diff --git a/.flake8 b/.flake8
index e758ffec4..9311ae8d0 100644
--- a/.flake8
+++ b/.flake8
@@ -1,6 +1,7 @@
[flake8]
max-line-length = 88
exclude = cookiecutter
+ignore = E, W
per-file-ignores =
# Don't require docstrings conventions in private modules
singer_sdk/helpers/_*.py:DAR
diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml
index 7d3b1a897..1eb15f756 100644
--- a/.github/ISSUE_TEMPLATE/bug.yml
+++ b/.github/ISSUE_TEMPLATE/bug.yml
@@ -15,7 +15,7 @@ body:
attributes:
label: Singer SDK Version
description: Version of the library you are using
- placeholder: "0.30.0"
+ placeholder: "0.34.1"
validations:
required: true
- type: checkboxes
@@ -36,6 +36,7 @@ body:
- "3.8"
- "3.9"
- "3.10"
+ - "3.11"
- "NA"
validations:
required: true
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 4d01dd881..c4226472c 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -5,7 +5,6 @@ updates:
schedule:
interval: daily
time: "12:00"
- timezone: "UTC"
reviewers: [meltano/engineering]
labels: [deps]
- package-ecosystem: pip
@@ -13,7 +12,6 @@ updates:
schedule:
interval: daily
time: "12:00"
- timezone: "UTC"
reviewers: [meltano/engineering]
labels: [deps]
- package-ecosystem: github-actions
@@ -22,3 +20,7 @@ updates:
interval: weekly
reviewers: [meltano/engineering]
labels: [deps]
+ groups:
+ actions:
+ patterns:
+ - "*"
diff --git a/.github/semantic.yml b/.github/semantic.yml
index 6ddb29231..ff00a1ad4 100644
--- a/.github/semantic.yml
+++ b/.github/semantic.yml
@@ -30,4 +30,4 @@ scopes:
- mappers # mappers only
- templates # cookiecutters
- deps # production dependencies
- - deps-dev # development depencencies (testing, linting, etc.)
+ - deps-dev # development dependencies (testing, linting, etc.)
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 75b4dc276..7f3194489 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -9,7 +9,13 @@ name: "CodeQL"
on:
push:
branches: [ "main" ]
- pull_request: {}
+ paths:
+ - .github/workflows/codeql-analysis.yml
+ - '**.py' # Any Python file
+ pull_request:
+ paths:
+ - .github/workflows/codeql-analysis.yml
+ - '**.py' # Any Python file
schedule:
- cron: '37 10 * * 5'
@@ -30,12 +36,10 @@ jobs:
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- - name: Checkout repository
- uses: actions/checkout@v3.5.3
+ - uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ - uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -48,8 +52,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- - name: Autobuild
- uses: github/codeql-action/autobuild@v2
+ - uses: github/codeql-action/autobuild@v3
# âšī¸ Command-line programs to run using the OS shell.
# đ See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -61,5 +64,4 @@ jobs:
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
+ - uses: github/codeql-action/analyze@v3
diff --git a/.github/workflows/codspeed.yml b/.github/workflows/codspeed.yml
new file mode 100644
index 000000000..98f8e85ff
--- /dev/null
+++ b/.github/workflows/codspeed.yml
@@ -0,0 +1,54 @@
+name: codspeed
+
+on:
+ push:
+ branches:
+ - "main"
+ paths:
+ - "singer_sdk/**"
+ - "tests/**"
+ - "noxfile.py"
+ - "poetry.lock"
+ - "pyproject.toml"
+ - ".github/workflows/codspeed.yml"
+ pull_request:
+ paths:
+ - "singer_sdk/**"
+ - "tests/**"
+ - "noxfile.py"
+ - "poetry.lock"
+ - "pyproject.toml"
+ - ".github/workflows/codspeed.yml"
+ # `workflow_dispatch` allows CodSpeed to trigger backtest
+ # performance analysis in order to generate initial data.
+ workflow_dispatch:
+
+jobs:
+ benchmarks:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: 3.12
+ architecture: x64
+
+ - name: Install poetry
+ run: |
+ curl -fsS https://install.python-poetry.org | python - -y
+
+ - name: Configure poetry
+ run: poetry config virtualenvs.create false
+
+ - name: Install project
+ run: >
+ poetry install
+ -vvv
+ --with dev
+ --with benchmark
+ --all-extras
+
+ - uses: CodSpeedHQ/action@v2
+ with:
+ token: ${{ secrets.CODSPEED_TOKEN }}
+ run: pytest tests/ --codspeed
diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt
index 246d39c69..9764b5cdf 100644
--- a/.github/workflows/constraints.txt
+++ b/.github/workflows/constraints.txt
@@ -1,5 +1,6 @@
-pip==23.2
-poetry==1.5.1
-pre-commit==3.3.3
+pip==23.3.2
+poetry==1.7.1
+poetry-plugin-export==1.6.0
+pre-commit==3.6.0
nox==2023.4.22
nox-poetry==1.0.3
diff --git a/.github/workflows/cookiecutter-e2e.yml b/.github/workflows/cookiecutter-e2e.yml
index a88fd6bdd..bb75980c7 100644
--- a/.github/workflows/cookiecutter-e2e.yml
+++ b/.github/workflows/cookiecutter-e2e.yml
@@ -3,10 +3,16 @@ name: E2E Cookiecutters
on:
pull_request:
types: [opened, synchronize, reopened]
- paths: ["cookiecutter/**", "e2e-tests/cookiecutters/**"]
+ paths:
+ - "cookiecutter/**"
+ - "e2e-tests/cookiecutters/**"
+ - ".github/workflows/cookiecutter-e2e.yml"
push:
branches: [main]
- paths: ["cookiecutter/**", "e2e-tests/cookiecutters/**"]
+ paths:
+ - "cookiecutter/**"
+ - "e2e-tests/cookiecutters/**"
+ - ".github/workflows/cookiecutter-e2e.yml"
workflow_dispatch:
concurrency:
@@ -18,18 +24,16 @@ env:
jobs:
lint:
- name: Cookiecutter E2E ${{ matrix.python-version }} ${{ matrix.python-version }} / ${{ matrix.os }}
+ name: Cookiecutter E2E Python ${{ matrix.python-version }} / ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: true
matrix:
include:
- - { python-version: "3.10", os: "ubuntu-latest" }
+ - { python-version: "3.11", os: "ubuntu-latest" }
steps:
- - name: Check out the repository
- uses: actions/checkout@v3.5.3
-
+ - uses: actions/checkout@v4
- name: Upgrade pip
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
@@ -38,12 +42,15 @@ jobs:
pip --version
- name: Install Poetry
+ env:
+ PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
pipx install poetry
+ pipx inject poetry poetry-plugin-export
poetry --version
+ poetry self show plugins
- - name: Setup Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4.7.0
+ - uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
architecture: x64
@@ -59,22 +66,25 @@ jobs:
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
- pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
+ pipx install nox
+ pipx inject nox nox-poetry
nox --version
- name: Run Nox
run: |
nox --python=${{ matrix.python-version }} --session=test_cookiecutter
- - name: Upload build artifacts
+ - uses: actions/upload-artifact@v4
if: always()
- uses: actions/upload-artifact@v3
with:
+ name: cookiecutter-${{ matrix.os }}-py${{ matrix.python-version }}
path: |
/tmp/tap-*
/tmp/target-*
+ /tmp/mapper-*
!/tmp/tap-*/.mypy_cache/
!/tmp/target-*/.mypy_cache/
+ !/tmp/mapper-*/.mypy_cache/
!/tmp/tap-*/.tox/
!/tmp/target-*/.tox/
+ !/tmp/mapper-*/.tox/
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
index 8048f9cad..0c3551b5b 100644
--- a/.github/workflows/dependency-review.yml
+++ b/.github/workflows/dependency-review.yml
@@ -5,9 +5,6 @@ on:
workflow_dispatch:
inputs: {}
-env:
- FOSSA_CLI_INSTALLER_VERSION: '3.3.10'
-
permissions:
contents: read
@@ -15,27 +12,8 @@ jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- - name: Checkout the repository
- uses: actions/checkout@v3.5.3
-
- - name: GitHub dependency vulnerability check
+ - uses: actions/checkout@v4
+ - uses: actions/dependency-review-action@v3
if: ${{ github.event_name == 'pull_request_target' }}
- uses: actions/dependency-review-action@v3.0.6
with:
fail-on-severity: high
-
- - name: FOSSA dependency license check
- run: |
- # `$FOSSA_CLI_INSTALLER_VERSION` only controls the version of the installer used - the latest version of `fossa-cli` will always be used.
- curl --no-progress-meter -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/v${FOSSA_CLI_INSTALLER_VERSION}/install-latest.sh | bash
-
- echo '## FOSSA dependency license check' >> $GITHUB_STEP_SUMMARY
- echo '' >> $GITHUB_STEP_SUMMARY
-
- fossa analyze --fossa-api-key ${{ secrets.MELTYBOT_FOSSA_API_KEY }} --revision ${{ github.sha }} |& tee fossa_analyze.log
- fossa test --fossa-api-key ${{ secrets.MELTYBOT_FOSSA_API_KEY }} --revision ${{ github.sha }}
-
- TEST_FAILED=$?
- FOSSA_REPORT_LINK="$(grep -A 1 '[ INFO] View FOSSA Report:' fossa_analyze.log | tail -n 1 | sed -e 's/^\[ INFO\]\s*//')"
- echo "[FOSSA detected $([ $TEST_FAILED -ne 0 ] && echo -n '' || echo 'no ')issues](${FOSSA_REPORT_LINK})" >> $GITHUB_STEP_SUMMARY
- exit $TEST_FAILED
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 73eea5953..dff4f2043 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,8 +1,9 @@
name: Release
on:
- release:
- types: [published]
+ push:
+ tags:
+ - v*
permissions:
contents: write # Needed to upload artifacts to the release
@@ -16,12 +17,12 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@v3.5.3
+ uses: actions/checkout@v4
- name: Set up Python
- uses: actions/setup-python@v4.7.0
+ uses: actions/setup-python@v5
with:
- python-version: "3.10"
+ python-version: "3.11"
- name: Upgrade pip
env:
@@ -56,4 +57,4 @@ jobs:
file_glob: true
- name: Publish
- uses: pypa/gh-action-pypi-publish@v1.8.8
+ uses: pypa/gh-action-pypi-publish@v1.8.11
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 495b4e58f..c0bfbadaa 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -37,33 +37,36 @@ env:
jobs:
tests:
- name: Test on ${{ matrix.python-version }} (${{ matrix.session }}) / ${{ matrix.os }}
+ name: "Test on ${{ matrix.python-version }} (${{ matrix.session }}) / ${{ matrix.os }} / SQLAlchemy: ${{ matrix.sqlalchemy }}"
runs-on: ${{ matrix.os }}
env:
+ NOXPYTHON: ${{ matrix.python-version }}
NOXSESSION: ${{ matrix.session }}
strategy:
fail-fast: false
matrix:
session: [tests]
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
- python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+ sqlalchemy: ["2"]
include:
- - { session: doctest, python-version: "3.10", os: "ubuntu-latest" }
- - { session: mypy, python-version: "3.8", os: "ubuntu-latest" }
+ - { session: tests, python-version: "3.11", os: "ubuntu-latest", sqlalchemy: "1" }
+ - { session: doctest, python-version: "3.11", os: "ubuntu-latest", sqlalchemy: "2" }
+ - { session: mypy, python-version: "3.11", os: "ubuntu-latest", sqlalchemy: "2" }
steps:
- - name: Check out the repository
- uses: actions/checkout@v3.5.3
+ - uses: actions/checkout@v4
- name: Install Poetry
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
pipx install poetry
+ pipx inject poetry poetry-plugin-export
poetry --version
+ poetry self show plugins
- - name: Setup Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4.7.0
+ - uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
architecture: x64
@@ -81,19 +84,20 @@ jobs:
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
- pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
+ pipx install nox
+ pipx inject nox nox-poetry
nox --version
- name: Run Nox
+ env:
+ SQLALCHEMY_VERSION: ${{ matrix.sqlalchemy }}
run: |
- nox --python=${{ matrix.python-version }}
+ nox --verbose
- - name: Upload coverage data
+ - uses: actions/upload-artifact@v4
if: always() && (matrix.session == 'tests')
- uses: actions/upload-artifact@v3.1.2
with:
- name: coverage-data
+ name: coverage-data-nox_${{ matrix.session }}-${{ matrix.os }}-py${{ matrix.python-version }}_sqlalchemy_${{ matrix.sqlalchemy }}
path: ".coverage.*"
tests-external:
@@ -101,6 +105,8 @@ jobs:
runs-on: ubuntu-latest
if: ${{ !github.event.pull_request.head.repo.fork }}
env:
+ NOXPYTHON: "3.11"
+ NOXSESSION: tests
SAMPLE_TAP_GITLAB_AUTH_TOKEN: ${{ secrets.SAMPLE_TAP_GITLAB_AUTH_TOKEN }}
SAMPLE_TAP_GITLAB_GROUP_IDS: ${{ secrets.SAMPLE_TAP_GITLAB_GROUP_IDS }}
SAMPLE_TAP_GITLAB_PROJECT_IDS: ${{ secrets.SAMPLE_TAP_GITLAB_PROJECT_IDS }}
@@ -110,20 +116,20 @@ jobs:
SAMPLE_TAP_GOOGLE_ANALYTICS_VIEW_ID: ${{ secrets.SAMPLE_TAP_GOOGLE_ANALYTICS_VIEW_ID }}
steps:
- - name: Check out the repository
- uses: actions/checkout@v3.5.3
+ - uses: actions/checkout@v4
- name: Install Poetry
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
pipx install poetry
+ pipx inject poetry poetry-plugin-export
poetry --version
+ poetry self show plugins
- - name: Setup Python 3.10
- uses: actions/setup-python@v4.7.0
+ - uses: actions/setup-python@v5
with:
- python-version: '3.10'
+ python-version: ${{ env.NOXPYTHON }}
architecture: x64
cache: 'pip'
cache-dependency-path: 'poetry.lock'
@@ -139,31 +145,33 @@ jobs:
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
- pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
+ pipx install nox
+ pipx inject nox nox-poetry
nox --version
- name: Run Nox
run: |
- nox -s tests -p 3.10 -- -m "external"
+ nox -- -m "external"
coverage:
name: Coverage
runs-on: ubuntu-latest
needs: tests
steps:
- - name: Check out the repository
- uses: actions/checkout@v3.5.3
+ - uses: actions/checkout@v4
- name: Install Poetry
+ env:
+ PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt poetry
+ pipx install poetry
+ pipx inject poetry poetry-plugin-export
poetry --version
+ poetry self show plugins
- - name: Set up Python
- uses: actions/setup-python@v4.7.0
+ - uses: actions/setup-python@v5
with:
- python-version: '3.10'
+ python-version: '3.11'
cache: 'pip'
cache-dependency-path: 'poetry.lock'
@@ -172,17 +180,17 @@ jobs:
pip install --constraint=.github/workflows/constraints.txt pip
pip --version
- - name: Download coverage data
- uses: actions/download-artifact@v3.0.2
+ - uses: actions/download-artifact@v4
with:
- name: coverage-data
+ pattern: coverage-data-*
+ merge-multiple: true
- name: Install Nox
env:
PIP_CONSTRAINT: .github/workflows/constraints.txt
run: |
- pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
- pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
+ pipx install nox
+ pipx inject nox nox-poetry
nox --version
- name: Combine coverage data and display human readable report
@@ -193,8 +201,7 @@ jobs:
run: |
nox --session=coverage -- xml
- - name: Upload coverage report
- uses: codecov/codecov-action@v3.1.4
+ - uses: codecov/codecov-action@v3
with:
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
diff --git a/.github/workflows/version_bump.yml b/.github/workflows/version_bump.yml
index 7503b030f..ed3edcf94 100644
--- a/.github/workflows/version_bump.yml
+++ b/.github/workflows/version_bump.yml
@@ -33,21 +33,21 @@ jobs:
permissions:
contents: write # to create a github release
pull-requests: write # to create and update PRs
+ discussions: write # to create a discussion
steps:
- - uses: actions/checkout@v3.5.3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- - name: Set up Python
- uses: actions/setup-python@v4.7.0
+ - uses: actions/setup-python@v5
with:
- python-version: "3.10"
+ python-version: "3.11"
architecture: x64
- name: Bump version
id: cz-bump
- uses: commitizen-tools/commitizen-action@0.18.2
+ uses: commitizen-tools/commitizen-action@0.20.0
with:
increment: ${{ github.event.inputs.bump != 'auto' && github.event.inputs.bump || '' }}
prerelease: ${{ github.event.inputs.prerelease != 'none' && github.event.inputs.prerelease || '' }}
@@ -66,8 +66,8 @@ jobs:
body_path: _changelog_fragment.md
tag_name: v${{ steps.cz-bump.outputs.version }}
prerelease: ${{ github.event.inputs.prerelease != 'none' }}
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ token: ${{ secrets.MELTYBOT_GITHUB_AUTH_TOKEN }}
+ discussion_category_name: ${{ github.event.inputs.prerelease && 'announcements' || '' }}
- name: Set repo file permissions
run: |
@@ -77,6 +77,7 @@ jobs:
uses: peter-evans/create-pull-request@v5
id: create-pull-request
with:
+ token: ${{ secrets.MELTYBOT_GITHUB_AUTH_TOKEN }}
commit-message: "chore: Bump package version"
title: "chore: Release v${{ steps.cz-bump.outputs.version }}"
body: |
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3c8a18afe..a9707001c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ ci:
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- id: check-json
- id: check-toml
@@ -17,7 +17,9 @@ repos:
exclude: |
(?x)^(
cookiecutter/.*/meltano.yml|
- cookiecutter/.*/.pre-commit-config.yaml
+ cookiecutter/.*/.pre-commit-config.yaml|
+ cookiecutter/.*/dependabot.yml|
+ cookiecutter/.*/test.yml
)$
- id: end-of-file-fixer
exclude: |
@@ -36,14 +38,14 @@ repos:
)$
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.23.3
+ rev: 0.27.3
hooks:
- id: check-dependabot
- id: check-github-workflows
- id: check-readthedocs
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.0.278
+ rev: v0.1.11
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
@@ -51,20 +53,14 @@ repos:
(?x)^(
cookiecutter/.*
)$
-
-- repo: https://github.com/psf/black
- rev: 23.7.0
- hooks:
- - id: black
+ - id: ruff-format
exclude: |
- (?x)^(
- cookiecutter/.*|
- singer_sdk/helpers/_simpleeval.py|
- tests/core/test_simpleeval.py
- )$
+ (?x)^(
+ cookiecutter/.*
+ )$
- repo: https://github.com/pycqa/flake8
- rev: 6.0.0
+ rev: 7.0.0
hooks:
- id: flake8
additional_dependencies:
@@ -76,6 +72,6 @@ repos:
)$
- repo: https://github.com/python-poetry/poetry
- rev: 1.5.0
+ rev: 1.7.0
hooks:
- id: poetry-check
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 56198b597..f1b205fc8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,144 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## v0.34.1 (2023-12-19)
+
+### đ Fixes
+
+- [#2118](https://github.com/meltano/sdk/issues/2118) Output JSONPath expression with match count message -- _**Thanks @mjsqu!**_
+- [#2107](https://github.com/meltano/sdk/issues/2107) Respect forced replication method when retrieving state
+- [#2094](https://github.com/meltano/sdk/issues/2094) Use `nulls_first` when available to order `NULL` results in incremental SQL streams
+
+### âī¸ Under the Hood
+
+- [#1733](https://github.com/meltano/sdk/issues/1733) Test with Python 3.12 đ
+- [#2095](https://github.com/meltano/sdk/issues/2095) Use `CursorResult.mappings()` in SQL streams
+- [#2092](https://github.com/meltano/sdk/issues/2092) Use `datetime.fromisoformat` in other places
+- [#2090](https://github.com/meltano/sdk/issues/2090) Explicitly use `T` iso date separator
+
+### đ Documentation Improvements
+
+- [#2111](https://github.com/meltano/sdk/issues/2111) Fix broken requests documentation links -- _**Thanks @mjsqu!**_
+
+## v0.34.0 (2023-12-05)
+
+## v0.34.0rc1 (2023-12-05)
+
+### đ Fixes
+
+- [#2076](https://github.com/meltano/sdk/issues/2076) Make a explicit dependency on `python-dateutil`
+
+## v0.34.0b1 (2023-11-28)
+
+### ⨠New
+
+- [#2044](https://github.com/meltano/sdk/issues/2044) Add Parquet as a batch encoding option -- _**Thanks @jamielxcarter!**_
+- [#768](https://github.com/meltano/sdk/issues/768) Better error messages when config validation fails
+- [#1854](https://github.com/meltano/sdk/issues/1854) Make stream logger a child of the tap logger
+
+### âī¸ Under the Hood
+
+- [#2070](https://github.com/meltano/sdk/issues/2070) Parse dates with `datetime.fromisoformat`/`backports.datetime_fromisoformat` -- _**Thanks @BuzzCutNorman!**_
+- [#2056](https://github.com/meltano/sdk/issues/2056) Break up `TapTestClassFactory._annotate_test_class` into simpler methods
+- [#2058](https://github.com/meltano/sdk/issues/2058) Implement a `SingerWriter` class in `singer_sdk.io_base` and use it to emit Singer messages -- _**Thanks @BuzzCutNorman!**_
+- [#2061](https://github.com/meltano/sdk/issues/2061) Simplify target template file names with `post_gen_project.py` hook -- _**Thanks @vicmattos!**_
+- [#2060](https://github.com/meltano/sdk/issues/2060) Simplify tap template file names with `post_gen_project.py` hook -- _**Thanks @vicmattos!**_
+
+### đ Documentation Improvements
+
+- [#2039](https://github.com/meltano/sdk/issues/2039) Add 404 page with `sphinx-notfound-page`
+- [#2037](https://github.com/meltano/sdk/issues/2037) Add flattening configuration examples
+
+## v0.33.1 (2023-11-08)
+
+### đ Fixes
+
+- [#2035](https://github.com/meltano/sdk/issues/2035) Retry all 5xx status codes -- _**Thanks @asamasoma!**_
+
+## v0.33.0 (2023-10-12)
+
+### ⨠New
+
+- [#1999](https://github.com/meltano/sdk/issues/1999) Log JSONPath match count at the INFO level
+- [#1779](https://github.com/meltano/sdk/issues/1779) Cache SQL columns and schemas
+- [#2003](https://github.com/meltano/sdk/issues/2003) Add ability to do list comprehensions in stream map expressions -- _**Thanks @haleemur!**_
+- [#2018](https://github.com/meltano/sdk/issues/2018) Drop Python 3.7 support in cookiecutter templates -- _**Thanks @visch!**_
+
+### đ Fixes
+
+- [#2006](https://github.com/meltano/sdk/issues/2006) Parse record `time_extracted` into `datetime.datetime` instance
+- [#1996](https://github.com/meltano/sdk/issues/1996) Respect nullability of leaf properties when flattening schema
+- [#1844](https://github.com/meltano/sdk/issues/1844) Safely skip parsing record field as date-time if it is missing in schema
+- [#1885](https://github.com/meltano/sdk/issues/1885) Map `record` field to a JSON `object` type
+- [#2015](https://github.com/meltano/sdk/issues/2015) Ensure `default` property is passed to SCHEMA messages -- _**Thanks @prakharcode!**_
+
+### đ Documentation Improvements
+
+- [#2017](https://github.com/meltano/sdk/issues/2017) Document support for comprehensions in stream maps
+
+## v0.32.0 (2023-09-22)
+
+### ⨠New
+
+- [#1893](https://github.com/meltano/sdk/issues/1893) Standard configurable load methods
+- [#1861](https://github.com/meltano/sdk/issues/1861) SQLTap connector instance shared with streams -- _**Thanks @BuzzCutNorman!**_
+
+### đ Fixes
+
+- [#1954](https://github.com/meltano/sdk/issues/1954) Missing begin()s related to SQLAlchemy 2.0 -- _**Thanks @andyoneal!**_
+- [#1951](https://github.com/meltano/sdk/issues/1951) Ensure SQL streams are sorted when a replication key is set
+- [#1949](https://github.com/meltano/sdk/issues/1949) Retry SQLAlchemy engine creation for adapters without JSON SerDe support
+- [#1939](https://github.com/meltano/sdk/issues/1939) Handle `decimal.Decimal` instances in flattening
+- [#1927](https://github.com/meltano/sdk/issues/1927) Handle replication key not found in stream schema -- _**Thanks @mjsqu!**_
+- [#1977](https://github.com/meltano/sdk/issues/1977) Fix hanging downstream tests in tap-postgres
+- [#1970](https://github.com/meltano/sdk/issues/1970) Warn instead of crashing when schema helpers cannot append `null` to types
+
+### ⥠Performance Improvements
+
+- [#1925](https://github.com/meltano/sdk/issues/1925) Add viztracer command for testing targets -- _**Thanks @mjsqu!**_
+
+- [#1962](https://github.com/meltano/sdk/issues/1962) Ensure `raw_schema` in stream mapper is immutable
+
+## v0.31.1 (2023-08-17)
+
+### ⨠New
+
+- [#1905](https://github.com/meltano/sdk/issues/1905) Add email field and use human-readable questions in templates
+
+### đ Fixes
+
+- [#1913](https://github.com/meltano/sdk/issues/1913) Fix tap tests for multiple test classes with different input catalogs
+
+## v0.31.0 (2023-08-07)
+
+### ⨠New
+
+- [#1892](https://github.com/meltano/sdk/issues/1892) Add a mapper cookiecutter template
+- [#1864](https://github.com/meltano/sdk/issues/1864) SQLTarget connector instance shared with sinks -- _**Thanks @BuzzCutNorman!**_
+- [#1878](https://github.com/meltano/sdk/issues/1878) Add `_sdc_sync_started_at` metadata column to indicate the start of the target process
+- [#1484](https://github.com/meltano/sdk/issues/1484) Bump latest supported sqlalchemy from `1.*` to `2.*`
+
+### đ Fixes
+
+- [#1898](https://github.com/meltano/sdk/issues/1898) Correctly serialize `decimal.Decimal` in JSON fields of SQL targets
+- [#1881](https://github.com/meltano/sdk/issues/1881) Expose `add_record_metadata` as a builtin target setting
+- [#1880](https://github.com/meltano/sdk/issues/1880) Append batch config if target supports the batch capability
+- [#1865](https://github.com/meltano/sdk/issues/1865) Handle missing record properties in SQL sinks
+- [#1838](https://github.com/meltano/sdk/issues/1838) Add deprecation warning when importing legacy testing helpers
+- [#1842](https://github.com/meltano/sdk/issues/1842) Ensure all expected tap parameters are passed to `SQLTap` initializer
+- [#1853](https://github.com/meltano/sdk/issues/1853) Check against the unconformed key properties when validating record keys
+- [#1843](https://github.com/meltano/sdk/issues/1843) Target template should not reference `tap_id`
+- [#1708](https://github.com/meltano/sdk/issues/1708) Finalize and write last state message with dedupe
+- [#1835](https://github.com/meltano/sdk/issues/1835) Avoid setting up mapper in discovery mode
+
+### âī¸ Under the Hood
+
+- [#1877](https://github.com/meltano/sdk/issues/1877) Use `importlib.resources` instead of `__file__` to retrieve sample Singer output files
+
+### đ Documentation Improvements
+
+- [#1852](https://github.com/meltano/sdk/issues/1852) Fix stale `pip_url` example that uses shell script workaround for editable installation
+
## v0.30.0 (2023-07-10)
### ⨠New
diff --git a/cookiecutter/mapper-template/README.md b/cookiecutter/mapper-template/README.md
new file mode 100644
index 000000000..70e2e47e8
--- /dev/null
+++ b/cookiecutter/mapper-template/README.md
@@ -0,0 +1,24 @@
+# Singer Mapper Template
+
+To use this cookie cutter template:
+
+```bash
+pip3 install pipx
+pipx ensurepath
+# You may need to reopen your shell at this point
+pipx install cookiecutter
+```
+
+Initialize Cookiecutter template directly from Git:
+
+```bash
+cookiecutter https://github.com/meltano/sdk --directory="cookiecutter/mapper-template"
+```
+
+Or locally from an already-cloned `sdk` repo:
+
+```bash
+cookiecutter ./sdk/cookiecutter/mapper-template
+```
+
+See the [dev guide](https://sdk.meltano.com/en/latest/dev_guide.html).
diff --git a/cookiecutter/mapper-template/cookiecutter.json b/cookiecutter/mapper-template/cookiecutter.json
new file mode 100644
index 000000000..c42b1cf06
--- /dev/null
+++ b/cookiecutter/mapper-template/cookiecutter.json
@@ -0,0 +1,19 @@
+{
+ "name": "MyMapperName",
+ "admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
+ "mapper_id": "mapper-{{ cookiecutter.name.lower() }}",
+ "library_name": "{{ cookiecutter.mapper_id.replace('-', '_') }}",
+ "variant": "None (Skip)",
+ "include_ci_files": ["GitHub", "None (Skip)"],
+ "license": ["Apache-2.0"],
+ "__prompts__": {
+ "name": "The name of the mapper, in CamelCase",
+ "admin_name": "Provide your [bold yellow]full name[/]",
+ "admin_email": "Provide your [bold yellow]email[/]",
+ "mapper_id": "The ID of the tap, in kebab-case",
+ "library_name": "The name of the library, in snake_case. This is how the library will be imported in Python.",
+ "include_ci_files": "Whether to include CI files for a common CI services",
+ "license": "The license for the project"
+ }
+}
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%} b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
new file mode 100644
index 000000000..33313a43e
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
@@ -0,0 +1,30 @@
+### A CI workflow template that runs linting and python testing
+### TODO: Modify as needed or as desired.
+
+name: Test {{cookiecutter.mapper_id}}
+
+on: [push]
+
+jobs:
+ pytest:
+ runs-on: ubuntu-latest
+ env:
+ GITHUB_TOKEN: {{ '${{secrets.GITHUB_TOKEN}}' }}
+ strategy:
+ matrix:
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python {{ '${{ matrix.python-version }}' }}
+ uses: actions/setup-python@v5
+ with:
+ python-version: {{ '${{ matrix.python-version }}' }}
+ - name: Install Poetry
+ run: |
+ pip install poetry
+ - name: Install dependencies
+ run: |
+ poetry install
+ - name: Test with pytest
+ run: |
+ poetry run pytest
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%} b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%}
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%}
rename to cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%}
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.gitignore b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.gitignore
new file mode 100644
index 000000000..475019c31
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.gitignore
@@ -0,0 +1,136 @@
+# Secrets and internal config files
+**/.secrets/*
+
+# Ignore meltano internal cache and sqlite systemdb
+
+.meltano/
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml
new file mode 100644
index 000000000..62f5ab41d
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.pre-commit-config.yaml
@@ -0,0 +1,32 @@
+ci:
+ autofix_prs: true
+ autoupdate_schedule: weekly
+ autoupdate_commit_msg: 'chore: pre-commit autoupdate'
+
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: check-json
+ - id: check-toml
+ - id: check-yaml
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+
+- repo: https://github.com/python-jsonschema/check-jsonschema
+ rev: 0.27.3
+ hooks:
+ - id: check-dependabot
+ - id: check-github-workflows
+
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.11
+ hooks:
+ - id: ruff
+ args: [--fix, --exit-non-zero-on-fix, --show-fixes]
+ - id: ruff-format
+
+- repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v1.8.0
+ hooks:
+ - id: mypy
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.secrets/.gitignore b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.secrets/.gitignore
new file mode 100644
index 000000000..33c6acd03
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/.secrets/.gitignore
@@ -0,0 +1,10 @@
+# IMPORTANT! This folder is hidden from git - if you need to store config files or other secrets,
+# make sure those are never staged for commit into your git repo. You can store them here or another
+# secure location.
+#
+# Note: This may be redundant with the global .gitignore for, and is provided
+# for redundancy. If the `.secrets` folder is not needed, you may delete it
+# from the project.
+
+*
+!.gitignore
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/README.md b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/README.md
new file mode 100644
index 000000000..ded365fb2
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/README.md
@@ -0,0 +1,128 @@
+# {{ cookiecutter.mapper_id }}
+
+`{{ cookiecutter.mapper_id }}` is a Singer mapper for {{ cookiecutter.name }}.
+
+Built with the [Meltano Mapper SDK](https://sdk.meltano.com) for Singer Mappers.
+
+
+
+## Configuration
+
+### Accepted Config Options
+
+
+
+A full list of supported settings and capabilities for this
+mapper is available by running:
+
+```bash
+{{ cookiecutter.mapper_id }} --about
+```
+
+### Configure using environment variables
+
+This Singer mapper will automatically import any environment variables within the working directory's
+`.env` if the `--config=ENV` is provided, such that config values will be considered if a matching
+environment variable is set either in the terminal context or in the `.env` file.
+
+### Source Authentication and Authorization
+
+
+
+## Usage
+
+You can easily run `{{ cookiecutter.mapper_id }}` by itself or in a pipeline using [Meltano](https://meltano.com/).
+
+### Executing the Mapper Directly
+
+```bash
+{{ cookiecutter.mapper_id }} --version
+{{ cookiecutter.mapper_id }} --help
+```
+
+## Developer Resources
+
+Follow these instructions to contribute to this project.
+
+### Initialize your Development Environment
+
+```bash
+pipx install poetry
+poetry install
+```
+
+### Create and Run Tests
+
+Create tests within the `tests` subfolder and
+ then run:
+
+```bash
+poetry run pytest
+```
+
+You can also test the `{{cookiecutter.mapper_id}}` CLI interface directly using `poetry run`:
+
+```bash
+poetry run {{cookiecutter.mapper_id}} --help
+```
+
+### Testing with [Meltano](https://www.meltano.com)
+
+_**Note:** This mapper will work in any Singer environment and does not require Meltano.
+Examples here are for convenience and to streamline end-to-end orchestration scenarios._
+
+
+
+Next, install Meltano (if you haven't already) and any needed plugins:
+
+```bash
+# Install meltano
+pipx install meltano
+# Initialize meltano within this directory
+cd {{ cookiecutter.mapper_id }}
+meltano install
+```
+
+Now you can test and orchestrate using Meltano:
+
+```bash
+# Run a test `run` pipeline:
+meltano run tap-smoke-test {{ cookiecutter.mapper_id }} target-jsonl
+```
+
+### SDK Dev Guide
+
+See the [dev guide](https://sdk.meltano.com/en/latest/dev_guide.html) for more instructions on how to use the SDK to
+develop your own taps, targets, and mappers.
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/meltano.yml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/meltano.yml
new file mode 100644
index 000000000..019015d06
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/meltano.yml
@@ -0,0 +1,31 @@
+version: 1
+send_anonymous_usage_stats: true
+project_id: "{{cookiecutter.mapper_id}}"
+default_environment: test
+environments:
+- name: test
+plugins:
+ extractors:
+ - name: tap-smoke-test
+ variant: meltano
+ pip_url: git+https://github.com/meltano/tap-smoke-test.git
+ config:
+ streams:
+ - stream_name: animals
+ input_filename: https://raw.githubusercontent.com/meltano/tap-smoke-test/main/demo-data/animals-data.jsonl
+ loaders:
+ - name: target-jsonl
+ variant: andyh1203
+ pip_url: target-jsonl
+ mappers:
+ - name: "{{cookiecutter.mapper_id}}"
+ pip_url: -e .
+ namespace: "{{cookiecutter.library_name}}"
+ # TODO: replace these with the actual settings
+ settings:
+ - name: example_config
+ kind: string
+ mappings:
+ - name: example
+ config:
+ example_config: foo
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/output/.gitignore b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/output/.gitignore
new file mode 100644
index 000000000..80ff9d2a6
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/output/.gitignore
@@ -0,0 +1,4 @@
+# This directory is used as a target by target-jsonl, so ignore all files
+
+*
+!.gitignore
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/pyproject.toml b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/pyproject.toml
new file mode 100644
index 000000000..d0ae4b1a9
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/pyproject.toml
@@ -0,0 +1,76 @@
+[tool.poetry]
+{%- if cookiecutter.variant != "None (Skip)" %}
+name = "{{cookiecutter.variant}}-{{cookiecutter.mapper_id}}"
+{%- else %}
+name = "{{cookiecutter.mapper_id}}"
+{%- endif %}
+version = "0.0.1"
+description = "`{{cookiecutter.mapper_id}}` is a Singer mapper {{cookiecutter.name}}, built with the Meltano Singer SDK."
+readme = "README.md"
+authors = ["{{ cookiecutter.admin_name }} <{{ cookiecutter.admin_email }}>"]
+keywords = [
+ "ELT",
+ "Mapper",
+ "{{cookiecutter.name}}",
+]
+classifiers = [
+ "Intended Audience :: Developers",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+]
+license = "Apache-2.0"
+{%- if cookiecutter.variant != "None (Skip)" %}
+packages = [
+ { include = "{{cookiecutter.library_name}}" },
+]
+{%- endif %}
+
+[tool.poetry.dependencies]
+python = ">=3.8"
+singer-sdk = { version="~=0.34.1" }
+fs-s3fs = { version = "~=1.1.1", optional = true }
+
+[tool.poetry.group.dev.dependencies]
+pytest = ">=7.4.0"
+singer-sdk = { version="~=0.34.1", extras = ["testing"] }
+
+[tool.poetry.extras]
+s3 = ["fs-s3fs"]
+
+[tool.mypy]
+python_version = "3.11"
+warn_unused_configs = true
+
+[tool.ruff]
+src = ["{{cookiecutter.library_name}}"]
+target-version = "py38"
+
+[tool.ruff.lint]
+ignore = [
+ "ANN101", # missing-type-self
+ "ANN102", # missing-type-cls
+ "COM812", # missing-trailing-comma
+ "ISC001", # single-line-implicit-string-concatenation
+]
+select = ["ALL"]
+
+[tool.ruff.lint.flake8-annotations]
+allow-star-arg-any = true
+
+[tool.ruff.lint.isort]
+known-first-party = ["{{cookiecutter.library_name}}"]
+
+[tool.ruff.lint.pydocstyle]
+convention = "google"
+
+[build-system]
+requires = ["poetry-core==1.8.1"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.poetry.scripts]
+# CLI declaration
+{{cookiecutter.mapper_id}} = '{{cookiecutter.library_name}}.mapper:{{cookiecutter.name}}Mapper.cli'
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/__init__.py b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/__init__.py
new file mode 100644
index 000000000..7caba56f7
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/__init__.py
@@ -0,0 +1 @@
+"""Test suite for {{ cookiecutter.mapper_id }}."""
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/conftest.py b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/conftest.py
new file mode 100644
index 000000000..6bb3ec2d7
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tests/conftest.py
@@ -0,0 +1,3 @@
+"""Test Configuration."""
+
+pytest_plugins = ("singer_sdk.testing.pytest_plugin",)
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tox.ini b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tox.ini
new file mode 100644
index 000000000..6be1c116a
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/tox.ini
@@ -0,0 +1,19 @@
+# This file can be used to customize tox tests as well as other test frameworks like flake8 and mypy
+
+[tox]
+envlist = py{38,39,310,311,312}
+isolated_build = true
+
+[testenv]
+allowlist_externals = poetry
+commands =
+ poetry install -v
+ poetry run pytest
+
+[testenv:pytest]
+# Run the python tests.
+# To execute, run `tox -e pytest`
+envlist = py{38,39,310,311,312}
+commands =
+ poetry install -v
+ poetry run pytest
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%} b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%}
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%}
rename to cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%}
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/__init__.py b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/__init__.py
new file mode 100644
index 000000000..5781fbbc4
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/__init__.py
@@ -0,0 +1 @@
+"""{{ cookiecutter.name }} Mapper."""
diff --git a/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/mapper.py b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/mapper.py
new file mode 100644
index 000000000..c8c3d23ec
--- /dev/null
+++ b/cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/{{cookiecutter.library_name}}/mapper.py
@@ -0,0 +1,96 @@
+"""{{ cookiecutter.name }} mapper class."""
+
+from __future__ import annotations
+
+import typing as t
+from typing import TYPE_CHECKING
+
+import singer_sdk.typing as th
+from singer_sdk import _singerlib as singer
+from singer_sdk.mapper import PluginMapper
+from singer_sdk.mapper_base import InlineMapper
+
+if TYPE_CHECKING:
+ from pathlib import PurePath
+
+
+class {{ cookiecutter.name }}Mapper(InlineMapper):
+ """Sample mapper for {{ cookiecutter.name }}."""
+
+ name = "{{ cookiecutter.mapper_id }}"
+
+ config_jsonschema = th.PropertiesList(
+ # TODO: Replace or remove this example config based on your needs
+ th.Property(
+ "example_config",
+ th.StringType,
+ description="An example config, replace or remove based on your needs.",
+ ),
+ ).to_dict()
+
+ def __init__(
+ self,
+ *,
+ config: dict | PurePath | str | list[PurePath | str] | None = None,
+ parse_env_config: bool = False,
+ validate_config: bool = True,
+ ) -> None:
+ """Create a new inline mapper.
+
+ Args:
+ config: Mapper configuration. Can be a dictionary, a single path to a
+ configuration file, or a list of paths to multiple configuration
+ files.
+ parse_env_config: Whether to look for configuration values in environment
+ variables.
+ validate_config: True to require validation of config settings.
+ """
+ super().__init__(
+ config=config,
+ parse_env_config=parse_env_config,
+ validate_config=validate_config,
+ )
+
+ self.mapper = PluginMapper(plugin_config=dict(self.config), logger=self.logger)
+
+ def map_schema_message(self, message_dict: dict) -> t.Iterable[singer.Message]:
+ """Map a schema message to zero or more new messages.
+
+ Args:
+ message_dict: A SCHEMA message JSON dictionary.
+ """
+ yield singer.SchemaMessage.from_dict(message_dict)
+
+ def map_record_message(
+ self,
+ message_dict: dict,
+ ) -> t.Iterable[singer.RecordMessage]:
+ """Map a record message to zero or more new messages.
+
+ Args:
+ message_dict: A RECORD message JSON dictionary.
+ """
+ yield singer.RecordMessage.from_dict(message_dict)
+
+ def map_state_message(self, message_dict: dict) -> t.Iterable[singer.Message]:
+ """Map a state message to zero or more new messages.
+
+ Args:
+ message_dict: A STATE message JSON dictionary.
+ """
+ yield singer.StateMessage.from_dict(message_dict)
+
+ def map_activate_version_message(
+ self,
+ message_dict: dict,
+ ) -> t.Iterable[singer.Message]:
+ """Map a version message to zero or more new messages.
+
+ Args:
+ message_dict: An ACTIVATE_VERSION message JSON dictionary.
+ """
+ yield singer.ActivateVersionMessage.from_dict(message_dict)
+
+
+if __name__ == "__main__":
+ {{ cookiecutter.name }}Mapper.cli()
diff --git a/cookiecutter/tap-template/cookiecutter.json b/cookiecutter/tap-template/cookiecutter.json
index 8da7a099b..017b31109 100644
--- a/cookiecutter/tap-template/cookiecutter.json
+++ b/cookiecutter/tap-template/cookiecutter.json
@@ -1,6 +1,7 @@
{
"source_name": "MySourceName",
"admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
"tap_id": "tap-{{ cookiecutter.source_name.lower() }}",
"library_name": "{{ cookiecutter.tap_id.replace('-', '_') }}",
"variant": "None (Skip)",
@@ -13,6 +14,17 @@
"JWT",
"Custom or N/A"
],
- "include_ci_files": ["GitHub", "None (Skip)"],
- "license": ["Apache-2.0"]
+ "include_ci_files": ["GitHub", "None"],
+ "license": ["Apache-2.0", "None"],
+ "__prompts__": {
+ "source_name": "The name of the source, in CamelCase",
+ "admin_name": "Provide your [bold yellow]full name[/]",
+ "admin_email": "Provide your [bold yellow]email[/]",
+ "tap_id": "The ID of the tap, in kebab-case",
+ "library_name": "The name of the library, in snake_case. This is how the library will be imported in Python.",
+ "stream_type": "The type of stream the source provides",
+ "auth_method": "The [bold red]authentication[/] method used by the source, for REST and GraphQL sources",
+ "include_ci_files": "Whether to include CI files for a common CI services",
+ "license": "The license for the project"
+ }
}
diff --git a/cookiecutter/tap-template/hooks/post_gen_project.py b/cookiecutter/tap-template/hooks/post_gen_project.py
new file mode 100644
index 000000000..775a3e1ed
--- /dev/null
+++ b/cookiecutter/tap-template/hooks/post_gen_project.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+from pathlib import Path
+import shutil
+
+
+PACKAGE_PATH = Path("{{cookiecutter.library_name}}")
+
+
+if __name__ == "__main__":
+ # Rename stream type client and delete others
+ target = PACKAGE_PATH / "client.py"
+ raw_client_py = PACKAGE_PATH / "{{cookiecutter.stream_type|lower}}-client.py"
+ raw_client_py.rename(target)
+
+ for client_py in PACKAGE_PATH.rglob("*-client.py"):
+ client_py.unlink()
+
+ if "{{ cookiecutter.auth_method }}" not in ("OAuth2", "JWT"):
+ PACKAGE_PATH.joinpath("auth.py").unlink()
+
+ if "{{ cookiecutter.stream_type }}" == "SQL":
+ PACKAGE_PATH.joinpath("streams.py").unlink()
+
+ if "{{ cookiecutter.license }}" == "None":
+ Path("LICENSE").unlink()
+
+ if "{{ cookiecutter.include_ci_files }}" != "GitHub":
+ shutil.rmtree(".github")
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/dependabot.yml
similarity index 100%
rename from cookiecutter/target-template/{{cookiecutter.target_id}}/.github/{% if cookiecutter.include_ci_files == 'GitHub' %}dependabot.yml{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/dependabot.yml
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/test.yml
similarity index 83%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/test.yml
index 0ea2f9ae7..3922fc4ed 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.github/workflows/test.yml
@@ -12,11 +12,11 @@ jobs:
GITHUB_TOKEN: {{ '${{secrets.GITHUB_TOKEN}}' }}
strategy:
matrix:
- python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python {{ '${{ matrix.python-version }}' }}
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: {{ '${{ matrix.python-version }}' }}
- name: Install Poetry
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml
index fe3a4d7ca..5e3f6cb83 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ ci:
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- id: check-json
- id: check-toml
@@ -14,24 +14,20 @@ repos:
- id: trailing-whitespace
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.23.0
+ rev: 0.27.3
hooks:
- id: check-dependabot
- id: check-github-workflows
-- repo: https://github.com/charliermarsh/ruff-pre-commit
- rev: v0.0.269
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.11
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
-
-- repo: https://github.com/psf/black
- rev: 23.3.0
- hooks:
- - id: black
+ - id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.3.0
+ rev: v1.8.0
hooks:
- id: mypy
additional_dependencies:
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/LICENSE b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/LICENSE
new file mode 100644
index 000000000..62913ff3a
--- /dev/null
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/LICENSE
@@ -0,0 +1,202 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+
+ Copyright {% now 'utc', '%Y' %} {{ cookiecutter.admin_name }}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml
index 3eaed335b..b1902b724 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml
@@ -7,11 +7,20 @@ name = "{{cookiecutter.tap_id}}"
version = "0.0.1"
description = "`{{cookiecutter.tap_id}}` is a Singer tap for {{cookiecutter.source_name}}, built with the Meltano Singer SDK."
readme = "README.md"
-authors = ["{{ cookiecutter.admin_name }}"]
+authors = ["{{ cookiecutter.admin_name }} <{{ cookiecutter.admin_email }}>"]
keywords = [
"ELT",
"{{cookiecutter.source_name}}",
]
+classifiers = [
+ "Intended Audience :: Developers",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+]
license = "Apache-2.0"
{%- if cookiecutter.variant != "None (Skip)" %}
packages = [
@@ -20,51 +29,52 @@ packages = [
{%- endif %}
[tool.poetry.dependencies]
-python = "<3.12,>=3.7.1"
-singer-sdk = { version="^0.30.0" }
-fs-s3fs = { version = "^1.1.1", optional = true }
+python = ">=3.8"
+importlib-resources = { version = "==6.1.*", python = "<3.9" }
+singer-sdk = { version="~=0.34.1" }
+fs-s3fs = { version = "~=1.1.1", optional = true }
{%- if cookiecutter.stream_type in ["REST", "GraphQL"] %}
-requests = "^2.31.0"
-{%- endif %}
-{%- if cookiecutter.auth_method in ("OAuth2", "JWT") %}
-cached-property = "^1" # Remove after Python 3.7 support is dropped
+requests = "~=2.31.0"
{%- endif %}
[tool.poetry.group.dev.dependencies]
-pytest = "^7.2.1"
-singer-sdk = { version="^0.30.0", extras = ["testing"] }
+pytest = ">=7.4.0"
+singer-sdk = { version="~=0.34.1", extras = ["testing"] }
[tool.poetry.extras]
s3 = ["fs-s3fs"]
[tool.mypy]
-python_version = "3.9"
+python_version = "3.11"
warn_unused_configs = true
{%- if cookiecutter.stream_type == 'SQL' %}
plugins = "sqlmypy"
{%- endif %}
[tool.ruff]
+src = ["{{cookiecutter.library_name}}"]
+target-version = "py38"
+
+[tool.ruff.lint]
ignore = [
"ANN101", # missing-type-self
"ANN102", # missing-type-cls
+ "COM812", # missing-trailing-comma
+ "ISC001", # single-line-implicit-string-concatenation
]
select = ["ALL"]
-src = ["{{cookiecutter.library_name}}"]
-target-version = "py37"
-
-[tool.ruff.flake8-annotations]
+[tool.ruff.lint.flake8-annotations]
allow-star-arg-any = true
-[tool.ruff.isort]
+[tool.ruff.lint.isort]
known-first-party = ["{{cookiecutter.library_name}}"]
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
convention = "google"
[build-system]
-requires = ["poetry-core>=1.0.8"]
+requires = ["poetry-core==1.8.1"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/{{ 'test' }}_core.py b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/test_core.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/{{ 'test' }}_core.py
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/tests/test_core.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tox.ini b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tox.ini
index 70b9e4ac7..6be1c116a 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tox.ini
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/tox.ini
@@ -1,7 +1,7 @@
# This file can be used to customize tox tests as well as other test frameworks like flake8 and mypy
[tox]
-envlist = py37, py38, py39, py310, py311
+envlist = py{38,39,310,311,312}
isolated_build = true
[testenv]
@@ -13,7 +13,7 @@ commands =
[testenv:pytest]
# Run the python tests.
# To execute, run `tox -e pytest`
-envlist = py37, py38, py39, py310, py311
+envlist = py{38,39,310,311,312}
commands =
poetry install -v
poetry run pytest
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if cookiecutter.auth_method in ('OAuth2', 'JWT')%}auth.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/auth.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if cookiecutter.auth_method in ('OAuth2', 'JWT')%}auth.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/auth.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'GraphQL' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/graphql-client.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'GraphQL' == cookiecutter.stream_type %}client.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/graphql-client.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'Other' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/other-client.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'Other' == cookiecutter.stream_type %}client.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/other-client.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py
similarity index 93%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py
index f777e6d00..26c1447c9 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'REST' == cookiecutter.stream_type %}client.py{%endif%}
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/rest-client.py
@@ -2,10 +2,10 @@
from __future__ import annotations
-{% if cookiecutter.auth_method in ("OAuth2", "JWT") -%}
import sys
-{% endif -%}
-from pathlib import Path
+{%- if cookiecutter.auth_method in ("OAuth2", "JWT") %}
+from functools import cached_property
+{%- endif %}
from typing import Any, Callable, Iterable
import requests
@@ -41,16 +41,15 @@
{% endif -%}
-{%- if cookiecutter.auth_method in ("OAuth2", "JWT") -%}
-if sys.version_info >= (3, 8):
- from functools import cached_property
+if sys.version_info >= (3, 9):
+ import importlib.resources as importlib_resources
else:
- from cached_property import cached_property
-
-{% endif -%}
+ import importlib_resources
_Auth = Callable[[requests.PreparedRequest], requests.PreparedRequest]
-SCHEMAS_DIR = Path(__file__).parent / Path("./schemas")
+
+# TODO: Delete this is if not using json files for schema definition
+SCHEMAS_DIR = importlib_resources.files(__package__) / "schemas"
class {{ cookiecutter.source_name }}Stream({{ cookiecutter.stream_type }}Stream):
@@ -159,7 +158,7 @@ def get_new_paginator(self) -> BaseAPIPaginator:
def get_url_params(
self,
context: dict | None, # noqa: ARG002
- next_page_token: Any | None,
+ next_page_token: Any | None, # noqa: ANN401
) -> dict[str, Any]:
"""Return a dictionary of values to be used in URL parameterization.
@@ -181,7 +180,7 @@ def get_url_params(
def prepare_request_payload(
self,
context: dict | None, # noqa: ARG002
- next_page_token: Any | None, # noqa: ARG002
+ next_page_token: Any | None, # noqa: ARG002, ANN401
) -> dict | None:
"""Prepare the data payload for the REST API request.
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' == cookiecutter.stream_type %}client.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/sql-client.py
similarity index 100%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' == cookiecutter.stream_type %}client.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/sql-client.py
diff --git a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%} b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/streams.py
similarity index 90%
rename from cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%}
rename to cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/streams.py
index 420017950..69c955e6f 100644
--- a/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/{%if 'SQL' != cookiecutter.stream_type %}streams.py{%endif%}
+++ b/cookiecutter/tap-template/{{cookiecutter.tap_id}}/{{cookiecutter.library_name}}/streams.py
@@ -2,14 +2,21 @@
from __future__ import annotations
-from pathlib import Path
+import sys
+import typing as t
from singer_sdk import typing as th # JSON Schema typing helpers
from {{ cookiecutter.library_name }}.client import {{ cookiecutter.source_name }}Stream
+if sys.version_info >= (3, 9):
+ import importlib.resources as importlib_resources
+else:
+ import importlib_resources
+
+
# TODO: Delete this is if not using json files for schema definition
-SCHEMAS_DIR = Path(__file__).parent / Path("./schemas")
+SCHEMAS_DIR = importlib_resources.files(__package__) / "schemas"
{%- if cookiecutter.stream_type == "GraphQL" %}
@@ -54,7 +61,7 @@ class UsersStream({{ cookiecutter.source_name }}Stream):
),
),
).to_dict()
- primary_keys = ["id"]
+ primary_keys: t.ClassVar[list[str]] = ["id"]
replication_key = None
graphql_query = """
users {
@@ -81,7 +88,7 @@ class GroupsStream({{ cookiecutter.source_name }}Stream):
th.Property("id", th.StringType),
th.Property("modified", th.DateTimeType),
).to_dict()
- primary_keys = ["id"]
+ primary_keys: t.ClassVar[list[str]] = ["id"]
replication_key = "modified"
graphql_query = """
groups {
@@ -104,7 +111,7 @@ class UsersStream({{ cookiecutter.source_name }}Stream):
{%- if cookiecutter.stream_type == "REST" %}
path = "/users"
{%- endif %}
- primary_keys = ["id"]
+ primary_keys: t.ClassVar[list[str]] = ["id"]
replication_key = None
# Optionally, you may also use `schema_filepath` in place of `schema`:
# schema_filepath = SCHEMAS_DIR / "users.json" # noqa: ERA001
@@ -143,7 +150,7 @@ class GroupsStream({{ cookiecutter.source_name }}Stream):
{%- if cookiecutter.stream_type == "REST" %}
path = "/groups"
{%- endif %}
- primary_keys = ["id"]
+ primary_keys: t.ClassVar[list[str]] = ["id"]
replication_key = "modified"
schema = th.PropertiesList(
th.Property("name", th.StringType),
diff --git a/cookiecutter/target-template/cookiecutter.json b/cookiecutter/target-template/cookiecutter.json
index 4816b54aa..c7c31835a 100644
--- a/cookiecutter/target-template/cookiecutter.json
+++ b/cookiecutter/target-template/cookiecutter.json
@@ -1,10 +1,21 @@
{
"destination_name": "MyDestinationName",
"admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
"target_id": "target-{{ cookiecutter.destination_name.lower() }}",
"library_name": "{{ cookiecutter.target_id.replace('-', '_') }}",
"variant": "None (Skip)",
"serialization_method": ["Per record", "Per batch", "SQL"],
"include_ci_files": ["GitHub", "None (Skip)"],
- "license": ["Apache-2.0"]
+ "license": ["Apache-2.0"],
+ "__prompts__": {
+ "name": "The name of the mapper, in CamelCase",
+ "admin_name": "Provide your [bold yellow]full name[/]",
+ "admin_email": "Provide your [bold yellow]email[/]",
+ "mapper_id": "The ID of the tap, in kebab-case",
+ "library_name": "The name of the library, in snake_case. This is how the library will be imported in Python.",
+ "serialization_method": "The serialization method to use for loading data",
+ "include_ci_files": "Whether to include CI files for a common CI services",
+ "license": "The license for the project"
+ }
}
diff --git a/cookiecutter/target-template/hooks/post_gen_project.py b/cookiecutter/target-template/hooks/post_gen_project.py
new file mode 100644
index 000000000..44edd337b
--- /dev/null
+++ b/cookiecutter/target-template/hooks/post_gen_project.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+from pathlib import Path
+import shutil
+
+
+BASE_PATH = Path("{{cookiecutter.library_name}}")
+
+
+if __name__ == "__main__":
+ if "{{ cookiecutter.license }}" != "Apache-2.0":
+ Path("LICENSE").unlink()
+
+ if "{{ cookiecutter.include_ci_files }}" != "GitHub":
+ shutil.rmtree(Path(".github"))
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml
new file mode 100644
index 000000000..933e6b1c2
--- /dev/null
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/dependabot.yml
@@ -0,0 +1,26 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+
+version: 2
+updates:
+ - package-ecosystem: pip
+ directory: "/"
+ schedule:
+ interval: "daily"
+ commit-message:
+ prefix: "chore(deps): "
+ prefix-development: "chore(deps-dev): "
+ - package-ecosystem: pip
+ directory: "/.github/workflows"
+ schedule:
+ interval: daily
+ commit-message:
+ prefix: "ci: "
+ - package-ecosystem: github-actions
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ commit-message:
+ prefix: "ci: "
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%} b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/test.yml
similarity index 83%
rename from cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
rename to cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/test.yml
index 4544911a6..6f9d71cae 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/{% if cookiecutter.include_ci_files == 'GitHub' %}test.yml{%endif%}
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.github/workflows/test.yml
@@ -12,11 +12,11 @@ jobs:
GITHUB_TOKEN: {{ '${{secrets.GITHUB_TOKEN}}' }}
strategy:
matrix:
- python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python {{ '${{ matrix.python-version }}' }}
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: {{ '${{ matrix.python-version }}' }}
- name: Install Poetry
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml b/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml
index 8d4c83fea..c89f290a1 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ ci:
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- id: check-json
- id: check-toml
@@ -14,24 +14,20 @@ repos:
- id: trailing-whitespace
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.23.0
+ rev: 0.27.3
hooks:
- id: check-dependabot
- id: check-github-workflows
-- repo: https://github.com/charliermarsh/ruff-pre-commit
- rev: v0.0.269
+- repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.11
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
-
-- repo: https://github.com/psf/black
- rev: 23.3.0
- hooks:
- - id: black
+ - id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.3.0
+ rev: v1.8.0
hooks:
- id: mypy
additional_dependencies:
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%} b/cookiecutter/target-template/{{cookiecutter.target_id}}/LICENSE
similarity index 100%
rename from cookiecutter/target-template/{{cookiecutter.target_id}}/{%if 'Apache-2.0' == cookiecutter.license %}LICENSE{%endif%}
rename to cookiecutter/target-template/{{cookiecutter.target_id}}/LICENSE
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml
index b61233111..7bcceaf69 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml
@@ -7,11 +7,20 @@ name = "{{cookiecutter.target_id}}"
version = "0.0.1"
description = "`{{cookiecutter.target_id}}` is a Singer target for {{cookiecutter.destination_name}}, built with the Meltano Singer SDK."
readme = "README.md"
-authors = ["{{ cookiecutter.admin_name }}"]
+authors = ["{{ cookiecutter.admin_name }} <{{ cookiecutter.admin_email }}>"]
keywords = [
"ELT",
"{{cookiecutter.destination_name}}",
]
+classifiers = [
+ "Intended Audience :: Developers",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+]
license = "Apache-2.0"
{%- if cookiecutter.variant != "None (Skip)" %}
packages = [
@@ -20,40 +29,44 @@ packages = [
{%- endif %}
[tool.poetry.dependencies]
-python = "<3.12,>=3.7.1"
-singer-sdk = { version="^0.30.0" }
-fs-s3fs = { version = "^1.1.1", optional = true }
+python = ">=3.8"
+singer-sdk = { version="~=0.34.1" }
+fs-s3fs = { version = "~=1.1.1", optional = true }
{%- if cookiecutter.serialization_method != "SQL" %}
-requests = "^2.31.0"
+requests = "~=2.31.0"
{%- endif %}
[tool.poetry.dev-dependencies]
-pytest = "^7.2.1"
-singer-sdk = { version="^0.30.0", extras = ["testing"] }
+pytest = ">=7.4.0"
+singer-sdk = { version="~=0.34.1", extras = ["testing"] }
[tool.poetry.extras]
s3 = ["fs-s3fs"]
[tool.ruff]
+src = ["{{cookiecutter.library_name}}"]
+target-version = "py38"
+
+[tool.ruff.lint]
ignore = [
"ANN101", # missing-type-self
"ANN102", # missing-type-cls
+ "COM812", # missing-trailing-comma
+ "ISC001", # single-line-implicit-string-concatenation
]
select = ["ALL"]
-src = ["{{cookiecutter.library_name}}"]
-target-version = "py37"
-[tool.ruff.flake8-annotations]
+[tool.ruff.lint.flake8-annotations]
allow-star-arg-any = true
-[tool.ruff.isort]
+[tool.ruff.lint.isort]
known-first-party = ["{{cookiecutter.library_name}}"]
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
convention = "google"
[build-system]
-requires = ["poetry-core>=1.0.8"]
+requires = ["poetry-core==1.8.1"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py b/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py
index 2403b2a3a..6db9a5041 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/tests/{{ 'test' }}_core.py
@@ -20,7 +20,7 @@
)
-class TestTarget{{ cookiecutter.destination_name }}(StandardTargetTests): # type: ignore[misc, valid-type] # noqa: E501
+class TestTarget{{ cookiecutter.destination_name }}(StandardTargetTests): # type: ignore[misc, valid-type]
"""Standard Target Tests."""
@pytest.fixture(scope="class")
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/tox.ini b/cookiecutter/target-template/{{cookiecutter.target_id}}/tox.ini
index 70b9e4ac7..6be1c116a 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/tox.ini
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/tox.ini
@@ -1,7 +1,7 @@
# This file can be used to customize tox tests as well as other test frameworks like flake8 and mypy
[tox]
-envlist = py37, py38, py39, py310, py311
+envlist = py{38,39,310,311,312}
isolated_build = true
[testenv]
@@ -13,7 +13,7 @@ commands =
[testenv:pytest]
# Run the python tests.
# To execute, run `tox -e pytest`
-envlist = py37, py38, py39, py310, py311
+envlist = py{38,39,310,311,312}
commands =
poetry install -v
poetry run pytest
diff --git a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py
index 4e84d1284..9edd13a11 100644
--- a/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py
+++ b/cookiecutter/target-template/{{cookiecutter.target_id}}/{{cookiecutter.library_name}}/sinks.py
@@ -35,6 +35,7 @@ class {{ cookiecutter.destination_name }}Connector(SQLConnector):
allow_column_rename: bool = True # Whether RENAME COLUMN is supported.
allow_column_alter: bool = False # Whether altering column types is supported.
allow_merge_upsert: bool = False # Whether MERGE UPSERT is supported.
+ allow_overwrite: bool = False # Whether overwrite load method is supported.
allow_temp_tables: bool = True # Whether temp tables are supported.
def get_sqlalchemy_url(self, config: dict) -> str:
diff --git a/docs/code_samples.md b/docs/code_samples.md
index 0cd79f563..6d9efefd1 100644
--- a/docs/code_samples.md
+++ b/docs/code_samples.md
@@ -240,7 +240,7 @@ class SingletonAuthStream(RESTStream):
### Make a stream reuse the same authenticator instance for all requests
```python
-from memoization import cached
+from functools import cached_property
from singer_sdk.authenticators import APIAuthenticatorBase
from singer_sdk.streams import RESTStream
@@ -248,8 +248,7 @@ from singer_sdk.streams import RESTStream
class CachedAuthStream(RESTStream):
"""A stream with singleton authenticator."""
- @property
- @cached
+ @cached_property
def authenticator(self) -> APIAuthenticatorBase:
"""Stream authenticator."""
return APIAuthenticatorBase(stream=self)
diff --git a/docs/conf.py b/docs/conf.py
index 61ac4b071..a784a46f5 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -26,7 +26,7 @@
author = "Meltano Core Team and Contributors"
# The full version, including alpha/beta/rc tags
-release = "0.30.0"
+release = "0.34.1"
# -- General configuration ---------------------------------------------------
@@ -43,6 +43,7 @@
"myst_parser",
"sphinx_reredirects",
"sphinx_inline_tabs",
+ "notfound.extension",
]
# Add any paths that contain templates here, relative to this directory.
@@ -56,6 +57,7 @@
# Show typehints in the description, along with parameter descriptions
autodoc_typehints = "signature"
autodoc_class_signature = "separated"
+autodoc_member_order = "groupwise"
# -- Options for HTML output -------------------------------------------------
@@ -70,7 +72,6 @@
"source_branch": "main",
"source_directory": "docs/",
"sidebar_hide_name": True,
- "announcement": 'Sign up for Public Beta today! Get a 20% discount on purchases before 27th of July!', # noqa: E501
# branding
"light_css_variables": {
"font-stack": "Hanken Grotesk,-apple-system,Helvetica,sans-serif",
diff --git a/docs/deprecation.md b/docs/deprecation.md
index 089355fd9..b27e61b2b 100644
--- a/docs/deprecation.md
+++ b/docs/deprecation.md
@@ -11,3 +11,5 @@ incompatible way, following their deprecation, as indicated in the
[`RESTStream.get_new_paginator`](singer_sdk.RESTStream.get_new_paginator).
See the [migration guide](./guides/pagination-classes.md) for more information.
+
+- The `singer_sdk.testing.get_standard_tap_tests` and `singer_sdk.testing.get_standard_target_tests` functions will be removed. Replace them with `singer_sdk.testing.get_tap_test_class` and `singer_sdk.testing.get_target_test_class` functions respective to generate a richer test suite.
diff --git a/docs/dev_guide.md b/docs/dev_guide.md
index b93eacedd..316cca8ad 100644
--- a/docs/dev_guide.md
+++ b/docs/dev_guide.md
@@ -76,6 +76,24 @@ generated `README.md` file to complete your new tap or target. You can also refe
[Meltano Tutorial](https://docs.meltano.com/tutorials/custom-extractor) for a more
detailed guide.
+````{admonition} Avoid repeating yourself
+ If you find yourself repeating the same inputs to the cookiecutter, you can create a
+ `cookiecutterrc` file in your home directory to set default values for the prompts.
+
+ For example, if you want to set the default value for your name and email, and the
+ default stream type and authentication method, you can add the following to your
+ `~/.cookiecutterrc` file:
+
+ ```yaml
+ # ~/.cookiecutterrc
+ default_context:
+ admin_name: Johnny B. Goode
+ admin_email: jbg@example.com
+ stream_type: REST
+ auth_method: Bearer Token
+ ```
+````
+
### Using an existing library
In some cases, there may already be a library that connects to the API and all you need the SDK for
@@ -239,13 +257,14 @@ We've had success using [`viztracer`](https://github.com/gaogaotiantian/viztrace
You can start doing the same in your package. Start by installing `viztracer`.
```console
-$ poetry add --dev viztracer
+$ poetry add --group dev viztracer
```
Then simply run your package's CLI as normal, preceded by the `viztracer` command
```console
$ poetry run viztracer my-tap
+$ poetry run viztracer -- my-target --config=config.json --input=messages.json
```
That command will produce a `result.json` file which you can explore with the `vizviewer` tool.
diff --git a/docs/stream_maps.md b/docs/stream_maps.md
index e4119d640..8d84d9ea0 100644
--- a/docs/stream_maps.md
+++ b/docs/stream_maps.md
@@ -47,6 +47,24 @@ three distinct fields:
- `user__last_name`
- `user__id`
+#### Flattening Example
+
+````{tab} meltano.yml
+```yaml
+flattening_enabled: true
+flattening_max_depth: 1 # flatten only top-level properties
+```
+````
+
+````{tab} JSON
+```json
+{
+ "flattening_enabled": true,
+ "flattening_max_depth": 1
+}
+```
+````
+
## Out-of-scope capabilities
These capabilities are all out of scope _by design_:
@@ -155,6 +173,32 @@ Expressions are defined and parsed using the
accepts most native python expressions and is extended by custom functions which have been declared
within the SDK.
+#### Compound Expressions
+
+Starting in version 0.33.0, the SDK supports the use of simple comprehensions, e.g. `[x + 1 for x in [1,2,3]]`. This is a powerful feature which allows you to perform complex transformations on lists of values. For example, you can use comprehensions to filter out values in an array:
+
+````{tab} meltano.yml
+```yaml
+stream_maps:
+ users:
+ id: id
+ fields: "[f for f in fields if f['key'] != 'age']"
+```
+````
+
+````{tab} JSON
+```json
+{
+ "stream_maps": {
+ "users": {
+ "id": "id",
+ "fields": "[f for f in fields if f['key'] != 'age']"
+ }
+ }
+}
+```
+````
+
### Accessing Stream Properties within Mapping Expressions
By default, all stream properties are made available via the property's given name. For
diff --git a/e2e-tests/cookiecutters/mapper-base.json b/e2e-tests/cookiecutters/mapper-base.json
new file mode 100644
index 000000000..390e8a7ba
--- /dev/null
+++ b/e2e-tests/cookiecutters/mapper-base.json
@@ -0,0 +1,14 @@
+{
+ "cookiecutter": {
+ "name": "MyMapperName",
+ "admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
+ "mapper_id": "mapper-base",
+ "library_name": "mapper_base",
+ "variant": "None (Skip)",
+ "include_ci_files": "None (Skip)",
+ "license": "Apache-2.0",
+ "_template": "../mapper-template/",
+ "_output_dir": "."
+ }
+}
diff --git a/e2e-tests/cookiecutters/tap-graphql-jwt.json b/e2e-tests/cookiecutters/tap-graphql-jwt.json
index 0c322e06f..5daf4ab8f 100644
--- a/e2e-tests/cookiecutters/tap-graphql-jwt.json
+++ b/e2e-tests/cookiecutters/tap-graphql-jwt.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "GraphQLJWTTemplateTest",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-graphql-jwt",
"library_name": "tap_graphql_jwt",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-other-custom.json b/e2e-tests/cookiecutters/tap-other-custom.json
index ac3816774..3ea01eaf4 100644
--- a/e2e-tests/cookiecutters/tap-other-custom.json
+++ b/e2e-tests/cookiecutters/tap-other-custom.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-other-custom",
"library_name": "tap_other_custom",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-api_key-github.json b/e2e-tests/cookiecutters/tap-rest-api_key-github.json
index e65981940..01570aba8 100644
--- a/e2e-tests/cookiecutters/tap-rest-api_key-github.json
+++ b/e2e-tests/cookiecutters/tap-rest-api_key-github.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-api_key-github",
"library_name": "tap_rest_api_key_github",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-basic_auth.json b/e2e-tests/cookiecutters/tap-rest-basic_auth.json
index 33eb7b625..6c7d7fa19 100644
--- a/e2e-tests/cookiecutters/tap-rest-basic_auth.json
+++ b/e2e-tests/cookiecutters/tap-rest-basic_auth.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-basic_auth",
"library_name": "tap_rest_basic_auth",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-bearer_token.json b/e2e-tests/cookiecutters/tap-rest-bearer_token.json
index f506061dd..157457462 100644
--- a/e2e-tests/cookiecutters/tap-rest-bearer_token.json
+++ b/e2e-tests/cookiecutters/tap-rest-bearer_token.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-bearer_token",
"library_name": "tap_rest_bearer_token",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-custom.json b/e2e-tests/cookiecutters/tap-rest-custom.json
index 5d68d60bf..831135b7a 100644
--- a/e2e-tests/cookiecutters/tap-rest-custom.json
+++ b/e2e-tests/cookiecutters/tap-rest-custom.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-custom",
"library_name": "tap_rest_custom",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-jwt.json b/e2e-tests/cookiecutters/tap-rest-jwt.json
index 80837f244..b46807d49 100644
--- a/e2e-tests/cookiecutters/tap-rest-jwt.json
+++ b/e2e-tests/cookiecutters/tap-rest-jwt.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-jwt",
"library_name": "tap_rest_jwt",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-rest-oauth2.json b/e2e-tests/cookiecutters/tap-rest-oauth2.json
index 27c7c39df..4a41b80e3 100644
--- a/e2e-tests/cookiecutters/tap-rest-oauth2.json
+++ b/e2e-tests/cookiecutters/tap-rest-oauth2.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-rest-oauth2",
"library_name": "tap_rest_oauth2",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/tap-sql-custom.json b/e2e-tests/cookiecutters/tap-sql-custom.json
index 96fa379d7..3c5996860 100644
--- a/e2e-tests/cookiecutters/tap-sql-custom.json
+++ b/e2e-tests/cookiecutters/tap-sql-custom.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"source_name": "AutomaticTestTap",
"admin_name": "Automatic Tester",
+ "admin_email": "auto.tester@example.com",
"tap_id": "tap-sql-custom",
"library_name": "tap_sql_custom",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/target-per_record.json b/e2e-tests/cookiecutters/target-per_record.json
index 9e0047af1..f5dde1cef 100644
--- a/e2e-tests/cookiecutters/target-per_record.json
+++ b/e2e-tests/cookiecutters/target-per_record.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"destination_name": "MyDestinationName",
"admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
"target_id": "target-per_record",
"library_name": "target_per_record",
"variant": "None (Skip)",
diff --git a/e2e-tests/cookiecutters/target-sql.json b/e2e-tests/cookiecutters/target-sql.json
index 5802e5ede..63691d718 100644
--- a/e2e-tests/cookiecutters/target-sql.json
+++ b/e2e-tests/cookiecutters/target-sql.json
@@ -2,6 +2,7 @@
"cookiecutter": {
"destination_name": "MyDestinationName",
"admin_name": "FirstName LastName",
+ "admin_email": "firstname.lastname@example.com",
"target_id": "target-sql",
"library_name": "target_sql",
"variant": "None (Skip)",
diff --git a/noxfile.py b/noxfile.py
index 4c4949413..4171a26f8 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -2,7 +2,6 @@
from __future__ import annotations
-import glob
import os
import shutil
import sys
@@ -23,50 +22,53 @@
RUFF_OVERRIDES = """\
extend = "./pyproject.toml"
-extend-ignore = ["TD002", "TD003"]
+extend-ignore = ["TD002", "TD003", "FIX002"]
"""
+COOKIECUTTER_REPLAY_FILES = list(Path("./e2e-tests/cookiecutters").glob("*.json"))
+
package = "singer_sdk"
-python_versions = ["3.11", "3.10", "3.9", "3.8", "3.7"]
-main_python_version = "3.10"
+python_versions = ["3.12", "3.11", "3.10", "3.9", "3.8", "3.7"]
+main_python_version = "3.11"
locations = "singer_sdk", "tests", "noxfile.py", "docs/conf.py"
nox.options.sessions = (
"mypy",
"tests",
+ "benches",
"doctest",
"test_cookiecutter",
)
test_dependencies = [
"coverage[toml]",
+ "duckdb",
+ "duckdb-engine",
"pytest",
- "pytest-snapshot",
+ "pytest-benchmark",
"pytest-durations",
- "freezegun",
- "pandas",
+ "pytest-snapshot",
"pyarrow",
"requests-mock",
- # Cookiecutter tests
- "black",
- "cookiecutter",
- "PyYAML",
- "darglint",
- "flake8",
- "flake8-annotations",
- "flake8-docstrings",
- "mypy",
+ "time-machine",
]
-@session(python=python_versions)
+def _clean_py312_deps(session: Session, dependencies: list[str]) -> None:
+ """Clean dependencies for Python 3.12."""
+ if session.python == "3.12":
+ dependencies.remove("duckdb")
+ dependencies.remove("duckdb-engine")
+
+
+@session(python=main_python_version)
def mypy(session: Session) -> None:
"""Check types with mypy."""
args = session.posargs or ["singer_sdk"]
- session.install(".")
+ session.install(".[s3,testing,parquet]")
session.install(
+ "exceptiongroup",
"mypy",
"pytest",
"importlib-resources",
- "sqlalchemy2-stubs",
"types-jsonschema",
"types-python-dateutil",
"types-pytz",
@@ -82,9 +84,20 @@ def mypy(session: Session) -> None:
@session(python=python_versions)
def tests(session: Session) -> None:
"""Execute pytest tests and compute coverage."""
- session.install(".[s3]")
+ _clean_py312_deps(session, test_dependencies)
+ session.install(".[s3,parquet]")
session.install(*test_dependencies)
+ sqlalchemy_version = os.environ.get("SQLALCHEMY_VERSION")
+ if sqlalchemy_version:
+ # Bypass nox-poetry use of --constraint so we can install a version of
+ # SQLAlchemy that doesn't match what's in poetry.lock.
+ session.poetry.session.install( # type: ignore[attr-defined]
+ f"sqlalchemy=={sqlalchemy_version}.*",
+ )
+
+ env = {"COVERAGE_CORE": "sysmon"} if session.python == "3.12" else {}
+
try:
session.run(
"coverage",
@@ -92,23 +105,43 @@ def tests(session: Session) -> None:
"--parallel",
"-m",
"pytest",
- "-v",
"--durations=10",
+ "--benchmark-skip",
*session.posargs,
- env={
- "SQLALCHEMY_WARN_20": "1",
- },
+ env=env,
)
finally:
if session.interactive:
session.notify("coverage", posargs=[])
+@session(python=main_python_version)
+def benches(session: Session) -> None:
+ """Run benchmarks."""
+ _clean_py312_deps(session, test_dependencies)
+ session.install(".[s3]")
+ session.install(*test_dependencies)
+ sqlalchemy_version = os.environ.get("SQLALCHEMY_VERSION")
+ if sqlalchemy_version:
+ # Bypass nox-poetry use of --constraint so we can install a version of
+ # SQLAlchemy that doesn't match what's in poetry.lock.
+ session.poetry.session.install( # type: ignore[attr-defined]
+ f"sqlalchemy=={sqlalchemy_version}",
+ )
+ session.run(
+ "pytest",
+ "--benchmark-only",
+ "--benchmark-json=output.json",
+ *session.posargs,
+ )
+
+
@session(python=main_python_version)
def update_snapshots(session: Session) -> None:
"""Update pytest snapshots."""
args = session.posargs or ["-m", "snapshot"]
+ _clean_py312_deps(session, test_dependencies)
session.install(".")
session.install(*test_dependencies)
session.run("pytest", "--snapshot-update", *args)
@@ -180,36 +213,37 @@ def docs_serve(session: Session) -> None:
session.run("sphinx-autobuild", *args)
-@nox.parametrize("replay_file_path", glob.glob("./e2e-tests/cookiecutters/*.json"))
+@nox.parametrize("replay_file_path", COOKIECUTTER_REPLAY_FILES)
@session(python=main_python_version)
-def test_cookiecutter(session: Session, replay_file_path) -> None:
+def test_cookiecutter(session: Session, replay_file_path: str) -> None:
"""Uses the tap template to build an empty cookiecutter.
Runs the lint task on the created test project.
"""
- cc_build_path = tempfile.gettempdir()
- folder_base_path = "./cookiecutter"
-
- target_folder = (
- "tap-template"
- if Path(replay_file_path).name.startswith("tap")
- else "target-template"
- )
- tap_template = Path(folder_base_path + "/" + target_folder).resolve()
+ cc_build_path = Path(tempfile.gettempdir())
+ folder_base_path = Path("./cookiecutter")
replay_file = Path(replay_file_path).resolve()
- if not Path(tap_template).exists():
+ if replay_file.name.startswith("tap"):
+ folder = "tap-template"
+ elif replay_file.name.startswith("target"):
+ folder = "target-template"
+ else:
+ folder = "mapper-template"
+ template = folder_base_path.joinpath(folder).resolve()
+
+ if not template.exists():
return
- if not Path(replay_file).is_file():
+ if not replay_file.is_file():
return
- sdk_dir = Path(Path(tap_template).parent).parent
- cc_output_dir = Path(replay_file_path).name.replace(".json", "")
- cc_test_output = cc_build_path + "/" + cc_output_dir
+ sdk_dir = template.parent.parent
+ cc_output_dir = replay_file.name.replace(".json", "")
+ cc_test_output = cc_build_path.joinpath(cc_output_dir)
- if Path(cc_test_output).exists():
- session.run("rm", "-fr", cc_test_output, external=True)
+ if cc_test_output.exists():
+ session.run("rm", "-fr", str(cc_test_output), external=True)
session.install(".")
session.install("cookiecutter", "pythonsed")
@@ -218,9 +252,9 @@ def test_cookiecutter(session: Session, replay_file_path) -> None:
"cookiecutter",
"--replay-file",
str(replay_file),
- str(tap_template),
+ str(template),
"-o",
- cc_build_path,
+ str(cc_build_path),
)
session.chdir(cc_test_output)
@@ -241,3 +275,25 @@ def test_cookiecutter(session: Session, replay_file_path) -> None:
session.run("git", "init", external=True)
session.run("git", "add", ".", external=True)
session.run("pre-commit", "run", "--all-files", external=True)
+
+
+@session(name="version-bump")
+def version_bump(session: Session) -> None:
+ """Run commitizen."""
+ session.install(
+ "commitizen",
+ "commitizen-version-bump @ git+https://github.com/meltano/commitizen-version-bump.git@main",
+ )
+ default_args = [
+ "--changelog",
+ "--files-only",
+ "--check-consistency",
+ "--changelog-to-stdout",
+ ]
+ args = session.posargs or default_args
+
+ session.run(
+ "cz",
+ "bump",
+ *args,
+ )
diff --git a/poetry.lock b/poetry.lock
index 8aa96235c..d9d49f97c 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "alabaster"
@@ -22,47 +22,15 @@ files = [
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
]
-[[package]]
-name = "argcomplete"
-version = "3.0.8"
-description = "Bash tab completion for argparse"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "argcomplete-3.0.8-py3-none-any.whl", hash = "sha256:e36fd646839933cbec7941c662ecb65338248667358dd3d968405a4506a60d9b"},
- {file = "argcomplete-3.0.8.tar.gz", hash = "sha256:b9ca96448e14fa459d7450a4ab5a22bbf9cee4ba7adddf03e65c398b5daeea28"},
-]
-
-[package.dependencies]
-importlib-metadata = {version = ">=0.23,<7", markers = "python_version == \"3.7\""}
-
-[package.extras]
-test = ["coverage", "mypy", "pexpect", "ruff", "wheel"]
-
-[[package]]
-name = "arrow"
-version = "1.2.3"
-description = "Better dates & times for Python"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"},
- {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"},
-]
-
-[package.dependencies]
-python-dateutil = ">=2.7.0"
-typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
-
[[package]]
name = "attrs"
-version = "23.1.0"
+version = "23.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
files = [
- {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
- {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+ {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
+ {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
]
[package.dependencies]
@@ -70,25 +38,29 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
-dev = ["attrs[docs,tests]", "pre-commit"]
+dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
-tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
+tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]]
name = "babel"
-version = "2.12.1"
+version = "2.14.0"
description = "Internationalization utilities"
optional = true
python-versions = ">=3.7"
files = [
- {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"},
- {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"},
+ {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"},
+ {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"},
]
[package.dependencies]
pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""}
+[package.extras]
+dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
+
[[package]]
name = "backoff"
version = "2.2.1"
@@ -100,6 +72,74 @@ files = [
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
]
+[[package]]
+name = "backports-datetime-fromisoformat"
+version = "2.0.1"
+description = "Backport of Python 3.11's datetime.fromisoformat"
+optional = false
+python-versions = ">3"
+files = [
+ {file = "backports-datetime-fromisoformat-2.0.1.tar.gz", hash = "sha256:1b6afca7f47019c22df43062cde73c1af65fbdebc66520f352c690d52fd27127"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b739ccd3f36244f618f1fbc21d89894d9dc9d1d75a68762fcf917d433df38ae3"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:afd072ca32f2ca4e838e0f7b61a56168d98837ee9a182c567a49a834e07c2b98"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a136d85f8b1db4747aa9e56a8caa0ba77c5c25b761b18e2169ea7b1b516f012"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d3a0579958ade7db62c8238163e05d46a4de61c99cebb40031ed7409a44d5f6"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:199df62af8feff5da0f4953fdc4a6994bcd7dbfe1db95901d8b93d05feda2ab5"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afe32e60a471831058ede14fc226d9f14120e6dc67d66fbbd36e1724826ad70b"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:a1ba7e226a9694b20b713867f71b5ed2f662603c39875f14f968608d331fc96a"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:403f155deecbe94d43d0679a74abb5c9ac441422a9ececcfde030fb133865659"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d2ee049997d3aa2e714489cb3c34864fb0f25786e7a4ff04ac9d82af58b453"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:20aa422769af9f72ca41d83238d4a3a008d6cd74bcff0a08befb11b0018d6aa5"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8ea8d85c3c9afa4ad51b6644d26516d43493f44c2131c12a2ba959433f4417f6"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:812b8c34e88a7d9615c604f1a0473a4e6d664aba94086bffb0c55627f9e3fb68"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:df5365930320b7a9d404cd6f7bc13988e28355e812aa42e21aa5c93443dcdd2e"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fe3e3968c8dce4a44da2da81a6031e992a4ee62d130c2536696d215a4db2ce3c"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a4abb678ab0d6a1965d70e21e424bcf7a52086a7afb1c5f13243a3d44fa2dd"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96b7e806ade09a91d8ce195c197fc799d8fbe6b8ea9cde21f8a01f1090e51e33"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:002a77bd4f549ff5e80f1ef4a9b69982746dd6190786b90abe3d9c69c9883ce4"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7b4ad0013a96b656ebf85079921ffb211623a1e28ff4981b3927690a2ed6df54"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:065421723e735ce8f68dbb4486f07562ce8556ed543ceaa012189b9aa209f303"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a4bf1bec08bc84095ee379202466c948fe12cff1442f58ee1a91fac4c5164c97"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1836aff09b8317e179cc7288856b61a450515d4b411f0ab723dc9692dfa5362e"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:815f85a581b142bcf34632c3ce26f7e21003f101ce88b5649631330e334bbe35"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a6986cfd3bc40b41465a6c54c18a30ca8110333d0b71f6062af136db11c8ff0"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:82741e732d71f78b44a8c3b95f33b3630e7bfbdb02e3fede3938cdf15d5b6a83"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eac27abb51ee84e08d1dd1e908c16cae2078c217ff5b54092e6cb92107b4c6c"},
+ {file = "backports_datetime_fromisoformat-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3b730d72061523be9600bcd281ef353f7f73b1df095adbbdc364aac8f430c44c"},
+ {file = "backports_datetime_fromisoformat-2.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e8f28f4a68539192473f427ed86794931502d186e2fffa1926250550c1335a"},
+ {file = "backports_datetime_fromisoformat-2.0.1-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef151f1df77e413dc179607edb5bee11949ca5890e81c0bb742d96fec753fe"},
+ {file = "backports_datetime_fromisoformat-2.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28c95d6df2a44fa3540e18e484596c03e8ff7112e2f93b664f482fe3a88720b"},
+ {file = "backports_datetime_fromisoformat-2.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91042b53de903e3725209ad6d69b6994ae4819614c0decd62d05dfea23f35e2b"},
+]
+
+[[package]]
+name = "backports-zoneinfo"
+version = "0.2.1"
+description = "Backport of the standard library zoneinfo module"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"},
+ {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"},
+]
+
+[package.extras]
+tzdata = ["tzdata"]
+
[[package]]
name = "beautifulsoup4"
version = "4.12.2"
@@ -118,117 +158,56 @@ soupsieve = ">1.2"
html5lib = ["html5lib"]
lxml = ["lxml"]
-[[package]]
-name = "binaryornot"
-version = "0.4.4"
-description = "Ultra-lightweight pure Python package to check if a file is binary or text."
-optional = false
-python-versions = "*"
-files = [
- {file = "binaryornot-0.4.4-py2.py3-none-any.whl", hash = "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4"},
- {file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"},
-]
-
-[package.dependencies]
-chardet = ">=3.0.2"
-
-[[package]]
-name = "black"
-version = "23.3.0"
-description = "The uncompromising code formatter."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
- {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
- {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
- {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
- {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
- {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
- {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
- {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
- {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
- {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
- {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
- {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
- {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
- {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
-]
-
-[package.dependencies]
-click = ">=8.0.0"
-mypy-extensions = ">=0.4.3"
-packaging = ">=22.0"
-pathspec = ">=0.9.0"
-platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
-typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
-
-[package.extras]
-colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)"]
-jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-uvloop = ["uvloop (>=0.15.2)"]
-
[[package]]
name = "boto3"
-version = "1.26.157"
+version = "1.33.13"
description = "The AWS SDK for Python"
optional = true
python-versions = ">= 3.7"
files = [
- {file = "boto3-1.26.157-py3-none-any.whl", hash = "sha256:718b236aafc3f106d17cd5c4f513fc2f40bfa995c0cb730ecc893e9c808c0385"},
- {file = "boto3-1.26.157.tar.gz", hash = "sha256:7a8117dfe9ba1f203d73b3df32a4ebdb895813189635f126fa256e1dea37ee8d"},
+ {file = "boto3-1.33.13-py3-none-any.whl", hash = "sha256:5f278b95fb2b32f3d09d950759a05664357ba35d81107bab1537c4ddd212cd8c"},
+ {file = "boto3-1.33.13.tar.gz", hash = "sha256:0e966b8a475ecb06cc0846304454b8da2473d4c8198a45dfb2c5304871986883"},
]
[package.dependencies]
-botocore = ">=1.29.157,<1.30.0"
+botocore = ">=1.33.13,<1.34.0"
jmespath = ">=0.7.1,<2.0.0"
-s3transfer = ">=0.6.0,<0.7.0"
+s3transfer = ">=0.8.2,<0.9.0"
[package.extras]
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.29.157"
+version = "1.33.13"
description = "Low-level, data-driven core of boto 3."
optional = true
python-versions = ">= 3.7"
files = [
- {file = "botocore-1.29.157-py3-none-any.whl", hash = "sha256:ccbf948c040d68b6a22570e73dd63cb3b07ce33f4032e9b1d502d2fae55c3b80"},
- {file = "botocore-1.29.157.tar.gz", hash = "sha256:af2a7b6417bf3bbf00ab22aa61a2d7d839a8a8a62e7975c18c80c55c88dc7fcf"},
+ {file = "botocore-1.33.13-py3-none-any.whl", hash = "sha256:aeadccf4b7c674c7d47e713ef34671b834bc3e89723ef96d994409c9f54666e6"},
+ {file = "botocore-1.33.13.tar.gz", hash = "sha256:fb577f4cb175605527458b04571451db1bd1a2036976b626206036acd4496617"},
]
[package.dependencies]
jmespath = ">=0.7.1,<2.0.0"
python-dateutil = ">=2.1,<3.0.0"
-urllib3 = ">=1.25.4,<1.27"
+urllib3 = [
+ {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""},
+ {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""},
+]
[package.extras]
-crt = ["awscrt (==0.16.9)"]
+crt = ["awscrt (==0.19.17)"]
[[package]]
name = "certifi"
-version = "2023.5.7"
+version = "2023.11.17"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"},
- {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"},
+ {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"},
+ {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"},
]
[[package]]
@@ -307,110 +286,114 @@ files = [
[package.dependencies]
pycparser = "*"
-[[package]]
-name = "chardet"
-version = "5.1.0"
-description = "Universal encoding detector for Python 3"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"},
- {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"},
-]
-
[[package]]
name = "charset-normalizer"
-version = "3.1.0"
+version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
- {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
+ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
+ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
[[package]]
name = "click"
-version = "8.1.5"
+version = "8.1.7"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
files = [
- {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"},
- {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"},
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
[package.dependencies]
@@ -428,70 +411,6 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
-[[package]]
-name = "commitizen"
-version = "3.4.0"
-description = "Python commitizen client tool"
-optional = false
-python-versions = ">=3.7,<4.0"
-files = [
- {file = "commitizen-3.4.0-py3-none-any.whl", hash = "sha256:5c58052099a6512da66a893f09e98e1f0d94ed02720a4e8d5747d4d409d59cfb"},
- {file = "commitizen-3.4.0.tar.gz", hash = "sha256:ab17db8c4f7258d9cdcc620046aa63d2139756ef78b2174cfa9f9c5e383eaf27"},
-]
-
-[package.dependencies]
-argcomplete = ">=1.12.1,<3.1"
-charset-normalizer = ">=2.1.0,<4"
-colorama = ">=0.4.1,<0.5.0"
-decli = ">=0.6.0,<0.7.0"
-importlib_metadata = ">=4.13,<7"
-jinja2 = ">=2.10.3"
-packaging = ">=19"
-pyyaml = ">=3.08"
-questionary = ">=1.4.0,<2.0.0"
-termcolor = ">=1.1,<3"
-tomlkit = ">=0.5.3,<1.0.0"
-typing-extensions = {version = ">=4.0.1,<5.0.0", markers = "python_version < \"3.8\""}
-
-[[package]]
-name = "commitizen-version-bump"
-version = "0.1.0"
-description = "Commitizen customized for Meltano projects (https://commitizen-tools.github.io/commitizen/customization)"
-optional = false
-python-versions = "^3.7"
-files = []
-develop = false
-
-[package.dependencies]
-commitizen = ">=3.0.0,<4.0.0"
-PyGithub = "^1.57"
-
-[package.source]
-type = "git"
-url = "https://github.com/meltano/commitizen-version-bump.git"
-reference = "main"
-resolved_reference = "e2e6d5d13d39eae1f37e3a275c0d3d3e38c18439"
-
-[[package]]
-name = "cookiecutter"
-version = "2.2.2"
-description = "A command-line utility that creates projects from project templates, e.g. creating a Python package project from a Python package project template."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "cookiecutter-2.2.2-py3-none-any.whl", hash = "sha256:4feb7485520dd7453e3094d8f3955601156a0fab0d0b90a2c8c74f6dc2cbaac6"},
- {file = "cookiecutter-2.2.2.tar.gz", hash = "sha256:3b475d17573a7785b4a22fab693be249840e235a92c93c0fa088b39e9193f194"},
-]
-
-[package.dependencies]
-arrow = "*"
-binaryornot = ">=0.4.4"
-click = ">=7.0,<9.0.0"
-Jinja2 = ">=2.7,<4.0.0"
-python-slugify = ">=4.0.0"
-pyyaml = ">=5.3.1"
-requests = ">=2.23.0"
-
[[package]]
name = "coverage"
version = "7.2.7"
@@ -567,36 +486,103 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
[package.extras]
toml = ["tomli"]
+[[package]]
+name = "coverage"
+version = "7.4.0"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"},
+ {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"},
+ {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"},
+ {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"},
+ {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"},
+ {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"},
+ {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"},
+ {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"},
+ {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"},
+ {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"},
+ {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"},
+ {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"},
+ {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"},
+ {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"},
+ {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"},
+ {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"},
+ {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"},
+ {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"},
+ {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"},
+ {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"},
+ {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"},
+ {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"},
+ {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"},
+ {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"},
+ {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"},
+ {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"},
+ {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"},
+ {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"},
+ {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"},
+ {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"},
+ {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"},
+ {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"},
+ {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"},
+ {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"},
+ {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"},
+ {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"},
+ {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"},
+ {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"},
+ {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"},
+ {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"},
+ {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"},
+ {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"},
+ {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"},
+ {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"},
+ {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"},
+ {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"},
+ {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"},
+ {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"},
+ {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"},
+ {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"},
+ {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"},
+ {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"},
+]
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
+
+[package.extras]
+toml = ["tomli"]
+
[[package]]
name = "cryptography"
-version = "41.0.2"
+version = "41.0.7"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
- {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"},
- {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"},
- {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"},
- {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"},
- {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"},
- {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"},
- {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"},
- {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"},
- {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"},
- {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"},
- {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"},
- {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"},
- {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"},
- {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"},
- {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"},
- {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"},
- {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"},
- {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"},
- {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"},
- {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"},
- {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"},
- {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"},
- {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"},
+ {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"},
+ {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"},
+ {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"},
+ {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"},
+ {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"},
+ {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"},
+ {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"},
+ {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"},
+ {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"},
+ {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"},
+ {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"},
+ {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"},
+ {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"},
+ {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"},
+ {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"},
+ {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"},
+ {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"},
+ {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"},
+ {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"},
+ {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"},
+ {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"},
+ {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"},
+ {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"},
]
[package.dependencies]
@@ -612,56 +598,6 @@ ssh = ["bcrypt (>=3.1.5)"]
test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
-[[package]]
-name = "darglint"
-version = "1.8.1"
-description = "A utility for ensuring Google-style docstrings stay up to date with the source code."
-optional = false
-python-versions = ">=3.6,<4.0"
-files = [
- {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"},
- {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"},
-]
-
-[[package]]
-name = "decli"
-version = "0.6.1"
-description = "Minimal, easy-to-use, declarative cli tool"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "decli-0.6.1-py3-none-any.whl", hash = "sha256:7815ac58617764e1a200d7cadac6315fcaacc24d727d182f9878dd6378ccf869"},
- {file = "decli-0.6.1.tar.gz", hash = "sha256:ed88ccb947701e8e5509b7945fda56e150e2ac74a69f25d47ac85ef30ab0c0f0"},
-]
-
-[[package]]
-name = "decorator"
-version = "5.1.1"
-description = "Decorators for Humans"
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
- {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
-]
-
-[[package]]
-name = "deprecated"
-version = "1.2.14"
-description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
- {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
-]
-
-[package.dependencies]
-wrapt = ">=1.10,<2"
-
-[package.extras]
-dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
-
[[package]]
name = "docutils"
version = "0.19"
@@ -674,80 +610,96 @@ files = [
]
[[package]]
-name = "exceptiongroup"
-version = "1.1.1"
-description = "Backport of PEP 654 (exception groups)"
+name = "duckdb"
+version = "0.9.2"
+description = "DuckDB embedded database"
optional = false
-python-versions = ">=3.7"
-files = [
- {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
- {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
-]
-
-[package.extras]
-test = ["pytest (>=6)"]
-
-[[package]]
-name = "flake8"
-version = "3.9.2"
-description = "the modular source code checker: pep8 pyflakes and co"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.7.0"
files = [
- {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"},
- {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"},
-]
-
-[package.dependencies]
-importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
-mccabe = ">=0.6.0,<0.7.0"
-pycodestyle = ">=2.7.0,<2.8.0"
-pyflakes = ">=2.3.0,<2.4.0"
-
-[[package]]
-name = "flake8-annotations"
-version = "2.9.1"
-description = "Flake8 Type Annotation Checks"
+ {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"},
+ {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"},
+ {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"},
+ {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"},
+ {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"},
+ {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"},
+ {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"},
+ {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"},
+ {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"},
+ {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"},
+ {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"},
+ {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"},
+ {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"},
+ {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"},
+ {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"},
+ {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"},
+ {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"},
+ {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"},
+ {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"},
+ {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"},
+ {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"},
+ {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"},
+ {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"},
+ {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"},
+ {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"},
+ {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"},
+ {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"},
+ {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"},
+ {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"},
+ {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"},
+ {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"},
+ {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"},
+ {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"},
+ {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"},
+ {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"},
+ {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"},
+ {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"},
+ {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"},
+ {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"},
+]
+
+[[package]]
+name = "duckdb-engine"
+version = "0.10.0"
+description = "SQLAlchemy driver for duckdb"
optional = false
-python-versions = ">=3.7,<4.0"
+python-versions = ">=3.7"
files = [
- {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"},
- {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"},
+ {file = "duckdb_engine-0.10.0-py3-none-any.whl", hash = "sha256:c408d002e83630b6bbb05fc3b26a43406085b1c22dd43e8cab00bf0b9c011ea8"},
+ {file = "duckdb_engine-0.10.0.tar.gz", hash = "sha256:5e3dad3b3513f055a4f5ec5430842249cfe03015743a7597ed1dcc0447dca565"},
]
[package.dependencies]
-attrs = ">=21.4"
-flake8 = ">=3.7"
-typed-ast = {version = ">=1.4,<2.0", markers = "python_version < \"3.8\""}
+duckdb = ">=0.4.0"
+sqlalchemy = ">=1.3.22"
[[package]]
-name = "flake8-docstrings"
-version = "1.7.0"
-description = "Extension for flake8 which uses pydocstyle to check docstrings"
+name = "exceptiongroup"
+version = "1.2.0"
+description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
- {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"},
- {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"},
+ {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"},
+ {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"},
]
-[package.dependencies]
-flake8 = ">=3"
-pydocstyle = ">=2.1"
+[package.extras]
+test = ["pytest (>=6)"]
[[package]]
-name = "freezegun"
-version = "1.2.2"
-description = "Let your Python tests travel through time"
+name = "filelock"
+version = "3.12.2"
+description = "A platform independent file lock."
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"},
- {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"},
+ {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"},
+ {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"},
]
-[package.dependencies]
-python-dateutil = ">=2.7"
+[package.extras]
+docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"]
[[package]]
name = "fs"
@@ -803,86 +755,84 @@ sphinx-basic-ng = "*"
[[package]]
name = "greenlet"
-version = "2.0.2"
+version = "3.0.3"
description = "Lightweight in-process concurrent programming"
optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
-files = [
- {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"},
- {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"},
- {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
- {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
- {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
- {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
- {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
- {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
- {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"},
- {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"},
- {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
- {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
- {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
- {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
- {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
- {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
- {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"},
- {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"},
- {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"},
- {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"},
- {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"},
- {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"},
- {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"},
- {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"},
- {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"},
- {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"},
- {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"},
- {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"},
- {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"},
- {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"},
- {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"},
- {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"},
- {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"},
- {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"},
- {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"},
- {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"},
- {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"},
- {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"},
- {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"},
- {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
- {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
- {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
- {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
- {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
- {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
- {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"},
- {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"},
- {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
- {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
- {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
- {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
- {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
- {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
- {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"},
- {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"},
- {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"},
- {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"},
- {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"},
- {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"},
- {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"},
+python-versions = ">=3.7"
+files = [
+ {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
+ {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
+ {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"},
+ {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"},
+ {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"},
+ {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"},
+ {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"},
+ {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"},
+ {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"},
+ {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"},
+ {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"},
+ {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"},
+ {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"},
+ {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"},
+ {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"},
+ {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"},
+ {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"},
+ {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"},
+ {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"},
+ {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"},
+ {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"},
+ {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"},
+ {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"},
+ {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"},
+ {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"},
+ {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"},
+ {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"},
+ {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"},
+ {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"},
+ {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"},
+ {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"},
+ {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"},
+ {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"},
+ {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"},
+ {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"},
+ {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"},
+ {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"},
+ {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"},
+ {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"},
+ {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"},
+ {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"},
+ {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"},
+ {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"},
+ {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"},
+ {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"},
+ {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"},
+ {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"},
+ {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"},
+ {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"},
+ {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"},
+ {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"},
+ {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"},
+ {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"},
+ {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"},
+ {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"},
+ {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"},
+ {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"},
+ {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"},
]
[package.extras]
-docs = ["Sphinx", "docutils (<0.18)"]
+docs = ["Sphinx", "furo"]
test = ["objgraph", "psutil"]
[[package]]
name = "idna"
-version = "3.4"
+version = "3.6"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
- {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
- {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+ {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
+ {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
]
[[package]]
@@ -898,13 +848,13 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "4.13.0"
+version = "6.7.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.7"
files = [
- {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
- {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
+ {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"},
+ {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"},
]
[package.dependencies]
@@ -912,9 +862,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
-testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
[[package]]
name = "importlib-resources"
@@ -960,7 +910,7 @@ files = [
name = "jinja2"
version = "3.1.2"
description = "A very fast and expressive template engine."
-optional = false
+optional = true
python-versions = ">=3.7"
files = [
{file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
@@ -986,31 +936,28 @@ files = [
[[package]]
name = "joblib"
-version = "1.3.1"
+version = "1.3.2"
description = "Lightweight pipelining with Python functions"
optional = false
python-versions = ">=3.7"
files = [
- {file = "joblib-1.3.1-py3-none-any.whl", hash = "sha256:89cf0529520e01b3de7ac7b74a8102c90d16d54c64b5dd98cafcd14307fdf915"},
- {file = "joblib-1.3.1.tar.gz", hash = "sha256:1f937906df65329ba98013dc9692fe22a4c5e4a648112de500508b18a21b41e3"},
+ {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"},
+ {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"},
]
[[package]]
name = "jsonpath-ng"
-version = "1.5.3"
+version = "1.6.0"
description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming."
optional = false
python-versions = "*"
files = [
- {file = "jsonpath-ng-1.5.3.tar.gz", hash = "sha256:a273b182a82c1256daab86a313b937059261b5c5f8c4fa3fc38b882b344dd567"},
- {file = "jsonpath_ng-1.5.3-py2-none-any.whl", hash = "sha256:f75b95dbecb8a0f3b86fd2ead21c2b022c3f5770957492b9b6196ecccfeb10aa"},
- {file = "jsonpath_ng-1.5.3-py3-none-any.whl", hash = "sha256:292a93569d74029ba75ac2dc3d3630fc0e17b2df26119a165fa1d498ca47bf65"},
+ {file = "jsonpath-ng-1.6.0.tar.gz", hash = "sha256:5483f8e9d74c39c9abfab554c070ae783c1c8cbadf5df60d561bc705ac68a07e"},
+ {file = "jsonpath_ng-1.6.0-py3-none-any.whl", hash = "sha256:6fd04833412c4b3d9299edf369542f5e67095ca84efa17cbb7f06a34958adc9f"},
]
[package.dependencies]
-decorator = "*"
ply = "*"
-six = "*"
[[package]]
name = "jsonschema"
@@ -1035,6 +982,44 @@ typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
+[[package]]
+name = "jsonschema"
+version = "4.20.0"
+description = "An implementation of JSON Schema validation for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"},
+ {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"},
+]
+
+[package.dependencies]
+attrs = ">=22.2.0"
+importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
+jsonschema-specifications = ">=2023.03.6"
+pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
+referencing = ">=0.28.4"
+rpds-py = ">=0.7.1"
+
+[package.extras]
+format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
+format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2023.12.1"
+description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"},
+ {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"},
+]
+
+[package.dependencies]
+importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
+referencing = ">=0.31.0"
+
[[package]]
name = "livereload"
version = "2.6.3"
@@ -1079,7 +1064,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
name = "markupsafe"
version = "2.1.3"
description = "Safely add untrusted strings to HTML/XML markup."
-optional = false
+optional = true
python-versions = ">=3.7"
files = [
{file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"},
@@ -1102,6 +1087,16 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@@ -1134,17 +1129,6 @@ files = [
{file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"},
]
-[[package]]
-name = "mccabe"
-version = "0.6.1"
-description = "McCabe checker, plugin for flake8"
-optional = false
-python-versions = "*"
-files = [
- {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
- {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
-]
-
[[package]]
name = "mdit-py-plugins"
version = "0.3.5"
@@ -1232,6 +1216,53 @@ install-types = ["pip"]
python2 = ["typed-ast (>=1.4.0,<2)"]
reports = ["lxml"]
+[[package]]
+name = "mypy"
+version = "1.8.0"
+description = "Optional static typing for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"},
+ {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"},
+ {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"},
+ {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"},
+ {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"},
+ {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"},
+ {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"},
+ {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"},
+ {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"},
+ {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"},
+ {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"},
+ {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"},
+ {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"},
+ {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"},
+ {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"},
+ {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"},
+ {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"},
+ {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"},
+ {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"},
+ {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"},
+ {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"},
+ {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"},
+ {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"},
+ {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"},
+ {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"},
+ {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"},
+ {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=1.0.0"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typing-extensions = ">=4.1.0"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+mypyc = ["setuptools (>=50)"]
+reports = ["lxml"]
+
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@@ -1274,7 +1305,7 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,
name = "numpy"
version = "1.21.6"
description = "NumPy is the fundamental package for array computing with Python."
-optional = false
+optional = true
python-versions = ">=3.7,<3.11"
files = [
{file = "numpy-1.21.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8737609c3bbdd48e380d463134a35ffad3b22dc56295eff6f79fd85bd0eeeb25"},
@@ -1312,61 +1343,95 @@ files = [
[[package]]
name = "numpy"
-version = "1.24.3"
+version = "1.24.4"
description = "Fundamental package for array computing in Python"
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
- {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"},
- {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"},
- {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"},
- {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"},
- {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"},
- {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"},
- {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"},
- {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"},
- {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"},
- {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"},
- {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"},
- {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"},
- {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"},
- {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"},
- {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"},
- {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"},
- {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"},
- {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"},
- {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"},
- {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"},
- {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"},
- {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"},
- {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"},
- {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"},
- {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"},
- {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"},
- {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"},
- {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"},
+ {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"},
+ {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"},
+ {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"},
+ {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"},
+ {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"},
+ {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"},
+ {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"},
+ {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"},
+ {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"},
+ {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"},
+ {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"},
+ {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"},
+ {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"},
+ {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"},
+ {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"},
+ {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"},
+ {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"},
+ {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"},
+ {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"},
+ {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"},
+ {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"},
+ {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"},
+ {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"},
+ {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"},
+ {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"},
+ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"},
]
[[package]]
-name = "packaging"
-version = "23.1"
-description = "Core utilities for Python packages"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
- {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
+name = "numpy"
+version = "1.26.2"
+description = "Fundamental package for array computing in Python"
+optional = true
+python-versions = ">=3.9"
+files = [
+ {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"},
+ {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"},
+ {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"},
+ {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"},
+ {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"},
+ {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"},
+ {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"},
+ {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"},
+ {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"},
+ {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"},
+ {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"},
+ {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"},
+ {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"},
+ {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"},
+ {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"},
+ {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"},
+ {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"},
+ {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"},
+ {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"},
+ {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"},
+ {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"},
+ {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"},
+ {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"},
+ {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"},
+ {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"},
+ {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"},
+ {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"},
+ {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"},
+ {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"},
+ {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"},
+ {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"},
+ {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"},
+ {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"},
+ {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"},
+ {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"},
+ {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"},
]
[[package]]
-name = "pathspec"
-version = "0.11.1"
-description = "Utility library for gitignore style pattern matching of file paths."
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
- {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
@@ -1403,6 +1468,107 @@ files = [
python-dateutil = ">=2.6,<3.0"
pytzdata = ">=2020.1"
+[[package]]
+name = "pendulum"
+version = "3.0.0"
+description = "Python datetimes made easy"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"},
+ {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"},
+ {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"},
+ {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"},
+ {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"},
+ {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"},
+ {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"},
+ {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"},
+ {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"},
+ {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"},
+ {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"},
+ {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"},
+ {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"},
+ {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"},
+ {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"},
+ {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"},
+ {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"},
+ {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"},
+ {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"},
+ {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"},
+ {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"},
+ {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"},
+ {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"},
+ {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"},
+ {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"},
+ {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"},
+ {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"},
+ {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"},
+ {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"},
+ {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"},
+ {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"},
+ {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"},
+ {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"},
+ {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"},
+ {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"},
+ {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"},
+ {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"},
+ {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"},
+ {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"},
+ {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"},
+ {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"},
+ {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"},
+ {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"},
+ {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"},
+ {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"},
+ {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"},
+ {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"},
+ {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"},
+ {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"},
+ {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"},
+ {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"},
+ {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"},
+ {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"},
+ {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"},
+ {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"},
+ {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"},
+ {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"},
+ {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"},
+ {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"},
+ {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"},
+ {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"},
+ {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"},
+ {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"},
+ {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"},
+ {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"},
+ {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"},
+ {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"},
+ {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"},
+ {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"},
+ {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"},
+ {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"},
+ {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"},
+ {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"},
+ {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"},
+ {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"},
+ {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"},
+ {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"},
+ {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"},
+ {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"},
+ {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"},
+ {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"},
+ {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"},
+ {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"},
+]
+
+[package.dependencies]
+"backports.zoneinfo" = {version = ">=0.2.1", markers = "python_version < \"3.9\""}
+importlib-resources = {version = ">=5.9.0", markers = "python_version < \"3.9\""}
+python-dateutil = ">=2.6"
+tzdata = ">=2020.1"
+
+[package.extras]
+test = ["time-machine (>=2.6.0)"]
+
[[package]]
name = "pkgutil-resolve-name"
version = "1.3.10"
@@ -1414,33 +1580,15 @@ files = [
{file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
]
-[[package]]
-name = "platformdirs"
-version = "3.6.0"
-description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "platformdirs-3.6.0-py3-none-any.whl", hash = "sha256:ffa199e3fbab8365778c4a10e1fbf1b9cd50707de826eb304b50e57ec0cc8d38"},
- {file = "platformdirs-3.6.0.tar.gz", hash = "sha256:57e28820ca8094678b807ff529196506d7a21e17156cb1cddb3e74cebce54640"},
-]
-
-[package.dependencies]
-typing-extensions = {version = ">=4.6.3", markers = "python_version < \"3.8\""}
-
-[package.extras]
-docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"]
-
[[package]]
name = "pluggy"
-version = "1.0.0"
+version = "1.2.0"
description = "plugin and hook calling mechanisms for python"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
- {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+ {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"},
+ {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"},
]
[package.dependencies]
@@ -1462,24 +1610,21 @@ files = [
]
[[package]]
-name = "prompt-toolkit"
-version = "3.0.38"
-description = "Library for building powerful interactive command lines in Python"
+name = "py-cpuinfo"
+version = "9.0.0"
+description = "Get CPU info with pure Python"
optional = false
-python-versions = ">=3.7.0"
+python-versions = "*"
files = [
- {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"},
- {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"},
+ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"},
+ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"},
]
-[package.dependencies]
-wcwidth = "*"
-
[[package]]
name = "pyarrow"
version = "12.0.1"
description = "Python library for Apache Arrow"
-optional = false
+optional = true
python-versions = ">=3.7"
files = [
{file = "pyarrow-12.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d288029a94a9bb5407ceebdd7110ba398a00412c5b0155ee9813a40d246c5df"},
@@ -1513,16 +1658,53 @@ files = [
numpy = ">=1.16.6"
[[package]]
-name = "pycodestyle"
-version = "2.7.0"
-description = "Python style guide checker"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+name = "pyarrow"
+version = "14.0.2"
+description = "Python library for Apache Arrow"
+optional = true
+python-versions = ">=3.8"
files = [
- {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"},
- {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
+ {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"},
+ {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"},
+ {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"},
+ {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"},
+ {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"},
+ {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"},
+ {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"},
+ {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"},
+ {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"},
+ {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"},
+ {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"},
+ {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"},
+ {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"},
+ {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"},
+ {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"},
+ {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"},
+ {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"},
+ {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"},
+ {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"},
+ {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"},
+ {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"},
+ {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"},
+ {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"},
+ {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"},
+ {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"},
+ {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"},
+ {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"},
+ {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"},
+ {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"},
+ {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"},
+ {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"},
+ {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"},
+ {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"},
+ {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"},
+ {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"},
+ {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"},
]
+[package.dependencies]
+numpy = ">=1.16.6"
+
[[package]]
name = "pycparser"
version = "2.21"
@@ -1534,79 +1716,33 @@ files = [
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
-[[package]]
-name = "pydocstyle"
-version = "6.3.0"
-description = "Python docstring style checker"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"},
- {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"},
-]
-
-[package.dependencies]
-importlib-metadata = {version = ">=2.0.0,<5.0.0", markers = "python_version < \"3.8\""}
-snowballstemmer = ">=2.2.0"
-
-[package.extras]
-toml = ["tomli (>=1.2.3)"]
-
-[[package]]
-name = "pyflakes"
-version = "2.3.1"
-description = "passive checker of Python programs"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"},
- {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"},
-]
-
-[[package]]
-name = "pygithub"
-version = "1.58.2"
-description = "Use the full Github API v3"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "PyGithub-1.58.2-py3-none-any.whl", hash = "sha256:f435884af617c6debaa76cbc355372d1027445a56fbc39972a3b9ed4968badc8"},
- {file = "PyGithub-1.58.2.tar.gz", hash = "sha256:1e6b1b7afe31f75151fb81f7ab6b984a7188a852bdb123dbb9ae90023c3ce60f"},
-]
-
-[package.dependencies]
-deprecated = "*"
-pyjwt = {version = ">=2.4.0", extras = ["crypto"]}
-pynacl = ">=1.4.0"
-requests = ">=2.14.0"
-
[[package]]
name = "pygments"
-version = "2.15.1"
+version = "2.17.2"
description = "Pygments is a syntax highlighting package written in Python."
optional = true
python-versions = ">=3.7"
files = [
- {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"},
- {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"},
+ {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
+ {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
]
[package.extras]
plugins = ["importlib-metadata"]
+windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pyjwt"
-version = "2.7.0"
+version = "2.8.0"
description = "JSON Web Token implementation in Python"
optional = false
python-versions = ">=3.7"
files = [
- {file = "PyJWT-2.7.0-py3-none-any.whl", hash = "sha256:ba2b425b15ad5ef12f200dc67dd56af4e26de2331f965c5439994dad075876e1"},
- {file = "PyJWT-2.7.0.tar.gz", hash = "sha256:bd6ca4a3c4285c1a2d4349e5a035fdf8fb94e04ccd0fcbe6ba289dae9cc3e074"},
+ {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"},
+ {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"},
]
[package.dependencies]
-cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""}
typing-extensions = {version = "*", markers = "python_version <= \"3.7\""}
[package.extras]
@@ -1615,32 +1751,6 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte
docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
-[[package]]
-name = "pynacl"
-version = "1.5.0"
-description = "Python binding to the Networking and Cryptography (NaCl) library"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"},
- {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"},
- {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"},
- {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"},
- {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"},
- {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"},
- {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"},
- {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"},
- {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"},
- {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"},
-]
-
-[package.dependencies]
-cffi = ">=1.4.1"
-
-[package.extras]
-docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
-tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
-
[[package]]
name = "pyrsistent"
version = "0.19.3"
@@ -1679,13 +1789,13 @@ files = [
[[package]]
name = "pytest"
-version = "7.4.0"
+version = "7.4.4"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"},
- {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"},
+ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
+ {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
]
[package.dependencies]
@@ -1700,6 +1810,48 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+[[package]]
+name = "pytest-benchmark"
+version = "4.0.0"
+description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"},
+ {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"},
+]
+
+[package.dependencies]
+py-cpuinfo = "*"
+pytest = ">=3.8"
+
+[package.extras]
+aspect = ["aspectlib"]
+elasticsearch = ["elasticsearch"]
+histogram = ["pygal", "pygaljs"]
+
+[[package]]
+name = "pytest-codspeed"
+version = "2.2.0"
+description = "Pytest plugin to create CodSpeed benchmarks"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest_codspeed-2.2.0-py3-none-any.whl", hash = "sha256:5da48b842fc465926d122dd15bb86e86af5d9f0c53ec1b7c736e9a9aed558c13"},
+ {file = "pytest_codspeed-2.2.0.tar.gz", hash = "sha256:665003fc20117b64a98d16ffd1008f5bd6bf3b1e9af142b98c00abff7f626bbd"},
+]
+
+[package.dependencies]
+cffi = ">=1.15.1,<1.16.0"
+filelock = ">=3.12.2,<3.13.0"
+pytest = ">=3.8"
+setuptools = {version = ">=67.8.0,<67.9.0", markers = "python_full_version >= \"3.12.0b1\""}
+
+[package.extras]
+compat = ["pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-xdist (>=2.0.0,<2.1.0)"]
+lint = ["black (>=23.3.0,<23.4.0)", "isort (>=5.12.0,<5.13.0)", "mypy (>=1.3.0,<1.4.0)", "ruff (>=0.0.275,<0.1.0)"]
+test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"]
+
[[package]]
name = "pytest-durations"
version = "1.2.0"
@@ -1756,32 +1908,15 @@ files = [
[package.extras]
cli = ["click (>=5.0)"]
-[[package]]
-name = "python-slugify"
-version = "8.0.1"
-description = "A Python slugify application that also handles Unicode"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "python-slugify-8.0.1.tar.gz", hash = "sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27"},
- {file = "python_slugify-8.0.1-py2.py3-none-any.whl", hash = "sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395"},
-]
-
-[package.dependencies]
-text-unidecode = ">=1.3"
-
-[package.extras]
-unidecode = ["Unidecode (>=1.1.1)"]
-
[[package]]
name = "pytz"
-version = "2023.3"
+version = "2023.3.post1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
- {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
- {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
+ {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
+ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
]
[[package]]
@@ -1797,69 +1932,77 @@ files = [
[[package]]
name = "pyyaml"
-version = "6.0"
+version = "6.0.1"
description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.6"
files = [
- {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
- {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
- {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
- {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
- {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
- {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
- {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
- {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
- {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
- {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
- {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
- {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
- {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
- {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
- {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
- {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
- {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
- {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
- {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
- {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
- {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
- {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
- {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
- {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
- {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
- {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
- {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
- {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
-]
-
-[[package]]
-name = "questionary"
-version = "1.10.0"
-description = "Python library to build pretty command line user prompts âī¸"
-optional = false
-python-versions = ">=3.6,<4.0"
-files = [
- {file = "questionary-1.10.0-py3-none-any.whl", hash = "sha256:fecfcc8cca110fda9d561cb83f1e97ecbb93c613ff857f655818839dac74ce90"},
- {file = "questionary-1.10.0.tar.gz", hash = "sha256:600d3aefecce26d48d97eee936fdb66e4bc27f934c3ab6dd1e292c4f43946d90"},
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
+ {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
+ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+]
+
+[[package]]
+name = "referencing"
+version = "0.32.0"
+description = "JSON Referencing + Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "referencing-0.32.0-py3-none-any.whl", hash = "sha256:bdcd3efb936f82ff86f993093f6da7435c7de69a3b3a5a06678a6050184bee99"},
+ {file = "referencing-0.32.0.tar.gz", hash = "sha256:689e64fe121843dcfd57b71933318ef1f91188ffb45367332700a86ac8fd6161"},
]
[package.dependencies]
-prompt_toolkit = ">=2.0,<4.0"
-
-[package.extras]
-docs = ["Sphinx (>=3.3,<4.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphinx-autodoc-typehints (>=1.11.1,<2.0.0)", "sphinx-copybutton (>=0.3.1,<0.4.0)", "sphinx-rtd-theme (>=0.5.0,<0.6.0)"]
+attrs = ">=22.2.0"
+rpds-py = ">=0.7.0"
[[package]]
name = "requests"
@@ -1901,32 +2044,140 @@ six = "*"
fixture = ["fixtures"]
test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"]
+[[package]]
+name = "rpds-py"
+version = "0.16.2"
+description = "Python bindings to Rust's persistent data structures (rpds)"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "rpds_py-0.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:509b617ac787cd1149600e731db9274ebbef094503ca25158e6f23edaba1ca8f"},
+ {file = "rpds_py-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:413b9c17388bbd0d87a329d8e30c1a4c6e44e2bb25457f43725a8e6fe4161e9e"},
+ {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2946b120718eba9af2b4dd103affc1164a87b9e9ebff8c3e4c05d7b7a7e274e2"},
+ {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35ae5ece284cf36464eb160880018cf6088a9ac5ddc72292a6092b6ef3f4da53"},
+ {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc6a7620ba7639a3db6213da61312cb4aa9ac0ca6e00dc1cbbdc21c2aa6eb57"},
+ {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cb6fe8ecdfffa0e711a75c931fb39f4ba382b4b3ccedeca43f18693864fe850"},
+ {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dace7b26a13353e24613417ce2239491b40a6ad44e5776a18eaff7733488b44"},
+ {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1bdbc5fcb04a7309074de6b67fa9bc4b418ab3fc435fec1f2779a0eced688d04"},
+ {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f42e25c016927e2a6b1ce748112c3ab134261fc2ddc867e92d02006103e1b1b7"},
+ {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eab36eae3f3e8e24b05748ec9acc66286662f5d25c52ad70cadab544e034536b"},
+ {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0474df4ade9a3b4af96c3d36eb81856cb9462e4c6657d4caecfd840d2a13f3c9"},
+ {file = "rpds_py-0.16.2-cp310-none-win32.whl", hash = "sha256:84c5a4d1f9dd7e2d2c44097fb09fffe728629bad31eb56caf97719e55575aa82"},
+ {file = "rpds_py-0.16.2-cp310-none-win_amd64.whl", hash = "sha256:2bd82db36cd70b3628c0c57d81d2438e8dd4b7b32a6a9f25f24ab0e657cb6c4e"},
+ {file = "rpds_py-0.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:adc0c3d6fc6ae35fee3e4917628983f6ce630d513cbaad575b4517d47e81b4bb"},
+ {file = "rpds_py-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec23fcad480e77ede06cf4127a25fc440f7489922e17fc058f426b5256ee0edb"},
+ {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07aab64e2808c3ebac2a44f67e9dc0543812b715126dfd6fe4264df527556cb6"},
+ {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4ebb8b20bd09c5ce7884c8f0388801100f5e75e7f733b1b6613c713371feefc"},
+ {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3d7e2ea25d3517c6d7e5a1cc3702cffa6bd18d9ef8d08d9af6717fc1c700eed"},
+ {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f28ac0e8e7242d140f99402a903a2c596ab71550272ae9247ad78f9a932b5698"},
+ {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19f00f57fdd38db4bb5ad09f9ead1b535332dbf624200e9029a45f1f35527ebb"},
+ {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3da5a4c56953bdbf6d04447c3410309616c54433146ccdb4a277b9cb499bc10e"},
+ {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec2e1cf025b2c0f48ec17ff3e642661da7ee332d326f2e6619366ce8e221f018"},
+ {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e0441fb4fdd39a230477b2ca9be90868af64425bfe7b122b57e61e45737a653b"},
+ {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9f0350ef2fba5f34eb0c9000ea328e51b9572b403d2f7f3b19f24085f6f598e8"},
+ {file = "rpds_py-0.16.2-cp311-none-win32.whl", hash = "sha256:5a80e2f83391ad0808b4646732af2a7b67550b98f0cae056cb3b40622a83dbb3"},
+ {file = "rpds_py-0.16.2-cp311-none-win_amd64.whl", hash = "sha256:e04e56b4ca7a770593633556e8e9e46579d66ec2ada846b401252a2bdcf70a6d"},
+ {file = "rpds_py-0.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5e6caa3809e50690bd92fa490f5c38caa86082c8c3315aa438bce43786d5e90d"},
+ {file = "rpds_py-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e53b9b25cac9065328901713a7e9e3b12e4f57ef4280b370fbbf6fef2052eef"},
+ {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af27423662f32d7501a00c5e7342f7dbd1e4a718aea7a239781357d15d437133"},
+ {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43d4dd5fb16eb3825742bad8339d454054261ab59fed2fbac84e1d84d5aae7ba"},
+ {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e061de3b745fe611e23cd7318aec2c8b0e4153939c25c9202a5811ca911fd733"},
+ {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b811d182ad17ea294f2ec63c0621e7be92a1141e1012383461872cead87468f"},
+ {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5552f328eaef1a75ff129d4d0c437bf44e43f9436d3996e8eab623ea0f5fcf73"},
+ {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dcbe1f8dd179e4d69b70b1f1d9bb6fd1e7e1bdc9c9aad345cdeb332e29d40748"},
+ {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8aad80645a011abae487d356e0ceb359f4938dfb6f7bcc410027ed7ae4f7bb8b"},
+ {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6f5549d6ed1da9bfe3631ca9483ae906f21410be2445b73443fa9f017601c6f"},
+ {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d452817e0d9c749c431a1121d56a777bd7099b720b3d1c820f1725cb40928f58"},
+ {file = "rpds_py-0.16.2-cp312-none-win32.whl", hash = "sha256:888a97002e986eca10d8546e3c8b97da1d47ad8b69726dcfeb3e56348ebb28a3"},
+ {file = "rpds_py-0.16.2-cp312-none-win_amd64.whl", hash = "sha256:d8dda2a806dfa4a9b795950c4f5cc56d6d6159f7d68080aedaff3bdc9b5032f5"},
+ {file = "rpds_py-0.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:071980663c273bf3d388fe5c794c547e6f35ba3335477072c713a3176bf14a60"},
+ {file = "rpds_py-0.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:726ac36e8a3bb8daef2fd482534cabc5e17334052447008405daca7ca04a3108"},
+ {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9e557db6a177470316c82f023e5d571811c9a4422b5ea084c85da9aa3c035fc"},
+ {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90123853fc8b1747f80b0d354be3d122b4365a93e50fc3aacc9fb4c2488845d6"},
+ {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a61f659665a39a4d17d699ab3593d7116d66e1e2e3f03ef3fb8f484e91908808"},
+ {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc97f0640e91d7776530f06e6836c546c1c752a52de158720c4224c9e8053cad"},
+ {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a54e99a2b9693a37ebf245937fd6e9228b4cbd64b9cc961e1f3391ec6c7391"},
+ {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4b677d929cf1f6bac07ad76e0f2d5de367e6373351c01a9c0a39f6b21b4a8b"},
+ {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5ef00873303d678aaf8b0627e111fd434925ca01c657dbb2641410f1cdaef261"},
+ {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:349cb40897fd529ca15317c22c0eab67f5ac5178b5bd2c6adc86172045210acc"},
+ {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2ddef620e70eaffebed5932ce754d539c0930f676aae6212f8e16cd9743dd365"},
+ {file = "rpds_py-0.16.2-cp38-none-win32.whl", hash = "sha256:882ce6e25e585949c3d9f9abd29202367175e0aab3aba0c58c9abbb37d4982ff"},
+ {file = "rpds_py-0.16.2-cp38-none-win_amd64.whl", hash = "sha256:f4bd4578e44f26997e9e56c96dedc5f1af43cc9d16c4daa29c771a00b2a26851"},
+ {file = "rpds_py-0.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:69ac7ea9897ec201ce68b48582f3eb34a3f9924488a5432a93f177bf76a82a7e"},
+ {file = "rpds_py-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a9880b4656efe36ccad41edc66789e191e5ee19a1ea8811e0aed6f69851a82f4"},
+ {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee94cb58c0ba2c62ee108c2b7c9131b2c66a29e82746e8fa3aa1a1effbd3dcf1"},
+ {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24f7a2eb3866a9e91f4599851e0c8d39878a470044875c49bd528d2b9b88361c"},
+ {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca57468da2d9a660bcf8961637c85f2fbb2aa64d9bc3f9484e30c3f9f67b1dd7"},
+ {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccd4e400309e1f34a5095bf9249d371f0fd60f8a3a5c4a791cad7b99ce1fd38d"},
+ {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80443fe2f7b3ea3934c5d75fb0e04a5dbb4a8e943e5ff2de0dec059202b70a8b"},
+ {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d6a9f052e72d493efd92a77f861e45bab2f6be63e37fa8ecf0c6fd1a58fedb0"},
+ {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:35953f4f2b3216421af86fd236b7c0c65935936a94ea83ddbd4904ba60757773"},
+ {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:981d135c7cdaf6cd8eadae1c950de43b976de8f09d8e800feed307140d3d6d00"},
+ {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d0dd7ed2f16df2e129496e7fbe59a34bc2d7fc8db443a606644d069eb69cbd45"},
+ {file = "rpds_py-0.16.2-cp39-none-win32.whl", hash = "sha256:703d95c75a72e902544fda08e965885525e297578317989fd15a6ce58414b41d"},
+ {file = "rpds_py-0.16.2-cp39-none-win_amd64.whl", hash = "sha256:e93ec1b300acf89730cf27975ef574396bc04edecc358e9bd116fb387a123239"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:44627b6ca7308680a70766454db5249105fa6344853af6762eaad4158a2feebe"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3f91df8e6dbb7360e176d1affd5fb0246d2b88d16aa5ebc7db94fd66b68b61da"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d904c5693e08bad240f16d79305edba78276be87061c872a4a15e2c301fa2c0"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:290a81cfbe4673285cdf140ec5cd1658ffbf63ab359f2b352ebe172e7cfa5bf0"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b634c5ec0103c5cbebc24ebac4872b045cccb9456fc59efdcf6fe39775365bd2"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a297a4d08cc67c7466c873c78039d87840fb50d05473db0ec1b7b03d179bf322"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e75e17bd0bb66ee34a707da677e47c14ee51ccef78ed6a263a4cc965a072a1"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b9d9260e06ea017feb7172976ab261e011c1dc2f8883c7c274f6b2aabfe01a"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:162d7cd9cd311c1b0ff1c55a024b8f38bd8aad1876b648821da08adc40e95734"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:9b32f742ce5b57201305f19c2ef7a184b52f6f9ba6871cc042c2a61f0d6b49b8"},
+ {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac08472f41ea77cd6a5dae36ae7d4ed3951d6602833af87532b556c1b4601d63"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:495a14b72bbe217f2695dcd9b5ab14d4f8066a00f5d209ed94f0aca307f85f6e"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d6b6937ae9eac6d6c0ca3c42774d89fa311f55adff3970fb364b34abde6ed3d"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a61226465bda9283686db8f17d02569a98e4b13c637be5a26d44aa1f1e361c2"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5cf6af100ffb5c195beec11ffaa8cf8523057f123afa2944e6571d54da84cdc9"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6df15846ee3fb2e6397fe25d7ca6624af9f89587f3f259d177b556fed6bebe2c"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1be2f033df1b8be8c3167ba3c29d5dca425592ee31e35eac52050623afba5772"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96f957d6ab25a78b9e7fc9749d754b98eac825a112b4e666525ce89afcbd9ed5"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:088396c7c70e59872f67462fcac3ecbded5233385797021976a09ebd55961dfe"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4c46ad6356e1561f2a54f08367d1d2e70a0a1bb2db2282d2c1972c1d38eafc3b"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:47713dc4fce213f5c74ca8a1f6a59b622fc1b90868deb8e8e4d993e421b4b39d"},
+ {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f811771019f063bbd0aa7bb72c8a934bc13ebacb4672d712fc1639cfd314cccc"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f19afcfc0dd0dca35694df441e9b0f95bc231b512f51bded3c3d8ca32153ec19"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4b682c5775d6a3d21e314c10124599976809455ee67020e8e72df1769b87bc3"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c647ca87fc0ebe808a41de912e9a1bfef9acb85257e5d63691364ac16b81c1f0"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:302bd4983bbd47063e452c38be66153760112f6d3635c7eeefc094299fa400a9"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf721ede3eb7b829e4a9b8142bd55db0bdc82902720548a703f7e601ee13bdc3"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:358dafc89ce3894c7f486c615ba914609f38277ef67f566abc4c854d23b997fa"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cad0f59ee3dc35526039f4bc23642d52d5f6616b5f687d846bfc6d0d6d486db0"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cffa76b385dfe1e38527662a302b19ffb0e7f5cf7dd5e89186d2c94a22dd9d0c"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:83640a5d7cd3bff694747d50436b8b541b5b9b9782b0c8c1688931d6ee1a1f2d"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:ed99b4f7179d2111702020fd7d156e88acd533f5a7d3971353e568b6051d5c97"},
+ {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4022b9dc620e14f30201a8a73898a873c8e910cb642bcd2f3411123bc527f6ac"},
+ {file = "rpds_py-0.16.2.tar.gz", hash = "sha256:781ef8bfc091b19960fc0142a23aedadafa826bc32b433fdfe6fd7f964d7ef44"},
+]
+
[[package]]
name = "s3transfer"
-version = "0.6.1"
+version = "0.8.2"
description = "An Amazon S3 Transfer Manager"
optional = true
python-versions = ">= 3.7"
files = [
- {file = "s3transfer-0.6.1-py3-none-any.whl", hash = "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346"},
- {file = "s3transfer-0.6.1.tar.gz", hash = "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"},
+ {file = "s3transfer-0.8.2-py3-none-any.whl", hash = "sha256:c9e56cbe88b28d8e197cf841f1f0c130f246595e77ae5b5a05b69fe7cb83de76"},
+ {file = "s3transfer-0.8.2.tar.gz", hash = "sha256:368ac6876a9e9ed91f6bc86581e319be08188dc60d50e0d56308ed5765446283"},
]
[package.dependencies]
-botocore = ">=1.12.36,<2.0a.0"
+botocore = ">=1.33.2,<2.0a.0"
[package.extras]
-crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
+crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
[[package]]
name = "setuptools"
-version = "68.0.0"
+version = "67.8.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.7"
files = [
- {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"},
- {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"},
+ {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"},
+ {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"},
]
[package.extras]
@@ -1934,98 +2185,122 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+[[package]]
+name = "simpleeval"
+version = "0.9.13"
+description = "A simple, safe single expression evaluator library."
+optional = false
+python-versions = "*"
+files = [
+ {file = "simpleeval-0.9.13-py2.py3-none-any.whl", hash = "sha256:22a2701a5006e4188d125d34accf2405c2c37c93f6b346f2484b6422415ae54a"},
+ {file = "simpleeval-0.9.13.tar.gz", hash = "sha256:4a30f9cc01825fe4c719c785e3762623e350c4840d5e6855c2a8496baaa65fac"},
+]
+
[[package]]
name = "simplejson"
-version = "3.19.1"
+version = "3.19.2"
description = "Simple, fast, extensible JSON encoder/decoder for Python"
optional = false
python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
- {file = "simplejson-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:412e58997a30c5deb8cab5858b8e2e5b40ca007079f7010ee74565cc13d19665"},
- {file = "simplejson-3.19.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e765b1f47293dedf77946f0427e03ee45def2862edacd8868c6cf9ab97c8afbd"},
- {file = "simplejson-3.19.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3231100edee292da78948fa0a77dee4e5a94a0a60bcba9ed7a9dc77f4d4bb11e"},
- {file = "simplejson-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:081ea6305b3b5e84ae7417e7f45956db5ea3872ec497a584ec86c3260cda049e"},
- {file = "simplejson-3.19.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f253edf694ce836631b350d758d00a8c4011243d58318fbfbe0dd54a6a839ab4"},
- {file = "simplejson-3.19.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:5db86bb82034e055257c8e45228ca3dbce85e38d7bfa84fa7b2838e032a3219c"},
- {file = "simplejson-3.19.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:69a8b10a4f81548bc1e06ded0c4a6c9042c0be0d947c53c1ed89703f7e613950"},
- {file = "simplejson-3.19.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:58ee5e24d6863b22194020eb62673cf8cc69945fcad6b283919490f6e359f7c5"},
- {file = "simplejson-3.19.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:73d0904c2471f317386d4ae5c665b16b5c50ab4f3ee7fd3d3b7651e564ad74b1"},
- {file = "simplejson-3.19.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:66d780047c31ff316ee305c3f7550f352d87257c756413632303fc59fef19eac"},
- {file = "simplejson-3.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd4d50a27b065447c9c399f0bf0a993bd0e6308db8bbbfbc3ea03b41c145775a"},
- {file = "simplejson-3.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c16ec6a67a5f66ab004190829eeede01c633936375edcad7cbf06d3241e5865"},
- {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17a963e8dd4d81061cc05b627677c1f6a12e81345111fbdc5708c9f088d752c9"},
- {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e78d79b10aa92f40f54178ada2b635c960d24fc6141856b926d82f67e56d169"},
- {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad071cd84a636195f35fa71de2186d717db775f94f985232775794d09f8d9061"},
- {file = "simplejson-3.19.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e7c70f19405e5f99168077b785fe15fcb5f9b3c0b70b0b5c2757ce294922c8c"},
- {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54fca2b26bcd1c403146fd9461d1da76199442297160721b1d63def2a1b17799"},
- {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:48600a6e0032bed17c20319d91775f1797d39953ccfd68c27f83c8d7fc3b32cb"},
- {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:93f5ac30607157a0b2579af59a065bcfaa7fadeb4875bf927a8f8b6739c8d910"},
- {file = "simplejson-3.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b79642a599740603ca86cf9df54f57a2013c47e1dd4dd2ae4769af0a6816900"},
- {file = "simplejson-3.19.1-cp310-cp310-win32.whl", hash = "sha256:d9f2c27f18a0b94107d57294aab3d06d6046ea843ed4a45cae8bd45756749f3a"},
- {file = "simplejson-3.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:5673d27806085d2a413b3be5f85fad6fca4b7ffd31cfe510bbe65eea52fff571"},
- {file = "simplejson-3.19.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:79c748aa61fd8098d0472e776743de20fae2686edb80a24f0f6593a77f74fe86"},
- {file = "simplejson-3.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:390f4a8ca61d90bcf806c3ad644e05fa5890f5b9a72abdd4ca8430cdc1e386fa"},
- {file = "simplejson-3.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d61482b5d18181e6bb4810b4a6a24c63a490c3a20e9fbd7876639653e2b30a1a"},
- {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2541fdb7467ef9bfad1f55b6c52e8ea52b3ce4a0027d37aff094190a955daa9d"},
- {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46133bc7dd45c9953e6ee4852e3de3d5a9a4a03b068bd238935a5c72f0a1ce34"},
- {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f96def94576f857abf58e031ce881b5a3fc25cbec64b2bc4824824a8a4367af9"},
- {file = "simplejson-3.19.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f14ecca970d825df0d29d5c6736ff27999ee7bdf5510e807f7ad8845f7760ce"},
- {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:66389b6b6ee46a94a493a933a26008a1bae0cfadeca176933e7ff6556c0ce998"},
- {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:22b867205cd258050c2625325fdd9a65f917a5aff22a23387e245ecae4098e78"},
- {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c39fa911e4302eb79c804b221ddec775c3da08833c0a9120041dd322789824de"},
- {file = "simplejson-3.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:65dafe413b15e8895ad42e49210b74a955c9ae65564952b0243a18fb35b986cc"},
- {file = "simplejson-3.19.1-cp311-cp311-win32.whl", hash = "sha256:f05d05d99fce5537d8f7a0af6417a9afa9af3a6c4bb1ba7359c53b6257625fcb"},
- {file = "simplejson-3.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:b46aaf0332a8a9c965310058cf3487d705bf672641d2c43a835625b326689cf4"},
- {file = "simplejson-3.19.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b438e5eaa474365f4faaeeef1ec3e8d5b4e7030706e3e3d6b5bee6049732e0e6"},
- {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9d614a612ad02492f704fbac636f666fa89295a5d22b4facf2d665fc3b5ea9"},
- {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46e89f58e4bed107626edce1cf098da3664a336d01fc78fddcfb1f397f553d44"},
- {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96ade243fb6f3b57e7bd3b71e90c190cd0f93ec5dce6bf38734a73a2e5fa274f"},
- {file = "simplejson-3.19.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed18728b90758d171f0c66c475c24a443ede815cf3f1a91e907b0db0ebc6e508"},
- {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6a561320485017ddfc21bd2ed5de2d70184f754f1c9b1947c55f8e2b0163a268"},
- {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:2098811cd241429c08b7fc5c9e41fcc3f59f27c2e8d1da2ccdcf6c8e340ab507"},
- {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8f8d179393e6f0cf6c7c950576892ea6acbcea0a320838c61968ac7046f59228"},
- {file = "simplejson-3.19.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:eff87c68058374e45225089e4538c26329a13499bc0104b52b77f8428eed36b2"},
- {file = "simplejson-3.19.1-cp36-cp36m-win32.whl", hash = "sha256:d300773b93eed82f6da138fd1d081dc96fbe53d96000a85e41460fe07c8d8b33"},
- {file = "simplejson-3.19.1-cp36-cp36m-win_amd64.whl", hash = "sha256:37724c634f93e5caaca04458f267836eb9505d897ab3947b52f33b191bf344f3"},
- {file = "simplejson-3.19.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:74bf802debe68627227ddb665c067eb8c73aa68b2476369237adf55c1161b728"},
- {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70128fb92932524c89f373e17221cf9535d7d0c63794955cc3cd5868e19f5d38"},
- {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8090e75653ea7db75bc21fa5f7bcf5f7bdf64ea258cbbac45c7065f6324f1b50"},
- {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a755f7bfc8adcb94887710dc70cc12a69a454120c6adcc6f251c3f7b46ee6aac"},
- {file = "simplejson-3.19.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ccb2c1877bc9b25bc4f4687169caa925ffda605d7569c40e8e95186e9a5e58b"},
- {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:919bc5aa4d8094cf8f1371ea9119e5d952f741dc4162810ab714aec948a23fe5"},
- {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e333c5b62e93949f5ac27e6758ba53ef6ee4f93e36cc977fe2e3df85c02f6dc4"},
- {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3a4480e348000d89cf501b5606415f4d328484bbb431146c2971123d49fd8430"},
- {file = "simplejson-3.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cb502cde018e93e75dc8fc7bb2d93477ce4f3ac10369f48866c61b5e031db1fd"},
- {file = "simplejson-3.19.1-cp37-cp37m-win32.whl", hash = "sha256:f41915a4e1f059dfad614b187bc06021fefb5fc5255bfe63abf8247d2f7a646a"},
- {file = "simplejson-3.19.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3844305bc33d52c4975da07f75b480e17af3558c0d13085eaa6cc2f32882ccf7"},
- {file = "simplejson-3.19.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1cb19eacb77adc5a9720244d8d0b5507421d117c7ed4f2f9461424a1829e0ceb"},
- {file = "simplejson-3.19.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:926957b278de22797bfc2f004b15297013843b595b3cd7ecd9e37ccb5fad0b72"},
- {file = "simplejson-3.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b0e9a5e66969f7a47dc500e3dba8edc3b45d4eb31efb855c8647700a3493dd8a"},
- {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79d46e7e33c3a4ef853a1307b2032cfb7220e1a079d0c65488fbd7118f44935a"},
- {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344a5093b71c1b370968d0fbd14d55c9413cb6f0355fdefeb4a322d602d21776"},
- {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23fbb7b46d44ed7cbcda689295862851105c7594ae5875dce2a70eeaa498ff86"},
- {file = "simplejson-3.19.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3025e7e9ddb48813aec2974e1a7e68e63eac911dd5e0a9568775de107ac79a"},
- {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:87b190e6ceec286219bd6b6f13547ca433f977d4600b4e81739e9ac23b5b9ba9"},
- {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc935d8322ba9bc7b84f99f40f111809b0473df167bf5b93b89fb719d2c4892b"},
- {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3b652579c21af73879d99c8072c31476788c8c26b5565687fd9db154070d852a"},
- {file = "simplejson-3.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6aa7ca03f25b23b01629b1c7f78e1cd826a66bfb8809f8977a3635be2ec48f1a"},
- {file = "simplejson-3.19.1-cp38-cp38-win32.whl", hash = "sha256:08be5a241fdf67a8e05ac7edbd49b07b638ebe4846b560673e196b2a25c94b92"},
- {file = "simplejson-3.19.1-cp38-cp38-win_amd64.whl", hash = "sha256:ca56a6c8c8236d6fe19abb67ef08d76f3c3f46712c49a3b6a5352b6e43e8855f"},
- {file = "simplejson-3.19.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6424d8229ba62e5dbbc377908cfee9b2edf25abd63b855c21f12ac596cd18e41"},
- {file = "simplejson-3.19.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:547ea86ca408a6735335c881a2e6208851027f5bfd678d8f2c92a0f02c7e7330"},
- {file = "simplejson-3.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:889328873c35cb0b2b4c83cbb83ec52efee5a05e75002e2c0c46c4e42790e83c"},
- {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cdb4e544134f305b033ad79ae5c6b9a32e7c58b46d9f55a64e2a883fbbba01"},
- {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2b3f06430cbd4fac0dae5b2974d2bf14f71b415fb6de017f498950da8159b1"},
- {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d125e754d26c0298715bdc3f8a03a0658ecbe72330be247f4b328d229d8cf67f"},
- {file = "simplejson-3.19.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:476c8033abed7b1fd8db62a7600bf18501ce701c1a71179e4ce04ac92c1c5c3c"},
- {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:199a0bcd792811c252d71e3eabb3d4a132b3e85e43ebd93bfd053d5b59a7e78b"},
- {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a79b439a6a77649bb8e2f2644e6c9cc0adb720fc55bed63546edea86e1d5c6c8"},
- {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:203412745fed916fc04566ecef3f2b6c872b52f1e7fb3a6a84451b800fb508c1"},
- {file = "simplejson-3.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca922c61d87b4c38f37aa706520328ffe22d7ac1553ef1cadc73f053a673553"},
- {file = "simplejson-3.19.1-cp39-cp39-win32.whl", hash = "sha256:3e0902c278243d6f7223ba3e6c5738614c971fd9a887fff8feaa8dcf7249c8d4"},
- {file = "simplejson-3.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:d396b610e77b0c438846607cd56418bfc194973b9886550a98fd6724e8c6cfec"},
- {file = "simplejson-3.19.1-py3-none-any.whl", hash = "sha256:4710806eb75e87919b858af0cba4ffedc01b463edc3982ded7b55143f39e41e1"},
- {file = "simplejson-3.19.1.tar.gz", hash = "sha256:6277f60848a7d8319d27d2be767a7546bc965535b28070e310b3a9af90604a4c"},
+ {file = "simplejson-3.19.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3471e95110dcaf901db16063b2e40fb394f8a9e99b3fe9ee3acc6f6ef72183a2"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3194cd0d2c959062b94094c0a9f8780ffd38417a5322450a0db0ca1a23e7fbd2"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8a390e56a7963e3946ff2049ee1eb218380e87c8a0e7608f7f8790ba19390867"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1537b3dd62d8aae644f3518c407aa8469e3fd0f179cdf86c5992792713ed717a"},
+ {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a8617625369d2d03766413bff9e64310feafc9fc4f0ad2b902136f1a5cd8c6b0"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:2c433a412e96afb9a3ce36fa96c8e61a757af53e9c9192c97392f72871e18e69"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f1c70249b15e4ce1a7d5340c97670a95f305ca79f376887759b43bb33288c973"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:287e39ba24e141b046812c880f4619d0ca9e617235d74abc27267194fc0c7835"},
+ {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6f0a0b41dd05eefab547576bed0cf066595f3b20b083956b1405a6f17d1be6ad"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f98d918f7f3aaf4b91f2b08c0c92b1774aea113334f7cde4fe40e777114dbe6"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d74beca677623481810c7052926365d5f07393c72cbf62d6cce29991b676402"},
+ {file = "simplejson-3.19.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f2398361508c560d0bf1773af19e9fe644e218f2a814a02210ac2c97ad70db0"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad331349b0b9ca6da86064a3599c425c7a21cd41616e175ddba0866da32df48"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:332c848f02d71a649272b3f1feccacb7e4f7e6de4a2e6dc70a32645326f3d428"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25785d038281cd106c0d91a68b9930049b6464288cea59ba95b35ee37c2d23a5"},
+ {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18955c1da6fc39d957adfa346f75226246b6569e096ac9e40f67d102278c3bcb"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11cc3afd8160d44582543838b7e4f9aa5e97865322844b75d51bf4e0e413bb3e"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b01fda3e95d07a6148702a641e5e293b6da7863f8bc9b967f62db9461330562c"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:778331444917108fa8441f59af45886270d33ce8a23bfc4f9b192c0b2ecef1b3"},
+ {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9eb117db8d7ed733a7317c4215c35993b815bf6aeab67523f1f11e108c040672"},
+ {file = "simplejson-3.19.2-cp310-cp310-win32.whl", hash = "sha256:39b6d79f5cbfa3eb63a869639cfacf7c41d753c64f7801efc72692c1b2637ac7"},
+ {file = "simplejson-3.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:5675e9d8eeef0aa06093c1ff898413ade042d73dc920a03e8cea2fb68f62445a"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed628c1431100b0b65387419551e822987396bee3c088a15d68446d92f554e0c"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adcb3332979cbc941b8fff07181f06d2b608625edc0a4d8bc3ffc0be414ad0c4"},
+ {file = "simplejson-3.19.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08889f2f597ae965284d7b52a5c3928653a9406d88c93e3161180f0abc2433ba"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7938a78447174e2616be223f496ddccdbf7854f7bf2ce716dbccd958cc7d13"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a970a2e6d5281d56cacf3dc82081c95c1f4da5a559e52469287457811db6a79b"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554313db34d63eac3b3f42986aa9efddd1a481169c12b7be1e7512edebff8eaf"},
+ {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d36081c0b1c12ea0ed62c202046dca11438bee48dd5240b7c8de8da62c620e9"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a3cd18e03b0ee54ea4319cdcce48357719ea487b53f92a469ba8ca8e39df285e"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66e5dc13bfb17cd6ee764fc96ccafd6e405daa846a42baab81f4c60e15650414"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:972a7833d4a1fcf7a711c939e315721a88b988553fc770a5b6a5a64bd6ebeba3"},
+ {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3e74355cb47e0cd399ead3477e29e2f50e1540952c22fb3504dda0184fc9819f"},
+ {file = "simplejson-3.19.2-cp311-cp311-win32.whl", hash = "sha256:1dd4f692304854352c3e396e9b5f0a9c9e666868dd0bdc784e2ac4c93092d87b"},
+ {file = "simplejson-3.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:9300aee2a8b5992d0f4293d88deb59c218989833e3396c824b69ba330d04a589"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b8d940fd28eb34a7084877747a60873956893e377f15a32ad445fe66c972c3b8"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4969d974d9db826a2c07671273e6b27bc48e940738d768fa8f33b577f0978378"},
+ {file = "simplejson-3.19.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c594642d6b13d225e10df5c16ee15b3398e21a35ecd6aee824f107a625690374"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f5a398b5e77bb01b23d92872255e1bcb3c0c719a3be40b8df146570fe7781a"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176a1b524a3bd3314ed47029a86d02d5a95cc0bee15bd3063a1e1ec62b947de6"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3c7363a8cb8c5238878ec96c5eb0fc5ca2cb11fc0c7d2379863d342c6ee367a"},
+ {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346820ae96aa90c7d52653539a57766f10f33dd4be609206c001432b59ddf89f"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de9a2792612ec6def556d1dc621fd6b2073aff015d64fba9f3e53349ad292734"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1c768e7584c45094dca4b334af361e43b0aaa4844c04945ac7d43379eeda9bc2"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:9652e59c022e62a5b58a6f9948b104e5bb96d3b06940c6482588176f40f4914b"},
+ {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9c1a4393242e321e344213a90a1e3bf35d2f624aa8b8f6174d43e3c6b0e8f6eb"},
+ {file = "simplejson-3.19.2-cp312-cp312-win32.whl", hash = "sha256:7cb98be113911cb0ad09e5523d0e2a926c09a465c9abb0784c9269efe4f95917"},
+ {file = "simplejson-3.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:6779105d2fcb7fcf794a6a2a233787f6bbd4731227333a072d8513b252ed374f"},
+ {file = "simplejson-3.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:061e81ea2d62671fa9dea2c2bfbc1eec2617ae7651e366c7b4a2baf0a8c72cae"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4280e460e51f86ad76dc456acdbfa9513bdf329556ffc8c49e0200878ca57816"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11c39fbc4280d7420684494373b7c5904fa72a2b48ef543a56c2d412999c9e5d"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bccb3e88ec26ffa90f72229f983d3a5d1155e41a1171190fa723d4135523585b"},
+ {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb5b50dc6dd671eb46a605a3e2eb98deb4a9af787a08fcdddabe5d824bb9664"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d94245caa3c61f760c4ce4953cfa76e7739b6f2cbfc94cc46fff6c050c2390c5"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0e5ffc763678d48ecc8da836f2ae2dd1b6eb2d27a48671066f91694e575173c"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d222a9ed082cd9f38b58923775152003765016342a12f08f8c123bf893461f28"},
+ {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8434dcdd347459f9fd9c526117c01fe7ca7b016b6008dddc3c13471098f4f0dc"},
+ {file = "simplejson-3.19.2-cp36-cp36m-win32.whl", hash = "sha256:c9ac1c2678abf9270e7228133e5b77c6c3c930ad33a3c1dfbdd76ff2c33b7b50"},
+ {file = "simplejson-3.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:92c4a4a2b1f4846cd4364855cbac83efc48ff5a7d7c06ba014c792dd96483f6f"},
+ {file = "simplejson-3.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0d551dc931638e2102b8549836a1632e6e7cf620af3d093a7456aa642bff601d"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73a8a4653f2e809049999d63530180d7b5a344b23a793502413ad1ecea9a0290"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40847f617287a38623507d08cbcb75d51cf9d4f9551dd6321df40215128325a3"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be893258d5b68dd3a8cba8deb35dc6411db844a9d35268a8d3793b9d9a256f80"},
+ {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9eb3cff1b7d71aa50c89a0536f469cb8d6dcdd585d8f14fb8500d822f3bdee4"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d0f402e787e6e7ee7876c8b05e2fe6464820d9f35ba3f172e95b5f8b699f6c7f"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbbcc6b0639aa09b9649f36f1bcb347b19403fe44109948392fbb5ea69e48c3e"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2fc697be37585eded0c8581c4788fcfac0e3f84ca635b73a5bf360e28c8ea1a2"},
+ {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b0a3eb6dd39cce23801a50c01a0976971498da49bc8a0590ce311492b82c44b"},
+ {file = "simplejson-3.19.2-cp37-cp37m-win32.whl", hash = "sha256:49f9da0d6cd17b600a178439d7d2d57c5ef01f816b1e0e875e8e8b3b42db2693"},
+ {file = "simplejson-3.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c87c22bd6a987aca976e3d3e23806d17f65426191db36d40da4ae16a6a494cbc"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e4c166f743bb42c5fcc60760fb1c3623e8fda94f6619534217b083e08644b46"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a48679310e1dd5c9f03481799311a65d343748fe86850b7fb41df4e2c00c087"},
+ {file = "simplejson-3.19.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0521e0f07cb56415fdb3aae0bbd8701eb31a9dfef47bb57206075a0584ab2a2"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2d5119b1d7a1ed286b8af37357116072fc96700bce3bec5bb81b2e7057ab41"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c1467d939932901a97ba4f979e8f2642415fcf02ea12f53a4e3206c9c03bc17"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49aaf4546f6023c44d7e7136be84a03a4237f0b2b5fb2b17c3e3770a758fc1a0"},
+ {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60848ab779195b72382841fc3fa4f71698a98d9589b0a081a9399904487b5832"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0436a70d8eb42bea4fe1a1c32d371d9bb3b62c637969cb33970ad624d5a3336a"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49e0e3faf3070abdf71a5c80a97c1afc059b4f45a5aa62de0c2ca0444b51669b"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ff836cd4041e16003549449cc0a5e372f6b6f871eb89007ab0ee18fb2800fded"},
+ {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3848427b65e31bea2c11f521b6fc7a3145d6e501a1038529da2391aff5970f2f"},
+ {file = "simplejson-3.19.2-cp38-cp38-win32.whl", hash = "sha256:3f39bb1f6e620f3e158c8b2eaf1b3e3e54408baca96a02fe891794705e788637"},
+ {file = "simplejson-3.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:0405984f3ec1d3f8777c4adc33eac7ab7a3e629f3b1c05fdded63acc7cf01137"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:445a96543948c011a3a47c8e0f9d61e9785df2544ea5be5ab3bc2be4bd8a2565"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a8c3cc4f9dfc33220246760358c8265dad6e1104f25f0077bbca692d616d358"},
+ {file = "simplejson-3.19.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af9c7e6669c4d0ad7362f79cb2ab6784d71147503e62b57e3d95c4a0f222c01c"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:064300a4ea17d1cd9ea1706aa0590dcb3be81112aac30233823ee494f02cb78a"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9453419ea2ab9b21d925d0fd7e3a132a178a191881fab4169b6f96e118cc25bb"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e038c615b3906df4c3be8db16b3e24821d26c55177638ea47b3f8f73615111c"},
+ {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16ca9c90da4b1f50f089e14485db8c20cbfff2d55424062791a7392b5a9b3ff9"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1018bd0d70ce85f165185d2227c71e3b1e446186f9fa9f971b69eee223e1e3cd"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e8dd53a8706b15bc0e34f00e6150fbefb35d2fd9235d095b4f83b3c5ed4fa11d"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d022b14d7758bfb98405672953fe5c202ea8a9ccf9f6713c5bd0718eba286fd"},
+ {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:febffa5b1eda6622d44b245b0685aff6fb555ce0ed734e2d7b1c3acd018a2cff"},
+ {file = "simplejson-3.19.2-cp39-cp39-win32.whl", hash = "sha256:4edcd0bf70087b244ba77038db23cd98a1ace2f91b4a3ecef22036314d77ac23"},
+ {file = "simplejson-3.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:aad7405c033d32c751d98d3a65801e2797ae77fac284a539f6c3a3e13005edc4"},
+ {file = "simplejson-3.19.2-py3-none-any.whl", hash = "sha256:bcedf4cae0d47839fee7de344f96b5694ca53c786f28b5f773d4f0b265a159eb"},
+ {file = "simplejson-3.19.2.tar.gz", hash = "sha256:9eb442a2442ce417801c912df68e1f6ccfcd41577ae7274953ab3ad24ef7d82c"},
]
[[package]]
@@ -2043,7 +2318,7 @@ files = [
name = "snowballstemmer"
version = "2.2.0"
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
-optional = false
+optional = true
python-versions = "*"
files = [
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
@@ -2117,13 +2392,13 @@ test = ["pytest", "pytest-cov"]
[[package]]
name = "sphinx-basic-ng"
-version = "1.0.0b1"
+version = "1.0.0b2"
description = "A modern skeleton for Sphinx themes."
optional = true
python-versions = ">=3.7"
files = [
- {file = "sphinx_basic_ng-1.0.0b1-py3-none-any.whl", hash = "sha256:ade597a3029c7865b24ad0eda88318766bcc2f9f4cef60df7e28126fde94db2a"},
- {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"},
+ {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"},
+ {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"},
]
[package.dependencies]
@@ -2168,15 +2443,33 @@ sphinx = ">=3"
doc = ["furo", "myst-parser"]
test = ["pytest", "pytest-cov", "pytest-xdist"]
+[[package]]
+name = "sphinx-notfound-page"
+version = "1.0.0"
+description = "Sphinx extension to build a 404 page with absolute URLs"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "sphinx_notfound_page-1.0.0-py3-none-any.whl", hash = "sha256:40a5741a6b07245a08fe55dbbd603ad6719e191b1419ab2e5337c706ebd16554"},
+ {file = "sphinx_notfound_page-1.0.0.tar.gz", hash = "sha256:14cd388956de5cdf8710ab4ff31776ef8d85759c4f46014ee30f368e83bd3a3b"},
+]
+
+[package.dependencies]
+sphinx = ">=5"
+
+[package.extras]
+doc = ["sphinx-autoapi", "sphinx-rtd-theme", "sphinx-tabs", "sphinxemoji"]
+test = ["tox"]
+
[[package]]
name = "sphinx-reredirects"
-version = "0.1.2"
+version = "0.1.3"
description = "Handles redirects for moved pages in Sphinx documentation projects"
optional = true
python-versions = ">=3.5"
files = [
- {file = "sphinx_reredirects-0.1.2-py3-none-any.whl", hash = "sha256:3a22161771aadd448bb608a4fe7277252182a337af53c18372b7104531d71489"},
- {file = "sphinx_reredirects-0.1.2.tar.gz", hash = "sha256:a0e7213304759b01edc22f032f1715a1c61176fc8f167164e7a52b9feec9ac64"},
+ {file = "sphinx_reredirects-0.1.3-py3-none-any.whl", hash = "sha256:02c53437c467cf9ed89863eff3addedc01d129624b2f03ab6302518fb77a2c89"},
+ {file = "sphinx_reredirects-0.1.3.tar.gz", hash = "sha256:56e222d285f76c944fd370f36ad3a1a66103a88b552e97d3d24a622bb9465de8"},
]
[package.dependencies]
@@ -2273,117 +2566,225 @@ test = ["pytest"]
[[package]]
name = "sqlalchemy"
-version = "1.4.48"
+version = "2.0.25"
description = "Database Abstraction Library"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
-files = [
- {file = "SQLAlchemy-1.4.48-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4bac3aa3c3d8bc7408097e6fe8bf983caa6e9491c5d2e2488cfcfd8106f13b6a"},
- {file = "SQLAlchemy-1.4.48-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dbcae0e528d755f4522cad5842f0942e54b578d79f21a692c44d91352ea6d64e"},
- {file = "SQLAlchemy-1.4.48-cp27-cp27m-win32.whl", hash = "sha256:cbbe8b8bffb199b225d2fe3804421b7b43a0d49983f81dc654d0431d2f855543"},
- {file = "SQLAlchemy-1.4.48-cp27-cp27m-win_amd64.whl", hash = "sha256:627e04a5d54bd50628fc8734d5fc6df2a1aa5962f219c44aad50b00a6cdcf965"},
- {file = "SQLAlchemy-1.4.48-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9af1db7a287ef86e0f5cd990b38da6bd9328de739d17e8864f1817710da2d217"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ce7915eecc9c14a93b73f4e1c9d779ca43e955b43ddf1e21df154184f39748e5"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5381ddd09a99638f429f4cbe1b71b025bed318f6a7b23e11d65f3eed5e181c33"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:87609f6d4e81a941a17e61a4c19fee57f795e96f834c4f0a30cee725fc3f81d9"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0808ad34167f394fea21bd4587fc62f3bd81bba232a1e7fbdfa17e6cfa7cd7"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-win32.whl", hash = "sha256:d53cd8bc582da5c1c8c86b6acc4ef42e20985c57d0ebc906445989df566c5603"},
- {file = "SQLAlchemy-1.4.48-cp310-cp310-win_amd64.whl", hash = "sha256:4355e5915844afdc5cf22ec29fba1010166e35dd94a21305f49020022167556b"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:066c2b0413e8cb980e6d46bf9d35ca83be81c20af688fedaef01450b06e4aa5e"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c99bf13e07140601d111a7c6f1fc1519914dd4e5228315bbda255e08412f61a4"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee26276f12614d47cc07bc85490a70f559cba965fb178b1c45d46ffa8d73fda"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-win32.whl", hash = "sha256:49c312bcff4728bffc6fb5e5318b8020ed5c8b958a06800f91859fe9633ca20e"},
- {file = "SQLAlchemy-1.4.48-cp311-cp311-win_amd64.whl", hash = "sha256:cef2e2abc06eab187a533ec3e1067a71d7bbec69e582401afdf6d8cad4ba3515"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3509159e050bd6d24189ec7af373359f07aed690db91909c131e5068176c5a5d"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc2ab4d9f6d9218a5caa4121bdcf1125303482a1cdcfcdbd8567be8518969c0"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1ddbbcef9bcedaa370c03771ebec7e39e3944782bef49e69430383c376a250b"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f82d8efea1ca92b24f51d3aea1a82897ed2409868a0af04247c8c1e4fef5890"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-win32.whl", hash = "sha256:e3e98d4907805b07743b583a99ecc58bf8807ecb6985576d82d5e8ae103b5272"},
- {file = "SQLAlchemy-1.4.48-cp36-cp36m-win_amd64.whl", hash = "sha256:25887b4f716e085a1c5162f130b852f84e18d2633942c8ca40dfb8519367c14f"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0817c181271b0ce5df1aa20949f0a9e2426830fed5ecdcc8db449618f12c2730"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1dd2562313dd9fe1778ed56739ad5d9aae10f9f43d9f4cf81d65b0c85168bb"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:68413aead943883b341b2b77acd7a7fe2377c34d82e64d1840860247cec7ff7c"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbde5642104ac6e95f96e8ad6d18d9382aa20672008cf26068fe36f3004491df"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-win32.whl", hash = "sha256:11c6b1de720f816c22d6ad3bbfa2f026f89c7b78a5c4ffafb220e0183956a92a"},
- {file = "SQLAlchemy-1.4.48-cp37-cp37m-win_amd64.whl", hash = "sha256:eb5464ee8d4bb6549d368b578e9529d3c43265007193597ddca71c1bae6174e6"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:92e6133cf337c42bfee03ca08c62ba0f2d9695618c8abc14a564f47503157be9"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d29a3fc6d9c45962476b470a81983dd8add6ad26fdbfae6d463b509d5adcda"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:005e942b451cad5285015481ae4e557ff4154dde327840ba91b9ac379be3b6ce"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8cfe951ed074ba5e708ed29c45397a95c4143255b0d022c7c8331a75ae61f3"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-win32.whl", hash = "sha256:2b9af65cc58726129d8414fc1a1a650dcdd594ba12e9c97909f1f57d48e393d3"},
- {file = "SQLAlchemy-1.4.48-cp38-cp38-win_amd64.whl", hash = "sha256:2b562e9d1e59be7833edf28b0968f156683d57cabd2137d8121806f38a9d58f4"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a1fc046756cf2a37d7277c93278566ddf8be135c6a58397b4c940abf837011f4"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d9b55252d2ca42a09bcd10a697fa041e696def9dfab0b78c0aaea1485551a08"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6dab89874e72a9ab5462997846d4c760cdb957958be27b03b49cf0de5e5c327c"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fd8b5ee5a3acc4371f820934b36f8109ce604ee73cc668c724abb054cebcb6e"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-win32.whl", hash = "sha256:eee09350fd538e29cfe3a496ec6f148504d2da40dbf52adefb0d2f8e4d38ccc4"},
- {file = "SQLAlchemy-1.4.48-cp39-cp39-win_amd64.whl", hash = "sha256:7ad2b0f6520ed5038e795cc2852eb5c1f20fa6831d73301ced4aafbe3a10e1f6"},
- {file = "SQLAlchemy-1.4.48.tar.gz", hash = "sha256:b47bc287096d989a0838ce96f7d8e966914a24da877ed41a7531d44b55cdb8df"},
+python-versions = ">=3.7"
+files = [
+ {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4344d059265cc8b1b1be351bfb88749294b87a8b2bbe21dfbe066c4199541ebd"},
+ {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9e2e59cbcc6ba1488404aad43de005d05ca56e069477b33ff74e91b6319735"},
+ {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84daa0a2055df9ca0f148a64fdde12ac635e30edbca80e87df9b3aaf419e144a"},
+ {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc8b7dabe8e67c4832891a5d322cec6d44ef02f432b4588390017f5cec186a84"},
+ {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5693145220517b5f42393e07a6898acdfe820e136c98663b971906120549da5"},
+ {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db854730a25db7c956423bb9fb4bdd1216c839a689bf9cc15fada0a7fb2f4570"},
+ {file = "SQLAlchemy-2.0.25-cp310-cp310-win32.whl", hash = "sha256:14a6f68e8fc96e5e8f5647ef6cda6250c780612a573d99e4d881581432ef1669"},
+ {file = "SQLAlchemy-2.0.25-cp310-cp310-win_amd64.whl", hash = "sha256:87f6e732bccd7dcf1741c00f1ecf33797383128bd1c90144ac8adc02cbb98643"},
+ {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:342d365988ba88ada8af320d43df4e0b13a694dbd75951f537b2d5e4cb5cd002"},
+ {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f37c0caf14b9e9b9e8f6dbc81bc56db06acb4363eba5a633167781a48ef036ed"},
+ {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9373708763ef46782d10e950b49d0235bfe58facebd76917d3f5cbf5971aed"},
+ {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24f571990c05f6b36a396218f251f3e0dda916e0c687ef6fdca5072743208f5"},
+ {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75432b5b14dc2fff43c50435e248b45c7cdadef73388e5610852b95280ffd0e9"},
+ {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:884272dcd3ad97f47702965a0e902b540541890f468d24bd1d98bcfe41c3f018"},
+ {file = "SQLAlchemy-2.0.25-cp311-cp311-win32.whl", hash = "sha256:e607cdd99cbf9bb80391f54446b86e16eea6ad309361942bf88318bcd452363c"},
+ {file = "SQLAlchemy-2.0.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d505815ac340568fd03f719446a589162d55c52f08abd77ba8964fbb7eb5b5f"},
+ {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0dacf67aee53b16f365c589ce72e766efaabd2b145f9de7c917777b575e3659d"},
+ {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b801154027107461ee992ff4b5c09aa7cc6ec91ddfe50d02bca344918c3265c6"},
+ {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a21853f5daeb50412d459cfb13cb82c089ad4c04ec208cd14dddd99fc23b39"},
+ {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29049e2c299b5ace92cbed0c1610a7a236f3baf4c6b66eb9547c01179f638ec5"},
+ {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b64b183d610b424a160b0d4d880995e935208fc043d0302dd29fee32d1ee3f95"},
+ {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f7a7d7fcc675d3d85fbf3b3828ecd5990b8d61bd6de3f1b260080b3beccf215"},
+ {file = "SQLAlchemy-2.0.25-cp312-cp312-win32.whl", hash = "sha256:cf18ff7fc9941b8fc23437cc3e68ed4ebeff3599eec6ef5eebf305f3d2e9a7c2"},
+ {file = "SQLAlchemy-2.0.25-cp312-cp312-win_amd64.whl", hash = "sha256:91f7d9d1c4dd1f4f6e092874c128c11165eafcf7c963128f79e28f8445de82d5"},
+ {file = "SQLAlchemy-2.0.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bb209a73b8307f8fe4fe46f6ad5979649be01607f11af1eb94aa9e8a3aaf77f0"},
+ {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:798f717ae7c806d67145f6ae94dc7c342d3222d3b9a311a784f371a4333212c7"},
+ {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd402169aa00df3142149940b3bf9ce7dde075928c1886d9a1df63d4b8de62"},
+ {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d3cab3076af2e4aa5693f89622bef7fa770c6fec967143e4da7508b3dceb9b9"},
+ {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:74b080c897563f81062b74e44f5a72fa44c2b373741a9ade701d5f789a10ba23"},
+ {file = "SQLAlchemy-2.0.25-cp37-cp37m-win32.whl", hash = "sha256:87d91043ea0dc65ee583026cb18e1b458d8ec5fc0a93637126b5fc0bc3ea68c4"},
+ {file = "SQLAlchemy-2.0.25-cp37-cp37m-win_amd64.whl", hash = "sha256:75f99202324383d613ddd1f7455ac908dca9c2dd729ec8584c9541dd41822a2c"},
+ {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:420362338681eec03f53467804541a854617faed7272fe71a1bfdb07336a381e"},
+ {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c88f0c7dcc5f99bdb34b4fd9b69b93c89f893f454f40219fe923a3a2fd11625"},
+ {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3be4987e3ee9d9a380b66393b77a4cd6d742480c951a1c56a23c335caca4ce3"},
+ {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a159111a0f58fb034c93eeba211b4141137ec4b0a6e75789ab7a3ef3c7e7e3"},
+ {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8b8cb63d3ea63b29074dcd29da4dc6a97ad1349151f2d2949495418fd6e48db9"},
+ {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:736ea78cd06de6c21ecba7416499e7236a22374561493b456a1f7ffbe3f6cdb4"},
+ {file = "SQLAlchemy-2.0.25-cp38-cp38-win32.whl", hash = "sha256:10331f129982a19df4284ceac6fe87353ca3ca6b4ca77ff7d697209ae0a5915e"},
+ {file = "SQLAlchemy-2.0.25-cp38-cp38-win_amd64.whl", hash = "sha256:c55731c116806836a5d678a70c84cb13f2cedba920212ba7dcad53260997666d"},
+ {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:605b6b059f4b57b277f75ace81cc5bc6335efcbcc4ccb9066695e515dbdb3900"},
+ {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:665f0a3954635b5b777a55111ababf44b4fc12b1f3ba0a435b602b6387ffd7cf"},
+ {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecf6d4cda1f9f6cb0b45803a01ea7f034e2f1aed9475e883410812d9f9e3cfcf"},
+ {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51db269513917394faec5e5c00d6f83829742ba62e2ac4fa5c98d58be91662f"},
+ {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:790f533fa5c8901a62b6fef5811d48980adeb2f51f1290ade8b5e7ba990ba3de"},
+ {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1b1180cda6df7af84fe72e4530f192231b1f29a7496951db4ff38dac1687202d"},
+ {file = "SQLAlchemy-2.0.25-cp39-cp39-win32.whl", hash = "sha256:555651adbb503ac7f4cb35834c5e4ae0819aab2cd24857a123370764dc7d7e24"},
+ {file = "SQLAlchemy-2.0.25-cp39-cp39-win_amd64.whl", hash = "sha256:dc55990143cbd853a5d038c05e79284baedf3e299661389654551bd02a6a68d7"},
+ {file = "SQLAlchemy-2.0.25-py3-none-any.whl", hash = "sha256:a86b4240e67d4753dc3092d9511886795b3c2852abe599cffe108952f7af7ac3"},
+ {file = "SQLAlchemy-2.0.25.tar.gz", hash = "sha256:a2c69a7664fb2d54b8682dd774c3b54f67f84fa123cf84dda2a5f40dcaa04e08"},
]
[package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"}
+greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+typing-extensions = ">=4.6.0"
[package.extras]
-aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
-aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
+aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
+aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
+aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
-asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
-mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
+asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
-mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
-mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
+mypy = ["mypy (>=0.910)"]
+mysql = ["mysqlclient (>=1.4.0)"]
mysql-connector = ["mysql-connector-python"]
-oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"]
+oracle = ["cx_oracle (>=8)"]
+oracle-oracledb = ["oracledb (>=1.0.1)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
-postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
+postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
+postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
-pymysql = ["pymysql", "pymysql (<1)"]
-sqlcipher = ["sqlcipher3-binary"]
+postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
+pymysql = ["pymysql"]
+sqlcipher = ["sqlcipher3_binary"]
[[package]]
-name = "sqlalchemy2-stubs"
-version = "0.0.2a35"
-description = "Typing Stubs for SQLAlchemy 1.4"
+name = "time-machine"
+version = "2.10.0"
+description = "Travel through time in your tests."
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "sqlalchemy2-stubs-0.0.2a35.tar.gz", hash = "sha256:bd5d530697d7e8c8504c7fe792ef334538392a5fb7aa7e4f670bfacdd668a19d"},
- {file = "sqlalchemy2_stubs-0.0.2a35-py3-none-any.whl", hash = "sha256:593784ff9fc0dc2ded1895e3322591689db3be06f3ca006e3ef47640baf2d38a"},
+ {file = "time_machine-2.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d5e93c14b935d802a310c1d4694a9fe894b48a733ebd641c9a570d6f9e1f667"},
+ {file = "time_machine-2.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c0dda6b132c0180941944ede357109016d161d840384c2fb1096a3a2ef619f4"},
+ {file = "time_machine-2.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:900517e4a4121bf88527343d6aea2b5c99df134815bb8271ef589ec792502a71"},
+ {file = "time_machine-2.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:860279c7f9413bc763b3d1aee622937c4538472e2e58ad668546b49a797cb9fb"},
+ {file = "time_machine-2.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f451be286d50ec9b685198c7f76cea46538b8c57ec816f60edf5eb68d71c4f4"},
+ {file = "time_machine-2.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1b07f5da833b2d8ea170cdf15a322c6fa2c6f7e9097a1bea435adc597cdcb5d"},
+ {file = "time_machine-2.10.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6b3a529ecc819488783e371df5ad315e790b9558c6945a236b13d7cb9ab73b9a"},
+ {file = "time_machine-2.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51e36491bd4a43f8a937ca7c0d1a2287b8998f41306f47ebed250a02f93d2fe4"},
+ {file = "time_machine-2.10.0-cp310-cp310-win32.whl", hash = "sha256:1e9973091ad3272c719dafae35a5bb08fa5433c2902224d0f745657f9e3ac327"},
+ {file = "time_machine-2.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab82ea5a59faa1faa7397465f2edd94789a13f543daa02d16244906339100080"},
+ {file = "time_machine-2.10.0-cp310-cp310-win_arm64.whl", hash = "sha256:55bc6d666966fa2e6283d7433ebe875be37684a847eaa802075433c1ab3a377a"},
+ {file = "time_machine-2.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99fc366cb4fa26d81f12fa36a929db0da89d99909e28231c045e0f1277e0db84"},
+ {file = "time_machine-2.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5969f325c20bdcb7f8917a6ac2ef328ec41cc2e256320a99dfe38b4080eeae71"},
+ {file = "time_machine-2.10.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:a1a5e283ab47b28205f33fa3c5a2df3fd9f07f09add63dbe76637c3633893a23"},
+ {file = "time_machine-2.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4083ec185ab9ece3e5a7ca7a7589114a555f04bcff31b29d4eb47a37e87d97fe"},
+ {file = "time_machine-2.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbe45f88399b8af299136435a2363764d5fa6d16a936e4505081b6ea32ff3e18"},
+ {file = "time_machine-2.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d149a3fae8a06a3593361496ec036a27906fed478ade23ffc01dd402acd0b37"},
+ {file = "time_machine-2.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2e05306f63df3c7760170af6e77e1b37405b7c7c4a97cc9fdf0105f1094b1b1c"},
+ {file = "time_machine-2.10.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d6d7b7680e34dbe60da34d75d6d5f31b6206c7149c0de8a7b0f0311d0ef7e3a"},
+ {file = "time_machine-2.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:91b8b06e09e1dfd53dafe272d41b60690d6f8806d7194c62982b003a088dc423"},
+ {file = "time_machine-2.10.0-cp311-cp311-win32.whl", hash = "sha256:6241a1742657622ebdcd66cf6045c92e0ec6ca6365c55434cc7fea945008192c"},
+ {file = "time_machine-2.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:48cce6dcb7118ba4a58537c6de4d1dd6e7ad6ea15d0257d6e0003b45c4a839c2"},
+ {file = "time_machine-2.10.0-cp311-cp311-win_arm64.whl", hash = "sha256:8cb6285095efa0833fd0301e159748a06e950c7744dc3d38e92e7607e2232d5a"},
+ {file = "time_machine-2.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8829ca7ed939419c2a23c360101edc51e3b57f40708d304b6aed16214d8b2a1f"},
+ {file = "time_machine-2.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b5b60bc00ad2efa5fefee117e5611a28b26f563f1a64df118d1d2f2590a679a"},
+ {file = "time_machine-2.10.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1491fb647568134d38b06e844783d3069f5811405e9a3906eff88d55403e327"},
+ {file = "time_machine-2.10.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78f2759a63fcc7660d283e22054c7cfa7468fad1ad86d0846819b6ea958d63f"},
+ {file = "time_machine-2.10.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:30881f263332245a665a49d0e30fda135597c4e18f2efa9c6759c224419c36a5"},
+ {file = "time_machine-2.10.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e93750309093275340e0e95bb270801ec9cbf2ee8702d71031f4ccd8cc91dd7f"},
+ {file = "time_machine-2.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a906bb338a6be978b83f09f09d8b24737239330f280c890ecbf1c13828e1838c"},
+ {file = "time_machine-2.10.0-cp37-cp37m-win32.whl", hash = "sha256:10c8b170920d3f83dad2268ae8d5e1d8bb431a85198e32d778e6f3a1f93b172d"},
+ {file = "time_machine-2.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5efc4cc914d93138944c488fdebd6e4290273e3ac795d5c7a744af29eb04ce0f"},
+ {file = "time_machine-2.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1787887168e36f57d5ca1abf1b9d065a55eb67067df2fa23aaa4382da36f7098"},
+ {file = "time_machine-2.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26a8cc1f8e9f4f69ea3f50b9b9e3a699e80e44ac9359a867208be6adac30fc60"},
+ {file = "time_machine-2.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07e2c6c299c5509c72cc221a19f4bf680c87c793727a3127a29e18ddad3db13"},
+ {file = "time_machine-2.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f3e5f263a623148a448756a332aad45e65a59876fcb2511f7f61213e6d3ec3e"},
+ {file = "time_machine-2.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3abcb48d7ca7ed95e5d99220317b7ce31378636bb020cabfa62f9099e7dad"},
+ {file = "time_machine-2.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:545a813b7407c33dee388aa380449e79f57f02613ea149c6e907fc9ca3d53e64"},
+ {file = "time_machine-2.10.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:458b52673ec83d10da279d989d7a6ad1e60c93e4ba986210d72e6c78e17102f4"},
+ {file = "time_machine-2.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:acb2ca50d779d39eab1d0fab48697359e4ffc1aedfa58b79cd3a86ee13253834"},
+ {file = "time_machine-2.10.0-cp38-cp38-win32.whl", hash = "sha256:648fec54917a7e67acca38ed8e736b206e8a9688730e13e1cf7a74bcce89dec7"},
+ {file = "time_machine-2.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3ed92d2a6e2c2b7a0c8161ecca5d012041b7ba147cbdfb2b7f62f45c02615111"},
+ {file = "time_machine-2.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6d2588581d3071d556f96954d084b7b99701e54120bb29dfadaab04791ef6ae4"},
+ {file = "time_machine-2.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:185f7a4228e993ddae610e24fb3c7e7891130ebb6a40f42d58ea3be0bfafe1b1"},
+ {file = "time_machine-2.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8225eb813ea9488de99e61569fc1b2d148d236473a84c6758cc436ffef4c043"},
+ {file = "time_machine-2.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f03ac22440b00abd1027bfb7dd793dfeffb72dda26f336f4d561835e0ce6117"},
+ {file = "time_machine-2.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4252f4daef831556e6685853d7a61b02910d0465528c549f179ea4e36aaeb14c"},
+ {file = "time_machine-2.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:58c65bf4775fca62e1678cb234f1ca90254e811d978971c819d2cd24e1b7f136"},
+ {file = "time_machine-2.10.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8527ac8fca7b92556c3c4c0f08e0bea995202db4be5b7d95b9b2ccbcb63649f2"},
+ {file = "time_machine-2.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4684308d749fdb0c22af173b081206d2a5a85d2154a683a7f4a60c4b667f7a65"},
+ {file = "time_machine-2.10.0-cp39-cp39-win32.whl", hash = "sha256:2adc24cf25b7e8d08aea2b109cc42c5db76817b07ee709fae5c66afa4ec7bc6e"},
+ {file = "time_machine-2.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:36f5be6f3042734fca043bedafbfbb6ad4809352e40b3283cb46b151a823674c"},
+ {file = "time_machine-2.10.0-cp39-cp39-win_arm64.whl", hash = "sha256:c1775a949dd830579d1af5a271ec53d920dc01657035ad305f55c5a1ac9b9f1e"},
+ {file = "time_machine-2.10.0.tar.gz", hash = "sha256:64fd89678cf589fc5554c311417128b2782222dd65f703bf248ef41541761da0"},
]
[package.dependencies]
-typing-extensions = ">=3.7.4"
+python-dateutil = "*"
[[package]]
-name = "termcolor"
-version = "2.3.0"
-description = "ANSI color formatting for output in terminal"
+name = "time-machine"
+version = "2.13.0"
+description = "Travel through time in your tests."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"},
- {file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"},
+ {file = "time_machine-2.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:685d98593f13649ad5e7ce3e58efe689feca1badcf618ba397d3ab877ee59326"},
+ {file = "time_machine-2.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ccbce292380ebf63fb9a52e6b03d91677f6a003e0c11f77473efe3913a75f289"},
+ {file = "time_machine-2.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:679cbf9b15bfde1654cf48124128d3fbe52f821fa158a98fcee5fe7e05db1917"},
+ {file = "time_machine-2.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a26bdf3462d5f12a4c1009fdbe54366c6ef22c7b6f6808705b51dedaaeba8296"},
+ {file = "time_machine-2.13.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dabb3b155819811b4602f7e9be936e2024e20dc99a90f103e36b45768badf9c3"},
+ {file = "time_machine-2.13.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0db97f92be3efe0ac62fd3f933c91a78438cef13f283b6dfc2ee11123bfd7d8a"},
+ {file = "time_machine-2.13.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:12eed2e9171c85b703d75c985dab2ecad4fe7025b7d2f842596fce1576238ece"},
+ {file = "time_machine-2.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bdfe4a7f033e6783c3e9a7f8d8fc0b115367330762e00a03ff35fedf663994f3"},
+ {file = "time_machine-2.13.0-cp310-cp310-win32.whl", hash = "sha256:3a7a0a49ce50d9c306c4343a7d6a3baa11092d4399a4af4355c615ccc321a9d3"},
+ {file = "time_machine-2.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1812e48c6c58707db9988445a219a908a710ea065b2cc808d9a50636291f27d4"},
+ {file = "time_machine-2.13.0-cp310-cp310-win_arm64.whl", hash = "sha256:5aee23cd046abf9caeddc982113e81ba9097a01f3972e9560f5ed64e3495f66d"},
+ {file = "time_machine-2.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e9a9d150e098be3daee5c9f10859ab1bd14a61abebaed86e6d71f7f18c05b9d7"},
+ {file = "time_machine-2.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2bd4169b808745d219a69094b3cb86006938d45e7293249694e6b7366225a186"},
+ {file = "time_machine-2.13.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:8d526cdcaca06a496877cfe61cc6608df2c3a6fce210e076761964ebac7f77cc"},
+ {file = "time_machine-2.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfef4ebfb4f055ce3ebc7b6c1c4d0dbfcffdca0e783ad8c6986c992915a57ed3"},
+ {file = "time_machine-2.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f128db8997c3339f04f7f3946dd9bb2a83d15e0a40d35529774da1e9e501511"},
+ {file = "time_machine-2.13.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21bef5854d49b62e2c33848b5c3e8acf22a3b46af803ef6ff19529949cb7cf9f"},
+ {file = "time_machine-2.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:32b71e50b07f86916ac04bd1eefc2bd2c93706b81393748b08394509ee6585dc"},
+ {file = "time_machine-2.13.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ac8ff145c63cd0dcfd9590fe694b5269aacbc130298dc7209b095d101f8cdde"},
+ {file = "time_machine-2.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:19a3b10161c91ca8e0fd79348665cca711fd2eac6ce336ff9e6b447783817f93"},
+ {file = "time_machine-2.13.0-cp311-cp311-win32.whl", hash = "sha256:5f87787d562e42bf1006a87eb689814105b98c4d5545874a281280d0f8b9a2d9"},
+ {file = "time_machine-2.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:62fd14a80b8b71726e07018628daaee0a2e00937625083f96f69ed6b8e3304c0"},
+ {file = "time_machine-2.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:e9935aff447f5400a2665ab10ed2da972591713080e1befe1bb8954e7c0c7806"},
+ {file = "time_machine-2.13.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:34dcdbbd25c1e124e17fe58050452960fd16a11f9d3476aaa87260e28ecca0fd"},
+ {file = "time_machine-2.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e58d82fe0e59d6e096ada3281d647a2e7420f7da5453b433b43880e1c2e8e0c5"},
+ {file = "time_machine-2.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71acbc1febbe87532c7355eca3308c073d6e502ee4ce272b5028967847c8e063"},
+ {file = "time_machine-2.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dec0ec2135a4e2a59623e40c31d6e8a8ae73305ade2634380e4263d815855750"},
+ {file = "time_machine-2.13.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e3a2611f8788608ebbcb060a5e36b45911bc3b8adc421b1dc29d2c81786ce4d"},
+ {file = "time_machine-2.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:42ef5349135626ad6cd889a0a81400137e5c6928502b0817ea9e90bb10702000"},
+ {file = "time_machine-2.13.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6c16d90a597a8c2d3ce22d6be2eb3e3f14786974c11b01886e51b3cf0d5edaf7"},
+ {file = "time_machine-2.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f2ae8d0e359b216b695f1e7e7256f208c390db0480601a439c5dd1e1e4e16ce"},
+ {file = "time_machine-2.13.0-cp312-cp312-win32.whl", hash = "sha256:f5fa9610f7e73fff42806a2ed8b06d862aa59ce4d178a52181771d6939c3e237"},
+ {file = "time_machine-2.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:02b33a8c19768c94f7ffd6aa6f9f64818e88afce23250016b28583929d20fb12"},
+ {file = "time_machine-2.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:0cc116056a8a2a917a4eec85661dfadd411e0d8faae604ef6a0e19fe5cd57ef1"},
+ {file = "time_machine-2.13.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:de01f33aa53da37530ad97dcd17e9affa25a8df4ab822506bb08101bab0c2673"},
+ {file = "time_machine-2.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:67fa45cd813821e4f5bec0ac0820869e8e37430b15509d3f5fad74ba34b53852"},
+ {file = "time_machine-2.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4a2d3db2c3b8e519d5ef436cd405abd33542a7b7761fb05ef5a5f782a8ce0b1"},
+ {file = "time_machine-2.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7558622a62243be866a7e7c41da48eacd82c874b015ecf67d18ebf65ca3f7436"},
+ {file = "time_machine-2.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab04cf4e56e1ee65bee2adaa26a04695e92eb1ed1ccc65fbdafd0d114399595a"},
+ {file = "time_machine-2.13.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b0c8f24ae611a58782773af34dd356f1f26756272c04be2be7ea73b47e5da37d"},
+ {file = "time_machine-2.13.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ca20f85a973a4ca8b00cf466cd72c27ccc72372549b138fd48d7e70e5a190ab"},
+ {file = "time_machine-2.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9fad549521c4c13bdb1e889b2855a86ec835780d534ffd8f091c2647863243be"},
+ {file = "time_machine-2.13.0-cp38-cp38-win32.whl", hash = "sha256:20205422fcf2caf9a7488394587df86e5b54fdb315c1152094fbb63eec4e9304"},
+ {file = "time_machine-2.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:2dc76ee55a7d915a55960a726ceaca7b9097f67e4b4e681ef89871bcf98f00be"},
+ {file = "time_machine-2.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7693704c0f2f6b9beed912ff609781edf5fcf5d63aff30c92be4093e09d94b8e"},
+ {file = "time_machine-2.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:918f8389de29b4f41317d121f1150176fae2cdb5fa41f68b2aee0b9dc88df5c3"},
+ {file = "time_machine-2.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fe3fda5fa73fec74278912e438fce1612a79c36fd0cc323ea3dc2d5ce629f31"},
+ {file = "time_machine-2.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c6245db573863b335d9ca64b3230f623caf0988594ae554c0c794e7f80e3e66"},
+ {file = "time_machine-2.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e433827eccd6700a34a2ab28fd9361ff6e4d4923f718d2d1dac6d1dcd9d54da6"},
+ {file = "time_machine-2.13.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:924377d398b1c48e519ad86a71903f9f36117f69e68242c99fb762a2465f5ad2"},
+ {file = "time_machine-2.13.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66fb3877014dca0b9286b0f06fa74062357bd23f2d9d102d10e31e0f8fa9b324"},
+ {file = "time_machine-2.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0c9829b2edfcf6b5d72a6ff330d4380f36a937088314c675531b43d3423dd8af"},
+ {file = "time_machine-2.13.0-cp39-cp39-win32.whl", hash = "sha256:1a22be4df364f49a507af4ac9ea38108a0105f39da3f9c60dce62d6c6ea4ccdc"},
+ {file = "time_machine-2.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:88601de1da06c7cab3d5ed3d5c3801ef683366e769e829e96383fdab6ae2fe42"},
+ {file = "time_machine-2.13.0-cp39-cp39-win_arm64.whl", hash = "sha256:3c87856105dcb25b5bbff031d99f06ef4d1c8380d096222e1bc63b496b5258e6"},
+ {file = "time_machine-2.13.0.tar.gz", hash = "sha256:c23b2408e3adcedec84ea1131e238f0124a5bc0e491f60d1137ad7239b37c01a"},
]
-[package.extras]
-tests = ["pytest", "pytest-cov"]
-
-[[package]]
-name = "text-unidecode"
-version = "1.3"
-description = "The most basic Text::Unidecode port"
-optional = false
-python-versions = "*"
-files = [
- {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"},
- {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"},
-]
+[package.dependencies]
+python-dateutil = "*"
[[package]]
name = "tomli"
@@ -2396,17 +2797,6 @@ files = [
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
-[[package]]
-name = "tomlkit"
-version = "0.11.8"
-description = "Style preserving TOML library"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"},
- {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"},
-]
-
[[package]]
name = "tornado"
version = "6.2"
@@ -2429,90 +2819,121 @@ files = [
[[package]]
name = "typed-ast"
-version = "1.5.4"
+version = "1.5.5"
description = "a fork of Python 2 and 3 ast modules with type comment support"
optional = false
python-versions = ">=3.6"
files = [
- {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"},
- {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"},
- {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"},
- {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"},
- {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"},
- {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"},
- {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"},
- {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"},
- {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"},
- {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"},
- {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"},
- {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"},
- {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"},
- {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"},
- {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"},
- {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"},
- {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"},
- {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"},
- {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"},
- {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"},
- {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"},
- {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"},
- {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"},
- {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"},
+ {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"},
+ {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"},
+ {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"},
+ {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"},
+ {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"},
+ {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"},
+ {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"},
+ {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"},
+ {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"},
+ {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"},
+ {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"},
+ {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"},
+ {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"},
+ {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"},
+ {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"},
+ {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"},
+ {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"},
+ {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"},
+ {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"},
+ {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"},
+ {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"},
+ {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"},
+ {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"},
+ {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"},
+ {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"},
+ {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"},
+ {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"},
+ {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"},
+ {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"},
+ {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"},
+ {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"},
]
[[package]]
name = "types-jsonschema"
-version = "4.17.0.9"
+version = "4.17.0.10"
description = "Typing stubs for jsonschema"
optional = false
python-versions = "*"
files = [
- {file = "types-jsonschema-4.17.0.9.tar.gz", hash = "sha256:ddbbf84a37ba19f486e43d2a4ab239c9e49aebb5cc99a17a5d59f54568373376"},
- {file = "types_jsonschema-4.17.0.9-py3-none-any.whl", hash = "sha256:ec83f48c5ce5d3ea6955c3617d8c903e5ba3db8debea0c7f5c8e9bd60d782a9e"},
+ {file = "types-jsonschema-4.17.0.10.tar.gz", hash = "sha256:8e979db34d69bc9f9b3d6e8b89bdbc60b3a41cfce4e1fb87bf191d205c7f5098"},
+ {file = "types_jsonschema-4.17.0.10-py3-none-any.whl", hash = "sha256:3aa2a89afbd9eaa6ce0c15618b36f02692a621433889ce73014656f7d8caf971"},
+]
+
+[[package]]
+name = "types-jsonschema"
+version = "4.20.0.0"
+description = "Typing stubs for jsonschema"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "types-jsonschema-4.20.0.0.tar.gz", hash = "sha256:0de1032d243f1d3dba8b745ad84efe8c1af71665a9deb1827636ac535dcb79c1"},
+ {file = "types_jsonschema-4.20.0.0-py3-none-any.whl", hash = "sha256:e6d5df18aaca4412f0aae246a294761a92040e93d7bc840f002b7329a8b72d26"},
]
+[package.dependencies]
+referencing = "*"
+
[[package]]
name = "types-python-dateutil"
-version = "2.8.19.13"
+version = "2.8.19.14"
description = "Typing stubs for python-dateutil"
optional = false
python-versions = "*"
files = [
- {file = "types-python-dateutil-2.8.19.13.tar.gz", hash = "sha256:09a0275f95ee31ce68196710ed2c3d1b9dc42e0b61cc43acc369a42cb939134f"},
- {file = "types_python_dateutil-2.8.19.13-py3-none-any.whl", hash = "sha256:0b0e7c68e7043b0354b26a1e0225cb1baea7abb1b324d02b50e2d08f1221043f"},
+ {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"},
+ {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"},
]
[[package]]
name = "types-pytz"
-version = "2023.3.0.0"
+version = "2023.3.1.1"
description = "Typing stubs for pytz"
optional = false
python-versions = "*"
files = [
- {file = "types-pytz-2023.3.0.0.tar.gz", hash = "sha256:ecdc70d543aaf3616a7e48631543a884f74205f284cefd6649ddf44c6a820aac"},
- {file = "types_pytz-2023.3.0.0-py3-none-any.whl", hash = "sha256:4fc2a7fbbc315f0b6630e0b899fd6c743705abe1094d007b0e612d10da15e0f3"},
+ {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"},
+ {file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"},
]
[[package]]
name = "types-pyyaml"
-version = "6.0.12.10"
+version = "6.0.12.12"
description = "Typing stubs for PyYAML"
optional = false
python-versions = "*"
files = [
- {file = "types-PyYAML-6.0.12.10.tar.gz", hash = "sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97"},
- {file = "types_PyYAML-6.0.12.10-py3-none-any.whl", hash = "sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f"},
+ {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"},
+ {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"},
]
[[package]]
name = "types-requests"
-version = "2.31.0.1"
+version = "2.31.0.6"
description = "Typing stubs for requests"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
files = [
- {file = "types-requests-2.31.0.1.tar.gz", hash = "sha256:3de667cffa123ce698591de0ad7db034a5317457a596eb0b4944e5a9d9e8d1ac"},
- {file = "types_requests-2.31.0.1-py3-none-any.whl", hash = "sha256:afb06ef8f25ba83d59a1d424bd7a5a939082f94b94e90ab5e6116bd2559deaa3"},
+ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"},
+ {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"},
]
[package.dependencies]
@@ -2520,24 +2941,24 @@ types-urllib3 = "*"
[[package]]
name = "types-simplejson"
-version = "3.19.0.1"
+version = "3.19.0.2"
description = "Typing stubs for simplejson"
optional = false
python-versions = "*"
files = [
- {file = "types-simplejson-3.19.0.1.tar.gz", hash = "sha256:0233df016477bd58a2525df79ac8a34b079910d51ca45ec4f09a94ae58222f02"},
- {file = "types_simplejson-3.19.0.1-py3-none-any.whl", hash = "sha256:0083e84d43b6b36e8af6eb77e6b41440f2aec8842d16cee0f828fb5622196f4f"},
+ {file = "types-simplejson-3.19.0.2.tar.gz", hash = "sha256:ebc81f886f89d99d6b80c726518aa2228bc77c26438f18fd81455e4f79f8ee1b"},
+ {file = "types_simplejson-3.19.0.2-py3-none-any.whl", hash = "sha256:8ba093dc7884f59b3e62aed217144085e675a269debc32678fd80e0b43b2b86f"},
]
[[package]]
name = "types-urllib3"
-version = "1.26.25.13"
+version = "1.26.25.14"
description = "Typing stubs for urllib3"
optional = false
python-versions = "*"
files = [
- {file = "types-urllib3-1.26.25.13.tar.gz", hash = "sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5"},
- {file = "types_urllib3-1.26.25.13-py3-none-any.whl", hash = "sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c"},
+ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"},
+ {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"},
]
[[package]]
@@ -2551,143 +2972,55 @@ files = [
{file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
]
+[[package]]
+name = "tzdata"
+version = "2023.4"
+description = "Provider of IANA time zone data"
+optional = false
+python-versions = ">=2"
+files = [
+ {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"},
+ {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"},
+]
+
[[package]]
name = "urllib3"
-version = "1.26.16"
+version = "1.26.18"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
- {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"},
- {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"},
+ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
+ {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
-[[package]]
-name = "wcwidth"
-version = "0.2.6"
-description = "Measures the displayed width of unicode strings in a terminal"
-optional = false
-python-versions = "*"
-files = [
- {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"},
- {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"},
-]
-
-[[package]]
-name = "wrapt"
-version = "1.15.0"
-description = "Module for decorators, wrappers and monkey patching."
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-files = [
- {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
- {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
- {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
- {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
- {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
- {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
- {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
- {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
- {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
- {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
- {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
- {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
- {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
- {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
- {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
- {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
- {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
- {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
- {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
- {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
- {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
- {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
- {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
- {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
- {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
- {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
- {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
- {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
- {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
- {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
- {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
- {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
- {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
- {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
- {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
- {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
- {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
- {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
- {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
- {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
- {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
- {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
-]
-
[[package]]
name = "xdoctest"
-version = "1.1.1"
+version = "1.1.2"
description = "A rewrite of the builtin doctest module"
optional = false
python-versions = ">=3.6"
files = [
- {file = "xdoctest-1.1.1-py3-none-any.whl", hash = "sha256:d59d4ed91cb92e4430ef0ad1b134a2bef02adff7d2fb9c9f057547bee44081a2"},
- {file = "xdoctest-1.1.1.tar.gz", hash = "sha256:2eac8131bdcdf2781b4e5a62d6de87f044b730cc8db8af142a51bb29c245e779"},
+ {file = "xdoctest-1.1.2-py3-none-any.whl", hash = "sha256:ebe133222534f09597cbe461f97cc5f95ad7b36e5d31f3437caffb9baaddbddb"},
+ {file = "xdoctest-1.1.2.tar.gz", hash = "sha256:267d3d4e362547fa917d3deabaf6888232bbf43c8d30298faeb957dbfa7e0ba3"},
]
-[package.dependencies]
-six = "*"
-
[package.extras]
-all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "pytest", "pytest", "pytest", "pytest-cov", "six", "tomli", "typing"]
-all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "six (==1.11.0)", "tomli (==0.2.0)", "typing (==3.7.4)"]
+all = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "tomli (>=0.2.0)", "typing (>=3.7.4)"]
+all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "tomli (==0.2.0)", "typing (==3.7.4)"]
colors = ["Pygments", "Pygments", "colorama"]
-jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"]
-optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"]
-optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"]
-runtime-strict = ["six (==1.11.0)"]
-tests = ["codecov", "pytest", "pytest", "pytest", "pytest-cov", "typing"]
+jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "nbconvert"]
+optional = ["IPython (>=7.10.0)", "IPython (>=7.23.1)", "Pygments (>=2.0.0)", "Pygments (>=2.4.1)", "attrs (>=19.2.0)", "colorama (>=0.4.1)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.0.0)", "debugpy (>=1.3.0)", "debugpy (>=1.6.0)", "ipykernel (>=5.2.0)", "ipykernel (>=6.0.0)", "ipykernel (>=6.11.0)", "ipython-genutils (>=0.2.0)", "jedi (>=0.16)", "jinja2 (>=3.0.0)", "jupyter-client (>=6.1.5)", "jupyter-client (>=7.0.0)", "jupyter-core (>=4.7.0)", "nbconvert (>=6.0.0)", "nbconvert (>=6.1.0)", "pyflakes (>=2.2.0)", "tomli (>=0.2.0)"]
+optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipykernel (==6.11.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "nbconvert (==6.1.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"]
+tests = ["pytest (>=4.6.0)", "pytest (>=4.6.0)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "typing (>=3.7.4)"]
tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"]
tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"]
-tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"]
+tests-strict = ["pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"]
[[package]]
name = "zipp"
@@ -2705,11 +3038,12 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker
testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
[extras]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-reredirects"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinx-reredirects"]
+parquet = ["numpy", "numpy", "numpy", "pyarrow", "pyarrow"]
s3 = ["fs-s3fs"]
testing = ["pytest", "pytest-durations"]
[metadata]
lock-version = "2.0"
-python-versions = "<3.12,>=3.7.1"
-content-hash = "ef713a1192d52c92e45d00697c48d51df216f225476d2a5744954302b438dc8e"
+python-versions = ">=3.7.1"
+content-hash = "b31a1c8736111f0a95b99d84aec5444b73c6e11ffb4e7a59d15c490257f69b8a"
diff --git a/pyproject.toml b/pyproject.toml
index bac2a0f7d..cb7965d09 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,9 +1,9 @@
[tool.poetry]
name = "singer-sdk"
-version = "0.30.0"
+version = "0.34.1"
description = "A framework for building Singer taps"
-authors = ["Meltano Team and Contributors"]
-maintainers = ["Meltano Team and Contributors"]
+authors = ["Meltano Team and Contributors "]
+maintainers = ["Meltano Team and Contributors "]
readme = "README.md"
homepage = "https://sdk.meltano.com/en/latest/"
repository = "https://github.com/meltano/sdk"
@@ -24,6 +24,7 @@ classifiers = [
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Typing :: Typed",
@@ -38,47 +39,68 @@ license = "Apache-2.0"
"Youtube" = "https://www.youtube.com/meltano"
[tool.poetry.dependencies]
-python = "<3.12,>=3.7.1"
-backoff = ">=2.0.0,<3.0"
-pendulum = "^2.1.0"
+python = ">=3.7.1"
+backoff = { version = ">=2.0.0", python = "<4" }
+backports-datetime-fromisoformat = { version = ">=2.0.1", python = "<3.11" }
click = "~=8.0"
-fs = "^2.4.16"
-PyJWT = "~=2.4"
-requests = "^2.25.1"
-cryptography = ">=3.4.6,<42.0.0"
-importlib-metadata = {version = "<5.0.0", markers = "python_version < \"3.8\""}
-importlib-resources = {version = "5.12.0", markers = "python_version < \"3.9\""}
-memoization = ">=0.3.2,<0.5.0"
-jsonpath-ng = "^1.5.3"
-joblib = "^1.0.1"
-inflection = "^0.5.1"
-sqlalchemy = "^1.4"
-python-dotenv = ">=0.20,<0.22"
-typing-extensions = "^4.2.0"
-simplejson = "^3.17.6"
-jsonschema = "^4.16.0"
+cryptography = ">=3.4.6"
+fs = ">=2.4.16"
+importlib-metadata = {version = "<7.0.0", python = "<3.12"}
+importlib-resources = {version = ">=5.12.0", python = "<3.9"}
+inflection = ">=0.5.1"
+joblib = ">=1.0.1"
+jsonpath-ng = ">=1.5.3"
+jsonschema = [
+ { version = ">=4.16.0,<4.18", python = "<3.8" },
+ { version = ">=4.16.0", python = ">=3.8" },
+]
+memoization = { version = ">=0.3.2,<0.5.0", python = "<4" }
packaging = ">=23.1"
-pytz = ">=2022.2.1,<2024.0.0"
-PyYAML = "^6.0"
+pendulum = [
+ { version = ">=2.1.0,<3", python = "<3.8" },
+ { version = ">=2.1.0,<4", python = ">=3.8" },
+]
+PyJWT = "~=2.4"
+python-dateutil = ">=2.8.2"
+python-dotenv = ">=0.20"
+pytz = ">=2022.2.1"
+PyYAML = ">=6.0"
+requests = ">=2.25.1"
+simpleeval = ">=0.9.13"
+simplejson = ">=3.17.6"
+sqlalchemy = ">=1.4,<3.0"
+typing-extensions = ">=4.5.0"
# urllib3 2.0 is not compatible with botocore
urllib3 = ">=1.26,<2"
# Sphinx dependencies installed as optional 'docs' extras
# https://github.com/readthedocs/readthedocs.org/issues/4912#issuecomment-664002569
-sphinx = {version = ">=4.5,<6.0", optional = true}
-furo = {version = ">=2022.12.7,<2024.0.0", optional = true}
-sphinx-copybutton = {version = ">=0.3.1,<0.6.0", optional = true}
-myst-parser = {version = ">=0.17.2,<1.1.0", optional = true}
-sphinx-autobuild = {version = "^2021.3.14", optional = true}
-sphinx-reredirects = {version = "^0.1.1", optional = true}
+sphinx = {version = ">=4.5", optional = true}
+furo = {version = ">=2022.12.7", optional = true}
+sphinx-copybutton = {version = ">=0.3.1", optional = true}
+myst-parser = {version = ">=1", optional = true}
+sphinx-autobuild = {version = ">=2021.3.14", optional = true}
sphinx-inline-tabs = {version = ">=2023.4.21", optional = true, markers = "python_version >= \"3.8\""}
+sphinx-notfound-page = {version = ">=1.0.0", optional = true, python = ">=3.8"}
+sphinx-reredirects = {version = ">=0.1.1", optional = true}
# File storage dependencies installed as optional 'filesystem' extras
-fs-s3fs = {version = "^1.1.1", optional = true}
+fs-s3fs = {version = ">=1.1.1", optional = true}
+
+# Parquet file dependencies installed as optional 'parquet' extras
+numpy = [
+ { version = "<1.22", python = "<3.8", optional = true },
+ { version = ">=1.22,<1.25", python = ">=3.8,<3.9", optional = true },
+ { version = ">=1.22", python = ">=3.9", optional = true },
+]
+pyarrow = [
+ { version = ">=11,<13", python = "<3.8", optional = true },
+ { version = ">=13", python = ">=3.8", optional = true }
+]
# Testing dependencies installed as optional 'testing' extras
-pytest = {version="^7.2.1", optional = true}
-pytest-durations = {version = "^1.2.0", optional = true}
+pytest = {version=">=7.2.1", optional = true}
+pytest-durations = {version = ">=1.2.0", optional = true}
[tool.poetry.extras]
docs = [
@@ -87,54 +109,55 @@ docs = [
"sphinx-copybutton",
"myst-parser",
"sphinx-autobuild",
- "sphinx-reredirects",
"sphinx-inline-tabs",
+ "sphinx-notfound-page",
+ "sphinx-reredirects",
]
s3 = ["fs-s3fs"]
testing = [
"pytest",
"pytest-durations"
]
+parquet = ["numpy", "pyarrow"]
[tool.poetry.group.dev.dependencies]
-# snowflake-connector-python = "2.0.4" # Removed: Too many version conflicts!
-commitizen-version-bump = { git = "https://github.com/meltano/commitizen-version-bump.git", branch = "main" }
-xdoctest = "^1.1.1"
-mypy = "^1.0"
-cookiecutter = ">=2.1.1,<2.2.3"
-PyYAML = "^6.0"
-freezegun = "^1.2.2"
-numpy = [
- { version = "<1.22", python = "<3.8" },
- { version = ">=1.22", python = ">=3.8" },
+coverage = [
+ {extras = ["toml"], version = ">=7.2,<7.3", python = "<3.8"},
+ {extras = ["toml"], version = ">=7.2", python = ">=3.8,<3.12"},
+ {extras = ["toml"], version = ">=7.4", python = ">=3.12"},
]
-requests-mock = "^1.10.0"
-sqlalchemy2-stubs = {version = "^0.0.2a32", allow-prereleases = true}
-types-jsonschema = "^4.17.0.6"
-types-python-dateutil = "^2.8.19"
-types-pytz = ">=2022.7.1.2,<2024.0.0.0"
-types-requests = "^2.28.11"
-types-simplejson = "^3.18.0"
-types-PyYAML = "^6.0.12"
-coverage = {extras = ["toml"], version = "^7.2"}
-pyarrow = ">=11,<13"
-pytest-snapshot = "^0.9.0"
-
-# Cookiecutter tests
-black = "^23.1"
-darglint = "^1.8.0"
-flake8 = "^3.9.0"
-flake8-annotations = "^2.9.1"
-flake8-docstrings = "^1.7.0"
-
-[tool.black]
-exclude = ".*simpleeval.*"
-[tool.pytest.ini_options]
-addopts = '-vvv --ignore=singer_sdk/helpers/_simpleeval.py -m "not external"'
-filterwarnings = [
- "error::sqlalchemy.exc.RemovedIn20Warning",
+# TODO: Remove the Python 3.12 marker when DuckDB supports it
+duckdb = { version = ">=0.8.0", python = "<3.12" }
+duckdb-engine = { version = ">=0.9.4", python = "<3.12" }
+
+mypy = [
+ { version = ">=1.0,<1.5", python = "<3.8" },
+ { version = ">=1.0", python = ">=3.8" },
+]
+pytest-benchmark = ">=4.0.0"
+pytest-snapshot = ">=0.9.0"
+requests-mock = ">=1.10.0"
+time-machine = [
+ { version = ">=2.10.0,<2.11", python = "<3.8" },
+ { version = ">=2.10.0", python = ">=3.8" },
]
+types-jsonschema = [
+ { version = ">=4.17.0.6,<4.18", python = "<3.8" },
+ { version = ">=4.17.0.6", python = ">=3.8" },
+]
+types-python-dateutil = ">=2.8.19"
+types-pytz = ">=2022.7.1.2"
+types-requests = ">=2.28.11"
+types-simplejson = ">=3.18.0"
+types-PyYAML = ">=6.0.12"
+xdoctest = ">=1.1.1"
+
+[tool.poetry.group.benchmark.dependencies]
+pytest-codspeed = ">=2.2.0"
+
+[tool.pytest.ini_options]
+addopts = '--ignore=singer_sdk/helpers/_simpleeval.py -m "not external"'
markers = [
"external: Tests relying on external resources",
"windows: Tests that only run on Windows",
@@ -145,13 +168,16 @@ norecursedirs = "cookiecutter"
[tool.commitizen]
name = "cz_version_bump"
-version = "0.30.0"
+version = "0.34.1"
+changelog_merge_prerelease = true
+prerelease_offset = 1
tag_format = "v$major.$minor.$patch$prerelease"
version_files = [
- "docs/conf.py",
+ "docs/conf.py:^release =",
"pyproject.toml:^version =",
"cookiecutter/tap-template/{{cookiecutter.tap_id}}/pyproject.toml:singer-sdk",
"cookiecutter/target-template/{{cookiecutter.target_id}}/pyproject.toml:singer-sdk",
+ "cookiecutter/mapper-template/{{cookiecutter.mapper_id}}/pyproject.toml:singer-sdk",
".github/ISSUE_TEMPLATE/bug.yml:^ placeholder:",
]
@@ -175,8 +201,7 @@ omit = [
]
[tool.coverage.report]
-exclude_lines = [
- "pragma: no cover",
+exclude_also = [
"def __repr__",
"raise AssertionError",
"raise NotImplementedError",
@@ -190,24 +215,19 @@ fail_under = 82
[tool.mypy]
exclude = "tests"
files = "singer_sdk"
-plugins = [
- "sqlalchemy.ext.mypy.plugin",
-]
python_version = "3.8"
+warn_redundant_casts = true
+warn_return_any = true
warn_unused_configs = true
warn_unused_ignores = true
-warn_return_any = true
[[tool.mypy.overrides]]
ignore_missing_imports = true
module = [
- "bcrypt.*",
- "joblib.*",
- "pyarrow.*",
- "pandas.*",
+ "backports.datetime_fromisoformat.*",
+ "joblib.*", # TODO: Remove when https://github.com/joblib/joblib/issues/1516 is shipped
"jsonpath_ng.*",
- "samples.*",
- "sqlalchemy.*",
+ "pyarrow.*", # TODO: Remove when https://github.com/apache/arrow/issues/32609 if implemented and released
]
[build-system]
@@ -215,20 +235,32 @@ requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
-pytest11 = { callable = "singer_sdk:testing.pytest_plugin", extras = ["testing"] }
+pytest11 = { reference = "singer_sdk:testing.pytest_plugin", extras = ["testing"], type = "console" }
+
+[tool.poetry.plugins."singer_sdk.batch_encoders"]
+jsonl = "singer_sdk.contrib.batch_encoder_jsonl:JSONLinesBatcher"
+parquet = "singer_sdk.contrib.batch_encoder_parquet:ParquetBatcher"
[tool.ruff]
+line-length = 88
+src = ["samples", "singer_sdk", "tests"]
+target-version = "py37"
+
+[tool.ruff.format]
+docstring-code-format = true
+
+[tool.ruff.lint]
exclude = [
"cookiecutter/*",
- "singer_sdk/helpers/_simpleeval.py",
- "tests/core/test_simpleeval.py",
+ "*simpleeval*",
]
ignore = [
"ANN101", # Missing type annotation for `self` in method
"ANN102", # Missing type annotation for `cls` in class method
"N818", # Exception name should be named with an Error suffix
+ "COM812", # missing-trailing-comma
+ "ISC001", # single-line-implicit-string-concatenation
]
-line-length = 88
select = [
"F", # Pyflakes
"E", # pycodestyle (error)
@@ -249,7 +281,8 @@ select = [
"C4", # flake8-comprehensions
"DTZ", # flake8-datetimezs
"T10", # flake8-debugger
- "EM", # flake8-error-message
+ "EM", # flake8-errmsg
+ "FA", # flake8-future-annotations
"ISC", # flake8-implicit-str-concat
"ICN", # flake8-import-conventions
"G", # flake8-logging-format
@@ -260,6 +293,7 @@ select = [
"Q", # flake8-quotes
"RSE", # flake8-raise
"RET", # flake8-return
+ "SLF", # flake8-self
"SIM", # flake8-simplify
"TID", # flake8-tidy-imports
"TCH", # flake8-type-checking
@@ -271,53 +305,54 @@ select = [
"PLE", # pylint (error)
"PLR", # pylint (refactor)
"PLW", # pylint (warning)
+ "PERF", # perflint
"RUF", # ruff
]
-src = ["samples", "singer_sdk", "tests"]
-target-version = "py37"
+unfixable = [
+ "ERA", # Don't remove commented out code
+]
-[tool.ruff.per-file-ignores]
+[tool.ruff.lint.per-file-ignores]
"docs/conf.py" = [
"D", # pydocstyle/flake8-docstrings
"I002", # isort: missing-required-import
"INP001", # flake8-no-pep420: implicit-namespace-package
]
"noxfile.py" = ["ANN"]
-"tests/*" = ["ANN", "D1", "D2", "FBT001", "FBT003", "PLR2004", "S101"]
+"tests/*" = ["ANN", "D1", "D2", "FBT001", "FBT003", "PLR2004", "S101", "SLF001"]
# Disabled some checks in samples code
"samples/*" = ["ANN", "D"]
-# Don't require docstrings conventions or type annotations in private modules
-"singer_sdk/helpers/_*.py" = ["ANN", "D105"]
# Templates support a generic resource of type Any.
"singer_sdk/testing/*.py" = ["S101"]
"singer_sdk/testing/templates.py" = ["ANN401"]
-[tool.ruff.flake8-annotations]
+[tool.ruff.lint.flake8-annotations]
allow-star-arg-any = true
mypy-init-return = true
suppress-dummy-args = true
-[tool.ruff.flake8-import-conventions]
-banned-from = ["typing"]
+[tool.ruff.lint.flake8-import-conventions]
+banned-from = ["sqlalchemy", "typing"]
-[tool.ruff.flake8-import-conventions.extend-aliases]
+[tool.ruff.lint.flake8-import-conventions.extend-aliases]
+sqlalchemy = "sa"
typing = "t"
-[tool.ruff.flake8-pytest-style]
+[tool.ruff.lint.flake8-pytest-style]
fixture-parentheses = false
parametrize-names-type = "csv"
-[tool.ruff.isort]
+[tool.ruff.lint.isort]
known-first-party = ["singer_sdk", "samples", "tests"]
required-imports = ["from __future__ import annotations"]
-[tool.ruff.pep8-naming]
+[tool.ruff.lint.pep8-naming]
classmethod-decorators = [
"singer_sdk.cli.plugin_cli",
]
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
convention = "google"
-[tool.ruff.pylint]
+[tool.ruff.lint.pylint]
max-args = 9
diff --git a/samples/aapl/aapl.py b/samples/aapl/aapl.py
index ecbf032de..7784a34f2 100644
--- a/samples/aapl/aapl.py
+++ b/samples/aapl/aapl.py
@@ -3,11 +3,16 @@
from __future__ import annotations
import json
-from pathlib import Path
+import sys
from singer_sdk import Stream, Tap
-PROJECT_DIR = Path(__file__).parent
+if sys.version_info < (3, 9):
+ import importlib_resources
+else:
+ import importlib.resources as importlib_resources
+
+PROJECT_DIR = importlib_resources.files("samples.aapl")
class AAPL(Stream):
diff --git a/samples/sample_custom_sql_adapter/__init__.py b/samples/sample_custom_sql_adapter/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/samples/sample_custom_sql_adapter/connector.py b/samples/sample_custom_sql_adapter/connector.py
new file mode 100644
index 000000000..6f7745a73
--- /dev/null
+++ b/samples/sample_custom_sql_adapter/connector.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+import typing as t
+
+from sqlalchemy.engine.default import DefaultDialect
+
+if t.TYPE_CHECKING:
+ from types import ModuleType
+
+
+class CustomSQLDialect(DefaultDialect):
+ """Custom SQLite dialect that supports JSON."""
+
+ name = "myrdbms"
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @classmethod
+ def import_dbapi(cls):
+ """Import the sqlite3 DBAPI."""
+ import sqlite3
+
+ return sqlite3
+
+ @classmethod
+ def dbapi(cls) -> ModuleType: # type: ignore[override]
+ """Return the DBAPI module.
+
+ NOTE: This is a legacy method that will stop being used by SQLAlchemy at some point.
+ """ # noqa: E501
+ return cls.import_dbapi()
diff --git a/samples/sample_tap_countries/countries_streams.py b/samples/sample_tap_countries/countries_streams.py
index 708e1678a..3b68a5571 100644
--- a/samples/sample_tap_countries/countries_streams.py
+++ b/samples/sample_tap_countries/countries_streams.py
@@ -9,12 +9,12 @@
from __future__ import annotations
import abc
-from pathlib import Path
from singer_sdk import typing as th
+from singer_sdk.helpers._compat import importlib_resources
from singer_sdk.streams.graphql import GraphQLStream
-SCHEMAS_DIR = Path(__file__).parent / Path("./schemas")
+SCHEMAS_DIR = importlib_resources.files(__package__) / "schemas"
class CountriesAPIStream(GraphQLStream, metaclass=abc.ABCMeta):
diff --git a/samples/sample_tap_countries/schemas/continents.json b/samples/sample_tap_countries/schemas/continents.json
index 2fab0c1ed..5d4a9a0bb 100644
--- a/samples/sample_tap_countries/schemas/continents.json
+++ b/samples/sample_tap_countries/schemas/continents.json
@@ -1,7 +1,7 @@
{
"type": "object",
"properties": {
- "code": { "type": ["null", "string"] },
- "name": { "type": ["null", "string"] }
+ "code": { "type": ["string", "null"] },
+ "name": { "type": ["string", "null"] }
}
}
\ No newline at end of file
diff --git a/samples/sample_tap_gitlab/gitlab_graphql_streams.py b/samples/sample_tap_gitlab/gitlab_graphql_streams.py
index b29fbc13e..303964615 100644
--- a/samples/sample_tap_gitlab/gitlab_graphql_streams.py
+++ b/samples/sample_tap_gitlab/gitlab_graphql_streams.py
@@ -6,13 +6,12 @@
from __future__ import annotations
-from pathlib import Path
-
+from singer_sdk.helpers._compat import importlib_resources
from singer_sdk.streams import GraphQLStream
SITE_URL = "https://gitlab.com/graphql"
-SCHEMAS_DIR = Path(__file__).parent / Path("./schemas")
+SCHEMAS_DIR = importlib_resources.files(__package__) / "schemas"
class GitlabGraphQLStream(GraphQLStream):
diff --git a/samples/sample_tap_gitlab/gitlab_rest_streams.py b/samples/sample_tap_gitlab/gitlab_rest_streams.py
index 1480a017d..1db629099 100644
--- a/samples/sample_tap_gitlab/gitlab_rest_streams.py
+++ b/samples/sample_tap_gitlab/gitlab_rest_streams.py
@@ -3,9 +3,9 @@
from __future__ import annotations
import typing as t
-from pathlib import Path
from singer_sdk.authenticators import SimpleAuthenticator
+from singer_sdk.helpers._compat import importlib_resources
from singer_sdk.pagination import SimpleHeaderPaginator
from singer_sdk.streams.rest import RESTStream
from singer_sdk.typing import (
@@ -17,7 +17,7 @@
StringType,
)
-SCHEMAS_DIR = Path(__file__).parent / Path("./schemas")
+SCHEMAS_DIR = importlib_resources.files(__package__) / "schemas"
DEFAULT_URL_BASE = "https://gitlab.com/api/v4"
diff --git a/samples/sample_tap_google_analytics/ga_tap_stream.py b/samples/sample_tap_google_analytics/ga_tap_stream.py
index 8d1f09592..5bd0503fb 100644
--- a/samples/sample_tap_google_analytics/ga_tap_stream.py
+++ b/samples/sample_tap_google_analytics/ga_tap_stream.py
@@ -2,17 +2,16 @@
from __future__ import annotations
+import datetime
import typing as t
-from pathlib import Path
-
-import pendulum
from singer_sdk.authenticators import OAuthJWTAuthenticator
+from singer_sdk.helpers._compat import importlib_resources
from singer_sdk.streams import RESTStream
GOOGLE_OAUTH_ENDPOINT = "https://oauth2.googleapis.com/token"
GA_OAUTH_SCOPES = "https://www.googleapis.com/auth/analytics.readonly"
-SCHEMAS_DIR = Path(__file__).parent / Path("./schemas")
+SCHEMAS_DIR = importlib_resources.files(__package__) / "schemas"
class GoogleJWTAuthenticator(OAuthJWTAuthenticator):
@@ -59,7 +58,7 @@ def prepare_request_payload(
request_def["dateRanges"] = [
{
"startDate": self.config.get("start_date"),
- "endDate": pendulum.now(tz="UTC"),
+ "endDate": datetime.datetime.now(datetime.timezone.utc),
},
]
return {"reportRequests": [request_def]}
diff --git a/samples/sample_tap_hostile/hostile_streams.py b/samples/sample_tap_hostile/hostile_streams.py
index e711b769f..0da506242 100644
--- a/samples/sample_tap_hostile/hostile_streams.py
+++ b/samples/sample_tap_hostile/hostile_streams.py
@@ -28,9 +28,7 @@ class HostilePropertyNamesStream(Stream):
@staticmethod
def get_random_lowercase_string():
- return "".join(
- random.choice(string.ascii_lowercase) for _ in range(10) # noqa: S311
- )
+ return "".join(random.choice(string.ascii_lowercase) for _ in range(10)) # noqa: S311
def get_records(
self,
diff --git a/samples/sample_tap_sqlite/__init__.py b/samples/sample_tap_sqlite/__init__.py
index e4a14b3a5..2cd34144f 100644
--- a/samples/sample_tap_sqlite/__init__.py
+++ b/samples/sample_tap_sqlite/__init__.py
@@ -33,6 +33,10 @@ class SQLiteStream(SQLStream):
"""
connector_class = SQLiteConnector
+ supports_nulls_first = True
+
+ # Use a smaller state message frequency to check intermediate state.
+ STATE_MSG_FREQUENCY = 10
class SQLiteTap(SQLTap):
diff --git a/samples/sample_target_sqlite/__init__.py b/samples/sample_target_sqlite/__init__.py
index 4cd6ddd61..8e43a5e87 100644
--- a/samples/sample_target_sqlite/__init__.py
+++ b/samples/sample_target_sqlite/__init__.py
@@ -19,6 +19,7 @@ class SQLiteConnector(SQLConnector):
allow_temp_tables = False
allow_column_alter = False
allow_merge_upsert = True
+ allow_overwrite: bool = True
def get_sqlalchemy_url(self, config: dict[str, t.Any]) -> str:
"""Generates a SQLAlchemy URL for SQLite."""
diff --git a/singer_sdk/_singerlib/catalog.py b/singer_sdk/_singerlib/catalog.py
index 77fe884d8..6a332b52e 100644
--- a/singer_sdk/_singerlib/catalog.py
+++ b/singer_sdk/_singerlib/catalog.py
@@ -31,11 +31,7 @@ def __missing__(self, breadcrumb: Breadcrumb) -> bool:
Returns:
True if the breadcrumb is selected, False otherwise.
"""
- if len(breadcrumb) >= 2: # noqa: PLR2004
- parent = breadcrumb[:-2]
- return self[parent]
-
- return True
+ return self[breadcrumb[:-2]] if len(breadcrumb) >= 2 else True # noqa: PLR2004
@dataclass
@@ -71,7 +67,7 @@ def from_dict(cls: type[Metadata], value: dict[str, t.Any]) -> Metadata:
)
def to_dict(self) -> dict[str, t.Any]:
- """Convert metadata to a JSON-encodeable dictionary.
+ """Convert metadata to a JSON-encodable dictionary.
Returns:
Metadata object.
@@ -90,7 +86,7 @@ def to_dict(self) -> dict[str, t.Any]:
class StreamMetadata(Metadata):
"""Stream metadata."""
- table_key_properties: list[str] | None = None
+ table_key_properties: t.Sequence[str] | None = None
forced_replication_method: str | None = None
valid_replication_keys: list[str] | None = None
schema_name: str | None = None
@@ -163,7 +159,7 @@ def get_standard_metadata(
*,
schema: dict[str, t.Any] | None = None,
schema_name: str | None = None,
- key_properties: list[str] | None = None,
+ key_properties: t.Sequence[str] | None = None,
valid_replication_keys: list[str] | None = None,
replication_method: str | None = None,
selected_by_default: bool | None = None,
@@ -289,7 +285,7 @@ class CatalogEntry:
metadata: MetadataMapping
schema: Schema
stream: str | None = None
- key_properties: list[str] | None = None
+ key_properties: t.Sequence[str] | None = None
replication_key: str | None = None
is_view: bool | None = None
database: str | None = None
@@ -389,7 +385,7 @@ def to_dict(self) -> dict[str, t.Any]:
return {"streams": [stream.to_dict() for stream in self.streams]}
@property
- def streams(self) -> list[CatalogEntry]:
+ def streams(self) -> t.Sequence[CatalogEntry]:
"""Get catalog entries.
Returns:
diff --git a/singer_sdk/_singerlib/messages.py b/singer_sdk/_singerlib/messages.py
index 7fc17e57d..cbea58527 100644
--- a/singer_sdk/_singerlib/messages.py
+++ b/singer_sdk/_singerlib/messages.py
@@ -6,12 +6,14 @@
import sys
import typing as t
from dataclasses import asdict, dataclass, field
+from datetime import datetime, timezone
-import pytz
import simplejson as json
-if t.TYPE_CHECKING:
- from datetime import datetime
+if sys.version_info < (3, 11):
+ from backports.datetime_fromisoformat import MonkeyPatch
+
+ MonkeyPatch.patch_fromisoformat()
class SingerMessageType(str, enum.Enum):
@@ -24,6 +26,18 @@ class SingerMessageType(str, enum.Enum):
BATCH = "BATCH"
+def _default_encoding(obj: t.Any) -> str: # noqa: ANN401
+ """Default JSON encoder.
+
+ Args:
+ obj: The object to encode.
+
+ Returns:
+ The encoded object.
+ """
+ return obj.isoformat(sep="T") if isinstance(obj, datetime) else str(obj)
+
+
def exclude_null_dict(pairs: list[tuple[str, t.Any]]) -> dict[str, t.Any]:
"""Exclude null values from a dictionary.
@@ -84,6 +98,29 @@ class RecordMessage(Message):
time_extracted: datetime | None = None
"""The time the record was extracted."""
+ @classmethod
+ def from_dict(cls: type[RecordMessage], data: dict[str, t.Any]) -> RecordMessage:
+ """Create a record message from a dictionary.
+
+ This overrides the default conversion logic, since it uses unnecessary
+ deep copying and is very slow.
+
+ Args:
+ data: The dictionary to create the message from.
+
+ Returns:
+ The created message.
+ """
+ time_extracted = data.get("time_extracted")
+ return cls(
+ stream=data["stream"],
+ record=data["record"],
+ version=data.get("version"),
+ time_extracted=datetime.fromisoformat(time_extracted)
+ if time_extracted
+ else None,
+ )
+
def to_dict(self) -> dict[str, t.Any]:
"""Return a dictionary representation of the message.
@@ -119,7 +156,7 @@ def __post_init__(self) -> None:
raise ValueError(msg)
if self.time_extracted:
- self.time_extracted = self.time_extracted.astimezone(pytz.utc)
+ self.time_extracted = self.time_extracted.astimezone(timezone.utc)
@dataclass
@@ -132,7 +169,7 @@ class SchemaMessage(Message):
schema: dict[str, t.Any]
"""The schema definition."""
- key_properties: list[str] | None = None
+ key_properties: t.Sequence[str] | None = None
"""The key properties."""
bookmark_properties: list[str] | None = None
@@ -189,7 +226,7 @@ def format_message(message: Message) -> str:
Returns:
The formatted message.
"""
- return json.dumps(message.to_dict(), use_decimal=True, default=str)
+ return json.dumps(message.to_dict(), use_decimal=True, default=_default_encoding)
def write_message(message: Message) -> None:
diff --git a/singer_sdk/_singerlib/schema.py b/singer_sdk/_singerlib/schema.py
index 9ef615e0d..41dd8104b 100644
--- a/singer_sdk/_singerlib/schema.py
+++ b/singer_sdk/_singerlib/schema.py
@@ -21,6 +21,7 @@
"minLength",
"format",
"type",
+ "default",
"required",
"enum",
"pattern",
@@ -47,6 +48,7 @@ class Schema:
"""
type: str | list[str] | None = None # noqa: A003
+ default: t.Any | None = None
properties: dict | None = None
items: t.Any | None = None
description: str | None = None
diff --git a/singer_sdk/_singerlib/utils.py b/singer_sdk/_singerlib/utils.py
index 778b23a49..c86b0f37f 100644
--- a/singer_sdk/_singerlib/utils.py
+++ b/singer_sdk/_singerlib/utils.py
@@ -1,10 +1,15 @@
from __future__ import annotations
+import sys
from datetime import datetime, timedelta
-import dateutil.parser
import pytz
+if sys.version_info < (3, 11):
+ from backports.datetime_fromisoformat import MonkeyPatch
+
+ MonkeyPatch.patch_fromisoformat()
+
DATETIME_FMT = "%04Y-%m-%dT%H:%M:%S.%fZ"
DATETIME_FMT_SAFE = "%Y-%m-%dT%H:%M:%S.%fZ"
@@ -26,7 +31,7 @@ def strptime_to_utc(dtimestr: str) -> datetime:
Returns:
A UTC datetime.datetime object
"""
- d_object: datetime = dateutil.parser.parse(dtimestr)
+ d_object: datetime = datetime.fromisoformat(dtimestr)
if d_object.tzinfo is None:
return d_object.replace(tzinfo=pytz.UTC)
diff --git a/singer_sdk/about.py b/singer_sdk/about.py
index 78478c417..e5bbf60de 100644
--- a/singer_sdk/about.py
+++ b/singer_sdk/about.py
@@ -138,10 +138,7 @@ def format_about(self, about_info: AboutInfo) -> str:
Returns:
A formatted string.
"""
- max_setting_len = t.cast(
- int,
- max(len(k) for k in about_info.settings["properties"]),
- )
+ max_setting_len = max(len(k) for k in about_info.settings["properties"])
# Set table base for markdown
table_base = (
diff --git a/singer_sdk/authenticators.py b/singer_sdk/authenticators.py
index 61382daba..d31ab1dee 100644
--- a/singer_sdk/authenticators.py
+++ b/singer_sdk/authenticators.py
@@ -5,7 +5,7 @@
import base64
import math
import typing as t
-from datetime import datetime, timedelta
+from datetime import timedelta
from types import MappingProxyType
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
@@ -19,6 +19,8 @@
if t.TYPE_CHECKING:
import logging
+ from pendulum import DateTime
+
from singer_sdk.streams.rest import RESTStream
@@ -378,7 +380,7 @@ def __init__(
# Initialize internal tracking attributes
self.access_token: str | None = None
self.refresh_token: str | None = None
- self.last_refreshed: datetime | None = None
+ self.last_refreshed: DateTime | None = None
self.expires_in: int | None = None
@property
@@ -441,12 +443,12 @@ def oauth_request_body(self) -> dict:
@property
def oauth_request_body(self) -> dict:
return {
- 'grant_type': 'password',
- 'scope': 'https://api.powerbi.com',
- 'resource': 'https://analysis.windows.net/powerbi/api',
- 'client_id': self.config["client_id"],
- 'username': self.config.get("username", self.config["client_id"]),
- 'password': self.config["password"],
+ "grant_type": "password",
+ "scope": "https://api.powerbi.com",
+ "resource": "https://analysis.windows.net/powerbi/api",
+ "client_id": self.config["client_id"],
+ "username": self.config.get("username", self.config["client_id"]),
+ "password": self.config["password"],
}
Raises:
@@ -462,9 +464,7 @@ def client_id(self) -> str | None:
Returns:
Optional client secret from stream config if it has been set.
"""
- if self.config:
- return self.config.get("client_id")
- return None
+ return self.config.get("client_id") if self.config else None
@property
def client_secret(self) -> str | None:
@@ -473,9 +473,7 @@ def client_secret(self) -> str | None:
Returns:
Optional client secret from stream config if it has been set.
"""
- if self.config:
- return self.config.get("client_secret")
- return None
+ return self.config.get("client_secret") if self.config else None
def is_token_valid(self) -> bool:
"""Check if token is valid.
@@ -487,9 +485,7 @@ def is_token_valid(self) -> bool:
return False
if not self.expires_in:
return True
- if self.expires_in > (utc_now() - self.last_refreshed).total_seconds():
- return True
- return False
+ return self.expires_in > (utc_now() - self.last_refreshed).total_seconds()
# Authentication and refresh
def update_access_token(self) -> None:
@@ -520,7 +516,7 @@ def update_access_token(self) -> None:
self.expires_in = int(expiration) if expiration else None
if self.expires_in is None:
self.logger.debug(
- "No expires_in receied in OAuth response and no "
+ "No expires_in received in OAuth response and no "
"default_expiration set. Token will be treated as if it never "
"expires.",
)
@@ -566,7 +562,7 @@ def oauth_request_body(self) -> dict:
@property
def oauth_request_payload(self) -> dict:
- """Return request paytload for OAuth request.
+ """Return request payload for OAuth request.
Returns:
Payload object for OAuth.
diff --git a/singer_sdk/batch.py b/singer_sdk/batch.py
index 0cbf11917..4545285e4 100644
--- a/singer_sdk/batch.py
+++ b/singer_sdk/batch.py
@@ -1,12 +1,12 @@
"""Batching utilities for Singer SDK."""
from __future__ import annotations
-import gzip
import itertools
-import json
import typing as t
+import warnings
from abc import ABC, abstractmethod
-from uuid import uuid4
+
+from singer_sdk.helpers._compat import entry_points
if t.TYPE_CHECKING:
from singer_sdk.helpers._batch import BatchConfig
@@ -14,6 +14,22 @@
_T = t.TypeVar("_T")
+def __getattr__(name: str) -> t.Any: # noqa: ANN401 # pragma: no cover
+ if name == "JSONLinesBatcher":
+ warnings.warn(
+ "The class JSONLinesBatcher was moved to singer_sdk.contrib.batch_encoder_jsonl.", # noqa: E501
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ from singer_sdk.contrib.batch_encoder_jsonl import JSONLinesBatcher
+
+ return JSONLinesBatcher
+
+ msg = f"module {__name__} has no attribute {name}"
+ raise AttributeError(msg)
+
+
def lazy_chunked_generator(
iterable: t.Iterable[_T],
chunk_size: int,
@@ -71,41 +87,46 @@ def get_batches(
raise NotImplementedError
-class JSONLinesBatcher(BaseBatcher):
- """JSON Lines Record Batcher."""
+class Batcher(BaseBatcher):
+ """Determines batch type and then serializes batches to that format."""
- def get_batches(
- self,
- records: t.Iterator[dict],
- ) -> t.Iterator[list[str]]:
- """Yield manifest of batches.
+ def get_batches(self, records: t.Iterator[dict]) -> t.Iterator[list[str]]:
+ """Manifest of batches.
Args:
records: The records to batch.
- Yields:
+ Returns:
A list of file paths (called a manifest).
"""
- sync_id = f"{self.tap_name}--{self.stream_name}-{uuid4()}"
- prefix = self.batch_config.storage.prefix or ""
-
- for i, chunk in enumerate(
- lazy_chunked_generator(
- records,
- self.batch_config.batch_size,
- ),
- start=1,
- ):
- filename = f"{prefix}{sync_id}-{i}.json.gz"
- with self.batch_config.storage.fs(create=True) as fs:
- # TODO: Determine compression from config.
- with fs.open(filename, "wb") as f, gzip.GzipFile(
- fileobj=f,
- mode="wb",
- ) as gz:
- gz.writelines(
- (json.dumps(record, default=str) + "\n").encode()
- for record in chunk
- )
- file_url = fs.geturl(filename)
- yield [file_url]
+ encoding_format = self.batch_config.encoding.format
+ batcher_type = self.get_batcher(encoding_format)
+ batcher = batcher_type(
+ self.tap_name,
+ self.stream_name,
+ self.batch_config,
+ )
+ return batcher.get_batches(records)
+
+ @classmethod
+ def get_batcher(cls, name: str) -> type[BaseBatcher]:
+ """Get a batcher by name.
+
+ Args:
+ name: The name of the batcher.
+
+ Returns:
+ The batcher class.
+
+ Raises:
+ ValueError: If the batcher is not found.
+ """
+ plugins = entry_points(group="singer_sdk.batch_encoders")
+
+ try:
+ plugin = next(filter(lambda x: x.name == name, plugins))
+ except StopIteration:
+ message = f"Unsupported batcher: {name}"
+ raise ValueError(message) from None
+
+ return plugin.load() # type: ignore[no-any-return]
diff --git a/singer_sdk/connectors/sql.py b/singer_sdk/connectors/sql.py
index aecfbb0c1..ae5ff78db 100644
--- a/singer_sdk/connectors/sql.py
+++ b/singer_sdk/connectors/sql.py
@@ -2,6 +2,8 @@
from __future__ import annotations
+import decimal
+import json
import logging
import typing as t
import warnings
@@ -9,14 +11,16 @@
from datetime import datetime
from functools import lru_cache
-import sqlalchemy
-from sqlalchemy.engine import Engine
+import simplejson
+import sqlalchemy as sa
from singer_sdk import typing as th
from singer_sdk._singerlib import CatalogEntry, MetadataMapping, Schema
from singer_sdk.exceptions import ConfigValidationError
+from singer_sdk.helpers.capabilities import TargetLoadMethods
if t.TYPE_CHECKING:
+ from sqlalchemy.engine import Engine
from sqlalchemy.engine.reflection import Inspector
@@ -37,6 +41,7 @@ class SQLConnector:
allow_column_rename: bool = True # Whether RENAME COLUMN is supported.
allow_column_alter: bool = False # Whether altering column types is supported.
allow_merge_upsert: bool = False # Whether MERGE UPSERT is supported.
+ allow_overwrite: bool = False # Whether overwrite load method is supported.
allow_temp_tables: bool = True # Whether temp tables are supported.
_cached_engine: Engine | None = None
@@ -53,6 +58,8 @@ def __init__(
"""
self._config: dict[str, t.Any] = config or {}
self._sqlalchemy_url: str | None = sqlalchemy_url or None
+ self._table_cols_cache: dict[str, dict[str, sa.Column]] = {}
+ self._schema_cache: set[str] = set()
@property
def config(self) -> dict:
@@ -73,11 +80,11 @@ def logger(self) -> logging.Logger:
return logging.getLogger("sqlconnector")
@contextmanager
- def _connect(self) -> t.Iterator[sqlalchemy.engine.Connection]:
+ def _connect(self) -> t.Iterator[sa.engine.Connection]:
with self._engine.connect().execution_options(stream_results=True) as conn:
yield conn
- def create_sqlalchemy_connection(self) -> sqlalchemy.engine.Connection:
+ def create_sqlalchemy_connection(self) -> sa.engine.Connection:
"""(DEPRECATED) Return a new SQLAlchemy connection using the provided config.
Do not use the SQLConnector's connection directly. Instead, if you need
@@ -124,7 +131,7 @@ def create_sqlalchemy_engine(self) -> Engine:
return self._engine
@property
- def connection(self) -> sqlalchemy.engine.Connection:
+ def connection(self) -> sa.engine.Connection:
"""(DEPRECATED) Return or set the SQLAlchemy connection object.
Do not use the SQLConnector's connection directly. Instead, if you need
@@ -179,9 +186,9 @@ def get_sqlalchemy_url(self, config: dict[str, t.Any]) -> str:
@staticmethod
def to_jsonschema_type(
sql_type: (
- str
- | sqlalchemy.types.TypeEngine
- | type[sqlalchemy.types.TypeEngine]
+ str # noqa: ANN401
+ | sa.types.TypeEngine
+ | type[sa.types.TypeEngine]
| t.Any
),
) -> dict:
@@ -203,11 +210,11 @@ def to_jsonschema_type(
Returns:
The JSON Schema representation of the provided type.
"""
- if isinstance(sql_type, (str, sqlalchemy.types.TypeEngine)):
+ if isinstance(sql_type, (str, sa.types.TypeEngine)):
return th.to_jsonschema_type(sql_type)
if isinstance(sql_type, type):
- if issubclass(sql_type, sqlalchemy.types.TypeEngine):
+ if issubclass(sql_type, sa.types.TypeEngine):
return th.to_jsonschema_type(sql_type)
msg = f"Unexpected type received: '{sql_type.__name__}'"
@@ -217,7 +224,7 @@ def to_jsonschema_type(
raise ValueError(msg)
@staticmethod
- def to_sql_type(jsonschema_type: dict) -> sqlalchemy.types.TypeEngine:
+ def to_sql_type(jsonschema_type: dict) -> sa.types.TypeEngine:
"""Return a JSON Schema representation of the provided type.
By default will call `typing.to_sql_type()`.
@@ -280,13 +287,13 @@ def get_fully_qualified_name(
return delimiter.join(parts)
@property
- def _dialect(self) -> sqlalchemy.engine.Dialect:
+ def _dialect(self) -> sa.engine.Dialect:
"""Return the dialect object.
Returns:
The dialect object.
"""
- return t.cast(sqlalchemy.engine.Dialect, self._engine.dialect)
+ return self._engine.dialect
@property
def _engine(self) -> Engine:
@@ -300,7 +307,7 @@ def _engine(self) -> Engine:
"""
if not self._cached_engine:
self._cached_engine = self.create_engine()
- return t.cast(Engine, self._cached_engine)
+ return self._cached_engine
def create_engine(self) -> Engine:
"""Creates and returns a new engine. Do not call outside of _engine.
@@ -316,7 +323,21 @@ def create_engine(self) -> Engine:
Returns:
A new SQLAlchemy Engine.
"""
- return sqlalchemy.create_engine(self.sqlalchemy_url, echo=False)
+ try:
+ return sa.create_engine(
+ self.sqlalchemy_url,
+ echo=False,
+ json_serializer=self.serialize_json,
+ json_deserializer=self.deserialize_json,
+ )
+ except TypeError:
+ self.logger.exception(
+ "Retrying engine creation with fewer arguments due to TypeError.",
+ )
+ return sa.create_engine(
+ self.sqlalchemy_url,
+ echo=False,
+ )
def quote(self, name: str) -> str:
"""Quote a name if it needs quoting, using '.' as a name-part delimiter.
@@ -423,8 +444,10 @@ def discover_catalog_entry(
if pk_def and "constrained_columns" in pk_def:
possible_primary_keys.append(pk_def["constrained_columns"])
+ # An element of the columns list is ``None`` if it's an expression and is
+ # returned in the ``expressions`` list of the reflected index.
possible_primary_keys.extend(
- index_def["column_names"]
+ index_def["column_names"] # type: ignore[misc]
for index_def in inspected.get_indexes(table_name, schema=schema_name)
if index_def.get("unique", False)
)
@@ -436,9 +459,7 @@ def discover_catalog_entry(
for column_def in inspected.get_columns(table_name, schema=schema_name):
column_name = column_def["name"]
is_nullable = column_def.get("nullable", False)
- jsonschema_type: dict = self.to_jsonschema_type(
- t.cast(sqlalchemy.types.TypeEngine, column_def["type"]),
- )
+ jsonschema_type: dict = self.to_jsonschema_type(column_def["type"])
table_schema.append(
th.Property(
name=column_name,
@@ -487,7 +508,7 @@ def discover_catalog_entries(self) -> list[dict]:
"""
result: list[dict] = []
engine = self._engine
- inspected = sqlalchemy.inspect(engine)
+ inspected = sa.inspect(engine)
for schema_name in self.get_schema_names(engine, inspected):
# Iterate through each table and view
for table_name, is_view in self.get_object_names(
@@ -551,10 +572,7 @@ def table_exists(self, full_table_name: str) -> bool:
"""
_, schema_name, table_name = self.parse_full_table_name(full_table_name)
- return t.cast(
- bool,
- sqlalchemy.inspect(self._engine).has_table(table_name, schema_name),
- )
+ return sa.inspect(self._engine).has_table(table_name, schema_name)
def schema_exists(self, schema_name: str) -> bool:
"""Determine if the target database schema already exists.
@@ -565,14 +583,18 @@ def schema_exists(self, schema_name: str) -> bool:
Returns:
True if the database schema exists, False if not.
"""
- schema_names = sqlalchemy.inspect(self._engine).get_schema_names()
- return schema_name in schema_names
+ if schema_name not in self._schema_cache:
+ self._schema_cache = set(
+ sa.inspect(self._engine).get_schema_names(),
+ )
+
+ return schema_name in self._schema_cache
def get_table_columns(
self,
full_table_name: str,
column_names: list[str] | None = None,
- ) -> dict[str, sqlalchemy.Column]:
+ ) -> dict[str, sa.Column]:
"""Return a list of table columns.
Args:
@@ -582,26 +604,30 @@ def get_table_columns(
Returns:
An ordered list of column objects.
"""
- _, schema_name, table_name = self.parse_full_table_name(full_table_name)
- inspector = sqlalchemy.inspect(self._engine)
- columns = inspector.get_columns(table_name, schema_name)
-
- return {
- col_meta["name"]: sqlalchemy.Column(
- col_meta["name"],
- col_meta["type"],
- nullable=col_meta.get("nullable", False),
- )
- for col_meta in columns
- if not column_names
- or col_meta["name"].casefold() in {col.casefold() for col in column_names}
- }
+ if full_table_name not in self._table_cols_cache:
+ _, schema_name, table_name = self.parse_full_table_name(full_table_name)
+ inspector = sa.inspect(self._engine)
+ columns = inspector.get_columns(table_name, schema_name)
+
+ self._table_cols_cache[full_table_name] = {
+ col_meta["name"]: sa.Column(
+ col_meta["name"],
+ col_meta["type"],
+ nullable=col_meta.get("nullable", False),
+ )
+ for col_meta in columns
+ if not column_names
+ or col_meta["name"].casefold()
+ in {col.casefold() for col in column_names}
+ }
+
+ return self._table_cols_cache[full_table_name]
def get_table(
self,
full_table_name: str,
column_names: list[str] | None = None,
- ) -> sqlalchemy.Table:
+ ) -> sa.Table:
"""Return a table object.
Args:
@@ -616,8 +642,8 @@ def get_table(
column_names=column_names,
).values()
_, schema_name, table_name = self.parse_full_table_name(full_table_name)
- meta = sqlalchemy.MetaData()
- return sqlalchemy.schema.Table(
+ meta = sa.MetaData()
+ return sa.schema.Table(
table_name,
meta,
*list(columns),
@@ -642,14 +668,14 @@ def create_schema(self, schema_name: str) -> None:
Args:
schema_name: The target schema to create.
"""
- with self._connect() as conn:
- conn.execute(sqlalchemy.schema.CreateSchema(schema_name))
+ with self._connect() as conn, conn.begin():
+ conn.execute(sa.schema.CreateSchema(schema_name))
def create_empty_table(
self,
full_table_name: str,
schema: dict,
- primary_keys: list[str] | None = None,
+ primary_keys: t.Sequence[str] | None = None,
partition_keys: list[str] | None = None,
as_temp_table: bool = False, # noqa: FBT001, FBT002
) -> None:
@@ -673,8 +699,8 @@ def create_empty_table(
_ = partition_keys # Not supported in generic implementation.
_, schema_name, table_name = self.parse_full_table_name(full_table_name)
- meta = sqlalchemy.MetaData(schema=schema_name)
- columns: list[sqlalchemy.Column] = []
+ meta = sa.MetaData(schema=schema_name)
+ columns: list[sa.Column] = []
primary_keys = primary_keys or []
try:
properties: dict = schema["properties"]
@@ -684,21 +710,21 @@ def create_empty_table(
for property_name, property_jsonschema in properties.items():
is_primary_key = property_name in primary_keys
columns.append(
- sqlalchemy.Column(
+ sa.Column(
property_name,
self.to_sql_type(property_jsonschema),
primary_key=is_primary_key,
),
)
- _ = sqlalchemy.Table(table_name, meta, *columns)
+ _ = sa.Table(table_name, meta, *columns)
meta.create_all(self._engine)
def _create_empty_column(
self,
full_table_name: str,
column_name: str,
- sql_type: sqlalchemy.types.TypeEngine,
+ sql_type: sa.types.TypeEngine,
) -> None:
"""Create a new column.
@@ -736,7 +762,7 @@ def prepare_table(
self,
full_table_name: str,
schema: dict,
- primary_keys: list[str],
+ primary_keys: t.Sequence[str],
partition_keys: list[str] | None = None,
as_temp_table: bool = False, # noqa: FBT002, FBT001
) -> None:
@@ -758,6 +784,16 @@ def prepare_table(
as_temp_table=as_temp_table,
)
return
+ if self.config["load_method"] == TargetLoadMethods.OVERWRITE:
+ self.get_table(full_table_name=full_table_name).drop(self._engine)
+ self.create_empty_table(
+ full_table_name=full_table_name,
+ schema=schema,
+ primary_keys=primary_keys,
+ partition_keys=partition_keys,
+ as_temp_table=as_temp_table,
+ )
+ return
for property_name, property_def in schema["properties"].items():
self.prepare_column(
@@ -770,7 +806,7 @@ def prepare_column(
self,
full_table_name: str,
column_name: str,
- sql_type: sqlalchemy.types.TypeEngine,
+ sql_type: sa.types.TypeEngine,
) -> None:
"""Adapt target table to provided schema if possible.
@@ -813,13 +849,13 @@ def rename_column(self, full_table_name: str, old_name: str, new_name: str) -> N
column_name=old_name,
new_column_name=new_name,
)
- with self._connect() as conn:
+ with self._connect() as conn, conn.begin():
conn.execute(column_rename_ddl)
def merge_sql_types(
self,
- sql_types: list[sqlalchemy.types.TypeEngine],
- ) -> sqlalchemy.types.TypeEngine:
+ sql_types: t.Sequence[sa.types.TypeEngine],
+ ) -> sa.types.TypeEngine:
"""Return a compatible SQL type for the selected type list.
Args:
@@ -850,7 +886,7 @@ def merge_sql_types(
# If greater than two evaluate the first pair then on down the line
if len(sql_types) > 2: # noqa: PLR2004
return self.merge_sql_types(
- [self.merge_sql_types([sql_types[0], sql_types[1]])] + sql_types[2:],
+ [self.merge_sql_types([sql_types[0], sql_types[1]]), *sql_types[2:]],
)
# Get the generic type class
@@ -862,10 +898,7 @@ def merge_sql_types(
if isinstance(generic_type, type):
if issubclass(
generic_type,
- (sqlalchemy.types.String, sqlalchemy.types.Unicode),
- ) or issubclass(
- generic_type,
- (sqlalchemy.types.String, sqlalchemy.types.Unicode),
+ (sa.types.String, sa.types.Unicode),
):
# If length None or 0 then is varchar max ?
if (
@@ -884,8 +917,8 @@ def merge_sql_types(
def _sort_types(
self,
- sql_types: t.Iterable[sqlalchemy.types.TypeEngine],
- ) -> list[sqlalchemy.types.TypeEngine]:
+ sql_types: t.Iterable[sa.types.TypeEngine],
+ ) -> t.Sequence[sa.types.TypeEngine]:
"""Return the input types sorted from most to least compatible.
For example, [Smallint, Integer, Datetime, String, Double] would become
@@ -896,14 +929,14 @@ def _sort_types(
length will be sorted earlier.
Args:
- sql_types (List[sqlalchemy.types.TypeEngine]): [description]
+ sql_types (List[sa.types.TypeEngine]): [description]
Returns:
The sorted list.
"""
def _get_type_sort_key(
- sql_type: sqlalchemy.types.TypeEngine,
+ sql_type: sa.types.TypeEngine,
) -> tuple[int, int]:
# return rank, with higher numbers ranking first
@@ -927,7 +960,7 @@ def _get_column_type(
self,
full_table_name: str,
column_name: str,
- ) -> sqlalchemy.types.TypeEngine:
+ ) -> sa.types.TypeEngine:
"""Get the SQL type of the declared column.
Args:
@@ -946,14 +979,14 @@ def _get_column_type(
msg = f"Column `{column_name}` does not exist in table `{full_table_name}`."
raise KeyError(msg) from ex
- return t.cast(sqlalchemy.types.TypeEngine, column.type)
+ return column.type
@staticmethod
def get_column_add_ddl(
table_name: str,
column_name: str,
- column_type: sqlalchemy.types.TypeEngine,
- ) -> sqlalchemy.DDL:
+ column_type: sa.types.TypeEngine,
+ ) -> sa.DDL:
"""Get the create column DDL statement.
Override this if your database uses a different syntax for creating columns.
@@ -966,13 +999,13 @@ def get_column_add_ddl(
Returns:
A sqlalchemy DDL instance.
"""
- create_column_clause = sqlalchemy.schema.CreateColumn(
- sqlalchemy.Column(
+ create_column_clause = sa.schema.CreateColumn(
+ sa.Column(
column_name,
column_type,
),
)
- return sqlalchemy.DDL(
+ return sa.DDL(
"ALTER TABLE %(table_name)s ADD COLUMN %(create_column_clause)s",
{
"table_name": table_name,
@@ -985,7 +1018,7 @@ def get_column_rename_ddl(
table_name: str,
column_name: str,
new_column_name: str,
- ) -> sqlalchemy.DDL:
+ ) -> sa.DDL:
"""Get the create column DDL statement.
Override this if your database uses a different syntax for renaming columns.
@@ -998,7 +1031,7 @@ def get_column_rename_ddl(
Returns:
A sqlalchemy DDL instance.
"""
- return sqlalchemy.DDL(
+ return sa.DDL(
"ALTER TABLE %(table_name)s "
"RENAME COLUMN %(column_name)s to %(new_column_name)s",
{
@@ -1012,8 +1045,8 @@ def get_column_rename_ddl(
def get_column_alter_ddl(
table_name: str,
column_name: str,
- column_type: sqlalchemy.types.TypeEngine,
- ) -> sqlalchemy.DDL:
+ column_type: sa.types.TypeEngine,
+ ) -> sa.DDL:
"""Get the alter column DDL statement.
Override this if your database uses a different syntax for altering columns.
@@ -1026,7 +1059,7 @@ def get_column_alter_ddl(
Returns:
A sqlalchemy DDL instance.
"""
- return sqlalchemy.DDL(
+ return sa.DDL(
"ALTER TABLE %(table_name)s ALTER COLUMN %(column_name)s (%(column_type)s)",
{
"table_name": table_name,
@@ -1037,7 +1070,7 @@ def get_column_alter_ddl(
@staticmethod
def remove_collation(
- column_type: sqlalchemy.types.TypeEngine,
+ column_type: sa.types.TypeEngine,
) -> str | None:
"""Removes collation for the given column TypeEngine instance.
@@ -1055,7 +1088,7 @@ def remove_collation(
@staticmethod
def update_collation(
- column_type: sqlalchemy.types.TypeEngine,
+ column_type: sa.types.TypeEngine,
collation: str | None,
) -> None:
"""Sets column collation if column type has a collation attribute.
@@ -1071,7 +1104,7 @@ def _adapt_column_type(
self,
full_table_name: str,
column_name: str,
- sql_type: sqlalchemy.types.TypeEngine,
+ sql_type: sa.types.TypeEngine,
) -> None:
"""Adapt table column type to support the new JSON schema type.
@@ -1083,7 +1116,7 @@ def _adapt_column_type(
Raises:
NotImplementedError: if altering columns is not supported.
"""
- current_type: sqlalchemy.types.TypeEngine = self._get_column_type(
+ current_type: sa.types.TypeEngine = self._get_column_type(
full_table_name,
column_name,
)
@@ -1122,5 +1155,37 @@ def _adapt_column_type(
column_name=column_name,
column_type=compatible_sql_type,
)
- with self._connect() as conn:
+ with self._connect() as conn, conn.begin():
conn.execute(alter_column_ddl)
+
+ def serialize_json(self, obj: object) -> str:
+ """Serialize an object to a JSON string.
+
+ Target connectors may override this method to provide custom serialization logic
+ for JSON types.
+
+ Args:
+ obj: The object to serialize.
+
+ Returns:
+ The JSON string.
+
+ .. versionadded:: 0.31.0
+ """
+ return simplejson.dumps(obj, use_decimal=True)
+
+ def deserialize_json(self, json_str: str) -> object:
+ """Deserialize a JSON string to an object.
+
+ Tap connectors may override this method to provide custom deserialization
+ logic for JSON types.
+
+ Args:
+ json_str: The JSON string to deserialize.
+
+ Returns:
+ The deserialized object.
+
+ .. versionadded:: 0.31.0
+ """
+ return json.loads(json_str, parse_float=decimal.Decimal)
diff --git a/singer_sdk/contrib/__init__.py b/singer_sdk/contrib/__init__.py
new file mode 100644
index 000000000..ebad0cbc7
--- /dev/null
+++ b/singer_sdk/contrib/__init__.py
@@ -0,0 +1 @@
+"""Singer SDK contrib modules."""
diff --git a/singer_sdk/contrib/batch_encoder_jsonl.py b/singer_sdk/contrib/batch_encoder_jsonl.py
new file mode 100644
index 000000000..6ce4c8793
--- /dev/null
+++ b/singer_sdk/contrib/batch_encoder_jsonl.py
@@ -0,0 +1,52 @@
+"""JSON Lines Record Batcher."""
+
+from __future__ import annotations
+
+import gzip
+import json
+import typing as t
+from uuid import uuid4
+
+from singer_sdk.batch import BaseBatcher, lazy_chunked_generator
+
+__all__ = ["JSONLinesBatcher"]
+
+
+class JSONLinesBatcher(BaseBatcher):
+ """JSON Lines Record Batcher."""
+
+ def get_batches(
+ self,
+ records: t.Iterator[dict],
+ ) -> t.Iterator[list[str]]:
+ """Yield manifest of batches.
+
+ Args:
+ records: The records to batch.
+
+ Yields:
+ A list of file paths (called a manifest).
+ """
+ sync_id = f"{self.tap_name}--{self.stream_name}-{uuid4()}"
+ prefix = self.batch_config.storage.prefix or ""
+
+ for i, chunk in enumerate(
+ lazy_chunked_generator(
+ records,
+ self.batch_config.batch_size,
+ ),
+ start=1,
+ ):
+ filename = f"{prefix}{sync_id}-{i}.json.gz"
+ with self.batch_config.storage.fs(create=True) as fs:
+ # TODO: Determine compression from config.
+ with fs.open(filename, "wb") as f, gzip.GzipFile(
+ fileobj=f,
+ mode="wb",
+ ) as gz:
+ gz.writelines(
+ (json.dumps(record, default=str) + "\n").encode()
+ for record in chunk
+ )
+ file_url = fs.geturl(filename)
+ yield [file_url]
diff --git a/singer_sdk/contrib/batch_encoder_parquet.py b/singer_sdk/contrib/batch_encoder_parquet.py
new file mode 100644
index 000000000..1d5ad9cc1
--- /dev/null
+++ b/singer_sdk/contrib/batch_encoder_parquet.py
@@ -0,0 +1,54 @@
+"""Parquet Record Batcher."""
+
+from __future__ import annotations
+
+import typing as t
+from uuid import uuid4
+
+from singer_sdk.batch import BaseBatcher, lazy_chunked_generator
+
+__all__ = ["ParquetBatcher"]
+
+
+class ParquetBatcher(BaseBatcher):
+ """Parquet Record Batcher."""
+
+ def get_batches(
+ self,
+ records: t.Iterator[dict],
+ ) -> t.Iterator[list[str]]:
+ """Yield manifest of batches.
+
+ Args:
+ records: The records to batch.
+
+ Yields:
+ A list of file paths (called a manifest).
+ """
+ import pyarrow as pa
+ import pyarrow.parquet as pq
+
+ sync_id = f"{self.tap_name}--{self.stream_name}-{uuid4()}"
+ prefix = self.batch_config.storage.prefix or ""
+
+ for i, chunk in enumerate(
+ lazy_chunked_generator(
+ records,
+ self.batch_config.batch_size,
+ ),
+ start=1,
+ ):
+ filename = f"{prefix}{sync_id}={i}.parquet"
+ if self.batch_config.encoding.compression == "gzip":
+ filename = f"{filename}.gz"
+ with self.batch_config.storage.fs() as fs:
+ with fs.open(filename, "wb") as f:
+ pylist = list(chunk)
+ table = pa.Table.from_pylist(pylist)
+ if self.batch_config.encoding.compression == "gzip":
+ pq.write_table(table, f, compression="GZIP")
+ else:
+ pq.write_table(table, f)
+
+ file_url = fs.geturl(filename)
+ yield [file_url]
diff --git a/singer_sdk/exceptions.py b/singer_sdk/exceptions.py
index 23325aa2a..75135e800 100644
--- a/singer_sdk/exceptions.py
+++ b/singer_sdk/exceptions.py
@@ -12,11 +12,30 @@
class ConfigValidationError(Exception):
"""Raised when a user's config settings fail validation."""
+ def __init__(
+ self,
+ message: str,
+ *,
+ errors: list[str] | None = None,
+ ) -> None:
+ """Initialize a ConfigValidationError.
+
+ Args:
+ message: A message describing the error.
+ errors: A list of errors which caused the validation error.
+ """
+ super().__init__(message)
+ self.errors = errors or []
+
class FatalAPIError(Exception):
"""Exception raised when a failed request should not be considered retriable."""
+class InvalidReplicationKeyException(Exception):
+ """Exception to raise if the replication key is not in the stream properties."""
+
+
class InvalidStreamSortException(Exception):
"""Exception to raise if sorting errors are found while syncing the records."""
diff --git a/singer_sdk/helpers/_batch.py b/singer_sdk/helpers/_batch.py
index 62447ddb3..490e2ef8b 100644
--- a/singer_sdk/helpers/_batch.py
+++ b/singer_sdk/helpers/_batch.py
@@ -25,6 +25,9 @@ class BatchFileFormat(str, enum.Enum):
JSONL = "jsonl"
"""JSON Lines format."""
+ PARQUET = "parquet"
+ """Parquet format."""
+
@dataclass
class BaseBatchFileEncoding:
@@ -69,6 +72,13 @@ class JSONLinesEncoding(BaseBatchFileEncoding):
__encoding_format__ = "jsonl"
+@dataclass
+class ParquetEncoding(BaseBatchFileEncoding):
+ """Parquet encoding for batch files."""
+
+ __encoding_format__ = "parquet"
+
+
@dataclass
class SDKBatchMessage(Message):
"""Singer batch message in the Meltano Singer SDK flavor."""
@@ -82,7 +92,7 @@ class SDKBatchMessage(Message):
manifest: list[str] = field(default_factory=list)
"""The manifest of files in the batch."""
- def __post_init__(self):
+ def __post_init__(self) -> None:
if isinstance(self.encoding, dict):
self.encoding = BaseBatchFileEncoding.from_dict(self.encoding)
@@ -102,7 +112,7 @@ class StorageTarget:
params: dict = field(default_factory=dict)
""""The storage parameters."""
- def asdict(self):
+ def asdict(self) -> dict[str, t.Any]:
"""Return a dictionary representation of the message.
Returns:
@@ -134,7 +144,7 @@ def split_url(url: str) -> tuple[str, str]:
"""
if platform.system() == "Windows" and "\\" in url:
# Original code from pyFileSystem split
- # Augemnted slitly to properly Windows paths
+ # Augmented slightly to properly handle Windows paths
split = url.rsplit("\\", 1)
return (split[0] or "\\", split[1])
@@ -214,7 +224,7 @@ class BatchConfig:
batch_size: int = DEFAULT_BATCH_SIZE
"""The max number of records in a batch."""
- def __post_init__(self):
+ def __post_init__(self) -> None:
if isinstance(self.encoding, dict):
self.encoding = BaseBatchFileEncoding.from_dict(self.encoding)
@@ -224,7 +234,7 @@ def __post_init__(self):
if self.batch_size is None:
self.batch_size = DEFAULT_BATCH_SIZE
- def asdict(self):
+ def asdict(self) -> dict[str, t.Any]:
"""Return a dictionary representation of the message.
Returns:
diff --git a/singer_sdk/helpers/_compat.py b/singer_sdk/helpers/_compat.py
index 87033ea4c..c9a7df6cc 100644
--- a/singer_sdk/helpers/_compat.py
+++ b/singer_sdk/helpers/_compat.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import datetime
import sys
if sys.version_info < (3, 8):
@@ -11,4 +12,39 @@
from importlib import metadata
from typing import final # noqa: ICN003
-__all__ = ["metadata", "final"]
+if sys.version_info < (3, 9):
+ import importlib_resources
+else:
+ from importlib import resources as importlib_resources
+
+if sys.version_info < (3, 9):
+ from importlib_resources.abc import Traversable
+elif sys.version_info < (3, 12):
+ from importlib.abc import Traversable
+else:
+ from importlib.resources.abc import Traversable
+
+if sys.version_info < (3, 12):
+ from importlib_metadata import entry_points
+else:
+ from importlib.metadata import entry_points
+
+if sys.version_info < (3, 11):
+ from backports.datetime_fromisoformat import MonkeyPatch
+
+ MonkeyPatch.patch_fromisoformat()
+
+datetime_fromisoformat = datetime.datetime.fromisoformat
+date_fromisoformat = datetime.date.fromisoformat
+time_fromisoformat = datetime.time.fromisoformat
+
+__all__ = [
+ "metadata",
+ "final",
+ "entry_points",
+ "datetime_fromisoformat",
+ "date_fromisoformat",
+ "time_fromisoformat",
+ "importlib_resources",
+ "Traversable",
+]
diff --git a/singer_sdk/helpers/_conformers.py b/singer_sdk/helpers/_conformers.py
index 46963284e..0ca70e85c 100644
--- a/singer_sdk/helpers/_conformers.py
+++ b/singer_sdk/helpers/_conformers.py
@@ -16,11 +16,13 @@ def snakecase(string: str) -> str:
"""
string = re.sub(r"[\-\.\s]", "_", string)
string = (
- string[0].lower()
- + re.sub(
- r"[A-Z]",
- lambda matched: "_" + str(matched.group(0).lower()),
- string[1:],
+ (
+ string[0].lower()
+ + re.sub(
+ r"[A-Z]",
+ lambda matched: f"_{matched.group(0).lower()!s}",
+ string[1:],
+ )
)
if string
else string
diff --git a/singer_sdk/helpers/_flattening.py b/singer_sdk/helpers/_flattening.py
index 29ef35cc2..866eb8a48 100644
--- a/singer_sdk/helpers/_flattening.py
+++ b/singer_sdk/helpers/_flattening.py
@@ -1,15 +1,15 @@
-"""Internal helper library for record flatteting functions."""
+"""Internal helper library for record flattening functions."""
from __future__ import annotations
import collections
import itertools
-import json
import re
import typing as t
from copy import deepcopy
import inflection
+import simplejson as json
DEFAULT_FLATTENING_SEPARATOR = "__"
@@ -70,7 +70,7 @@ def flatten_key(key_name: str, parent_keys: list[str], separator: str = "__") ->
inflection.camelize(inflected_key[reducer_index]),
)
inflected_key[reducer_index] = (
- reduced_key if len(reduced_key) > 1 else inflected_key[reducer_index][0:3]
+ reduced_key if len(reduced_key) > 1 else inflected_key[reducer_index][:3]
).lower()
reducer_index += 1
@@ -96,9 +96,7 @@ def flatten_schema(
>>> schema = {
... "type": "object",
... "properties": {
- ... "id": {
- ... "type": "string"
- ... },
+ ... "id": {"type": "string"},
... "foo": {
... "type": "object",
... "properties": {
@@ -107,17 +105,13 @@ def flatten_schema(
... "properties": {
... "baz": {
... "type": "object",
- ... "properties": {
- ... "qux": {
- ... "type": "string"
- ... }
- ... }
+ ... "properties": {"qux": {"type": "string"}},
... }
- ... }
+ ... },
... }
- ... }
- ... }
- ... }
+ ... },
+ ... },
+ ... },
... }
>>> print(json.dumps(flatten_schema(schema, 0), indent=2))
{
@@ -155,17 +149,7 @@ def flatten_schema(
"type": "string"
},
"foo__bar": {
- "type": "object",
- "properties": {
- "baz": {
- "type": "object",
- "properties": {
- "qux": {
- "type": "string"
- }
- }
- }
- }
+ "type": "string"
}
}
}
@@ -178,25 +162,109 @@ def flatten_schema(
"type": "string"
},
"foo__bar__baz": {
- "type": "object",
+ "type": "string"
+ }
+ }
+ }
+
+ >>> print(json.dumps(flatten_schema(schema, 3), indent=2))
+ {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "foo__bar__baz__qux": {
+ "type": "string"
+ }
+ }
+ }
+
+ >>> nullable_leaves_schema = {
+ ... "type": "object",
+ ... "properties": {
+ ... "id": {"type": "string"},
+ ... "foo": {
+ ... "type": ["object", "null"],
+ ... "properties": {
+ ... "bar": {
+ ... "type": ["object", "null"],
+ ... "properties": {
+ ... "baz": {
+ ... "type": ["object", "null"],
+ ... "properties": {"qux": {"type": "string"}},
+ ... }
+ ... },
+ ... }
+ ... },
+ ... },
+ ... },
+ ... }
+ >>> print(json.dumps(flatten_schema(nullable_leaves_schema, 0), indent=2))
+ {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "foo": {
+ "type": [
+ "object",
+ "null"
+ ],
"properties": {
- "qux": {
- "type": "string"
+ "bar": {
+ "type": [
+ "object",
+ "null"
+ ],
+ "properties": {
+ "baz": {
+ "type": [
+ "object",
+ "null"
+ ],
+ "properties": {
+ "qux": {
+ "type": "string"
+ }
+ }
+ }
+ }
}
}
}
}
}
- >>> print(json.dumps(flatten_schema(schema, 3), indent=2))
+ >>> print(json.dumps(flatten_schema(nullable_leaves_schema, 1), indent=2))
{
"type": "object",
"properties": {
"id": {
"type": "string"
},
- "foo__bar__baz__qux": {
+ "foo__bar": {
+ "type": [
+ "string",
+ "null"
+ ]
+ }
+ }
+ }
+
+ >>> print(json.dumps(flatten_schema(nullable_leaves_schema, 2), indent=2))
+ {
+ "type": "object",
+ "properties": {
+ "id": {
"type": "string"
+ },
+ "foo__bar__baz": {
+ "type": [
+ "string",
+ "null"
+ ]
}
}
}
@@ -210,7 +278,7 @@ def flatten_schema(
return new_schema
-def _flatten_schema( # noqa: C901
+def _flatten_schema( # noqa: C901, PLR0912
schema_node: dict,
parent_keys: list[str] | None = None,
separator: str = "__",
@@ -236,40 +304,55 @@ def _flatten_schema( # noqa: C901
if "properties" not in schema_node:
return {}
- for k, v in schema_node["properties"].items():
- new_key = flatten_key(k, parent_keys, separator)
- if "type" in v:
- if "object" in v["type"] and "properties" in v and level < max_level:
+ for field_name, field_schema in schema_node["properties"].items():
+ new_key = flatten_key(field_name, parent_keys, separator)
+ if "type" in field_schema:
+ if (
+ "object" in field_schema["type"]
+ and "properties" in field_schema
+ and level < max_level
+ ):
items.extend(
_flatten_schema(
- v,
- [*parent_keys, k],
+ field_schema,
+ [*parent_keys, field_name],
separator=separator,
level=level + 1,
max_level=max_level,
).items(),
)
+ elif (
+ "array" in field_schema["type"]
+ or "object" in field_schema["type"]
+ and max_level > 0
+ ):
+ types = (
+ ["string", "null"] if "null" in field_schema["type"] else "string"
+ )
+ items.append((new_key, {"type": types}))
else:
- items.append((new_key, v))
- elif len(v.values()) > 0:
- if list(v.values())[0][0]["type"] == "string":
- list(v.values())[0][0]["type"] = ["null", "string"]
- items.append((new_key, list(v.values())[0][0]))
- elif list(v.values())[0][0]["type"] == "array":
- list(v.values())[0][0]["type"] = ["null", "array"]
- items.append((new_key, list(v.values())[0][0]))
- elif list(v.values())[0][0]["type"] == "object":
- list(v.values())[0][0]["type"] = ["null", "object"]
- items.append((new_key, list(v.values())[0][0]))
+ items.append((new_key, field_schema))
+ # TODO: Figure out what this really does, try breaking it.
+ # If it's not needed, remove it.
+ elif len(field_schema.values()) > 0:
+ if next(iter(field_schema.values()))[0]["type"] == "string":
+ next(iter(field_schema.values()))[0]["type"] = ["null", "string"]
+ items.append((new_key, next(iter(field_schema.values()))[0]))
+ elif next(iter(field_schema.values()))[0]["type"] == "array":
+ next(iter(field_schema.values()))[0]["type"] = ["null", "array"]
+ items.append((new_key, next(iter(field_schema.values()))[0]))
+ elif next(iter(field_schema.values()))[0]["type"] == "object":
+ next(iter(field_schema.values()))[0]["type"] = ["null", "object"]
+ items.append((new_key, next(iter(field_schema.values()))[0]))
# Sort and check for duplicates
- def _key_func(item):
- return item[0] # first item is tuple is the key name.
+ def _key_func(item: tuple[str, dict]) -> str:
+ return item[0] # first item in tuple is the key name.
sorted_items = sorted(items, key=_key_func)
- for k, g in itertools.groupby(sorted_items, key=_key_func):
+ for field_name, g in itertools.groupby(sorted_items, key=_key_func):
if len(list(g)) > 1:
- msg = f"Duplicate column name produced in schema: {k}"
+ msg = f"Duplicate column name produced in schema: {field_name}"
raise ValueError(msg)
# Return the (unsorted) result as a dict.
@@ -347,7 +430,7 @@ def _flatten_record(
items.append(
(
new_key,
- json.dumps(v)
+ json.dumps(v, use_decimal=True)
if _should_jsondump_value(k, v, flattened_schema)
else v,
),
@@ -356,7 +439,11 @@ def _flatten_record(
return dict(items)
-def _should_jsondump_value(key: str, value: t.Any, flattened_schema=None) -> bool:
+def _should_jsondump_value(
+ key: str,
+ value: t.Any, # noqa: ANN401
+ flattened_schema: dict[str, t.Any] | None = None,
+) -> bool:
"""Return True if json.dump() should be used to serialize the value.
Args:
@@ -370,12 +457,9 @@ def _should_jsondump_value(key: str, value: t.Any, flattened_schema=None) -> boo
if isinstance(value, (dict, list)):
return True
- if (
+ return bool(
flattened_schema
and key in flattened_schema
and "type" in flattened_schema[key]
and set(flattened_schema[key]["type"]) == {"null", "object", "array"}
- ):
- return True
-
- return False
+ )
diff --git a/singer_sdk/helpers/_secrets.py b/singer_sdk/helpers/_secrets.py
index ad7d05032..bbababa70 100644
--- a/singer_sdk/helpers/_secrets.py
+++ b/singer_sdk/helpers/_secrets.py
@@ -28,7 +28,7 @@ def is_common_secret_key(key_name: str) -> bool:
class SecretString(str):
"""For now, this class wraps a sensitive string to be identified as such later."""
- def __init__(self, contents):
+ def __init__(self, contents: str) -> None:
"""Initialize secret string."""
self.contents = contents
diff --git a/singer_sdk/helpers/_simpleeval.py b/singer_sdk/helpers/_simpleeval.py
deleted file mode 100644
index c3fb41c3f..000000000
--- a/singer_sdk/helpers/_simpleeval.py
+++ /dev/null
@@ -1,679 +0,0 @@
-"""
-Simpleeval module originally imported on 2021-09-16 from:
-- https://github.com/danthedeckie/simpleeval
-
-For more information:
-- https://gitlab.com/meltano/sdk/-/issues/213
-
--------------------------------------
-SimpleEval - (C) 2013-2019 Daniel Fairhead
--------------------------------------
-
-An short, easy to use, safe and reasonably extensible expression evaluator.
-Designed for things like in a website where you want to allow the user to
-generate a string, or a number from some other input, without allowing full
-eval() or other unsafe or needlessly complex linguistics.
-
--------------------------------------
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
--------------------------------------
-
-Initial idea copied from J.F. Sebastian on Stack Overflow
-( http://stackoverflow.com/a/9558001/1973500 ) with
-modifications and many improvements.
-
--------------------------------------
-Contributors:
-- corro (Robin Baumgartner) (py3k)
-- dratchkov (David R) (nested dicts)
-- marky1991 (Mark Young) (slicing)
-- T045T (Nils Berg) (!=, py3kstr, obj.
-- perkinslr (Logan Perkins) (.__globals__ or .func_ breakouts)
-- impala2 (Kirill Stepanov) (massive _eval refactor)
-- gk (ugik) (Other iterables than str can DOS too, and can be made)
-- daveisfera (Dave Johansen) 'not' Boolean op, Pycharm, pep8, various other fixes
-- xaled (Khalid Grandi) method chaining correctly, double-eval bugfix.
-- EdwardBetts (Edward Betts) spelling correction.
-- charlax (Charles-Axel Dein charlax) Makefile and cleanups
-- mommothazaz123 (Andrew Zhu) f"string" support, Python 3.8 support
-- lubieowoce (Uryga) various potential vulnerabilities
-- JCavallo (Jean Cavallo) names dict shouldn't be modified
-- Birne94 (Daniel Birnstiel) for fixing leaking generators.
-- patricksurry (Patrick Surry) or should return last value, even if falsy.
-- shughes-uk (Samantha Hughes) python w/o 'site' should not fail to import.
-
--------------------------------------
-Basic Usage:
-
->>> s = SimpleEval()
->>> s.eval("20 + 30")
-50
-
-You can add your own functions easily too:
-
-if file.txt contents is "11"
-
->>> def get_file():
-... with open("file.txt", 'r') as f:
-... return f.read()
-
->>> s.functions["get_file"] = get_file
->>> s.eval("int(get_file()) + 31")
-42
-
-For more information, see the full package documentation on pypi, or the github
-repo.
-
------------
-
-If you don't need to re-use the evaluator (with it's names, functions, etc),
-then you can use the simple_eval() function:
-
->>> simple_eval("21 + 19")
-40
-
-You can pass names, operators and functions to the simple_eval function as
-well:
-
->>> simple_eval("40 + two", names={"two": 2})
-42
-
-"""
-# flake8: noqa # Ignoring flake errors in imported module
-# isort: dont-add-imports
-
-import ast
-import operator as op
-import sys
-import warnings
-from random import random
-
-PYTHON3 = sys.version_info[0] == 3
-
-########################################
-# Module wide 'globals'
-
-MAX_STRING_LENGTH = 100000
-MAX_COMPREHENSION_LENGTH = 10000
-MAX_POWER = 4000000 # highest exponent
-DISALLOW_PREFIXES = ["_", "func_"]
-DISALLOW_METHODS = ["format", "format_map", "mro"]
-
-# Disallow functions:
-# This, strictly speaking, is not necessary. These /should/ never be accessable anyway,
-# if DISALLOW_PREFIXES and DISALLOW_METHODS are all right. This is here to try and help
-# people not be stupid. Allowing these functions opens up all sorts of holes - if any of
-# their functionality is required, then please wrap them up in a safe container. And think
-# very hard about it first. And don't say I didn't warn you.
-# builtins is a dict in python >3.6 but a module before
-DISALLOW_FUNCTIONS = {type, isinstance, eval, getattr, setattr, repr, compile, open}
-if hasattr(__builtins__, "help") or (
- hasattr(__builtins__, "__contains__") and "help" in __builtins__
-):
- # PyInstaller environment doesn't include this module.
- DISALLOW_FUNCTIONS.add(help)
-
-
-if PYTHON3:
- exec("DISALLOW_FUNCTIONS.add(exec)") # exec is not a function in Python2...
-
-
-########################################
-# Exceptions:
-
-
-class InvalidExpression(Exception):
- """ Generic Exception """
-
- pass
-
-
-class FunctionNotDefined(InvalidExpression):
- """ sorry! That function isn't defined! """
-
- def __init__(self, func_name, expression):
- self.message = "Function '{0}' not defined," " for expression '{1}'.".format(
- func_name, expression
- )
- setattr(self, "func_name", func_name) # bypass 2to3 confusion.
- self.expression = expression
-
- # pylint: disable=bad-super-call
- super(InvalidExpression, self).__init__(self.message)
-
-
-class NameNotDefined(InvalidExpression):
- """ a name isn't defined. """
-
- def __init__(self, name, expression):
- self.name = name
- self.message = "'{0}' is not defined for expression '{1}'".format(
- name, expression
- )
- self.expression = expression
-
- # pylint: disable=bad-super-call
- super(InvalidExpression, self).__init__(self.message)
-
-
-class AttributeDoesNotExist(InvalidExpression):
- """attribute does not exist"""
-
- def __init__(self, attr, expression):
- self.message = "Attribute '{0}' does not exist in expression '{1}'".format(
- attr, expression
- )
- self.attr = attr
- self.expression = expression
-
-
-class FeatureNotAvailable(InvalidExpression):
- """ What you're trying to do is not allowed. """
-
- pass
-
-
-class NumberTooHigh(InvalidExpression):
- """Sorry! That number is too high. I don't want to spend the
- next 10 years evaluating this expression!"""
-
- pass
-
-
-class IterableTooLong(InvalidExpression):
- """ That iterable is **way** too long, baby. """
-
- pass
-
-
-class AssignmentAttempted(UserWarning):
- pass
-
-
-########################################
-# Default simple functions to include:
-
-
-def random_int(top):
- """ return a random int below """
-
- return int(random() * top)
-
-
-def safe_power(a, b): # pylint: disable=invalid-name
- """ a limited exponent/to-the-power-of function, for safety reasons """
-
- if abs(a) > MAX_POWER or abs(b) > MAX_POWER:
- raise NumberTooHigh("Sorry! I don't want to evaluate {0} ** {1}".format(a, b))
- return a ** b
-
-
-def safe_mult(a, b): # pylint: disable=invalid-name
- """ limit the number of times an iterable can be repeated... """
-
- if hasattr(a, "__len__") and b * len(a) > MAX_STRING_LENGTH:
- raise IterableTooLong("Sorry, I will not evalute something that long.")
- if hasattr(b, "__len__") and a * len(b) > MAX_STRING_LENGTH:
- raise IterableTooLong("Sorry, I will not evalute something that long.")
-
- return a * b
-
-
-def safe_add(a, b): # pylint: disable=invalid-name
- """ iterable length limit again """
-
- if hasattr(a, "__len__") and hasattr(b, "__len__"):
- if len(a) + len(b) > MAX_STRING_LENGTH:
- raise IterableTooLong(
- "Sorry, adding those two together would" " make something too long."
- )
- return a + b
-
-
-########################################
-# Defaults for the evaluator:
-
-DEFAULT_OPERATORS = {
- ast.Add: safe_add,
- ast.Sub: op.sub,
- ast.Mult: safe_mult,
- ast.Div: op.truediv,
- ast.FloorDiv: op.floordiv,
- ast.Pow: safe_power,
- ast.Mod: op.mod,
- ast.Eq: op.eq,
- ast.NotEq: op.ne,
- ast.Gt: op.gt,
- ast.Lt: op.lt,
- ast.GtE: op.ge,
- ast.LtE: op.le,
- ast.Not: op.not_,
- ast.USub: op.neg,
- ast.UAdd: op.pos,
- ast.In: lambda x, y: op.contains(y, x),
- ast.NotIn: lambda x, y: not op.contains(y, x),
- ast.Is: lambda x, y: x is y,
- ast.IsNot: lambda x, y: x is not y,
-}
-
-DEFAULT_FUNCTIONS = {
- "rand": random,
- "randint": random_int,
- "int": int,
- "float": float,
- "str": str if PYTHON3 else unicode, # type: ignore # 'unicode' not defined
-}
-
-DEFAULT_NAMES = {"True": True, "False": False, "None": None}
-
-ATTR_INDEX_FALLBACK = True
-
-
-########################################
-# And the actual evaluator:
-
-
-class SimpleEval(object): # pylint: disable=too-few-public-methods
- """A very simple expression parser.
- >>> s = SimpleEval()
- >>> s.eval("20 + 30 - ( 10 * 5)")
- 0
- """
-
- expr = ""
-
- def __init__(self, operators=None, functions=None, names=None):
- """
- Create the evaluator instance. Set up valid operators (+,-, etc)
- functions (add, random, get_val, whatever) and names."""
-
- if not operators:
- operators = DEFAULT_OPERATORS.copy()
- if not functions:
- functions = DEFAULT_FUNCTIONS.copy()
- if not names:
- names = DEFAULT_NAMES.copy()
-
- self.operators = operators
- self.functions = functions
- self.names = names
-
- self.nodes = {
- ast.Expr: self._eval_expr,
- ast.Assign: self._eval_assign,
- ast.AugAssign: self._eval_aug_assign,
- ast.Import: self._eval_import,
- ast.Num: self._eval_num,
- ast.Str: self._eval_str,
- ast.Name: self._eval_name,
- ast.UnaryOp: self._eval_unaryop,
- ast.BinOp: self._eval_binop,
- ast.BoolOp: self._eval_boolop,
- ast.Compare: self._eval_compare,
- ast.IfExp: self._eval_ifexp,
- ast.Call: self._eval_call,
- ast.keyword: self._eval_keyword,
- ast.Subscript: self._eval_subscript,
- ast.Attribute: self._eval_attribute,
- ast.Index: self._eval_index,
- ast.Slice: self._eval_slice,
- }
-
- # py3k stuff:
- if hasattr(ast, "NameConstant"):
- self.nodes[ast.NameConstant] = self._eval_constant
-
- # py3.6, f-strings
- if hasattr(ast, "JoinedStr"):
- self.nodes[ast.JoinedStr] = self._eval_joinedstr # f-string
- self.nodes[
- ast.FormattedValue
- ] = self._eval_formattedvalue # formatted value in f-string
-
- # py3.8 uses ast.Constant instead of ast.Num, ast.Str, ast.NameConstant
- if hasattr(ast, "Constant"):
- self.nodes[ast.Constant] = self._eval_constant
-
- # Defaults:
-
- self.ATTR_INDEX_FALLBACK = ATTR_INDEX_FALLBACK
-
- # Check for forbidden functions:
-
- for f in self.functions.values():
- if f in DISALLOW_FUNCTIONS:
- raise FeatureNotAvailable(
- "This function {} is a really bad idea.".format(f)
- )
-
- def eval(self, expr):
- """evaluate an expresssion, using the operators, functions and
- names previously set up."""
-
- # set a copy of the expression aside, so we can give nice errors...
-
- self.expr = expr
-
- # and evaluate:
- return self._eval(ast.parse(expr.strip()).body[0])
-
- def _eval(self, node):
- """ The internal evaluator used on each node in the parsed tree. """
-
- try:
- handler = self.nodes[type(node)]
- except KeyError:
- raise FeatureNotAvailable(
- "Sorry, {0} is not available in this "
- "evaluator".format(type(node).__name__)
- )
-
- return handler(node)
-
- def _eval_expr(self, node):
- return self._eval(node.value)
-
- def _eval_assign(self, node):
- warnings.warn(
- "Assignment ({}) attempted, but this is ignored".format(self.expr),
- AssignmentAttempted,
- )
- return self._eval(node.value)
-
- def _eval_aug_assign(self, node):
- warnings.warn(
- "Assignment ({}) attempted, but this is ignored".format(self.expr),
- AssignmentAttempted,
- )
- return self._eval(node.value)
-
- def _eval_import(self, node):
- raise FeatureNotAvailable("Sorry, 'import' is not allowed.")
- return self._eval(node.value)
-
- @staticmethod
- def _eval_num(node):
- return node.n
-
- @staticmethod
- def _eval_str(node):
- if len(node.s) > MAX_STRING_LENGTH:
- raise IterableTooLong(
- "String Literal in statement is too long!"
- " ({0}, when {1} is max)".format(len(node.s), MAX_STRING_LENGTH)
- )
- return node.s
-
- @staticmethod
- def _eval_constant(node):
- if hasattr(node.value, "__len__") and len(node.value) > MAX_STRING_LENGTH:
- raise IterableTooLong(
- "Literal in statement is too long!"
- " ({0}, when {1} is max)".format(len(node.value), MAX_STRING_LENGTH)
- )
- return node.value
-
- def _eval_unaryop(self, node):
- return self.operators[type(node.op)](self._eval(node.operand))
-
- def _eval_binop(self, node):
- return self.operators[type(node.op)](
- self._eval(node.left), self._eval(node.right)
- )
-
- def _eval_boolop(self, node):
- if isinstance(node.op, ast.And):
- vout = False
- for value in node.values:
- vout = self._eval(value)
- if not vout:
- return vout
- return vout
- elif isinstance(node.op, ast.Or):
- for value in node.values:
- vout = self._eval(value)
- if vout:
- return vout
- return vout
-
- def _eval_compare(self, node):
- right = self._eval(node.left)
- to_return = True
- for operation, comp in zip(node.ops, node.comparators):
- if not to_return:
- break
- left = right
- right = self._eval(comp)
- to_return = self.operators[type(operation)](left, right)
- return to_return
-
- def _eval_ifexp(self, node):
- return (
- self._eval(node.body) if self._eval(node.test) else self._eval(node.orelse)
- )
-
- def _eval_call(self, node):
- if isinstance(node.func, ast.Attribute):
- func = self._eval(node.func)
- else:
- try:
- func = self.functions[node.func.id]
- except KeyError:
- raise FunctionNotDefined(node.func.id, self.expr)
- except AttributeError as e:
- raise FeatureNotAvailable("Lambda Functions not implemented")
-
- if func in DISALLOW_FUNCTIONS:
- raise FeatureNotAvailable("This function is forbidden")
-
- return func(
- *(self._eval(a) for a in node.args),
- **dict(self._eval(k) for k in node.keywords)
- )
-
- def _eval_keyword(self, node):
- return node.arg, self._eval(node.value)
-
- def _eval_name(self, node):
- try:
- # This happens at least for slicing
- # This is a safe thing to do because it is impossible
- # that there is a true exression assigning to none
- # (the compiler rejects it, so you can't even
- # pass that to ast.parse)
- if hasattr(self.names, "__getitem__"):
- return self.names[node.id]
- elif callable(self.names):
- return self.names(node)
- else:
- raise InvalidExpression(
- 'Trying to use name (variable) "{0}"'
- ' when no "names" defined for'
- " evaluator".format(node.id)
- )
-
- except KeyError:
- if node.id in self.functions:
- return self.functions[node.id]
-
- raise NameNotDefined(node.id, self.expr)
-
- def _eval_subscript(self, node):
- container = self._eval(node.value)
- key = self._eval(node.slice)
- try:
- return container[key]
- except KeyError:
- raise
-
- def _eval_attribute(self, node):
- for prefix in DISALLOW_PREFIXES:
- if node.attr.startswith(prefix):
- raise FeatureNotAvailable(
- "Sorry, access to __attributes "
- " or func_ attributes is not available. "
- "({0})".format(node.attr)
- )
- if node.attr in DISALLOW_METHODS:
- raise FeatureNotAvailable(
- "Sorry, this method is not available. " "({0})".format(node.attr)
- )
- # eval node
- node_evaluated = self._eval(node.value)
-
- # Maybe the base object is an actual object, not just a dict
- try:
- return getattr(node_evaluated, node.attr)
- except (AttributeError, TypeError):
- pass
-
- # TODO: is this a good idea? Try and look for [x] if .x doesn't work?
- if self.ATTR_INDEX_FALLBACK:
- try:
- return node_evaluated[node.attr]
- except (KeyError, TypeError):
- pass
-
- # If it is neither, raise an exception
- raise AttributeDoesNotExist(node.attr, self.expr)
-
- def _eval_index(self, node):
- return self._eval(node.value)
-
- def _eval_slice(self, node):
- lower = upper = step = None
- if node.lower is not None:
- lower = self._eval(node.lower)
- if node.upper is not None:
- upper = self._eval(node.upper)
- if node.step is not None:
- step = self._eval(node.step)
- return slice(lower, upper, step)
-
- def _eval_joinedstr(self, node):
- length = 0
- evaluated_values = []
- for n in node.values:
- val = str(self._eval(n))
- if len(val) + length > MAX_STRING_LENGTH:
- raise IterableTooLong("Sorry, I will not evaluate something this long.")
- evaluated_values.append(val)
- return "".join(evaluated_values)
-
- def _eval_formattedvalue(self, node):
- if node.format_spec:
- fmt = "{:" + self._eval(node.format_spec) + "}"
- return fmt.format(self._eval(node.value))
- return self._eval(node.value)
-
-
-class EvalWithCompoundTypes(SimpleEval):
- """
- SimpleEval with additional Compound Types, and their respective
- function editions. (list, tuple, dict, set).
- """
-
- def __init__(self, operators=None, functions=None, names=None):
- super(EvalWithCompoundTypes, self).__init__(operators, functions, names)
-
- self.functions.update(list=list, tuple=tuple, dict=dict, set=set)
-
- self.nodes.update(
- {
- ast.Dict: self._eval_dict,
- ast.Tuple: self._eval_tuple,
- ast.List: self._eval_list,
- ast.Set: self._eval_set,
- ast.ListComp: self._eval_comprehension,
- ast.GeneratorExp: self._eval_comprehension,
- }
- )
-
- def eval(self, expr):
- self._max_count = 0
- return super(EvalWithCompoundTypes, self).eval(expr)
-
- def _eval_dict(self, node):
- return {self._eval(k): self._eval(v) for (k, v) in zip(node.keys, node.values)}
-
- def _eval_tuple(self, node):
- return tuple(self._eval(x) for x in node.elts)
-
- def _eval_list(self, node):
- return list(self._eval(x) for x in node.elts)
-
- def _eval_set(self, node):
- return set(self._eval(x) for x in node.elts)
-
- def _eval_comprehension(self, node):
- to_return = []
-
- extra_names = {}
-
- previous_name_evaller = self.nodes[ast.Name]
-
- def eval_names_extra(node):
- """
- Here we hide our extra scope for within this comprehension
- """
- if node.id in extra_names:
- return extra_names[node.id]
- return previous_name_evaller(node)
-
- self.nodes.update({ast.Name: eval_names_extra})
-
- def recurse_targets(target, value):
- """
- Recursively (enter, (into, (nested, name), unpacking)) = \
- and, (assign, (values, to), each
- """
- if isinstance(target, ast.Name):
- extra_names[target.id] = value
- else:
- for t, v in zip(target.elts, value):
- recurse_targets(t, v)
-
- def do_generator(gi=0):
- g = node.generators[gi]
- for i in self._eval(g.iter):
- self._max_count += 1
-
- if self._max_count > MAX_COMPREHENSION_LENGTH:
- raise IterableTooLong("Comprehension generates too many elements")
- recurse_targets(g.target, i)
- if all(self._eval(iff) for iff in g.ifs):
- if len(node.generators) > gi + 1:
- do_generator(gi + 1)
- else:
- to_return.append(self._eval(node.elt))
-
- try:
- do_generator()
- finally:
- self.nodes.update({ast.Name: previous_name_evaller})
-
- return to_return
-
-
-def simple_eval(expr, operators=None, functions=None, names=None):
- """ Simply evaluate an expresssion """
- s = SimpleEval(operators=operators, functions=functions, names=names)
- return s.eval(expr)
diff --git a/singer_sdk/helpers/_state.py b/singer_sdk/helpers/_state.py
index 9d0102186..ba5121bba 100644
--- a/singer_sdk/helpers/_state.py
+++ b/singer_sdk/helpers/_state.py
@@ -18,12 +18,12 @@
STARTING_MARKER = "starting_replication_value"
-def get_state_if_exists( # noqa: PLR0911
+def get_state_if_exists(
tap_state: dict,
tap_stream_id: str,
state_partition_context: dict | None = None,
key: str | None = None,
-) -> t.Any | None:
+) -> t.Any | None: # noqa: ANN401
"""Return the stream or partition state, creating a new one if it does not exist.
Args:
@@ -47,9 +47,7 @@ def get_state_if_exists( # noqa: PLR0911
stream_state = tap_state["bookmarks"][tap_stream_id]
if not state_partition_context:
- if key:
- return stream_state.get(key, None)
- return stream_state
+ return stream_state.get(key, None) if key else stream_state
if "partitions" not in stream_state:
return None # No partitions defined
@@ -59,14 +57,12 @@ def get_state_if_exists( # noqa: PLR0911
)
if matched_partition is None:
return None # Partition definition not present
- if key:
- return matched_partition.get(key, None)
- return matched_partition
+ return matched_partition.get(key, None) if key else matched_partition
def get_state_partitions_list(tap_state: dict, tap_stream_id: str) -> list[dict] | None:
"""Return a list of partitions defined in the state, or None if not defined."""
- return (get_state_if_exists(tap_state, tap_stream_id) or {}).get("partitions", None)
+ return (get_state_if_exists(tap_state, tap_stream_id) or {}).get("partitions", None) # type: ignore[no-any-return]
def _find_in_partitions_list(
@@ -84,10 +80,7 @@ def _find_in_partitions_list(
f"{{state_partition_context}}.\nMatching state values were: {found!s}"
)
raise ValueError(msg)
- if found:
- return t.cast(dict, found[0])
-
- return None
+ return found[0] if found else None
def _create_in_partitions_list(
@@ -142,10 +135,10 @@ def get_writeable_state_dict(
def write_stream_state(
- tap_state,
+ tap_state: dict,
tap_stream_id: str,
- key,
- val,
+ key: str,
+ val: t.Any, # noqa: ANN401
*,
state_partition_context: dict | None = None,
) -> None:
@@ -172,7 +165,7 @@ def reset_state_progress_markers(stream_or_partition_state: dict) -> dict | None
def write_replication_key_signpost(
stream_or_partition_state: dict,
- new_signpost_value: t.Any,
+ new_signpost_value: t.Any, # noqa: ANN401
) -> None:
"""Write signpost value."""
stream_or_partition_state[SIGNPOST_MARKER] = to_json_compatible(new_signpost_value)
@@ -180,13 +173,13 @@ def write_replication_key_signpost(
def write_starting_replication_value(
stream_or_partition_state: dict,
- initial_value: t.Any,
+ initial_value: t.Any, # noqa: ANN401
) -> None:
"""Write initial replication value to state."""
stream_or_partition_state[STARTING_MARKER] = to_json_compatible(initial_value)
-def get_starting_replication_value(stream_or_partition_state: dict):
+def get_starting_replication_value(stream_or_partition_state: dict) -> t.Any | None: # noqa: ANN401
"""Retrieve initial replication marker value from state."""
if not stream_or_partition_state:
return None
diff --git a/singer_sdk/helpers/_typing.py b/singer_sdk/helpers/_typing.py
index d3df38a5b..8b937d973 100644
--- a/singer_sdk/helpers/_typing.py
+++ b/singer_sdk/helpers/_typing.py
@@ -4,21 +4,21 @@
import copy
import datetime
+import logging
import typing as t
from enum import Enum
from functools import lru_cache
import pendulum
-if t.TYPE_CHECKING:
- import logging
-
_MAX_TIMESTAMP = "9999-12-31 23:59:59.999999"
_MAX_TIME = "23:59:59.999999"
JSONSCHEMA_ANNOTATION_SECRET = "secret" # noqa: S105
JSONSCHEMA_ANNOTATION_WRITEONLY = "writeOnly"
UTC = datetime.timezone.utc
+logger = logging.getLogger(__name__)
+
class DatetimeErrorTreatmentEnum(Enum):
"""Enum for treatment options for date parsing error."""
@@ -39,13 +39,13 @@ def __init__(self, *args: object) -> None:
super().__init__(msg, *args)
-def to_json_compatible(val: t.Any) -> t.Any:
+def to_json_compatible(val: t.Any) -> t.Any: # noqa: ANN401
"""Return as string if datetime. JSON does not support proper datetime types.
If given a naive datetime object, pendulum automatically makes it utc
"""
if isinstance(val, (datetime.datetime, pendulum.DateTime)):
- return pendulum.instance(val).isoformat()
+ return pendulum.instance(val).isoformat("T")
return val
@@ -67,18 +67,19 @@ def append_type(type_dict: dict, new_type: str) -> dict:
result["type"] = [*type_array, new_type]
return result
- msg = (
+ logger.warning(
"Could not append type because the JSON schema for the dictionary "
- f"`{type_dict}` appears to be invalid."
+ "`%s` appears to be invalid.",
+ type_dict,
)
- raise ValueError(msg)
+ return result
def is_secret_type(type_dict: dict) -> bool:
"""Return True if JSON Schema type definition appears to be a secret.
Will return true if either `writeOnly` or `secret` are true on this type
- or any of the type's subproperties.
+ or any of the type's sub-properties.
Args:
type_dict: The JSON Schema type to check.
@@ -95,7 +96,7 @@ def is_secret_type(type_dict: dict) -> bool:
return True
if "properties" in type_dict:
- # Recursively check subproperties and return True if any child is secret.
+ # Recursively check sub-properties and return True if any child is secret.
return any(
is_secret_type(child_type_dict)
for child_type_dict in type_dict["properties"].values()
@@ -184,7 +185,7 @@ def get_datelike_property_type(property_schema: dict) -> str | None:
return None
-def _is_string_with_format(type_dict):
+def _is_string_with_format(type_dict: dict[str, t.Any]) -> bool | None:
if "string" in type_dict.get("type", []) and type_dict.get("format") in {
"date-time",
"time",
@@ -195,14 +196,14 @@ def _is_string_with_format(type_dict):
def handle_invalid_timestamp_in_record(
- record, # noqa: ARG001
+ record: dict[str, t.Any], # noqa: ARG001
key_breadcrumb: list[str],
invalid_value: str,
datelike_typename: str,
ex: Exception,
treatment: DatetimeErrorTreatmentEnum | None,
logger: logging.Logger,
-) -> t.Any:
+) -> t.Any: # noqa: ANN401
"""Apply treatment or raise an error for invalid time values."""
treatment = treatment or DatetimeErrorTreatmentEnum.ERROR
msg = (
@@ -330,7 +331,7 @@ def _warn_unmapped_properties(
stream_name: str,
property_names: tuple[str],
logger: logging.Logger,
-):
+) -> None:
logger.warning(
"Properties %s were present in the '%s' stream but "
"not found in catalog schema. Ignoring.",
@@ -387,6 +388,7 @@ def conform_record_data_types(
return rec
+# TODO: This is in dire need of refactoring. It's a mess.
def _conform_record_data_types( # noqa: PLR0912
input_object: dict[str, t.Any],
schema: dict,
@@ -404,7 +406,7 @@ def _conform_record_data_types( # noqa: PLR0912
input_object: A single record
schema: JSON schema the given input_object is expected to meet
level: Specifies how recursive the conformance process should be
- parent: '.' seperated path to this element from the object root (for logging)
+ parent: '.' separated path to this element from the object root (for logging)
"""
output_object: dict[str, t.Any] = {}
unmapped_properties: list[str] = []
@@ -468,9 +470,9 @@ def _conform_record_data_types( # noqa: PLR0912
def _conform_primitive_property( # noqa: PLR0911
- elem: t.Any,
+ elem: t.Any, # noqa: ANN401
property_schema: dict,
-) -> t.Any:
+) -> t.Any: # noqa: ANN401
"""Converts a primitive (i.e. not object or array) to a json compatible type."""
if isinstance(elem, (datetime.datetime, pendulum.DateTime)):
return to_json_compatible(elem)
diff --git a/singer_sdk/helpers/capabilities.py b/singer_sdk/helpers/capabilities.py
index 55aff8ce3..c3d124093 100644
--- a/singer_sdk/helpers/capabilities.py
+++ b/singer_sdk/helpers/capabilities.py
@@ -62,7 +62,7 @@
Property(
"format",
StringType,
- allowed_values=["jsonl"],
+ allowed_values=["jsonl", "parquet"],
description="Format to use for batch files.",
),
Property(
@@ -99,6 +99,47 @@
description="The default target database schema name to use for all streams.",
),
).to_dict()
+ADD_RECORD_METADATA_CONFIG = PropertiesList(
+ Property(
+ "add_record_metadata",
+ BooleanType(),
+ description="Add metadata to records.",
+ ),
+).to_dict()
+
+
+class TargetLoadMethods(str, Enum):
+ """Target-specific capabilities."""
+
+ # always write all input records whether that records already exists or not
+ APPEND_ONLY = "append-only"
+
+ # update existing records and insert new records
+ UPSERT = "upsert"
+
+ # delete all existing records and insert all input records
+ OVERWRITE = "overwrite"
+
+
+TARGET_LOAD_METHOD_CONFIG = PropertiesList(
+ Property(
+ "load_method",
+ StringType(),
+ description=(
+ "The method to use when loading data into the destination. "
+ "`append-only` will always write all input records whether that records "
+ "already exists or not. `upsert` will update existing records and insert "
+ "new records. `overwrite` will delete all existing records and insert all "
+ "input records."
+ ),
+ allowed_values=[
+ TargetLoadMethods.APPEND_ONLY,
+ TargetLoadMethods.UPSERT,
+ TargetLoadMethods.OVERWRITE,
+ ],
+ default=TargetLoadMethods.APPEND_ONLY,
+ ),
+).to_dict()
class DeprecatedEnum(Enum):
@@ -120,7 +161,7 @@ def __new__(
"""
member: DeprecatedEnum = object.__new__(cls)
member._value_ = value
- member._deprecation = deprecation
+ member.deprecation = deprecation
return member
@property
@@ -130,8 +171,8 @@ def deprecation_message(self) -> str | None:
Returns:
Deprecation message.
"""
- self._deprecation: str | None
- return self._deprecation
+ self.deprecation: str | None
+ return self.deprecation
def emit_warning(self) -> None:
"""Emit deprecation warning."""
diff --git a/singer_sdk/helpers/jsonpath.py b/singer_sdk/helpers/jsonpath.py
index 9e2956f19..33e65c7fa 100644
--- a/singer_sdk/helpers/jsonpath.py
+++ b/singer_sdk/helpers/jsonpath.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import logging
import typing as t
import memoization
@@ -11,6 +12,9 @@
import jsonpath_ng
+logger = logging.getLogger(__name__)
+
+
def extract_jsonpath(
expression: str,
input: dict | list, # noqa: A002
@@ -27,7 +31,11 @@ def extract_jsonpath(
compiled_jsonpath = _compile_jsonpath(expression)
match: jsonpath_ng.DatumInContext
- for match in compiled_jsonpath.find(input):
+ matches = compiled_jsonpath.find(input)
+
+ logger.info("JSONPath %s match count: %d", expression, len(matches))
+
+ for match in matches:
yield match.value
diff --git a/singer_sdk/io_base.py b/singer_sdk/io_base.py
index 07da6e63e..f7c2ed668 100644
--- a/singer_sdk/io_base.py
+++ b/singer_sdk/io_base.py
@@ -10,7 +10,9 @@
import typing as t
from collections import Counter, defaultdict
-from singer_sdk._singerlib import SingerMessageType
+from singer_sdk._singerlib.messages import Message, SingerMessageType
+from singer_sdk._singerlib.messages import format_message as singer_format_message
+from singer_sdk._singerlib.messages import write_message as singer_write_message
from singer_sdk.helpers._compat import final
logger = logging.getLogger(__name__)
@@ -48,7 +50,7 @@ def _assert_line_requires(line_dict: dict, requires: set[str]) -> None:
if not requires.issubset(line_dict):
missing = requires - set(line_dict)
msg = f"Line is missing required {', '.join(missing)} key(s): {line_dict}"
- raise Exception(msg)
+ raise Exception(msg) # TODO: Raise a more specific exception
def deserialize_json(self, line: str) -> dict:
"""Deserialize a line of json.
@@ -143,3 +145,26 @@ def _process_unknown_message(self, message_dict: dict) -> None:
def _process_endofpipe(self) -> None:
logger.debug("End of pipe reached")
+
+
+class SingerWriter:
+ """Interface for all plugins writting Singer messages to stdout."""
+
+ def format_message(self, message: Message) -> str:
+ """Format a message as a JSON string.
+
+ Args:
+ message: The message to format.
+
+ Returns:
+ The formatted message.
+ """
+ return singer_format_message(message)
+
+ def write_message(self, message: Message) -> None:
+ """Write a message to stdout.
+
+ Args:
+ message: The message to write.
+ """
+ singer_write_message(message)
diff --git a/singer_sdk/mapper.py b/singer_sdk/mapper.py
index 031ca0c82..aeea46812 100644
--- a/singer_sdk/mapper.py
+++ b/singer_sdk/mapper.py
@@ -6,14 +6,17 @@
from __future__ import annotations
import abc
+import ast
import copy
import datetime
import hashlib
import logging
import typing as t
+import simpleeval # type: ignore[import-untyped]
+
+import singer_sdk.typing as th
from singer_sdk.exceptions import MapExpressionError, StreamMapConfigError
-from singer_sdk.helpers import _simpleeval as simpleeval
from singer_sdk.helpers._catalog import get_selected_schema
from singer_sdk.helpers._flattening import (
FlatteningOptions,
@@ -21,15 +24,6 @@
flatten_schema,
get_flattening_options,
)
-from singer_sdk.typing import (
- CustomType,
- IntegerType,
- JSONTypeHelper,
- NumberType,
- PropertiesList,
- Property,
- StringType,
-)
if t.TYPE_CHECKING:
import sys
@@ -72,7 +66,7 @@ def __init__(
self,
stream_alias: str,
raw_schema: dict,
- key_properties: list[str] | None,
+ key_properties: t.Sequence[str] | None,
flattening_options: FlatteningOptions | None,
) -> None:
"""Initialize mapper.
@@ -84,7 +78,7 @@ def __init__(
flattening_options: Flattening options, or None to skip flattening.
"""
self.stream_alias = stream_alias
- self.raw_schema = raw_schema
+ self.raw_schema = copy.deepcopy(raw_schema)
self.raw_key_properties = key_properties
self.transformed_schema = raw_schema
self.transformed_key_properties = key_properties
@@ -238,7 +232,7 @@ def __init__(
stream_alias: str,
map_config: dict,
raw_schema: dict,
- key_properties: list[str] | None,
+ key_properties: t.Sequence[str] | None,
map_transform: dict,
flattening_options: FlatteningOptions | None,
) -> None:
@@ -267,6 +261,7 @@ def __init__(
self._transform_fn,
self.transformed_schema,
) = self._init_functions_and_schema(stream_map=map_transform)
+ self.expr_evaluator = simpleeval.EvalWithCompoundTypes(functions=self.functions)
def transform(self, record: dict) -> dict | None:
"""Return a transformed record.
@@ -278,10 +273,7 @@ def transform(self, record: dict) -> dict | None:
The transformed record.
"""
transformed_record = self._transform_fn(record)
- if not transformed_record:
- return None
-
- return super().transform(transformed_record)
+ return super().transform(transformed_record) if transformed_record else None
def get_filter_result(self, record: dict) -> bool:
"""Return True to include or False to exclude.
@@ -296,7 +288,7 @@ def get_filter_result(self, record: dict) -> bool:
@property
def functions(self) -> dict[str, t.Callable]:
- """Get availabale transformation functions.
+ """Get available transformation functions.
Returns:
Functions which should be available for expression evaluation.
@@ -304,18 +296,21 @@ def functions(self) -> dict[str, t.Callable]:
funcs: dict[str, t.Any] = simpleeval.DEFAULT_FUNCTIONS.copy()
funcs["md5"] = md5
funcs["datetime"] = datetime
+ funcs["bool"] = bool
return funcs
def _eval(
self,
expr: str,
+ expr_parsed: ast.Expr,
record: dict,
property_name: str | None,
) -> str | int | float:
"""Solve an expression.
Args:
- expr: String expression to evaluate.
+ expr: String expression to evaluate (used to raise human readable errors).
+ expr_parsed: Parsed expression abstract syntax tree.
record: Individual stream record.
property_name: Name of property to transform in the record.
@@ -333,10 +328,10 @@ def _eval(
# Allow access to original property value if applicable
names["self"] = record[property_name]
try:
- result: str | int | float = simpleeval.simple_eval(
+ self.expr_evaluator.names = names
+ result: str | int | float = self.expr_evaluator.eval(
expr,
- functions=self.functions,
- names=names,
+ previously_parsed=expr_parsed,
)
except (simpleeval.InvalidExpression, SyntaxError) as ex:
msg = f"Failed to evaluate simpleeval expressions {expr}."
@@ -349,8 +344,8 @@ def _eval(
def _eval_type(
self,
expr: str,
- default: JSONTypeHelper | None = None,
- ) -> JSONTypeHelper:
+ default: th.JSONTypeHelper | None = None,
+ ) -> th.JSONTypeHelper:
"""Evaluate an expression's type.
Args:
@@ -367,21 +362,25 @@ def _eval_type(
msg = "Expression should be str, not None"
raise ValueError(msg)
- default = default or StringType()
+ default = default or th.StringType()
+
+ # If a field is set to "record", then it should be an "object" in the schema
+ if expr == "record":
+ return th.CustomType(self.raw_schema)
if expr.startswith("float("):
- return NumberType()
+ return th.NumberType()
if expr.startswith("int("):
- return IntegerType()
+ return th.IntegerType()
if expr.startswith("str("):
- return StringType()
+ return th.StringType()
- if expr[0] == "'" and expr[-1] == "'":
- return StringType()
+ if expr.startswith("bool("):
+ return th.BooleanType()
- return default
+ return th.StringType() if expr[0] == "'" and expr[-1] == "'" else default
def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
self,
@@ -398,6 +397,7 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
Raises:
NotImplementedError: TODO
StreamMapConfigError: TODO
+ MapExpressionError: TODO
"""
stream_map = copy.copy(stream_map)
@@ -405,6 +405,12 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
include_by_default = True
if stream_map and MAPPER_FILTER_OPTION in stream_map:
filter_rule = stream_map.pop(MAPPER_FILTER_OPTION)
+ try:
+ filter_rule_parsed: ast.Expr = ast.parse(filter_rule).body[0] # type: ignore[arg-type,assignment]
+ except (SyntaxError, IndexError) as ex:
+ msg = f"Failed to parse expression {filter_rule}."
+ raise MapExpressionError(msg) from ex
+
logging.info(
"Found '%s' filter rule: %s",
self.stream_alias,
@@ -442,11 +448,12 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
transformed_schema = copy.copy(self.raw_schema)
if not include_by_default:
# Start with only the defined (or transformed) key properties
- transformed_schema = PropertiesList().to_dict()
+ transformed_schema = th.PropertiesList().to_dict()
if "properties" not in transformed_schema:
transformed_schema["properties"] = {}
+ stream_map_parsed: list[tuple[str, str | None, ast.Expr | None]] = []
for prop_key, prop_def in list(stream_map.items()):
if prop_def in {None, NULL_STRING}:
if prop_key in (self.transformed_key_properties or []):
@@ -459,8 +466,9 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
)
raise StreamMapConfigError(msg)
transformed_schema["properties"].pop(prop_key, None)
+ stream_map_parsed.append((prop_key, prop_def, None))
elif isinstance(prop_def, str):
- default_type: JSONTypeHelper = StringType() # Fallback to string
+ default_type: th.JSONTypeHelper = th.StringType() # Fallback to string
existing_schema: dict = (
# Use transformed schema if available
transformed_schema["properties"].get(prop_key, {})
@@ -469,14 +477,21 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
)
if existing_schema:
# Set default type if property exists already in JSON Schema
- default_type = CustomType(existing_schema)
+ default_type = th.CustomType(existing_schema)
transformed_schema["properties"].update(
- Property(
+ th.Property(
prop_key,
self._eval_type(prop_def, default=default_type),
).to_dict(),
)
+ try:
+ parsed_def: ast.Expr = ast.parse(prop_def).body[0] # type: ignore[assignment]
+ stream_map_parsed.append((prop_key, prop_def, parsed_def))
+ except (SyntaxError, IndexError) as ex:
+ msg = f"Failed to parse expression {prop_def}."
+ raise MapExpressionError(msg) from ex
+
else:
msg = (
f"Unexpected type '{type(prop_def).__name__}' in stream map for "
@@ -498,10 +513,14 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915, C901
# Declare function variables
- def eval_filter(filter_rule: str) -> t.Callable[[dict], bool]:
+ def eval_filter(
+ filter_rule: str,
+ filter_rule_parsed: ast.Expr,
+ ) -> t.Callable[[dict], bool]:
def _inner(record: dict) -> bool:
filter_result = self._eval(
expr=filter_rule,
+ expr_parsed=filter_rule_parsed,
record=record,
property_name=None,
)
@@ -523,7 +542,7 @@ def always_true(record: dict) -> bool:
return True
if isinstance(filter_rule, str):
- filter_fn = eval_filter(filter_rule)
+ filter_fn = eval_filter(filter_rule, filter_rule_parsed)
elif filter_rule is None:
filter_fn = always_true
else:
@@ -548,16 +567,17 @@ def transform_fn(record: dict) -> dict | None:
if key_property in record:
result[key_property] = record[key_property]
- for prop_key, prop_def in list(stream_map.items()):
+ for prop_key, prop_def, prop_def_parsed in stream_map_parsed:
if prop_def in {None, NULL_STRING}:
# Remove property from result
result.pop(prop_key, None)
continue
- if isinstance(prop_def, str):
+ if isinstance(prop_def_parsed, ast.Expr):
# Apply property transform
result[prop_key] = self._eval(
- expr=prop_def,
+ expr=prop_def, # type: ignore[arg-type]
+ expr_parsed=prop_def_parsed,
record=record,
property_name=prop_key,
)
@@ -646,7 +666,7 @@ def register_raw_stream_schema( # noqa: PLR0912, C901
self,
stream_name: str,
schema: dict,
- key_properties: list[str] | None,
+ key_properties: t.Sequence[str] | None,
) -> None:
"""Register a new stream as described by its name and schema.
diff --git a/singer_sdk/mapper_base.py b/singer_sdk/mapper_base.py
index b0be198bd..2cc943a46 100644
--- a/singer_sdk/mapper_base.py
+++ b/singer_sdk/mapper_base.py
@@ -7,14 +7,16 @@
import click
-import singer_sdk._singerlib as singer
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.helpers.capabilities import CapabilitiesEnum, PluginCapabilities
-from singer_sdk.io_base import SingerReader
+from singer_sdk.io_base import SingerReader, SingerWriter
from singer_sdk.plugin_base import PluginBase
+if t.TYPE_CHECKING:
+ import singer_sdk._singerlib as singer
-class InlineMapper(PluginBase, SingerReader, metaclass=abc.ABCMeta):
+
+class InlineMapper(PluginBase, SingerReader, SingerWriter, metaclass=abc.ABCMeta):
"""Abstract base class for inline mappers."""
@classproperty
@@ -28,10 +30,9 @@ def capabilities(self) -> list[CapabilitiesEnum]:
PluginCapabilities.STREAM_MAPS,
]
- @staticmethod
- def _write_messages(messages: t.Iterable[singer.Message]) -> None:
+ def _write_messages(self, messages: t.Iterable[singer.Message]) -> None:
for message in messages:
- singer.write_message(message)
+ self.write_message(message)
def _process_schema_message(self, message_dict: dict) -> None:
self._write_messages(self.map_schema_message(message_dict))
diff --git a/singer_sdk/metrics.py b/singer_sdk/metrics.py
index 89d51a338..e4191eadf 100644
--- a/singer_sdk/metrics.py
+++ b/singer_sdk/metrics.py
@@ -263,10 +263,9 @@ def __exit__(
exc_tb: The exception traceback.
"""
if Tag.STATUS not in self.tags:
- if exc_type is None:
- self.tags[Tag.STATUS] = Status.SUCCEEDED
- else:
- self.tags[Tag.STATUS] = Status.FAILED
+ self.tags[Tag.STATUS] = (
+ Status.SUCCEEDED if exc_type is None else Status.FAILED
+ )
log(self.logger, Point("timer", self.metric, self.elapsed(), self.tags))
def elapsed(self) -> float:
diff --git a/singer_sdk/pagination.py b/singer_sdk/pagination.py
index f00bb0920..238740768 100644
--- a/singer_sdk/pagination.py
+++ b/singer_sdk/pagination.py
@@ -30,7 +30,7 @@ def first(iterable: t.Iterable[T]) -> T:
Returns:
The first element of the iterable.
- >>> first('ABC')
+ >>> first("ABC")
'A'
"""
return next(iter(iterable))
@@ -205,6 +205,7 @@ class MyHATEOASPaginator(BaseHATEOASPaginator):
def get_next_url(self, response):
return response.json().get("next")
+
class MyStream(Stream):
def get_new_paginator(self):
return MyHATEOASPaginator()
diff --git a/singer_sdk/plugin_base.py b/singer_sdk/plugin_base.py
index d81e8f7c3..171e326e6 100644
--- a/singer_sdk/plugin_base.py
+++ b/singer_sdk/plugin_base.py
@@ -6,6 +6,7 @@
import logging
import os
import sys
+import time
import typing as t
from pathlib import Path, PurePath
from types import MappingProxyType
@@ -71,6 +72,43 @@ def __init__(self) -> None:
super().__init__("Mapper not initialized. Please call setup_mapper() first.")
+class SingerCommand(click.Command):
+ """Custom click command class for Singer packages."""
+
+ def __init__(
+ self,
+ *args: t.Any,
+ logger: logging.Logger,
+ **kwargs: t.Any,
+ ) -> None:
+ """Initialize the command.
+
+ Args:
+ *args: Positional `click.Command` arguments.
+ logger: A logger instance.
+ **kwargs: Keyword `click.Command` arguments.
+ """
+ super().__init__(*args, **kwargs)
+ self.logger = logger
+
+ def invoke(self, ctx: click.Context) -> t.Any: # noqa: ANN401
+ """Invoke the command, capturing warnings and logging them.
+
+ Args:
+ ctx: The `click` context.
+
+ Returns:
+ The result of the command invocation.
+ """
+ logging.captureWarnings(capture=True)
+ try:
+ return super().invoke(ctx)
+ except ConfigValidationError as exc:
+ for error in exc.errors:
+ self.logger.error("Config validation error: %s", error)
+ sys.exit(1)
+
+
class PluginBase(metaclass=abc.ABCMeta):
"""Abstract base class for taps."""
@@ -100,7 +138,7 @@ def logger(cls) -> logging.Logger: # noqa: N805
logger = logging.getLogger(cls.name)
- if log_level is not None and log_level.upper() in logging._levelToName.values():
+ if log_level is not None and log_level.upper() in logging._levelToName.values(): # noqa: SLF001
logger.setLevel(log_level.upper())
return logger
@@ -149,11 +187,14 @@ def __init__(
if self._is_secret_config(k):
config_dict[k] = SecretString(v)
self._config = config_dict
+ metrics._setup_logging(self.config) # noqa: SLF001
+ self.metrics_logger = metrics.get_metrics_logger()
+
self._validate_config(raise_errors=validate_config)
self._mapper: PluginMapper | None = None
- metrics._setup_logging(self.config)
- self.metrics_logger = metrics.get_metrics_logger()
+ # Initialization timestamp
+ self.__initialized_at = int(time.time() * 1000)
def setup_mapper(self) -> None:
"""Initialize the plugin mapper for this tap."""
@@ -185,6 +226,15 @@ def mapper(self, mapper: PluginMapper) -> None:
"""
self._mapper = mapper
+ @property
+ def initialized_at(self) -> int:
+ """Start time of the plugin.
+
+ Returns:
+ The start time of the plugin.
+ """
+ return self.__initialized_at
+
@classproperty
def capabilities(self) -> list[CapabilitiesEnum]:
"""Get capabilities.
@@ -338,27 +388,19 @@ def _is_secret_config(config_key: str) -> bool:
"""
return is_common_secret_key(config_key)
- def _validate_config(
- self,
- *,
- raise_errors: bool = True,
- warnings_as_errors: bool = False,
- ) -> tuple[list[str], list[str]]:
+ def _validate_config(self, *, raise_errors: bool = True) -> list[str]:
"""Validate configuration input against the plugin configuration JSON schema.
Args:
raise_errors: Flag to throw an exception if any validation errors are found.
- warnings_as_errors: Flag to throw an exception if any warnings were emitted.
Returns:
- A tuple of configuration validation warnings and errors.
+ A list of validation errors.
Raises:
ConfigValidationError: If raise_errors is True and validation fails.
"""
- warnings: list[str] = []
errors: list[str] = []
- log_fn = self.logger.info
config_jsonschema = self.config_jsonschema
if config_jsonschema:
@@ -376,19 +418,11 @@ def _validate_config(
f"JSONSchema was: {config_jsonschema}"
)
if raise_errors:
- raise ConfigValidationError(summary)
+ raise ConfigValidationError(summary, errors=errors)
- log_fn = self.logger.warning
- else:
- summary = f"Config validation passed with {len(warnings)} warnings."
- for warning in warnings:
- summary += f"\n{warning}"
+ self.logger.warning(summary)
- if warnings_as_errors and raise_errors and warnings:
- msg = f"One or more warnings ocurred during validation: {warnings}"
- raise ConfigValidationError(msg)
- log_fn(summary)
- return warnings, errors
+ return errors
@classmethod
def print_version(
@@ -542,7 +576,7 @@ def get_singer_command(cls: type[PluginBase]) -> click.Command:
Returns:
A callable CLI object.
"""
- return click.Command(
+ return SingerCommand(
name=cls.name,
callback=cls.invoke,
context_settings={"help_option_names": ["--help"]},
@@ -583,6 +617,7 @@ def get_singer_command(cls: type[PluginBase]) -> click.Command:
is_eager=True,
),
],
+ logger=cls.logger,
)
@plugin_cli
diff --git a/singer_sdk/sinks/core.py b/singer_sdk/sinks/core.py
index ec43c4060..f1a7c0f92 100644
--- a/singer_sdk/sinks/core.py
+++ b/singer_sdk/sinks/core.py
@@ -5,6 +5,7 @@
import abc
import copy
import datetime
+import importlib.util
import json
import time
import typing as t
@@ -12,8 +13,7 @@
from gzip import open as gzip_open
from types import MappingProxyType
-from dateutil import parser
-from jsonschema import Draft7Validator, FormatChecker
+from jsonschema import Draft7Validator
from singer_sdk.exceptions import MissingKeyPropertiesError
from singer_sdk.helpers._batch import (
@@ -22,7 +22,12 @@
BatchFileFormat,
StorageTarget,
)
-from singer_sdk.helpers._compat import final
+from singer_sdk.helpers._compat import (
+ date_fromisoformat,
+ datetime_fromisoformat,
+ final,
+ time_fromisoformat,
+)
from singer_sdk.helpers._typing import (
DatetimeErrorTreatmentEnum,
get_datelike_property_type,
@@ -32,7 +37,7 @@
if t.TYPE_CHECKING:
from logging import Logger
- from singer_sdk.plugin_base import PluginBase
+ from singer_sdk.target_base import Target
JSONSchemaValidator = Draft7Validator
@@ -48,10 +53,10 @@ class Sink(metaclass=abc.ABCMeta):
def __init__(
self,
- target: PluginBase,
+ target: Target,
stream_name: str,
schema: dict,
- key_properties: list[str] | None,
+ key_properties: t.Sequence[str] | None,
) -> None:
"""Initialize target sink.
@@ -61,7 +66,8 @@ def __init__(
schema: Schema of the stream to sink.
key_properties: Primary key of the stream to sink.
"""
- self.logger = target.logger
+ self.logger = target.logger.getChild(stream_name)
+ self.sync_started_at = target.initialized_at
self._config = dict(target.config)
self._pending_batch: dict | None = None
self.stream_name = stream_name
@@ -89,7 +95,10 @@ def __init__(
self._batch_records_read: int = 0
self._batch_dupe_records_merged: int = 0
- self._validator = Draft7Validator(schema, format_checker=FormatChecker())
+ self._validator = Draft7Validator(
+ schema,
+ format_checker=Draft7Validator.FORMAT_CHECKER,
+ )
def _get_context(self, record: dict) -> dict: # noqa: ARG002
"""Return an empty dictionary by default.
@@ -212,9 +221,12 @@ def datetime_error_treatment(self) -> DatetimeErrorTreatmentEnum:
return DatetimeErrorTreatmentEnum.ERROR
@property
- def key_properties(self) -> list[str]:
+ def key_properties(self) -> t.Sequence[str]:
"""Return key properties.
+ Override this method to return a list of key properties in a format that is
+ compatible with the target.
+
Returns:
A list of stream key properties.
"""
@@ -235,7 +247,7 @@ def _add_sdc_metadata_to_record(
Args:
record: Individual record in the stream.
- message: TODO
+ message: The record message.
context: Stream partition or context dictionary.
"""
record["_sdc_extracted_at"] = message.get("time_extracted")
@@ -243,12 +255,13 @@ def _add_sdc_metadata_to_record(
tz=datetime.timezone.utc,
).isoformat()
record["_sdc_batched_at"] = (
- context.get("batch_start_time", None)
+ context.get("batch_start_time")
or datetime.datetime.now(tz=datetime.timezone.utc)
).isoformat()
record["_sdc_deleted_at"] = record.get("_sdc_deleted_at")
record["_sdc_sequence"] = int(round(time.time() * 1000))
record["_sdc_table_version"] = message.get("version")
+ record["_sdc_sync_started_at"] = self.sync_started_at
def _add_sdc_metadata_to_schema(self) -> None:
"""Add _sdc metadata columns.
@@ -267,7 +280,7 @@ def _add_sdc_metadata_to_schema(self) -> None:
"type": ["null", "string"],
"format": "date-time",
}
- for col in ("_sdc_sequence", "_sdc_table_version"):
+ for col in ("_sdc_sequence", "_sdc_table_version", "_sdc_sync_started_at"):
properties_dict[col] = {"type": ["null", "integer"]}
def _remove_sdc_metadata_from_schema(self) -> None:
@@ -284,6 +297,7 @@ def _remove_sdc_metadata_from_schema(self) -> None:
"_sdc_deleted_at",
"_sdc_sequence",
"_sdc_table_version",
+ "_sdc_sync_started_at",
):
properties_dict.pop(col, None)
@@ -302,6 +316,7 @@ def _remove_sdc_metadata_from_record(self, record: dict) -> None:
record.pop("_sdc_deleted_at", None)
record.pop("_sdc_sequence", None)
record.pop("_sdc_table_version", None)
+ record.pop("_sdc_sync_started_at", None)
# Record validation
@@ -331,10 +346,10 @@ def _singer_validate_message(self, record: dict) -> None:
Raises:
MissingKeyPropertiesError: If record is missing one or more key properties.
"""
- if not all(key_property in record for key_property in self.key_properties):
+ if any(key_property not in record for key_property in self._key_properties):
msg = (
f"Record is missing one or more key_properties. \n"
- f"Key Properties: {self.key_properties}, "
+ f"Key Properties: {self._key_properties}, "
f"Record Keys: {list(record.keys())}"
)
raise MissingKeyPropertiesError(
@@ -358,14 +373,22 @@ def _parse_timestamps_in_record(
schema: TODO
treatment: TODO
"""
- for key in record:
+ for key, value in record.items():
+ if key not in schema["properties"]:
+ self.logger.warning("No schema for record field '%s'", key)
+ continue
datelike_type = get_datelike_property_type(schema["properties"][key])
if datelike_type:
- date_val = record[key]
+ date_val = value
try:
- if record[key] is not None:
- date_val = parser.parse(date_val)
- except parser.ParserError as ex:
+ if value is not None:
+ if datelike_type == "time":
+ date_val = time_fromisoformat(date_val)
+ elif datelike_type == "date":
+ date_val = date_fromisoformat(date_val)
+ else:
+ date_val = datetime_fromisoformat(date_val)
+ except ValueError as ex:
date_val = handle_invalid_timestamp_in_record(
record,
[key],
@@ -518,15 +541,24 @@ def process_batch_files(
tail,
mode="rb",
) as file:
- open_file = (
+ context_file = (
gzip_open(file) if encoding.compression == "gzip" else file
)
- context = {
- "records": [
- json.loads(line)
- for line in open_file # type: ignore[attr-defined]
- ],
- }
+ context = {"records": [json.loads(line) for line in context_file]} # type: ignore[attr-defined]
+ self.process_batch(context)
+ elif (
+ importlib.util.find_spec("pyarrow")
+ and encoding.format == BatchFileFormat.PARQUET
+ ):
+ import pyarrow.parquet as pq
+
+ with storage.fs(create=False) as batch_fs, batch_fs.open(
+ tail,
+ mode="rb",
+ ) as file:
+ context_file = file
+ table = pq.read_table(context_file)
+ context = {"records": table.to_pylist()}
self.process_batch(context)
else:
msg = f"Unsupported batch encoding format: {encoding.format}"
diff --git a/singer_sdk/sinks/sql.py b/singer_sdk/sinks/sql.py
index 6b6f8d121..eb6dcfef6 100644
--- a/singer_sdk/sinks/sql.py
+++ b/singer_sdk/sinks/sql.py
@@ -8,7 +8,7 @@
from copy import copy
from textwrap import dedent
-import sqlalchemy
+import sqlalchemy as sa
from pendulum import now
from sqlalchemy.sql.expression import bindparam
@@ -20,7 +20,7 @@
if t.TYPE_CHECKING:
from sqlalchemy.sql import Executable
- from singer_sdk.plugin_base import PluginBase
+ from singer_sdk.target_base import Target
class SQLSink(BatchSink):
@@ -32,10 +32,10 @@ class SQLSink(BatchSink):
def __init__(
self,
- target: PluginBase,
+ target: Target,
stream_name: str,
schema: dict,
- key_properties: list[str] | None,
+ key_properties: t.Sequence[str] | None,
connector: SQLConnector | None = None,
) -> None:
"""Initialize SQL Sink.
@@ -61,7 +61,7 @@ def connector(self) -> SQLConnector:
return self._connector
@property
- def connection(self) -> sqlalchemy.engine.Connection:
+ def connection(self) -> sa.engine.Connection:
"""Get or set the SQLAlchemy connection for this sink.
Returns:
@@ -98,13 +98,7 @@ def schema_name(self) -> str | None:
if default_target_schema:
return default_target_schema
- if len(parts) in {2, 3}:
- # Stream name is a two-part or three-part identifier.
- # Use the second-to-last part as the schema name.
- return self.conform_name(parts[-2], "schema")
-
- # Schema name not detected.
- return None
+ return self.conform_name(parts[-2], "schema") if len(parts) in {2, 3} else None
@property
def database_name(self) -> str | None:
@@ -245,7 +239,7 @@ def setup(self) -> None:
)
@property
- def key_properties(self) -> list[str]:
+ def key_properties(self) -> t.Sequence[str]:
"""Return key properties, conformed to target system naming requirements.
Returns:
@@ -320,17 +314,23 @@ def bulk_insert_records(
schema,
)
if isinstance(insert_sql, str):
- insert_sql = sqlalchemy.text(insert_sql)
+ insert_sql = sa.text(insert_sql)
+
+ conformed_records = [self.conform_record(record) for record in records]
+ property_names = list(self.conform_schema(schema)["properties"].keys())
+
+ # Create new record dicts with missing properties filled in with None
+ new_records = [
+ {name: record.get(name) for name in property_names}
+ for record in conformed_records
+ ]
- conformed_records = (
- [self.conform_record(record) for record in records]
- if isinstance(records, list)
- else (self.conform_record(record) for record in records)
- )
self.logger.info("Inserting with SQL: %s", insert_sql)
- with self.connector._connect() as conn, conn.begin():
- conn.execute(insert_sql, conformed_records)
- return len(conformed_records) if isinstance(conformed_records, list) else None
+
+ with self.connector._connect() as conn, conn.begin(): # noqa: SLF001
+ result = conn.execute(insert_sql, new_records)
+
+ return result.rowcount
def merge_upsert_from_table(
self,
@@ -375,13 +375,13 @@ def activate_version(self, new_version: int) -> None:
self.connector.prepare_column(
self.full_table_name,
self.version_column_name,
- sql_type=sqlalchemy.types.Integer(),
+ sql_type=sa.types.Integer(),
)
if self.config.get("hard_delete", True):
- with self.connector._connect() as conn, conn.begin():
+ with self.connector._connect() as conn, conn.begin(): # noqa: SLF001
conn.execute(
- sqlalchemy.text(
+ sa.text(
f"DELETE FROM {self.full_table_name} " # noqa: S608
f"WHERE {self.version_column_name} <= {new_version}",
),
@@ -395,20 +395,20 @@ def activate_version(self, new_version: int) -> None:
self.connector.prepare_column(
self.full_table_name,
self.soft_delete_column_name,
- sql_type=sqlalchemy.types.DateTime(),
+ sql_type=sa.types.DateTime(),
)
- query = sqlalchemy.text(
+ query = sa.text(
f"UPDATE {self.full_table_name}\n"
f"SET {self.soft_delete_column_name} = :deletedate \n"
f"WHERE {self.version_column_name} < :version \n"
f" AND {self.soft_delete_column_name} IS NULL\n",
)
query = query.bindparams(
- bindparam("deletedate", value=deleted_at, type_=sqlalchemy.types.DateTime),
- bindparam("version", value=new_version, type_=sqlalchemy.types.Integer),
+ bindparam("deletedate", value=deleted_at, type_=sa.types.DateTime),
+ bindparam("version", value=new_version, type_=sa.types.Integer),
)
- with self.connector._connect() as conn, conn.begin():
+ with self.connector._connect() as conn, conn.begin(): # noqa: SLF001
conn.execute(query)
diff --git a/singer_sdk/streams/core.py b/singer_sdk/streams/core.py
index 35bc2e79e..459b9e761 100644
--- a/singer_sdk/streams/core.py
+++ b/singer_sdk/streams/core.py
@@ -15,10 +15,11 @@
import singer_sdk._singerlib as singer
from singer_sdk import metrics
-from singer_sdk.batch import JSONLinesBatcher
+from singer_sdk.batch import Batcher
from singer_sdk.exceptions import (
AbortedSyncFailedException,
AbortedSyncPausedException,
+ InvalidReplicationKeyException,
InvalidStreamSortException,
MaxRecordsLimitException,
)
@@ -53,6 +54,7 @@
if t.TYPE_CHECKING:
import logging
+ from singer_sdk.helpers._compat import Traversable
from singer_sdk.tap_base import Tap
# Replication methods
@@ -123,7 +125,7 @@ def __init__(
msg = "Missing argument or class variable 'name'."
raise ValueError(msg)
- self.logger: logging.Logger = tap.logger
+ self.logger: logging.Logger = tap.logger.getChild(self.name)
self.metrics_logger = tap.metrics_logger
self.tap_name: str = tap.name
self._config: dict = dict(tap.config)
@@ -133,9 +135,9 @@ def __init__(
self._stream_maps: list[StreamMap] | None = None
self.forced_replication_method: str | None = None
self._replication_key: str | None = None
- self._primary_keys: list[str] | None = None
+ self._primary_keys: t.Sequence[str] | None = None
self._state_partitioning_keys: list[str] | None = None
- self._schema_filepath: Path | None = None
+ self._schema_filepath: Path | Traversable | None = None
self._metadata: singer.MetadataMapping | None = None
self._mask: singer.SelectionMask | None = None
self._schema: dict
@@ -159,7 +161,7 @@ def __init__(
raise ValueError(msg)
if self.schema_filepath:
- self._schema = json.loads(Path(self.schema_filepath).read_text())
+ self._schema = json.loads(self.schema_filepath.read_text())
if not self.schema:
msg = (
@@ -211,13 +213,23 @@ def is_timestamp_replication_key(self) -> bool:
Returns:
True if the stream uses a timestamp-based replication key.
+
+ Raises:
+ InvalidReplicationKeyException: If the schema does not contain the
+ replication key.
"""
if not self.replication_key:
return False
type_dict = self.schema.get("properties", {}).get(self.replication_key)
+ if type_dict is None:
+ msg = f"Field '{self.replication_key}' is not in schema for stream '{self.name}'" # noqa: E501
+ raise InvalidReplicationKeyException(msg)
return is_datetime_type(type_dict)
- def get_starting_replication_key_value(self, context: dict | None) -> t.Any | None:
+ def get_starting_replication_key_value(
+ self,
+ context: dict | None,
+ ) -> t.Any | None: # noqa: ANN401
"""Get starting replication key.
Will return the value of the stream's replication key when `--state` is passed.
@@ -235,7 +247,11 @@ def get_starting_replication_key_value(self, context: dict | None) -> t.Any | No
"""
state = self.get_context_state(context)
- return get_starting_replication_value(state)
+ return (
+ get_starting_replication_value(state)
+ if self.replication_method != REPLICATION_FULL_TABLE
+ else None
+ )
def get_starting_timestamp(self, context: dict | None) -> datetime.datetime | None:
"""Get starting replication timestamp.
@@ -308,7 +324,7 @@ def descendent_streams(self) -> list[Stream]:
Returns:
A list of all children, recursively.
"""
- result: list[Stream] = list(self.child_streams) or []
+ result: list[Stream] = [*self.child_streams]
for child in self.child_streams:
result += child.descendent_streams or []
return result
@@ -385,7 +401,7 @@ def _write_starting_replication_value(self, context: dict | None) -> None:
def get_replication_key_signpost(
self,
context: dict | None, # noqa: ARG002
- ) -> datetime.datetime | t.Any | None:
+ ) -> datetime.datetime | t.Any | None: # noqa: ANN401
"""Get the replication signpost.
For timestamp-based replication keys, this defaults to `utc_now()`. For
@@ -406,7 +422,7 @@ def get_replication_key_signpost(
return utc_now() if self.is_timestamp_replication_key else None
@property
- def schema_filepath(self) -> Path | None:
+ def schema_filepath(self) -> Path | Traversable | None:
"""Get path to schema file.
Returns:
@@ -424,7 +440,7 @@ def schema(self) -> dict:
return self._schema
@property
- def primary_keys(self) -> list[str] | None:
+ def primary_keys(self) -> t.Sequence[str] | None:
"""Get primary keys.
Returns:
@@ -433,7 +449,7 @@ def primary_keys(self) -> list[str] | None:
return self._primary_keys or []
@primary_keys.setter
- def primary_keys(self, new_value: list[str] | None) -> None:
+ def primary_keys(self, new_value: t.Sequence[str] | None) -> None:
"""Set primary key(s) for the stream.
Args:
@@ -758,7 +774,7 @@ def _write_state_message(self) -> None:
if (not self._is_state_flushed) and (
self.tap_state != self._last_emitted_state
):
- singer.write_message(singer.StateMessage(value=self.tap_state))
+ self._tap.write_message(singer.StateMessage(value=self.tap_state))
self._last_emitted_state = copy.deepcopy(self.tap_state)
self._is_state_flushed = True
@@ -786,7 +802,7 @@ def _generate_schema_messages(
def _write_schema_message(self) -> None:
"""Write out a SCHEMA message with the stream schema."""
for schema_message in self._generate_schema_messages():
- singer.write_message(schema_message)
+ self._tap.write_message(schema_message)
@property
def mask(self) -> singer.SelectionMask:
@@ -838,7 +854,7 @@ def _write_record_message(self, record: dict) -> None:
record: A single stream record.
"""
for record_message in self._generate_record_messages(record):
- singer.write_message(record_message)
+ self._tap.write_message(record_message)
self._is_state_flushed = False
@@ -853,7 +869,7 @@ def _write_batch_message(
encoding: The encoding to use for the batch.
manifest: A list of filenames for the batch.
"""
- singer.write_message(
+ self._tap.write_message(
SDKBatchMessage(
stream=self.name,
encoding=encoding,
@@ -1255,7 +1271,7 @@ def get_child_context(self, record: dict, context: dict | None) -> dict | None:
Raises:
NotImplementedError: If the stream has children but this method is not
- overriden.
+ overridden.
"""
if context is None:
for child_stream in self.child_streams:
@@ -1338,7 +1354,7 @@ def get_batches(
Yields:
A tuple of (encoding, manifest) for each batch.
"""
- batcher = JSONLinesBatcher(
+ batcher = Batcher(
tap_name=self.tap_name,
stream_name=self.name,
batch_config=batch_config,
diff --git a/singer_sdk/streams/graphql.py b/singer_sdk/streams/graphql.py
index 01e5d41ee..fde4f99b9 100644
--- a/singer_sdk/streams/graphql.py
+++ b/singer_sdk/streams/graphql.py
@@ -8,8 +8,10 @@
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.streams.rest import RESTStream
+_TToken = t.TypeVar("_TToken")
-class GraphQLStream(RESTStream, metaclass=abc.ABCMeta):
+
+class GraphQLStream(RESTStream, t.Generic[_TToken], metaclass=abc.ABCMeta):
"""Abstract base class for API-type streams.
GraphQL streams inherit from the class `GraphQLStream`, which in turn inherits from
@@ -43,7 +45,7 @@ def query(self) -> str:
def prepare_request_payload(
self,
context: dict | None,
- next_page_token: t.Any | None,
+ next_page_token: _TToken | None,
) -> dict | None:
"""Prepare the data payload for the GraphQL API request.
diff --git a/singer_sdk/streams/rest.py b/singer_sdk/streams/rest.py
index 563956a5f..f8dbeadc9 100644
--- a/singer_sdk/streams/rest.py
+++ b/singer_sdk/streams/rest.py
@@ -140,7 +140,7 @@ def requests_session(self) -> requests.Session:
The `requests.Session`_ object for HTTP requests.
.. _requests.Session:
- https://requests.readthedocs.io/en/latest/api/#request-sessions
+ https://requests.readthedocs.io/en/latest/api.html#requests.Session
"""
if not self._requests_session:
self._requests_session = requests.Session()
@@ -149,7 +149,7 @@ def requests_session(self) -> requests.Session:
def validate_response(self, response: requests.Response) -> None:
"""Validate HTTP response.
- Checks for error status codes and wether they are fatal or retriable.
+ Checks for error status codes and whether they are fatal or retriable.
In case an error is deemed transient and can be safely retried, then this
method should raise an :class:`singer_sdk.exceptions.RetriableAPIError`.
@@ -175,13 +175,11 @@ def validate_response(self, response: requests.Response) -> None:
RetriableAPIError: If the request is retriable.
.. _requests.Response:
- https://requests.readthedocs.io/en/latest/api/#requests.Response
+ https://requests.readthedocs.io/en/latest/api.html#requests.Response
"""
if (
response.status_code in self.extra_retry_statuses
- or HTTPStatus.INTERNAL_SERVER_ERROR
- <= response.status_code
- <= max(HTTPStatus)
+ or response.status_code >= HTTPStatus.INTERNAL_SERVER_ERROR
):
msg = self.response_error_message(response)
raise RetriableAPIError(msg, response)
@@ -294,6 +292,7 @@ def get_url_params(
from urllib.parse import urlencode
+
class MyStream(RESTStream):
def get_url_params(self, context, next_page_token):
params = {"key": "(a,b,c)"}
@@ -330,9 +329,9 @@ def build_prepared_request(
A `requests.PreparedRequest`_ object.
.. _requests.PreparedRequest:
- https://requests.readthedocs.io/en/latest/api/#requests.PreparedRequest
+ https://requests.readthedocs.io/en/latest/api.html#requests.PreparedRequest
.. _requests.Request:
- https://requests.readthedocs.io/en/latest/api/#requests.Request
+ https://requests.readthedocs.io/en/latest/api.html#requests.Request
"""
request = requests.Request(*args, **kwargs)
self.requests_session.auth = self.authenticator
@@ -590,7 +589,7 @@ def parse_response(self, response: requests.Response) -> t.Iterable[dict]:
One item for every item found in the response.
.. _requests.Response:
- https://requests.readthedocs.io/en/latest/api/#requests.Response
+ https://requests.readthedocs.io/en/latest/api.html#requests.Response
"""
yield from extract_jsonpath(self.records_jsonpath, input=response.json())
diff --git a/singer_sdk/streams/sql.py b/singer_sdk/streams/sql.py
index d5fb52219..48e67d63f 100644
--- a/singer_sdk/streams/sql.py
+++ b/singer_sdk/streams/sql.py
@@ -5,6 +5,8 @@
import abc
import typing as t
+import sqlalchemy as sa
+
import singer_sdk.helpers._catalog as catalog
from singer_sdk._singerlib import CatalogEntry, MetadataMapping
from singer_sdk.connectors import SQLConnector
@@ -20,6 +22,9 @@ class SQLStream(Stream, metaclass=abc.ABCMeta):
connector_class = SQLConnector
_cached_schema: dict | None = None
+ supports_nulls_first: bool = False
+ """Whether the database supports the NULLS FIRST/LAST syntax."""
+
def __init__(
self,
tap: Tap,
@@ -51,7 +56,7 @@ def _singer_catalog_entry(self) -> CatalogEntry:
Returns:
A CatalogEntry object.
"""
- return t.cast(CatalogEntry, CatalogEntry.from_dict(self.catalog_entry))
+ return CatalogEntry.from_dict(self.catalog_entry)
@property
def connector(self) -> SQLConnector:
@@ -105,7 +110,7 @@ def tap_stream_id(self) -> str:
return self._singer_catalog_entry.tap_stream_id
@property
- def primary_keys(self) -> list[str] | None:
+ def primary_keys(self) -> t.Sequence[str] | None:
"""Get primary keys from the catalog entry definition.
Returns:
@@ -114,7 +119,7 @@ def primary_keys(self) -> list[str] | None:
return self._singer_catalog_entry.metadata.root.table_key_properties or []
@primary_keys.setter
- def primary_keys(self, new_value: list[str]) -> None:
+ def primary_keys(self, new_value: t.Sequence[str]) -> None:
"""Set or reset the primary key(s) in the stream's catalog entry.
Args:
@@ -189,7 +194,12 @@ def get_records(self, context: dict | None) -> t.Iterable[dict[str, t.Any]]:
if self.replication_key:
replication_key_col = table.columns[self.replication_key]
- query = query.order_by(replication_key_col)
+ order_by = (
+ sa.nulls_first(replication_key_col.asc())
+ if self.supports_nulls_first
+ else replication_key_col.asc()
+ )
+ query = query.order_by(order_by)
start_val = self.get_starting_replication_key_value(context)
if start_val:
@@ -202,13 +212,27 @@ def get_records(self, context: dict | None) -> t.Iterable[dict[str, t.Any]]:
# processed.
query = query.limit(self.ABORT_AT_RECORD_COUNT + 1)
- with self.connector._connect() as conn:
- for record in conn.execute(query):
- transformed_record = self.post_process(dict(record._mapping))
+ with self.connector._connect() as conn: # noqa: SLF001
+ for record in conn.execute(query).mappings():
+ # TODO: Standardize record mapping type
+ # https://github.com/meltano/sdk/issues/2096
+ transformed_record = self.post_process(dict(record))
if transformed_record is None:
# Record filtered out during post_process()
continue
yield transformed_record
+ @property
+ def is_sorted(self) -> bool:
+ """Expect stream to be sorted.
+
+ When `True`, incremental streams will attempt to resume if unexpectedly
+ interrupted.
+
+ Returns:
+ `True` if stream is sorted. Defaults to `False`.
+ """
+ return self.replication_key is not None
+
__all__ = ["SQLStream", "SQLConnector"]
diff --git a/singer_sdk/tap_base.py b/singer_sdk/tap_base.py
index f00b6b4bc..679d22fa8 100644
--- a/singer_sdk/tap_base.py
+++ b/singer_sdk/tap_base.py
@@ -11,9 +11,13 @@
import click
-from singer_sdk._singerlib import Catalog, StateMessage, write_message
+from singer_sdk._singerlib import Catalog, StateMessage
from singer_sdk.configuration._dict_config import merge_missing_config_jsonschema
-from singer_sdk.exceptions import AbortedSyncFailedException, AbortedSyncPausedException
+from singer_sdk.exceptions import (
+ AbortedSyncFailedException,
+ AbortedSyncPausedException,
+ ConfigValidationError,
+)
from singer_sdk.helpers import _state
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.helpers._compat import final
@@ -25,11 +29,13 @@
PluginCapabilities,
TapCapabilities,
)
+from singer_sdk.io_base import SingerWriter
from singer_sdk.plugin_base import PluginBase
if t.TYPE_CHECKING:
from pathlib import PurePath
+ from singer_sdk.connectors import SQLConnector
from singer_sdk.mapper import PluginMapper
from singer_sdk.streams import SQLStream, Stream
@@ -44,7 +50,7 @@ class CliTestOptionValue(Enum):
Disabled = "disabled"
-class Tap(PluginBase, metaclass=abc.ABCMeta):
+class Tap(PluginBase, SingerWriter, metaclass=abc.ABCMeta):
"""Abstract base class for taps.
The Tap class governs configuration, validation, and stream discovery for tap
@@ -124,10 +130,10 @@ def streams(self) -> dict[str, Stream]:
Returns:
A mapping of names to streams, using discovery or a provided catalog.
"""
- input_catalog = self.input_catalog
-
if self._streams is None:
self._streams = {}
+ input_catalog = self.input_catalog
+
for stream in self.load_streams():
if input_catalog is not None:
stream.apply_catalog(input_catalog)
@@ -278,7 +284,7 @@ def write_schemas(self) -> None:
"""Write a SCHEMA message for all known streams to STDOUT."""
for stream in self.streams.values():
stream.selected = True
- stream._write_schema_message()
+ stream._write_schema_message() # noqa: SLF001
# Stream detection:
@@ -318,7 +324,7 @@ def _singer_catalog(self) -> Catalog:
:class:`singer_sdk._singerlib.Catalog`.
"""
return Catalog(
- (stream.tap_stream_id, stream._singer_catalog_entry)
+ (stream.tap_stream_id, stream._singer_catalog_entry) # noqa: SLF001
for stream in self.streams.values()
)
@@ -411,7 +417,7 @@ def load_state(self, state: dict[str, t.Any]) -> None:
def _reset_state_progress_markers(self) -> None:
"""Clear prior jobs' progress markers at beginning of sync."""
- for _, state in self.state.get("bookmarks", {}).items():
+ for state in self.state.get("bookmarks", {}).values():
_state.reset_state_progress_markers(state)
for partition_state in state.get("partitions", []):
_state.reset_state_progress_markers(partition_state)
@@ -442,7 +448,7 @@ def sync_all(self) -> None:
"""Sync all streams."""
self._reset_state_progress_markers()
self._set_compatible_replication_methods()
- write_message(StateMessage(value=self.state))
+ self.write_message(StateMessage(value=self.state))
stream: Stream
for stream in self.streams.values():
@@ -522,12 +528,17 @@ def cb_discover(
config_args = ctx.params.get("config", ())
config_files, parse_env_config = cls.config_from_cli_args(*config_args)
- tap = cls(
- config=config_files, # type: ignore[arg-type]
- parse_env_config=parse_env_config,
- validate_config=cls.dynamic_catalog,
- setup_mapper=False,
- )
+ try:
+ tap = cls(
+ config=config_files, # type: ignore[arg-type]
+ parse_env_config=parse_env_config,
+ validate_config=cls.dynamic_catalog,
+ setup_mapper=False,
+ )
+ except ConfigValidationError as exc:
+ for error in exc.errors:
+ cls.logger.error("Config validation error: %s", error)
+ ctx.exit(1)
tap.run_discovery()
ctx.exit()
@@ -617,37 +628,32 @@ class SQLTap(Tap):
default_stream_class: type[SQLStream]
dynamic_catalog: bool = True
- def __init__(
- self,
- *,
- config: dict | PurePath | str | list[PurePath | str] | None = None,
- catalog: PurePath | str | dict | None = None,
- state: PurePath | str | dict | None = None,
- parse_env_config: bool = False,
- validate_config: bool = True,
- ) -> None:
+ _tap_connector: SQLConnector | None = None
+
+ def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
"""Initialize the SQL tap.
The SQLTap initializer additionally creates a cache variable for _catalog_dict.
Args:
- config: Tap configuration. Can be a dictionary, a single path to a
- configuration file, or a list of paths to multiple configuration
- files.
- catalog: Tap catalog. Can be a dictionary or a path to the catalog file.
- state: Tap state. Can be dictionary or a path to the state file.
- parse_env_config: Whether to look for configuration values in environment
- variables.
- validate_config: True to require validation of config settings.
+ *args: Positional arguments for the Tap initializer.
+ **kwargs: Keyword arguments for the Tap initializer.
"""
self._catalog_dict: dict | None = None
- super().__init__(
- config=config,
- catalog=catalog,
- state=state,
- parse_env_config=parse_env_config,
- validate_config=validate_config,
- )
+ super().__init__(*args, **kwargs)
+
+ @property
+ def tap_connector(self) -> SQLConnector:
+ """The connector object.
+
+ Returns:
+ The connector object.
+ """
+ if self._tap_connector is None:
+ self._tap_connector = self.default_stream_class.connector_class(
+ dict(self.config),
+ )
+ return self._tap_connector
@property
def catalog_dict(self) -> dict:
@@ -662,7 +668,7 @@ def catalog_dict(self) -> dict:
if self.input_catalog:
return self.input_catalog.to_dict()
- connector = self.default_stream_class.connector_class(dict(self.config))
+ connector = self.tap_connector
result: dict[str, list[dict]] = {"streams": []}
result["streams"].extend(connector.discover_catalog_entries())
@@ -670,14 +676,17 @@ def catalog_dict(self) -> dict:
self._catalog_dict = result
return self._catalog_dict
- def discover_streams(self) -> list[Stream]:
- """Initialize all available streams and return them as a list.
+ def discover_streams(self) -> t.Sequence[Stream]:
+ """Initialize all available streams and return them as a sequence.
Returns:
- List of discovered Stream objects.
+ A sequence of discovered Stream objects.
"""
- result: list[Stream] = []
- for catalog_entry in self.catalog_dict["streams"]:
- result.append(self.default_stream_class(self, catalog_entry))
-
- return result
+ return [
+ self.default_stream_class(
+ tap=self,
+ catalog_entry=catalog_entry,
+ connector=self.tap_connector,
+ )
+ for catalog_entry in self.catalog_dict["streams"]
+ ]
diff --git a/singer_sdk/target_base.py b/singer_sdk/target_base.py
index d62bbbfd8..01aed9891 100644
--- a/singer_sdk/target_base.py
+++ b/singer_sdk/target_base.py
@@ -17,6 +17,9 @@
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.helpers._compat import final
from singer_sdk.helpers.capabilities import (
+ ADD_RECORD_METADATA_CONFIG,
+ BATCH_CONFIG,
+ TARGET_LOAD_METHOD_CONFIG,
TARGET_SCHEMA_CONFIG,
CapabilitiesEnum,
PluginCapabilities,
@@ -28,8 +31,9 @@
if t.TYPE_CHECKING:
from pathlib import PurePath
+ from singer_sdk.connectors import SQLConnector
from singer_sdk.mapper import PluginMapper
- from singer_sdk.sinks import Sink
+ from singer_sdk.sinks import Sink, SQLSink
_MAX_PARALLELISM = 8
@@ -48,7 +52,7 @@ class Target(PluginBase, SingerReader, metaclass=abc.ABCMeta):
# Default class to use for creating new sink objects.
# Required if `Target.get_sink_class()` is not defined.
- default_sink_class: type[Sink] | None = None
+ default_sink_class: type[Sink]
def __init__(
self,
@@ -133,7 +137,7 @@ def get_sink(
*,
record: dict | None = None,
schema: dict | None = None,
- key_properties: list[str] | None = None,
+ key_properties: t.Sequence[str] | None = None,
) -> Sink:
"""Return a sink for the given stream name.
@@ -222,7 +226,7 @@ def add_sink(
self,
stream_name: str,
schema: dict,
- key_properties: list[str] | None = None,
+ key_properties: t.Sequence[str] | None = None,
) -> Sink:
"""Create a sink and register it.
@@ -328,23 +332,23 @@ def _process_record_message(self, message_dict: dict) -> None:
continue
sink = self.get_sink(stream_map.stream_alias, record=transformed_record)
- context = sink._get_context(transformed_record)
+ context = sink._get_context(transformed_record) # noqa: SLF001
if sink.include_sdc_metadata_properties:
- sink._add_sdc_metadata_to_record(
+ sink._add_sdc_metadata_to_record( # noqa: SLF001
transformed_record,
message_dict,
context,
)
else:
- sink._remove_sdc_metadata_from_record(transformed_record)
+ sink._remove_sdc_metadata_from_record(transformed_record) # noqa: SLF001
- sink._validate_and_parse(transformed_record)
+ sink._validate_and_parse(transformed_record) # noqa: SLF001
transformed_record = sink.preprocess_record(transformed_record, context)
- sink._singer_validate_message(transformed_record)
+ sink._singer_validate_message(transformed_record) # noqa: SLF001
sink.tally_record_read()
sink.process_record(transformed_record, context)
- sink._after_process_record(context)
+ sink._after_process_record(context) # noqa: SLF001
if sink.is_full:
self.logger.info(
@@ -366,7 +370,7 @@ def _process_schema_message(self, message_dict: dict) -> None:
stream_name = message_dict["stream"]
schema = message_dict["schema"]
- key_properties = message_dict.get("key_properties", None)
+ key_properties = message_dict.get("key_properties")
do_registration = False
if stream_name not in self.mapper.stream_maps:
do_registration = True
@@ -570,10 +574,60 @@ def get_singer_command(cls: type[Target]) -> click.Command:
return command
+ @classmethod
+ def append_builtin_config(cls: type[Target], config_jsonschema: dict) -> None:
+ """Appends built-in config to `config_jsonschema` if not already set.
+
+ To customize or disable this behavior, developers may either override this class
+ method or override the `capabilities` property to disabled any unwanted
+ built-in capabilities.
+
+ For all except very advanced use cases, we recommend leaving these
+ implementations "as-is", since this provides the most choice to users and is
+ the most "future proof" in terms of taking advantage of built-in capabilities
+ which may be added in the future.
+
+ Args:
+ config_jsonschema: [description]
+ """
+
+ def _merge_missing(source_jsonschema: dict, target_jsonschema: dict) -> None:
+ # Append any missing properties in the target with those from source.
+ for k, v in source_jsonschema["properties"].items():
+ if k not in target_jsonschema["properties"]:
+ target_jsonschema["properties"][k] = v
+
+ _merge_missing(ADD_RECORD_METADATA_CONFIG, config_jsonschema)
+ _merge_missing(TARGET_LOAD_METHOD_CONFIG, config_jsonschema)
+
+ capabilities = cls.capabilities
+
+ if PluginCapabilities.BATCH in capabilities:
+ _merge_missing(BATCH_CONFIG, config_jsonschema)
+
+ super().append_builtin_config(config_jsonschema)
+
class SQLTarget(Target):
"""Target implementation for SQL destinations."""
+ _target_connector: SQLConnector | None = None
+
+ default_sink_class: type[SQLSink]
+
+ @property
+ def target_connector(self) -> SQLConnector:
+ """The connector object.
+
+ Returns:
+ The connector object.
+ """
+ if self._target_connector is None:
+ self._target_connector = self.default_sink_class.connector_class(
+ dict(self.config),
+ )
+ return self._target_connector
+
@classproperty
def capabilities(self) -> list[CapabilitiesEnum]:
"""Get target capabilities.
@@ -616,4 +670,113 @@ def _merge_missing(source_jsonschema: dict, target_jsonschema: dict) -> None:
super().append_builtin_config(config_jsonschema)
- pass
+ @final
+ def add_sqlsink(
+ self,
+ stream_name: str,
+ schema: dict,
+ key_properties: t.Sequence[str] | None = None,
+ ) -> Sink:
+ """Create a sink and register it.
+
+ This method is internal to the SDK and should not need to be overridden.
+
+ Args:
+ stream_name: Name of the stream.
+ schema: Schema of the stream.
+ key_properties: Primary key of the stream.
+
+ Returns:
+ A new sink for the stream.
+ """
+ self.logger.info("Initializing '%s' target sink...", self.name)
+ sink_class = self.get_sink_class(stream_name=stream_name)
+ sink = sink_class(
+ target=self,
+ stream_name=stream_name,
+ schema=schema,
+ key_properties=key_properties,
+ connector=self.target_connector,
+ )
+ sink.setup()
+ self._sinks_active[stream_name] = sink
+
+ return sink
+
+ def get_sink_class(self, stream_name: str) -> type[SQLSink]:
+ """Get sink for a stream.
+
+ Developers can override this method to return a custom Sink type depending
+ on the value of `stream_name`. Optional when `default_sink_class` is set.
+
+ Args:
+ stream_name: Name of the stream.
+
+ Raises:
+ ValueError: If no :class:`singer_sdk.sinks.Sink` class is defined.
+
+ Returns:
+ The sink class to be used with the stream.
+ """
+ if self.default_sink_class:
+ return self.default_sink_class
+
+ msg = (
+ f"No sink class defined for '{stream_name}' and no default sink class "
+ "available."
+ )
+ raise ValueError(msg)
+
+ def get_sink(
+ self,
+ stream_name: str,
+ *,
+ record: dict | None = None,
+ schema: dict | None = None,
+ key_properties: t.Sequence[str] | None = None,
+ ) -> Sink:
+ """Return a sink for the given stream name.
+
+ A new sink will be created if `schema` is provided and if either `schema` or
+ `key_properties` has changed. If so, the old sink becomes archived and held
+ until the next drain_all() operation.
+
+ Developers only need to override this method if they want to provide a different
+ sink depending on the values within the `record` object. Otherwise, please see
+ `default_sink_class` property and/or the `get_sink_class()` method.
+
+ Raises :class:`singer_sdk.exceptions.RecordsWithoutSchemaException` if sink does
+ not exist and schema is not sent.
+
+ Args:
+ stream_name: Name of the stream.
+ record: Record being processed.
+ schema: Stream schema.
+ key_properties: Primary key of the stream.
+
+ Returns:
+ The sink used for this target.
+ """
+ _ = record # Custom implementations may use record in sink selection.
+ if schema is None:
+ self._assert_sink_exists(stream_name)
+ return self._sinks_active[stream_name]
+
+ existing_sink = self._sinks_active.get(stream_name, None)
+ if not existing_sink:
+ return self.add_sqlsink(stream_name, schema, key_properties)
+
+ if (
+ existing_sink.schema != schema
+ or existing_sink.key_properties != key_properties
+ ):
+ self.logger.info(
+ "Schema or key properties for '%s' stream have changed. "
+ "Initializing a new '%s' sink...",
+ stream_name,
+ stream_name,
+ )
+ self._sinks_to_clear.append(self._sinks_active.pop(stream_name))
+ return self.add_sqlsink(stream_name, schema, key_properties)
+
+ return existing_sink
diff --git a/singer_sdk/testing/__init__.py b/singer_sdk/testing/__init__.py
index 24ce4ac9f..83ca9aacc 100644
--- a/singer_sdk/testing/__init__.py
+++ b/singer_sdk/testing/__init__.py
@@ -2,13 +2,14 @@
from __future__ import annotations
+import typing as t
+import warnings
+
from .config import SuiteConfig
from .factory import get_tap_test_class, get_target_test_class
from .legacy import (
_get_tap_catalog,
_select_all,
- get_standard_tap_tests,
- get_standard_target_tests,
sync_end_to_end,
tap_sync_test,
tap_to_target_sync_test,
@@ -16,13 +17,42 @@
)
from .runners import SingerTestRunner, TapTestRunner, TargetTestRunner
+
+def __getattr__(name: str) -> t.Any: # noqa: ANN401
+ if name == "get_standard_tap_tests":
+ warnings.warn(
+ "The function singer_sdk.testing.get_standard_tap_tests is deprecated "
+ "and will be removed in a future release. Use get_tap_test_class instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ from .legacy import get_standard_tap_tests
+
+ return get_standard_tap_tests
+
+ if name == "get_standard_target_tests":
+ warnings.warn(
+ "The function singer_sdk.testing.get_standard_target_tests is deprecated "
+ "and will be removed in a future release. Use get_target_test_class "
+ "instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ from .legacy import get_standard_target_tests
+
+ return get_standard_target_tests
+
+ msg = f"module {__name__} has no attribute {name}"
+ raise AttributeError(msg)
+
+
__all__ = [
"get_tap_test_class",
"get_target_test_class",
"_get_tap_catalog",
"_select_all",
- "get_standard_tap_tests",
- "get_standard_target_tests",
"sync_end_to_end",
"tap_sync_test",
"tap_to_target_sync_test",
diff --git a/singer_sdk/testing/factory.py b/singer_sdk/testing/factory.py
index c7611955f..30740b6ef 100644
--- a/singer_sdk/testing/factory.py
+++ b/singer_sdk/testing/factory.py
@@ -8,6 +8,7 @@
from .config import SuiteConfig
from .runners import TapTestRunner, TargetTestRunner
from .suites import (
+ TestSuite,
tap_stream_attribute_tests,
tap_stream_tests,
tap_tests,
@@ -15,14 +16,31 @@
)
if t.TYPE_CHECKING:
- from singer_sdk import Tap, Target
+ from singer_sdk import Stream, Tap, Target
+ from singer_sdk.testing.templates import (
+ AttributeTestTemplate,
+ StreamTestTemplate,
+ TapTestTemplate,
+ )
class BaseTestClass:
"""Base test class."""
- params: t.ClassVar[dict] = {}
- param_ids: t.ClassVar[dict] = {}
+ params: dict[str, t.Any]
+ param_ids: dict[str, list[str]]
+
+ def __init_subclass__(cls, **kwargs: t.Any) -> None:
+ """Initialize a subclass.
+
+ Args:
+ **kwargs: Keyword arguments.
+ """
+ # Add empty params and param_ids attributes to a direct subclass but not to
+ # subclasses of subclasses
+ if cls.__base__ == BaseTestClass:
+ cls.params = {}
+ cls.param_ids = {}
class TapTestClassFactory:
@@ -132,7 +150,7 @@ def runner(self) -> TapTestRunner | TargetTestRunner:
return TapTestClass
- def _annotate_test_class( # noqa: C901
+ def _annotate_test_class(
self,
empty_test_class: type[BaseTestClass],
test_suites: list,
@@ -150,81 +168,101 @@ def _annotate_test_class( # noqa: C901
"""
for suite in test_suites:
if suite.kind == "tap":
- for test_class in suite.tests:
- test = test_class()
- test_name = f"test_{suite.kind}_{test.name}"
- setattr(empty_test_class, test_name, test.run)
+ self._with_tap_tests(empty_test_class, suite)
if suite.kind in {"tap_stream", "tap_stream_attribute"}:
streams = list(test_runner.new_tap().streams.values())
if suite.kind == "tap_stream":
- params = [
+ self._with_stream_tests(empty_test_class, suite, streams)
+
+ if suite.kind == "tap_stream_attribute":
+ self._with_stream_attribute_tests(empty_test_class, suite, streams)
+
+ return empty_test_class
+
+ def _with_tap_tests(
+ self,
+ empty_test_class: type[BaseTestClass],
+ suite: TestSuite[TapTestTemplate],
+ ) -> None:
+ for test_class in suite.tests:
+ test = test_class()
+ test_name = f"test_{suite.kind}_{test.name}"
+ setattr(empty_test_class, test_name, test.run)
+
+ def _with_stream_tests(
+ self,
+ empty_test_class: type[BaseTestClass],
+ suite: TestSuite[StreamTestTemplate],
+ streams: list[Stream],
+ ) -> None:
+ params = [
+ {
+ "stream": stream,
+ }
+ for stream in streams
+ ]
+ param_ids = [stream.name for stream in streams]
+
+ for test_class in suite.tests:
+ test = test_class()
+ test_name = f"test_{suite.kind}_{test.name}"
+ setattr(
+ empty_test_class,
+ test_name,
+ test.run,
+ )
+ empty_test_class.params[test_name] = params
+ empty_test_class.param_ids[test_name] = param_ids
+
+ def _with_stream_attribute_tests(
+ self,
+ empty_test_class: type[BaseTestClass],
+ suite: TestSuite[AttributeTestTemplate],
+ streams: list[Stream],
+ ) -> None:
+ for test_class in suite.tests:
+ test = test_class()
+ test_name = f"test_{suite.kind}_{test.name}"
+ test_params = []
+ test_ids: list[str] = []
+ for stream in streams:
+ final_schema = stream.stream_maps[-1].transformed_schema["properties"]
+ test_params.extend(
+ [
{
"stream": stream,
+ "attribute_name": prop_name,
}
- for stream in streams
- ]
- param_ids = [stream.name for stream in streams]
-
- for test_class in suite.tests:
- test = test_class()
- test_name = f"test_{suite.kind}_{test.name}"
- setattr(
- empty_test_class,
- test_name,
- test.run,
+ for prop_name, prop_schema in final_schema.items()
+ if test_class.evaluate(
+ stream=stream,
+ property_name=prop_name,
+ property_schema=prop_schema,
)
- empty_test_class.params[test_name] = params
- empty_test_class.param_ids[test_name] = param_ids
-
- if suite.kind == "tap_stream_attribute":
- for test_class in suite.tests:
- test = test_class()
- test_name = f"test_{suite.kind}_{test.name}"
- test_params = []
- test_ids = []
- for stream in streams:
- test_params.extend(
- [
- {
- "stream": stream,
- "attribute_name": property_name,
- }
- for property_name, property_schema in stream.schema[
- "properties"
- ].items()
- if test_class.evaluate(
- stream=stream,
- property_name=property_name,
- property_schema=property_schema,
- )
- ],
- )
- test_ids.extend(
- [
- f"{stream.name}.{property_name}"
- for property_name, property_schema in stream.schema[
- "properties"
- ].items()
- if test_class.evaluate(
- stream=stream,
- property_name=property_name,
- property_schema=property_schema,
- )
- ],
- )
-
- if test_params:
- setattr(
- empty_test_class,
- test_name,
- test.run,
- )
- empty_test_class.params[test_name] = test_params
- empty_test_class.param_ids[test_name] = test_ids
+ ],
+ )
+ test_ids.extend(
+ [
+ f"{stream.name}.{prop_name}"
+ for prop_name, prop_schema in final_schema.items()
+ if test_class.evaluate(
+ stream=stream,
+ property_name=prop_name,
+ property_schema=prop_schema,
+ )
+ ],
+ )
- return empty_test_class
+ if test_params:
+ setattr(
+ empty_test_class,
+ test_name,
+ test.run,
+ )
+ empty_test_class.params[test_name] = test_params
+ empty_test_class.param_ids[test_name] = test_ids
class TargetTestClassFactory:
diff --git a/singer_sdk/testing/legacy.py b/singer_sdk/testing/legacy.py
index 5baa94034..a47d3e770 100644
--- a/singer_sdk/testing/legacy.py
+++ b/singer_sdk/testing/legacy.py
@@ -150,10 +150,7 @@ def _get_tap_catalog(
# Test discovery
tap.run_discovery()
catalog_dict = tap.catalog_dict
- if select_all:
- return _select_all(catalog_dict)
-
- return catalog_dict
+ return _select_all(catalog_dict) if select_all else catalog_dict
def _select_all(catalog_dict: dict) -> dict:
@@ -194,9 +191,9 @@ def target_sync_test(
with redirect_stdout(stdout_buf), redirect_stderr(stderr_buf):
if input is not None:
- target._process_lines(input)
+ target._process_lines(input) # noqa: SLF001
if finalize:
- target._process_endofpipe()
+ target._process_endofpipe() # noqa: SLF001
stdout_buf.seek(0)
stderr_buf.seek(0)
diff --git a/singer_sdk/testing/runners.py b/singer_sdk/testing/runners.py
index 96416de95..c5ec10a10 100644
--- a/singer_sdk/testing/runners.py
+++ b/singer_sdk/testing/runners.py
@@ -8,11 +8,15 @@
import typing as t
from collections import defaultdict
from contextlib import redirect_stderr, redirect_stdout
-from pathlib import Path
from singer_sdk import Tap, Target
from singer_sdk.testing.config import SuiteConfig
+if t.TYPE_CHECKING:
+ from pathlib import Path
+
+ from singer_sdk.helpers._compat import Traversable
+
class SingerTestRunner(metaclass=abc.ABCMeta):
"""Base Singer Test Runner."""
@@ -197,7 +201,7 @@ def __init__(
target_class: type[Target],
config: dict | None = None,
suite_config: SuiteConfig | None = None,
- input_filepath: Path | None = None,
+ input_filepath: Path | Traversable | None = None,
input_io: io.StringIO | None = None,
**kwargs: t.Any,
) -> None:
@@ -242,7 +246,7 @@ def target_input(self) -> t.IO[str]:
if self.input_io:
self._input = self.input_io
elif self.input_filepath:
- self._input = Path(self.input_filepath).open(encoding="utf8")
+ self._input = self.input_filepath.open(encoding="utf8")
return t.cast(t.IO[str], self._input)
@target_input.setter
@@ -295,9 +299,9 @@ def _execute_sync(
with redirect_stdout(stdout_buf), redirect_stderr(stderr_buf):
if target_input is not None:
- target._process_lines(target_input)
+ target._process_lines(target_input) # noqa: SLF001
if finalize:
- target._process_endofpipe()
+ target._process_endofpipe() # noqa: SLF001
stdout_buf.seek(0)
stderr_buf.seek(0)
diff --git a/singer_sdk/testing/suites.py b/singer_sdk/testing/suites.py
index 0f8a9fabe..df93c86d2 100644
--- a/singer_sdk/testing/suites.py
+++ b/singer_sdk/testing/suites.py
@@ -37,21 +37,22 @@
TargetOptionalAttributes,
TargetRecordBeforeSchemaTest,
TargetRecordMissingKeyProperty,
+ TargetRecordMissingOptionalFields,
TargetSchemaNoProperties,
TargetSchemaUpdates,
TargetSpecialCharsInAttributes,
)
+from .templates import TestTemplate
-if t.TYPE_CHECKING:
- from .templates import TapTestTemplate, TargetTestTemplate, TestTemplate
+T = t.TypeVar("T", bound=TestTemplate)
@dataclass
-class TestSuite:
+class TestSuite(t.Generic[T]):
"""Test Suite container class."""
kind: str
- tests: list[type[TestTemplate] | type[TapTestTemplate] | type[TargetTestTemplate]]
+ tests: list[type[T]]
# Tap Test Suites
@@ -103,6 +104,7 @@ class TestSuite:
TargetOptionalAttributes,
TargetRecordBeforeSchemaTest,
TargetRecordMissingKeyProperty,
+ TargetRecordMissingOptionalFields,
TargetSchemaNoProperties,
TargetSchemaUpdates,
TargetSpecialCharsInAttributes,
diff --git a/singer_sdk/testing/tap_tests.py b/singer_sdk/testing/tap_tests.py
index a95720d57..0ce51db10 100644
--- a/singer_sdk/testing/tap_tests.py
+++ b/singer_sdk/testing/tap_tests.py
@@ -5,11 +5,11 @@
import typing as t
import warnings
-from dateutil import parser
from jsonschema import Draft7Validator
import singer_sdk.helpers._typing as th
from singer_sdk import Tap
+from singer_sdk.helpers._compat import datetime_fromisoformat
from .templates import AttributeTestTemplate, StreamTestTemplate, TapTestTemplate
@@ -93,16 +93,20 @@ def test(self) -> None:
class StreamCatalogSchemaMatchesRecordTest(StreamTestTemplate):
"""Test all attributes in the catalog schema are present in the record schema."""
- name = "catalog_schema_matches_record"
+ name = "transformed_catalog_schema_matches_record"
def test(self) -> None:
"""Run test."""
- stream_catalog_keys = set(self.stream.schema["properties"].keys())
+ stream_transformed_keys = set(
+ self.stream.stream_maps[-1].transformed_schema["properties"].keys(),
+ )
stream_record_keys = set().union(*(d.keys() for d in self.stream_records))
- diff = stream_catalog_keys - stream_record_keys
+ diff = stream_transformed_keys - stream_record_keys
if diff:
warnings.warn(
- UserWarning(f"Fields in catalog but not in records: ({diff})"),
+ UserWarning(
+ f"Fields in transformed catalog but not in records: ({diff})",
+ ),
stacklevel=2,
)
@@ -110,14 +114,16 @@ def test(self) -> None:
class StreamRecordSchemaMatchesCatalogTest(StreamTestTemplate):
"""Test all attributes in the record schema are present in the catalog schema."""
- name = "record_schema_matches_catalog"
+ name = "record_schema_matches_transformed_catalog"
def test(self) -> None:
"""Run test."""
- stream_catalog_keys = set(self.stream.schema["properties"].keys())
+ stream_transformed_keys = set(
+ self.stream.stream_maps[-1].transformed_schema["properties"].keys(),
+ )
stream_record_keys = set().union(*(d.keys() for d in self.stream_records))
- diff = stream_record_keys - stream_catalog_keys
- assert not diff, f"Fields in records but not in catalog: ({diff})"
+ diff = stream_record_keys - stream_transformed_keys
+ assert not diff, f"Fields in records but not in transformed catalog: ({diff})"
class StreamRecordMatchesStreamSchema(StreamTestTemplate):
@@ -185,12 +191,12 @@ def test(self) -> None:
Raises:
AssertionError: if value cannot be parsed as a datetime.
"""
- for v in self.non_null_attribute_values:
- try:
+ try:
+ for v in self.non_null_attribute_values:
error_message = f"Unable to parse value ('{v}') with datetime parser."
- assert parser.parse(v), error_message
- except parser.ParserError as e:
- raise AssertionError(error_message) from e
+ assert datetime_fromisoformat(v), error_message
+ except ValueError as e:
+ raise AssertionError(error_message) from e
@classmethod
def evaluate(
diff --git a/singer_sdk/testing/target_test_streams/__init__.py b/singer_sdk/testing/target_test_streams/__init__.py
new file mode 100644
index 000000000..14d313288
--- /dev/null
+++ b/singer_sdk/testing/target_test_streams/__init__.py
@@ -0,0 +1 @@
+"""Singer output samples, used for testing target behavior."""
diff --git a/singer_sdk/testing/target_test_streams/record_missing_fields.singer b/singer_sdk/testing/target_test_streams/record_missing_fields.singer
new file mode 100644
index 000000000..a398f6bd6
--- /dev/null
+++ b/singer_sdk/testing/target_test_streams/record_missing_fields.singer
@@ -0,0 +1,4 @@
+{"type": "SCHEMA", "stream": "record_missing_fields", "key_properties": ["id"], "schema": {"type": "object", "properties": {"id": {"type": "integer"}, "optional": {"type": "string"}}, "required": ["id"]}}
+{"type": "RECORD", "stream": "record_missing_fields", "record": {"id": 1, "optional": "now you see me"}}
+{"type": "RECORD", "stream": "record_missing_fields", "record": {"id": 2}}
+{"type": "STATE", "value": {}}
diff --git a/singer_sdk/testing/target_tests.py b/singer_sdk/testing/target_tests.py
index 8412329c5..96e0b0d59 100644
--- a/singer_sdk/testing/target_tests.py
+++ b/singer_sdk/testing/target_tests.py
@@ -139,3 +139,9 @@ class TargetSpecialCharsInAttributes(TargetFileTestTemplate):
"""Test Target handles special chars in attributes."""
name = "special_chars_in_attributes"
+
+
+class TargetRecordMissingOptionalFields(TargetFileTestTemplate):
+ """Test Target handles record missing optional fields."""
+
+ name = "record_missing_fields"
diff --git a/singer_sdk/testing/templates.py b/singer_sdk/testing/templates.py
index b43d37830..4a16feb05 100644
--- a/singer_sdk/testing/templates.py
+++ b/singer_sdk/testing/templates.py
@@ -5,9 +5,12 @@
import contextlib
import typing as t
import warnings
-from pathlib import Path
+
+from singer_sdk.helpers._compat import importlib_resources
+from singer_sdk.testing import target_test_streams
if t.TYPE_CHECKING:
+ from singer_sdk.helpers._compat import Traversable
from singer_sdk.streams import Stream
from .config import SuiteConfig
@@ -319,14 +322,14 @@ def run( # type: ignore[override]
"""
# get input from file
if getattr(self, "singer_filepath", None):
- assert Path(
- self.singer_filepath,
- ).exists(), f"Singer file {self.singer_filepath} does not exist."
+ assert (
+ self.singer_filepath.is_file()
+ ), f"Singer file {self.singer_filepath} does not exist."
runner.input_filepath = self.singer_filepath
super().run(config, resource, runner)
@property
- def singer_filepath(self) -> Path:
+ def singer_filepath(self) -> Traversable:
"""Get path to singer JSONL formatted messages file.
Files will be sourced from `./target_test_streams/.singer`.
@@ -334,5 +337,4 @@ def singer_filepath(self) -> Path:
Returns:
The expected Path to this tests singer file.
"""
- current_dir = Path(__file__).resolve().parent
- return current_dir / "target_test_streams" / f"{self.name}.singer"
+ return importlib_resources.files(target_test_streams) / f"{self.name}.singer" # type: ignore[no-any-return]
diff --git a/singer_sdk/typing.py b/singer_sdk/typing.py
index a8f654c90..80e553574 100644
--- a/singer_sdk/typing.py
+++ b/singer_sdk/typing.py
@@ -57,8 +57,11 @@
import json
import typing as t
-import sqlalchemy
-from jsonschema import ValidationError, Validator, validators
+import sqlalchemy as sa
+from jsonschema import ValidationError, validators
+
+if t.TYPE_CHECKING:
+ from jsonschema.protocols import Validator
from singer_sdk.helpers._typing import (
JSONSCHEMA_ANNOTATION_SECRET,
@@ -490,6 +493,26 @@ def type_dict(self) -> dict: # type: ignore[override]
return {"type": "array", "items": self.wrapped_type.type_dict, **self.extras}
+class AnyType(JSONTypeHelper):
+ """Any type."""
+
+ def __init__(
+ self,
+ *args: t.Any,
+ **kwargs: t.Any,
+ ) -> None:
+ super().__init__(*args, **kwargs)
+
+ @DefaultInstanceProperty
+ def type_dict(self) -> dict:
+ """Get type dictionary.
+
+ Returns:
+ A dictionary describing the type.
+ """
+ return {**self.extras}
+
+
class Property(JSONTypeHelper[T], t.Generic[T]):
"""Generic Property. Should be nested within a `PropertiesList`."""
@@ -897,7 +920,7 @@ def append(self, property: Property) -> None: # noqa: A002
def to_jsonschema_type(
- from_type: str | sqlalchemy.types.TypeEngine | type[sqlalchemy.types.TypeEngine],
+ from_type: str | sa.types.TypeEngine | type[sa.types.TypeEngine],
) -> dict:
"""Return the JSON Schema dict that describes the sql type.
@@ -931,23 +954,25 @@ def to_jsonschema_type(
}
if isinstance(from_type, str):
type_name = from_type
- elif isinstance(from_type, sqlalchemy.types.TypeEngine):
+ elif isinstance(from_type, sa.types.TypeEngine):
type_name = type(from_type).__name__
elif isinstance(from_type, type) and issubclass(
from_type,
- sqlalchemy.types.TypeEngine,
+ sa.types.TypeEngine,
):
type_name = from_type.__name__
else:
msg = "Expected `str` or a SQLAlchemy `TypeEngine` object or type."
raise ValueError(msg)
- # Look for the type name within the known SQL type names:
- for sqltype, jsonschema_type in sqltype_lookup.items():
- if sqltype.lower() in type_name.lower():
- return jsonschema_type
-
- return sqltype_lookup["string"] # safe failover to str
+ return next(
+ (
+ jsonschema_type
+ for sqltype, jsonschema_type in sqltype_lookup.items()
+ if sqltype.lower() in type_name.lower()
+ ),
+ sqltype_lookup["string"], # safe failover to str
+ )
def _jsonschema_type_check(jsonschema_type: dict, type_check: tuple[str]) -> bool:
@@ -958,28 +983,24 @@ def _jsonschema_type_check(jsonschema_type: dict, type_check: tuple[str]) -> boo
type_check: A tuple of type strings to look for.
Returns:
- True if the schema suports the type.
+ True if the schema supports the type.
"""
if "type" in jsonschema_type:
if isinstance(jsonschema_type["type"], (list, tuple)):
for schema_type in jsonschema_type["type"]:
if schema_type in type_check:
return True
- else:
- if jsonschema_type.get("type") in type_check: # noqa: PLR5501
- return True
+ elif jsonschema_type.get("type") in type_check:
+ return True
- if any(
+ return any(
_jsonschema_type_check(t, type_check) for t in jsonschema_type.get("anyOf", ())
- ):
- return True
-
- return False
+ )
def to_sql_type( # noqa: PLR0911, C901
jsonschema_type: dict,
-) -> sqlalchemy.types.TypeEngine:
+) -> sa.types.TypeEngine:
"""Convert JSON Schema type to a SQL type.
Args:
@@ -992,26 +1013,26 @@ def to_sql_type( # noqa: PLR0911, C901
datelike_type = get_datelike_property_type(jsonschema_type)
if datelike_type:
if datelike_type == "date-time":
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.DATETIME())
+ return sa.types.DATETIME()
if datelike_type in "time":
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.TIME())
+ return sa.types.TIME()
if datelike_type == "date":
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.DATE())
+ return sa.types.DATE()
maxlength = jsonschema_type.get("maxLength")
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.VARCHAR(maxlength))
+ return sa.types.VARCHAR(maxlength)
if _jsonschema_type_check(jsonschema_type, ("integer",)):
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.INTEGER())
+ return sa.types.INTEGER()
if _jsonschema_type_check(jsonschema_type, ("number",)):
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.DECIMAL())
+ return sa.types.DECIMAL()
if _jsonschema_type_check(jsonschema_type, ("boolean",)):
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.BOOLEAN())
+ return sa.types.BOOLEAN()
if _jsonschema_type_check(jsonschema_type, ("object",)):
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.VARCHAR())
+ return sa.types.VARCHAR()
if _jsonschema_type_check(jsonschema_type, ("array",)):
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.VARCHAR())
+ return sa.types.VARCHAR()
- return t.cast(sqlalchemy.types.TypeEngine, sqlalchemy.types.VARCHAR())
+ return sa.types.VARCHAR()
diff --git a/tests/_singerlib/test_messages.py b/tests/_singerlib/test_messages.py
index 47a36aca6..e10259497 100644
--- a/tests/_singerlib/test_messages.py
+++ b/tests/_singerlib/test_messages.py
@@ -1,14 +1,16 @@
from __future__ import annotations
+import datetime
import io
from contextlib import redirect_stdout
-from datetime import datetime
import pytest
-from pytz import UTC, timezone
+from pytz import timezone
import singer_sdk._singerlib as singer
-from singer_sdk._singerlib.messages import format_message
+from singer_sdk.io_base import SingerWriter
+
+UTC = datetime.timezone.utc
def test_exclude_null_dict():
@@ -17,22 +19,24 @@ def test_exclude_null_dict():
def test_format_message():
+ singerwriter = SingerWriter()
message = singer.RecordMessage(
stream="test",
record={"id": 1, "name": "test"},
)
- assert format_message(message) == (
+ assert singerwriter.format_message(message) == (
'{"type": "RECORD", "stream": "test", "record": {"id": 1, "name": "test"}}'
)
def test_write_message():
+ singerwriter = SingerWriter()
message = singer.RecordMessage(
stream="test",
record={"id": 1, "name": "test"},
)
with redirect_stdout(io.StringIO()) as out:
- singer.write_message(message)
+ singerwriter.write_message(message)
assert out.getvalue() == (
'{"type": "RECORD", "stream": "test", "record": {"id": 1, "name": "test"}}\n'
@@ -55,19 +59,33 @@ def test_record_message():
assert singer.RecordMessage.from_dict(record.to_dict()) == record
+def test_record_message_parse_time_extracted():
+ message_dic = {
+ "type": "RECORD",
+ "stream": "test",
+ "record": {"id": 1, "name": "test"},
+ "time_extracted": "2021-01-01T00:00:00Z",
+ }
+ record = singer.RecordMessage.from_dict(message_dic)
+ assert record.type == "RECORD"
+ assert record.stream == "test"
+ assert record.record == {"id": 1, "name": "test"}
+ assert record.time_extracted == datetime.datetime(2021, 1, 1, 0, 0, 0, tzinfo=UTC)
+
+
def test_record_message_naive_time_extracted():
"""Check that record message' time_extracted must be timezone-aware."""
with pytest.raises(ValueError, match="must be either None or an aware datetime"):
singer.RecordMessage(
stream="test",
record={"id": 1, "name": "test"},
- time_extracted=datetime(2021, 1, 1), # noqa: DTZ001
+ time_extracted=datetime.datetime(2021, 1, 1), # noqa: DTZ001
)
def test_record_message_time_extracted_to_utc():
"""Check that record message's time_extracted is converted to UTC."""
- naive = datetime(2021, 1, 1, 12) # noqa: DTZ001
+ naive = datetime.datetime(2021, 1, 1, 12) # noqa: DTZ001
nairobi = timezone("Africa/Nairobi")
record = singer.RecordMessage(
@@ -75,7 +93,7 @@ def test_record_message_time_extracted_to_utc():
record={"id": 1, "name": "test"},
time_extracted=nairobi.localize(naive),
)
- assert record.time_extracted == datetime(2021, 1, 1, 9, tzinfo=UTC)
+ assert record.time_extracted == datetime.datetime(2021, 1, 1, 9, tzinfo=UTC)
def test_schema_message():
diff --git a/tests/_singerlib/test_schema.py b/tests/_singerlib/test_schema.py
index 07589f431..4fa72c5a8 100644
--- a/tests/_singerlib/test_schema.py
+++ b/tests/_singerlib/test_schema.py
@@ -4,10 +4,10 @@
from singer_sdk._singerlib import Schema, resolve_schema_references
-STRING_SCHEMA = Schema(type="string", maxLength=32)
-STRING_DICT = {"type": "string", "maxLength": 32}
-INTEGER_SCHEMA = Schema(type="integer", maximum=1000000)
-INTEGER_DICT = {"type": "integer", "maximum": 1000000}
+STRING_SCHEMA = Schema(type="string", maxLength=32, default="")
+STRING_DICT = {"type": "string", "maxLength": 32, "default": ""}
+INTEGER_SCHEMA = Schema(type="integer", maximum=1000000, default=0)
+INTEGER_DICT = {"type": "integer", "maximum": 1000000, "default": 0}
ARRAY_SCHEMA = Schema(type="array", items=INTEGER_SCHEMA)
ARRAY_DICT = {"type": "array", "items": INTEGER_DICT}
OBJECT_SCHEMA = Schema(
diff --git a/tests/conftest.py b/tests/conftest.py
index 142e76fe1..b898deaa9 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -8,14 +8,21 @@
import typing as t
import pytest
+import sqlalchemy as sa
+from singer_sdk import SQLConnector
from singer_sdk import typing as th
-from singer_sdk.sinks import BatchSink
-from singer_sdk.target_base import Target
+from singer_sdk.helpers._typing import DatetimeErrorTreatmentEnum
+from singer_sdk.helpers.capabilities import PluginCapabilities
+from singer_sdk.sinks import BatchSink, SQLSink
+from singer_sdk.target_base import SQLTarget, Target
if t.TYPE_CHECKING:
from _pytest.config import Config
+ from singer_sdk.helpers.capabilities import CapabilitiesEnum
+
+
SYSTEMS = {"linux", "darwin", "windows"}
pytest_plugins = ("singer_sdk.testing.pytest_plugin",)
@@ -39,9 +46,14 @@ def pytest_runtest_setup(item):
pytest.skip(f"cannot run on platform {system}")
+def pytest_report_header() -> list[str]:
+ """Return a list of strings to be displayed in the header of the report."""
+ return [f"sqlalchemy: {sa.__version__}"]
+
+
@pytest.fixture(scope="class")
def outdir() -> t.Generator[str, None, None]:
- """Create a temporary directory for cookiecutters and target output."""
+ """Create a temporary directory for target output."""
name = ".output/"
try:
pathlib.Path(name).mkdir(parents=True)
@@ -64,6 +76,7 @@ class BatchSinkMock(BatchSink):
"""A mock Sink class."""
name = "batch-sink-mock"
+ datetime_error_treatment = DatetimeErrorTreatmentEnum.MAX
def __init__(
self,
@@ -86,6 +99,10 @@ def process_batch(self, context: dict) -> None:
self.target.records_written.extend(context["records"])
self.target.num_batches_processed += 1
+ @property
+ def key_properties(self) -> list[str]:
+ return [key.upper() for key in super().key_properties]
+
class TargetMock(Target):
"""A mock Target class."""
@@ -93,6 +110,70 @@ class TargetMock(Target):
name = "target-mock"
config_jsonschema = th.PropertiesList().to_dict()
default_sink_class = BatchSinkMock
+ capabilities: t.ClassVar[list[CapabilitiesEnum]] = [
+ *Target.capabilities,
+ PluginCapabilities.BATCH,
+ ]
+
+ def __init__(self, *args, **kwargs):
+ """Create the Mock target sync."""
+ super().__init__(*args, **kwargs)
+ self.state_messages_written: list[dict] = []
+ self.records_written: list[dict] = []
+ self.num_records_processed: int = 0
+ self.num_batches_processed: int = 0
+
+ def _write_state_message(self, state: dict):
+ """Emit the stream's latest state."""
+ super()._write_state_message(state)
+ self.state_messages_written.append(state)
+
+
+class SQLConnectorMock(SQLConnector):
+ """A Mock SQLConnector class."""
+
+
+class SQLSinkMock(SQLSink):
+ """A mock Sink class."""
+
+ name = "sql-sink-mock"
+ connector_class = SQLConnectorMock
+
+ def __init__(
+ self,
+ target: SQLTargetMock,
+ stream_name: str,
+ schema: dict,
+ key_properties: list[str] | None,
+ connector: SQLConnector | None = None,
+ ):
+ """Create the Mock batch-based sink."""
+ self._connector: SQLConnector
+ self._connector = connector or self.connector_class(dict(target.config))
+ super().__init__(target, stream_name, schema, key_properties, connector)
+ self.target = target
+
+ def process_record(self, record: dict, context: dict) -> None:
+ """Tracks the count of processed records."""
+ self.target.num_records_processed += 1
+ super().process_record(record, context)
+
+ def process_batch(self, context: dict) -> None:
+ """Write to mock trackers."""
+ self.target.records_written.extend(context["records"])
+ self.target.num_batches_processed += 1
+
+ @property
+ def key_properties(self) -> list[str]:
+ return [key.upper() for key in super().key_properties]
+
+
+class SQLTargetMock(SQLTarget):
+ """A mock Target class."""
+
+ name = "sql-target-mock"
+ config_jsonschema = th.PropertiesList().to_dict()
+ default_sink_class = SQLSinkMock
def __init__(self, *args, **kwargs):
"""Create the Mock target sync."""
diff --git a/tests/contrib/__init__.py b/tests/contrib/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/contrib/test_batch_encoder_parquet.py b/tests/contrib/test_batch_encoder_parquet.py
new file mode 100644
index 000000000..0318e41d3
--- /dev/null
+++ b/tests/contrib/test_batch_encoder_parquet.py
@@ -0,0 +1,49 @@
+"""Tests for the Parquet batch encoder."""
+
+from __future__ import annotations
+
+import typing as t
+
+from singer_sdk.contrib.batch_encoder_parquet import ParquetBatcher
+from singer_sdk.helpers._batch import BatchConfig, ParquetEncoding, StorageTarget
+
+if t.TYPE_CHECKING:
+ from pathlib import Path
+
+
+def test_batcher(tmp_path: Path) -> None:
+ root = tmp_path.joinpath("batches")
+ root.mkdir()
+ config = BatchConfig(
+ encoding=ParquetEncoding(),
+ storage=StorageTarget(root=str(root)),
+ batch_size=2,
+ )
+ batcher = ParquetBatcher("tap", "stream", config)
+ records = [
+ {"id": 1, "numeric": "1.0"},
+ {"id": 2, "numeric": "2.0"},
+ {"id": 3, "numeric": "3.0"},
+ ]
+ batches = list(batcher.get_batches(records))
+ assert len(batches) == 2
+ assert batches[0][0].endswith(".parquet")
+
+
+def test_batcher_gzip(tmp_path: Path) -> None:
+ root = tmp_path.joinpath("batches")
+ root.mkdir()
+ config = BatchConfig(
+ encoding=ParquetEncoding(compression="gzip"),
+ storage=StorageTarget(root=str(root)),
+ batch_size=2,
+ )
+ batcher = ParquetBatcher("tap", "stream", config)
+ records = [
+ {"id": 1, "numeric": "1.0"},
+ {"id": 2, "numeric": "2.0"},
+ {"id": 3, "numeric": "3.0"},
+ ]
+ batches = list(batcher.get_batches(records))
+ assert len(batches) == 2
+ assert batches[0][0].endswith(".parquet.gz")
diff --git a/tests/core/conftest.py b/tests/core/conftest.py
new file mode 100644
index 000000000..97eb76e7f
--- /dev/null
+++ b/tests/core/conftest.py
@@ -0,0 +1,114 @@
+"""Tap, target and stream test fixtures."""
+
+from __future__ import annotations
+
+import typing as t
+from contextlib import contextmanager
+
+import pendulum
+import pytest
+from typing_extensions import override
+
+from singer_sdk import Stream, Tap
+from singer_sdk.typing import (
+ DateTimeType,
+ IntegerType,
+ PropertiesList,
+ Property,
+ StringType,
+)
+
+
+class SimpleTestStream(Stream):
+ """Test stream class."""
+
+ name = "test"
+ schema = PropertiesList(
+ Property("id", IntegerType, required=True),
+ Property("value", StringType, required=True),
+ Property("updatedAt", DateTimeType, required=True),
+ ).to_dict()
+ replication_key = "updatedAt"
+
+ def __init__(self, tap: Tap):
+ """Create a new stream."""
+ super().__init__(tap, schema=self.schema, name=self.name)
+
+ @override
+ def get_records(
+ self,
+ context: dict | None,
+ ) -> t.Iterable[dict[str, t.Any]]:
+ """Generate records."""
+ yield {"id": 1, "value": "Egypt"}
+ yield {"id": 2, "value": "Germany"}
+ yield {"id": 3, "value": "India"}
+
+ @contextmanager
+ def with_replication_method(self, method: str | None) -> t.Iterator[None]:
+ """Context manager to temporarily override the replication method."""
+ original_method = self.forced_replication_method
+ self.forced_replication_method = method
+ yield
+ self.forced_replication_method = original_method
+
+
+class UnixTimestampIncrementalStream(SimpleTestStream):
+ name = "unix_ts"
+ schema = PropertiesList(
+ Property("id", IntegerType, required=True),
+ Property("value", StringType, required=True),
+ Property("updatedAt", IntegerType, required=True),
+ ).to_dict()
+ replication_key = "updatedAt"
+
+
+class UnixTimestampIncrementalStream2(UnixTimestampIncrementalStream):
+ name = "unix_ts_override"
+
+ @override
+ def compare_start_date(self, value: str, start_date_value: str) -> str:
+ """Compare a value to a start date value."""
+
+ start_timestamp = pendulum.parse(start_date_value).format("X")
+ return max(value, start_timestamp, key=float)
+
+
+class SimpleTestTap(Tap):
+ """Test tap class."""
+
+ name = "test-tap"
+ config_jsonschema = PropertiesList(
+ Property("username", StringType, required=True),
+ Property("password", StringType, required=True),
+ Property("start_date", DateTimeType),
+ additional_properties=False,
+ ).to_dict()
+
+ @override
+ def discover_streams(self) -> list[Stream]:
+ """List all streams."""
+ return [
+ SimpleTestStream(self),
+ UnixTimestampIncrementalStream(self),
+ UnixTimestampIncrementalStream2(self),
+ ]
+
+
+@pytest.fixture
+def tap_class():
+ """Return the tap class."""
+ return SimpleTestTap
+
+
+@pytest.fixture
+def tap() -> SimpleTestTap:
+ """Tap instance."""
+ return SimpleTestTap(
+ config={
+ "username": "utest",
+ "password": "ptest",
+ "start_date": "2021-01-01",
+ },
+ parse_env_config=False,
+ )
diff --git a/tests/core/resources/continents.parquet.gz b/tests/core/resources/continents.parquet.gz
new file mode 100644
index 000000000..8df64eb5a
Binary files /dev/null and b/tests/core/resources/continents.parquet.gz differ
diff --git a/tests/core/resources/countries.parquet.gz b/tests/core/resources/countries.parquet.gz
new file mode 100644
index 000000000..e02065064
Binary files /dev/null and b/tests/core/resources/countries.parquet.gz differ
diff --git a/tests/core/rest/test_authenticators.py b/tests/core/rest/test_authenticators.py
index 0226c4aef..7e1da91eb 100644
--- a/tests/core/rest/test_authenticators.py
+++ b/tests/core/rest/test_authenticators.py
@@ -2,10 +2,12 @@
from __future__ import annotations
+import datetime
import typing as t
import jwt
import pytest
+import time_machine
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateKey,
RSAPublicKey,
@@ -125,7 +127,7 @@ def test_oauth_authenticator_token_expiry_handling(
requests_mock: requests_mock.Mocker,
oauth_response_expires_in: int,
default_expiration: int,
- result: bool,
+ result: int | None,
):
"""Validate various combinations of expires_in and default_expiration."""
response = {"access_token": "an-access-token"}
@@ -143,10 +145,26 @@ def test_oauth_authenticator_token_expiry_handling(
auth_endpoint="https://example.com/oauth",
default_expiration=default_expiration,
)
- authenticator.update_access_token()
+ with time_machine.travel(
+ datetime.datetime(2023, 1, 1, tzinfo=datetime.timezone.utc),
+ tick=False,
+ ):
+ authenticator.update_access_token()
assert authenticator.expires_in == result
+ with time_machine.travel(
+ datetime.datetime(2023, 1, 1, 0, 1, tzinfo=datetime.timezone.utc),
+ tick=False,
+ ):
+ assert authenticator.is_token_valid()
+
+ with time_machine.travel(
+ datetime.datetime(2023, 1, 1, 0, 5, tzinfo=datetime.timezone.utc),
+ tick=False,
+ ):
+ assert not authenticator.expires_in or not authenticator.is_token_valid()
+
@pytest.fixture
def private_key() -> RSAPrivateKey:
diff --git a/tests/core/rest/test_backoff.py b/tests/core/rest/test_backoff.py
index fbe391e55..7a2ba39b8 100644
--- a/tests/core/rest/test_backoff.py
+++ b/tests/core/rest/test_backoff.py
@@ -74,6 +74,14 @@ def custom_validation_stream(rest_tap):
match=r"503 Server Error: Service Unavailable for path: /dummy",
),
),
+ (
+ 521, # Cloudflare custom status code higher than max(HTTPStatus)
+ "Web Server Is Down",
+ pytest.raises(
+ RetriableAPIError,
+ match=r"521 Server Error: Web Server Is Down for path: /dummy",
+ ),
+ ),
(
429,
"Too Many Requests",
@@ -84,7 +92,7 @@ def custom_validation_stream(rest_tap):
),
(200, "OK", nullcontext()),
],
- ids=["client-error", "server-error", "rate-limited", "ok"],
+ ids=["client-error", "server-error", "server-error", "rate-limited", "ok"],
)
def test_status_code_api(basic_rest_stream, status_code, reason, expectation):
fake_response = requests.Response()
diff --git a/tests/core/rest/test_pagination.py b/tests/core/rest/test_pagination.py
index 23dce9841..09e9d04b2 100644
--- a/tests/core/rest/test_pagination.py
+++ b/tests/core/rest/test_pagination.py
@@ -27,7 +27,7 @@ def test_paginator_base_missing_implementation():
with pytest.raises(
TypeError,
- match="Can't instantiate abstract class .* get_next",
+ match="Can't instantiate abstract class .* '?get_next'?",
):
BaseAPIPaginator(0)
@@ -52,7 +52,7 @@ def test_paginator_page_number_missing_implementation():
with pytest.raises(
TypeError,
- match="Can't instantiate abstract class .* has_more",
+ match="Can't instantiate abstract class .* '?has_more'?",
):
BasePageNumberPaginator(1)
@@ -62,7 +62,7 @@ def test_paginator_offset_missing_implementation():
with pytest.raises(
TypeError,
- match="Can't instantiate abstract class .* has_more",
+ match="Can't instantiate abstract class .* '?has_more'?",
):
BaseOffsetPaginator(0, 100)
@@ -72,7 +72,7 @@ def test_paginator_hateoas_missing_implementation():
with pytest.raises(
TypeError,
- match="Can't instantiate abstract class .* get_next_url",
+ match="Can't instantiate abstract class .* '?get_next_url'?",
):
BaseHATEOASPaginator()
diff --git a/tests/core/sinks/__init__.py b/tests/core/sinks/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/core/sinks/test_sdc_metadata.py b/tests/core/sinks/test_sdc_metadata.py
new file mode 100644
index 000000000..dbb9a7976
--- /dev/null
+++ b/tests/core/sinks/test_sdc_metadata.py
@@ -0,0 +1,63 @@
+from __future__ import annotations
+
+import datetime
+
+import time_machine
+
+from tests.conftest import BatchSinkMock, TargetMock
+
+
+def test_sdc_metadata():
+ with time_machine.travel(
+ datetime.datetime(2023, 1, 1, tzinfo=datetime.timezone.utc),
+ tick=False,
+ ):
+ target = TargetMock()
+
+ sink = BatchSinkMock(
+ target,
+ "users",
+ {"type": "object", "properties": {"id": {"type": "integer"}}},
+ ["id"],
+ )
+
+ record_message = {
+ "type": "RECORD",
+ "stream": "users",
+ "record": {"id": 1},
+ "time_extracted": "2021-01-01T00:00:00+00:00",
+ "version": 100,
+ }
+ record = record_message["record"]
+
+ with time_machine.travel(
+ datetime.datetime(2023, 1, 1, 0, 5, tzinfo=datetime.timezone.utc),
+ tick=False,
+ ):
+ sink._add_sdc_metadata_to_record(record, record_message, {})
+
+ assert record == {
+ "id": 1,
+ "_sdc_extracted_at": "2021-01-01T00:00:00+00:00",
+ "_sdc_received_at": "2023-01-01T00:05:00+00:00",
+ "_sdc_batched_at": "2023-01-01T00:05:00+00:00",
+ "_sdc_deleted_at": None,
+ "_sdc_sequence": 1672531500000,
+ "_sdc_table_version": 100,
+ "_sdc_sync_started_at": 1672531200000,
+ }
+
+ sink._add_sdc_metadata_to_schema()
+ assert sink.schema == {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "_sdc_extracted_at": {"type": ["null", "string"], "format": "date-time"},
+ "_sdc_received_at": {"type": ["null", "string"], "format": "date-time"},
+ "_sdc_batched_at": {"type": ["null", "string"], "format": "date-time"},
+ "_sdc_deleted_at": {"type": ["null", "string"], "format": "date-time"},
+ "_sdc_sequence": {"type": ["null", "integer"]},
+ "_sdc_table_version": {"type": ["null", "integer"]},
+ "_sdc_sync_started_at": {"type": ["null", "integer"]},
+ },
+ }
diff --git a/tests/core/sinks/test_validation.py b/tests/core/sinks/test_validation.py
new file mode 100644
index 000000000..0672c9f49
--- /dev/null
+++ b/tests/core/sinks/test_validation.py
@@ -0,0 +1,128 @@
+from __future__ import annotations
+
+import datetime
+import itertools
+
+import pytest
+
+from tests.conftest import BatchSinkMock, TargetMock
+
+
+def test_validate_record():
+ target = TargetMock()
+ sink = BatchSinkMock(
+ target,
+ "users",
+ {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "created_at": {"type": "string", "format": "date-time"},
+ "created_at_date": {"type": "string", "format": "date"},
+ "created_at_time": {"type": "string", "format": "time"},
+ "invalid_datetime": {"type": "string", "format": "date-time"},
+ },
+ },
+ ["id"],
+ )
+
+ record = {
+ "id": 1,
+ "created_at": "2021-01-01T00:00:00+00:00",
+ "created_at_date": "2021-01-01",
+ "created_at_time": "00:01:00+00:00",
+ "missing_datetime": "2021-01-01T00:00:00+00:00",
+ "invalid_datetime": "not a datetime",
+ }
+ updated_record = sink._validate_and_parse(record)
+
+ assert updated_record["created_at"] == datetime.datetime(
+ 2021,
+ 1,
+ 1,
+ 0,
+ 0,
+ tzinfo=datetime.timezone.utc,
+ )
+ assert updated_record["created_at_date"] == datetime.date(
+ 2021,
+ 1,
+ 1,
+ )
+ assert updated_record["created_at_time"] == datetime.time(
+ 0,
+ 1,
+ tzinfo=datetime.timezone.utc,
+ )
+ assert updated_record["missing_datetime"] == "2021-01-01T00:00:00+00:00"
+ assert updated_record["invalid_datetime"] == "9999-12-31 23:59:59.999999"
+
+
+@pytest.fixture
+def bench_sink() -> BatchSinkMock:
+ target = TargetMock()
+ return BatchSinkMock(
+ target,
+ "users",
+ {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "created_at": {"type": "string", "format": "date-time"},
+ "updated_at": {"type": "string", "format": "date-time"},
+ "deleted_at": {"type": "string", "format": "date-time"},
+ },
+ },
+ ["id"],
+ )
+
+
+@pytest.fixture
+def bench_record():
+ return {
+ "id": 1,
+ "created_at": "2021-01-01T00:08:00-07:00",
+ "updated_at": "2022-01-02T00:09:00-07:00",
+ "deleted_at": "2023-01-03T00:10:00.0000",
+ }
+
+
+def test_bench_parse_timestamps_in_record(benchmark, bench_sink, bench_record):
+ """Run benchmark for Sink method _parse_timestamps_in_record."""
+ number_of_runs = 1000
+
+ sink: BatchSinkMock = bench_sink
+
+ def run_parse_timestamps_in_record():
+ for record in itertools.repeat(bench_record, number_of_runs):
+ _ = sink._parse_timestamps_in_record(
+ record.copy(), sink.schema, sink.datetime_error_treatment
+ )
+
+ benchmark(run_parse_timestamps_in_record)
+
+
+def test_bench_validate_and_parse(benchmark, bench_sink, bench_record):
+ """Run benchmark for Sink method _validate_and_parse."""
+ number_of_runs = 1000
+
+ sink: BatchSinkMock = bench_sink
+
+ def run_validate_and_parse():
+ for record in itertools.repeat(bench_record, number_of_runs):
+ _ = sink._validate_and_parse(record.copy())
+
+ benchmark(run_validate_and_parse)
+
+
+def test_bench_validate_record_with_schema(benchmark, bench_sink, bench_record):
+ """Run benchmark for Sink._validator method validate."""
+ number_of_runs = 1000
+
+ sink: BatchSinkMock = bench_sink
+
+ def run_validate_record_with_schema():
+ for record in itertools.repeat(bench_record, number_of_runs):
+ sink._validator.validate(record)
+
+ benchmark(run_validate_record_with_schema)
diff --git a/tests/core/test_batch.py b/tests/core/test_batch.py
index 6efb3b34a..c2076e37b 100644
--- a/tests/core/test_batch.py
+++ b/tests/core/test_batch.py
@@ -1,40 +1,80 @@
from __future__ import annotations
import decimal
+import importlib.util
import re
from dataclasses import asdict
import pytest
-from singer_sdk.batch import JSONLinesBatcher
+from singer_sdk.batch import Batcher
+from singer_sdk.contrib.batch_encoder_jsonl import JSONLinesBatcher
+from singer_sdk.contrib.batch_encoder_parquet import ParquetBatcher
from singer_sdk.helpers._batch import (
BaseBatchFileEncoding,
BatchConfig,
JSONLinesEncoding,
+ ParquetEncoding,
StorageTarget,
)
+def is_pyarrow_installed():
+ module_spec = importlib.util.find_spec("pyarrow")
+ return module_spec is not None
+
+
+skip_if_no_pyarrow = pytest.mark.skipif(
+ not is_pyarrow_installed(),
+ reason="requires pyarrow",
+)
+
+
@pytest.mark.parametrize(
"encoding,expected",
[
(JSONLinesEncoding("gzip"), {"compression": "gzip", "format": "jsonl"}),
(JSONLinesEncoding(), {"compression": None, "format": "jsonl"}),
+ (ParquetEncoding("gzip"), {"compression": "gzip", "format": "parquet"}),
+ (ParquetEncoding(), {"compression": None, "format": "parquet"}),
+ ],
+ ids=[
+ "jsonl-compression-gzip",
+ "jsonl-compression-none",
+ "parquet-compression-gzip",
+ "parquet-compression-none",
],
- ids=["jsonl-compression-gzip", "jsonl-compression-none"],
)
def test_encoding_as_dict(encoding: BaseBatchFileEncoding, expected: dict) -> None:
"""Test encoding as dict."""
assert asdict(encoding) == expected
-def test_storage_get_url():
- storage = StorageTarget("file://root_dir")
+@pytest.mark.parametrize(
+ "file_scheme,root,prefix,expected",
+ [
+ (
+ "file://",
+ "root_dir",
+ "prefix--file.jsonl.gz",
+ "root_dir/prefix--file.jsonl.gz",
+ ),
+ (
+ "file://",
+ "root_dir",
+ "prefix--file.parquet.gz",
+ "root_dir/prefix--file.parquet.gz",
+ ),
+ ],
+ ids=["jsonl-url", "parquet-url"],
+)
+def test_storage_get_url(file_scheme, root, prefix, expected):
+ storage = StorageTarget(file_scheme + root)
with storage.fs(create=True) as fs:
- url = fs.geturl("prefix--file.jsonl.gz")
- assert url.startswith("file://")
- assert url.replace("\\", "/").endswith("root_dir/prefix--file.jsonl.gz")
+ url = fs.geturl(prefix)
+ assert url.startswith(file_scheme)
+ assert url.replace("\\", "/").endswith(expected)
def test_storage_get_s3_url():
@@ -69,6 +109,11 @@ def test_storage_from_url(file_url: str, root: str):
assert target.root == root
+def test_get_unsupported_batcher():
+ with pytest.raises(ValueError, match="Unsupported batcher"):
+ Batcher.get_batcher("unsupported")
+
+
@pytest.mark.parametrize(
"file_url,expected",
[
@@ -125,3 +170,83 @@ def test_json_lines_batcher():
for batch in batches
for filepath in batch
)
+
+
+def test_batcher_with_jsonl_encoding():
+ batcher = Batcher(
+ "tap-test",
+ "stream-test",
+ batch_config=BatchConfig(
+ encoding=JSONLinesEncoding("gzip"),
+ storage=StorageTarget("file:///tmp/sdk-batches"),
+ batch_size=2,
+ ),
+ )
+ records = [
+ {"id": 1, "numeric": decimal.Decimal("1.0")},
+ {"id": 2, "numeric": decimal.Decimal("2.0")},
+ {"id": 3, "numeric": decimal.Decimal("3.0")},
+ ]
+
+ batches = list(batcher.get_batches(records))
+ assert len(batches) == 2
+ assert all(len(batch) == 1 for batch in batches)
+ assert all(
+ re.match(r".*tap-test--stream-test-.*\.json.gz", filepath)
+ for batch in batches
+ for filepath in batch
+ )
+
+
+@skip_if_no_pyarrow
+def test_parquet_batcher():
+ batcher = ParquetBatcher(
+ "tap-test",
+ "stream-test",
+ batch_config=BatchConfig(
+ encoding=ParquetEncoding("gzip"),
+ storage=StorageTarget("file:///tmp/sdk-batches"),
+ batch_size=2,
+ ),
+ )
+ records = [
+ {"id": 1, "numeric": decimal.Decimal("1.0")},
+ {"id": 2, "numeric": decimal.Decimal("2.0")},
+ {"id": 3, "numeric": decimal.Decimal("3.0")},
+ ]
+
+ batches = list(batcher.get_batches(records))
+ assert len(batches) == 2
+ assert all(len(batch) == 1 for batch in batches)
+ assert all(
+ re.match(r".*tap-test--stream-test-.*\.parquet.gz", filepath)
+ for batch in batches
+ for filepath in batch
+ )
+
+
+@skip_if_no_pyarrow
+def test_batcher_with_parquet_encoding():
+ batcher = Batcher(
+ "tap-test",
+ "stream-test",
+ batch_config=BatchConfig(
+ encoding=ParquetEncoding("gzip"),
+ storage=StorageTarget("file:///tmp/sdk-batches"),
+ batch_size=2,
+ ),
+ )
+ records = [
+ {"id": 1, "numeric": decimal.Decimal("1.0")},
+ {"id": 2, "numeric": decimal.Decimal("2.0")},
+ {"id": 3, "numeric": decimal.Decimal("3.0")},
+ ]
+
+ batches = list(batcher.get_batches(records))
+ assert len(batches) == 2
+ assert all(len(batch) == 1 for batch in batches)
+ assert all(
+ re.match(r".*tap-test--stream-test-.*\.parquet.gz", filepath)
+ for batch in batches
+ for filepath in batch
+ )
diff --git a/tests/core/test_connector_sql.py b/tests/core/test_connector_sql.py
index 1c04dbcdd..5175465ff 100644
--- a/tests/core/test_connector_sql.py
+++ b/tests/core/test_connector_sql.py
@@ -1,14 +1,21 @@
from __future__ import annotations
+import sys
+import typing as t
+from decimal import Decimal
from unittest import mock
import pytest
-import sqlalchemy
-from sqlalchemy.dialects import sqlite
+import sqlalchemy as sa
+from sqlalchemy.dialects import registry, sqlite
+from sqlalchemy.exc import NoSuchModuleError
from singer_sdk.connectors import SQLConnector
from singer_sdk.exceptions import ConfigValidationError
+if t.TYPE_CHECKING:
+ from sqlalchemy.engine import Engine
+
def stringify(in_dict):
return {k: str(v) for k, v in in_dict.items()}
@@ -29,14 +36,14 @@ def connector(self):
{
"table_name": "full.table.name",
"column_name": "column_name",
- "column_type": sqlalchemy.types.Text(),
+ "column_type": sa.types.Text(),
},
{
"table_name": "full.table.name",
- "create_column_clause": sqlalchemy.schema.CreateColumn(
- sqlalchemy.Column(
+ "create_column_clause": sa.schema.CreateColumn(
+ sa.Column(
"column_name",
- sqlalchemy.types.Text(),
+ sa.types.Text(),
),
),
},
@@ -63,12 +70,12 @@ def connector(self):
{
"table_name": "full.table.name",
"column_name": "column_name",
- "column_type": sqlalchemy.types.String(),
+ "column_type": sa.types.String(),
},
{
"table_name": "full.table.name",
"column_name": "column_name",
- "column_type": sqlalchemy.types.String(),
+ "column_type": sa.types.String(),
},
"ALTER TABLE %(table_name)s ALTER COLUMN %(column_name)s (%(column_type)s)", # noqa: E501
"ALTER TABLE full.table.name ALTER COLUMN column_name (VARCHAR)",
@@ -101,7 +108,7 @@ def test_get_column_ddl(
def test_remove_collation_text_type(self):
remove_collation = SQLConnector.remove_collation
test_collation = "SQL_Latin1_General_CP1_CI_AS"
- current_type = sqlalchemy.types.Text(collation=test_collation)
+ current_type = sa.types.Text(collation=test_collation)
current_type_collation = remove_collation(current_type)
# Check collation was set to None by the function
assert current_type.collation is None
@@ -110,7 +117,7 @@ def test_remove_collation_text_type(self):
def test_remove_collation_non_text_type(self):
remove_collation = SQLConnector.remove_collation
- current_type = sqlalchemy.types.Integer()
+ current_type = sa.types.Integer()
current_type_collation = remove_collation(current_type)
# Check there is not a collation attribute
assert not hasattr(current_type, "collation")
@@ -122,7 +129,7 @@ def test_remove_collation_non_text_type(self):
def test_update_collation_text_type(self):
update_collation = SQLConnector.update_collation
test_collation = "SQL_Latin1_General_CP1_CI_AS"
- compatible_type = sqlalchemy.types.Text(collation=None)
+ compatible_type = sa.types.Text(collation=None)
update_collation(compatible_type, test_collation)
# Check collation was set to the value we put in
assert compatible_type.collation == test_collation
@@ -130,7 +137,7 @@ def test_update_collation_text_type(self):
def test_update_collation_non_text_type(self):
update_collation = SQLConnector.update_collation
test_collation = "SQL_Latin1_General_CP1_CI_AS"
- compatible_type = sqlalchemy.types.Integer()
+ compatible_type = sa.types.Integer()
update_collation(compatible_type, test_collation)
# Check there is not a collation attribute
assert not hasattr(compatible_type, "collation")
@@ -173,9 +180,9 @@ def test_connect_calls_connect(self, connector):
def test_connect_raises_on_operational_failure(self, connector):
with pytest.raises(
- sqlalchemy.exc.OperationalError,
+ sa.exc.OperationalError,
) as _, connector._connect() as conn:
- conn.execute(sqlalchemy.text("SELECT * FROM fake_table"))
+ conn.execute(sa.text("SELECT * FROM fake_table"))
def test_rename_column_uses_connect_correctly(self, connector):
attached_engine = connector._engine
@@ -198,30 +205,30 @@ def test_dialect_uses_engine(self, connector):
res = connector._dialect
assert res == attached_engine.dialect
- def test_merge_sql_types_text_current_max(self, connector):
- current_type = sqlalchemy.types.VARCHAR(length=None)
- sql_type = sqlalchemy.types.VARCHAR(length=255)
+ def test_merge_sql_types_text_current_max(self, connector: SQLConnector):
+ current_type = sa.types.VARCHAR(length=None)
+ sql_type = sa.types.VARCHAR(length=255)
compatible_sql_type = connector.merge_sql_types([current_type, sql_type])
# Check that the current VARCHAR(MAX) type is kept
assert compatible_sql_type is current_type
- def test_merge_sql_types_text_current_greater_than(self, connector):
- current_type = sqlalchemy.types.VARCHAR(length=255)
- sql_type = sqlalchemy.types.VARCHAR(length=64)
+ def test_merge_sql_types_text_current_greater_than(self, connector: SQLConnector):
+ current_type = sa.types.VARCHAR(length=255)
+ sql_type = sa.types.VARCHAR(length=64)
compatible_sql_type = connector.merge_sql_types([current_type, sql_type])
# Check the current greater VARCHAR(255) is kept
assert compatible_sql_type is current_type
def test_merge_sql_types_text_proposed_max(self, connector):
- current_type = sqlalchemy.types.VARCHAR(length=64)
- sql_type = sqlalchemy.types.VARCHAR(length=None)
+ current_type = sa.types.VARCHAR(length=64)
+ sql_type = sa.types.VARCHAR(length=None)
compatible_sql_type = connector.merge_sql_types([current_type, sql_type])
# Check the current VARCHAR(64) is chosen over default VARCHAR(max)
assert compatible_sql_type is current_type
def test_merge_sql_types_text_current_less_than(self, connector):
- current_type = sqlalchemy.types.VARCHAR(length=64)
- sql_type = sqlalchemy.types.VARCHAR(length=255)
+ current_type = sa.types.VARCHAR(length=64)
+ sql_type = sa.types.VARCHAR(length=255)
compatible_sql_type = connector.merge_sql_types([current_type, sql_type])
# Check that VARCHAR(255) is chosen over the lesser current VARCHAR(64)
assert compatible_sql_type is sql_type
@@ -230,22 +237,22 @@ def test_merge_sql_types_text_current_less_than(self, connector):
"types,expected_type",
[
pytest.param(
- [sqlalchemy.types.Integer(), sqlalchemy.types.Numeric()],
- sqlalchemy.types.Integer,
+ [sa.types.Integer(), sa.types.Numeric()],
+ sa.types.Integer,
id="integer-numeric",
),
pytest.param(
- [sqlalchemy.types.Numeric(), sqlalchemy.types.Integer()],
- sqlalchemy.types.Numeric,
+ [sa.types.Numeric(), sa.types.Integer()],
+ sa.types.Numeric,
id="numeric-integer",
),
pytest.param(
[
- sqlalchemy.types.Integer(),
- sqlalchemy.types.String(),
- sqlalchemy.types.Numeric(),
+ sa.types.Integer(),
+ sa.types.String(),
+ sa.types.Numeric(),
],
- sqlalchemy.types.String,
+ sa.types.String,
id="integer-string-numeric",
),
],
@@ -253,8 +260,123 @@ def test_merge_sql_types_text_current_less_than(self, connector):
def test_merge_generic_sql_types(
self,
connector: SQLConnector,
- types: list[sqlalchemy.types.TypeEngine],
- expected_type: type[sqlalchemy.types.TypeEngine],
+ types: list[sa.types.TypeEngine],
+ expected_type: type[sa.types.TypeEngine],
):
merged_type = connector.merge_sql_types(types)
assert isinstance(merged_type, expected_type)
+
+ def test_engine_json_serialization(self, connector: SQLConnector):
+ engine = connector._engine
+ meta = sa.MetaData()
+ table = sa.Table(
+ "test_table",
+ meta,
+ sa.Column("id", sa.Integer, primary_key=True),
+ sa.Column("attrs", sa.JSON),
+ )
+ meta.create_all(engine)
+ with engine.connect() as conn:
+ conn.execute(
+ table.insert(),
+ [
+ {"attrs": {"x": Decimal("1.0")}},
+ {"attrs": {"x": Decimal("2.0"), "y": [1, 2, 3]}},
+ ],
+ )
+ result = conn.execute(table.select())
+ assert result.fetchall() == [
+ (1, {"x": Decimal("1.0")}),
+ (2, {"x": Decimal("2.0"), "y": [1, 2, 3]}),
+ ]
+
+
+class DuckDBConnector(SQLConnector):
+ allow_column_alter = True
+
+ @staticmethod
+ def get_column_alter_ddl(
+ table_name: str,
+ column_name: str,
+ column_type: sa.types.TypeEngine,
+ ) -> sa.DDL:
+ return sa.DDL(
+ "ALTER TABLE %(table_name)s ALTER COLUMN %(column_name)s TYPE %(column_type)s", # noqa: E501
+ {
+ "table_name": table_name,
+ "column_name": column_name,
+ "column_type": column_type,
+ },
+ )
+
+
+@pytest.mark.xfail(
+ reason="DuckDB does not build on Python 3.12 yet",
+ condition=sys.version_info >= (3, 12),
+ raises=NoSuchModuleError,
+)
+class TestDuckDBConnector:
+ @pytest.fixture
+ def connector(self):
+ return DuckDBConnector(config={"sqlalchemy_url": "duckdb:///"})
+
+ def test_create_schema(self, connector: DuckDBConnector):
+ engine = connector._engine
+ connector.create_schema("test_schema")
+ inspector = sa.inspect(engine)
+ assert "memory.test_schema" in inspector.get_schema_names()
+
+ def test_column_rename(self, connector: DuckDBConnector):
+ engine = connector._engine
+ meta = sa.MetaData()
+ _ = sa.Table(
+ "test_table",
+ meta,
+ sa.Column("id", sa.Integer),
+ sa.Column("old_name", sa.String),
+ )
+ meta.create_all(engine)
+
+ connector.rename_column("test_table", "old_name", "new_name")
+
+ with engine.connect() as conn:
+ result = conn.execute(
+ sa.text("SELECT * FROM test_table"),
+ )
+ assert result.keys() == ["id", "new_name"]
+
+ def test_adapt_column_type(self, connector: DuckDBConnector):
+ connector.allow_column_alter = True
+ engine = connector._engine
+ meta = sa.MetaData()
+ _ = sa.Table(
+ "test_table",
+ meta,
+ sa.Column("id", sa.Integer),
+ sa.Column("name", sa.Integer),
+ )
+ meta.create_all(engine)
+
+ connector._adapt_column_type("test_table", "name", sa.types.String())
+
+ with engine.connect() as conn:
+ result = conn.execute(
+ sa.text("SELECT * FROM test_table"),
+ )
+ assert result.keys() == ["id", "name"]
+ assert result.cursor.description[1][1] == "STRING"
+
+
+def test_adapter_without_json_serde():
+ registry.register(
+ "myrdbms",
+ "samples.sample_custom_sql_adapter.connector",
+ "CustomSQLDialect",
+ )
+
+ class CustomConnector(SQLConnector):
+ def create_engine(self) -> Engine:
+ return super().create_engine()
+
+ connector = CustomConnector(config={"sqlalchemy_url": "myrdbms:///"})
+ connector.create_engine()
diff --git a/tests/core/test_io.py b/tests/core/test_io.py
index c8de02447..0fcce614b 100644
--- a/tests/core/test_io.py
+++ b/tests/core/test_io.py
@@ -3,12 +3,14 @@
from __future__ import annotations
import decimal
+import itertools
import json
from contextlib import nullcontext
import pytest
-from singer_sdk.io_base import SingerReader
+from singer_sdk._singerlib import RecordMessage
+from singer_sdk.io_base import SingerReader, SingerWriter
class DummyReader(SingerReader):
@@ -38,7 +40,7 @@ def _process_state_message(self, message_dict: dict) -> None:
id="unparsable",
),
pytest.param(
- '{"type": "RECORD", "stream": "users", "record": {"id": 1, "value": 1.23}}', # noqa: E501
+ '{"type": "RECORD", "stream": "users", "record": {"id": 1, "value": 1.23}}',
{
"type": "RECORD",
"stream": "users",
@@ -53,3 +55,59 @@ def test_deserialize(line, expected, exception):
reader = DummyReader()
with exception:
assert reader.deserialize_json(line) == expected
+
+
+# Benchmark Tests
+
+
+@pytest.fixture
+def bench_record():
+ return {
+ "stream": "users",
+ "record": {
+ "Id": 1,
+ "created_at": "2021-01-01T00:08:00-07:00",
+ "updated_at": "2022-01-02T00:09:00-07:00",
+ "deleted_at": "2023-01-03T00:10:00-07:00",
+ "value": 1.23,
+ "RelatedtId": 32412,
+ "TypeId": 1,
+ },
+ "time_extracted": "2023-01-01T11:00:00.00000-07:00",
+ }
+
+
+@pytest.fixture
+def bench_record_message(bench_record):
+ return RecordMessage.from_dict(bench_record)
+
+
+@pytest.fixture
+def bench_encoded_record(bench_record):
+ return json.dumps(bench_record)
+
+
+def test_bench_format_message(benchmark, bench_record_message):
+ """Run benchmark for Sink._validator method validate."""
+ number_of_runs = 1000
+
+ writer = SingerWriter()
+
+ def run_format_message():
+ for record in itertools.repeat(bench_record_message, number_of_runs):
+ writer.format_message(record)
+
+ benchmark(run_format_message)
+
+
+def test_bench_deserialize_json(benchmark, bench_encoded_record):
+ """Run benchmark for Sink._validator method validate."""
+ number_of_runs = 1000
+
+ reader = DummyReader()
+
+ def run_deserialize_json():
+ for record in itertools.repeat(bench_encoded_record, number_of_runs):
+ reader.deserialize_json(record)
+
+ benchmark(run_deserialize_json)
diff --git a/tests/core/test_jsonschema_helpers.py b/tests/core/test_jsonschema_helpers.py
index 3e4ba6eca..8438a6168 100644
--- a/tests/core/test_jsonschema_helpers.py
+++ b/tests/core/test_jsonschema_helpers.py
@@ -4,6 +4,7 @@
import re
import typing as t
+from logging import WARNING
from textwrap import dedent
import pytest
@@ -26,6 +27,7 @@
)
from singer_sdk.tap_base import Tap
from singer_sdk.typing import (
+ AnyType,
ArrayType,
BooleanType,
CustomType,
@@ -130,6 +132,26 @@ def test_to_json():
)
+def test_any_type(caplog: pytest.LogCaptureFixture):
+ schema = PropertiesList(
+ Property("any_type", AnyType, description="Can be anything"),
+ )
+ with caplog.at_level(WARNING):
+ assert schema.to_dict() == {
+ "type": "object",
+ "properties": {
+ "any_type": {
+ "description": "Can be anything",
+ },
+ },
+ }
+ assert caplog.records[0].levelname == "WARNING"
+ assert caplog.records[0].message == (
+ "Could not append type because the JSON schema for the dictionary `{}` "
+ "appears to be invalid."
+ )
+
+
def test_nested_complex_objects():
test1a = Property(
"Datasets",
@@ -490,7 +512,7 @@ def test_property_creation(
property_dict = property_obj.to_dict()
assert property_dict == expected_jsonschema
for check_fn in TYPE_FN_CHECKS:
- property_name = list(property_dict.keys())[0]
+ property_name = next(iter(property_dict.keys()))
property_node = property_dict[property_name]
if check_fn in type_fn_checks_true:
assert (
diff --git a/tests/core/test_mapper.py b/tests/core/test_mapper.py
index 036d7586a..10f65cf8e 100644
--- a/tests/core/test_mapper.py
+++ b/tests/core/test_mapper.py
@@ -3,14 +3,16 @@
from __future__ import annotations
import copy
+import datetime
import io
import json
import logging
import typing as t
from contextlib import redirect_stdout
+from decimal import Decimal
import pytest
-from freezegun import freeze_time
+import time_machine
from singer_sdk._singerlib import Catalog
from singer_sdk.exceptions import MapExpressionError
@@ -19,8 +21,12 @@
from singer_sdk.streams.core import Stream
from singer_sdk.tap_base import Tap
from singer_sdk.typing import (
+ ArrayType,
+ BooleanType,
IntegerType,
+ NumberType,
ObjectType,
+ OneOf,
PropertiesList,
Property,
StringType,
@@ -52,6 +58,18 @@ def sample_catalog_dict() -> dict:
Property("the", StringType),
Property("brown", StringType),
).to_dict()
+ nested_jellybean_schema = PropertiesList(
+ Property("id", IntegerType),
+ Property(
+ "custom_fields",
+ ArrayType(
+ ObjectType(
+ Property("id", IntegerType),
+ Property("value", OneOf(StringType, IntegerType, BooleanType)),
+ ),
+ ),
+ ),
+ ).to_dict()
return {
"streams": [
{
@@ -64,6 +82,11 @@ def sample_catalog_dict() -> dict:
"tap_stream_id": "foobars",
"schema": foobars_schema,
},
+ {
+ "stream": "nested_jellybean",
+ "tap_stream_id": "nested_jellybean",
+ "schema": nested_jellybean_schema,
+ },
],
}
@@ -106,6 +129,24 @@ def sample_stream():
{"the": "quick"},
{"brown": "fox"},
],
+ "nested_jellybean": [
+ {
+ "id": 123,
+ "custom_fields": [
+ {"id": 1, "value": "abc"},
+ {"id": 2, "value": 1212},
+ {"id": 3, "value": None},
+ ],
+ },
+ {
+ "id": 124,
+ "custom_fields": [
+ {"id": 1, "value": "foo"},
+ {"id": 2, "value": 9009},
+ {"id": 3, "value": True},
+ ],
+ },
+ ],
}
@@ -114,6 +155,19 @@ def sample_stream():
@pytest.fixture
def transform_stream_maps():
+ nested_jellybean_custom_field_1 = (
+ 'dict([(x["id"], x["value"]) for x in custom_fields]).get(1)'
+ )
+ nested_jellybean_custom_field_2 = (
+ 'int(dict([(x["id"], x["value"]) for x in custom_fields]).get(2)) '
+ 'if dict([(x["id"], x["value"]) for x in custom_fields]).get(2) '
+ "else None"
+ )
+ nested_jellybean_custom_field_3 = (
+ 'bool(dict([(x["id"], x["value"]) for x in custom_fields]).get(3)) '
+ 'if dict([(x["id"], x["value"]) for x in custom_fields]).get(3) '
+ "else None"
+ )
return {
"repositories": {
"repo_name": "_['name']",
@@ -125,6 +179,12 @@ def transform_stream_maps():
"int_test": "int('0')",
"__else__": None,
},
+ "nested_jellybean": {
+ "custom_fields": "__NULL__",
+ "custom_field_1": nested_jellybean_custom_field_1,
+ "custom_field_2": nested_jellybean_custom_field_2,
+ "custom_field_3": nested_jellybean_custom_field_3,
+ },
}
@@ -181,6 +241,20 @@ def transformed_result(stream_map_config):
{"the": "quick"},
{"brown": "fox"},
],
+ "nested_jellybean": [
+ {
+ "id": 123,
+ "custom_field_1": "abc",
+ "custom_field_2": 1212,
+ "custom_field_3": None,
+ },
+ {
+ "id": 124,
+ "custom_field_1": "foo",
+ "custom_field_2": 9009,
+ "custom_field_3": True,
+ },
+ ],
}
@@ -200,6 +274,12 @@ def transformed_schemas():
Property("the", StringType),
Property("brown", StringType),
).to_dict(),
+ "nested_jellybean": PropertiesList(
+ Property("id", IntegerType),
+ Property("custom_field_1", StringType),
+ Property("custom_field_2", IntegerType),
+ Property("custom_field_3", BooleanType),
+ ).to_dict(),
}
@@ -354,17 +434,15 @@ def test_filter_transforms_w_error(
)
-def _test_transform(
- test_name: str,
+def _run_transform(
*,
stream_maps,
stream_map_config,
- expected_result,
- expected_schemas,
sample_stream,
sample_catalog_obj,
):
output: dict[str, list[dict]] = {}
+ output_schemas = {}
mapper = PluginMapper(
plugin_config={
"stream_maps": stream_maps,
@@ -379,15 +457,7 @@ def _test_transform(
if isinstance(stream_map, RemoveRecordTransform):
logging.info("Skipping ignored stream '%s'", stream_name)
continue
-
- assert (
- expected_schemas[stream_map.stream_alias]
- == stream_map.transformed_schema
- ), (
- f"Failed '{test_name}' schema test. Generated schema was "
- f"{json.dumps(stream_map.transformed_schema, indent=2)}"
- )
-
+ output_schemas[stream_map.stream_alias] = stream_map.transformed_schema
output[stream_map.stream_alias] = []
for record in stream:
result = stream_map.transform(record)
@@ -396,6 +466,39 @@ def _test_transform(
continue
output[stream_map.stream_alias].append(result)
+ return output, output_schemas
+
+
+def _test_transform(
+ test_name: str,
+ *,
+ stream_maps,
+ stream_map_config,
+ expected_result,
+ expected_schemas,
+ sample_stream,
+ sample_catalog_obj,
+):
+ output, output_schemas = _run_transform(
+ stream_maps=stream_maps,
+ stream_map_config=stream_map_config,
+ sample_stream=sample_stream,
+ sample_catalog_obj=sample_catalog_obj,
+ )
+
+ assert set(expected_schemas.keys()) == set(output_schemas.keys()), (
+ f"Failed `{test_name}` schema test. "
+ f"'{set(expected_schemas.keys()) - set(output_schemas.keys())}' "
+ "schemas not found. "
+ f"'{set(output_schemas.keys()) - set(expected_schemas.keys())}' "
+ "schemas not expected. "
+ )
+ for expected_schema_name, expected_schema in expected_schemas.items():
+ output_schema = output_schemas[expected_schema_name]
+ assert expected_schema == output_schema, (
+ f"Failed '{test_name}' schema test. Generated schema was "
+ f"{json.dumps(output_schema, indent=2)}"
+ )
assert expected_result == output, (
f"Failed '{test_name}' record result test. "
@@ -415,6 +518,7 @@ class MappedStream(Stream):
ObjectType(
Property("id", IntegerType()),
Property("sub", ObjectType(Property("num", IntegerType()))),
+ Property("some_numbers", ArrayType(NumberType())),
),
),
).to_dict()
@@ -423,17 +527,29 @@ def get_records(self, context): # noqa: ARG002
yield {
"email": "alice@example.com",
"count": 21,
- "user": {"id": 1, "sub": {"num": 1}},
+ "user": {
+ "id": 1,
+ "sub": {"num": 1},
+ "some_numbers": [Decimal("3.14"), Decimal("2.718")],
+ },
}
yield {
"email": "bob@example.com",
"count": 13,
- "user": {"id": 2, "sub": {"num": 2}},
+ "user": {
+ "id": 2,
+ "sub": {"num": 2},
+ "some_numbers": [Decimal("10.32"), Decimal("1.618")],
+ },
}
yield {
"email": "charlie@example.com",
"count": 19,
- "user": {"id": 3, "sub": {"num": 3}},
+ "user": {
+ "id": 3,
+ "sub": {"num": 3},
+ "some_numbers": [Decimal("1.414"), Decimal("1.732")],
+ },
}
@@ -454,7 +570,10 @@ def _clear_schema_cache() -> None:
get_selected_schema.cache_clear()
-@freeze_time("2022-01-01T00:00:00Z")
+@time_machine.travel(
+ datetime.datetime(2022, 1, 1, tzinfo=datetime.timezone.utc),
+ tick=False,
+)
@pytest.mark.snapshot()
@pytest.mark.usefixtures("_clear_schema_cache")
@pytest.mark.parametrize(
@@ -545,6 +664,13 @@ def _clear_schema_cache() -> None:
"aliased_stream.jsonl",
id="aliased_stream",
),
+ pytest.param(
+ {},
+ True,
+ 0,
+ "flatten_depth_0.jsonl",
+ id="flatten_depth_0",
+ ),
pytest.param(
{},
True,
@@ -601,6 +727,18 @@ def _clear_schema_cache() -> None:
"non_pk_passthrough.jsonl",
id="non_pk_passthrough",
),
+ pytest.param(
+ {
+ "mystream": {
+ "_data": "record",
+ "__else__": None,
+ },
+ },
+ False,
+ 0,
+ "record_to_column.jsonl",
+ id="record_to_column",
+ ),
],
)
def test_mapped_stream(
@@ -626,3 +764,37 @@ def test_mapped_stream(
buf.seek(0)
snapshot.assert_match(buf.read(), snapshot_name)
+
+
+def test_bench_simple_map_transforms(
+ benchmark,
+ sample_stream,
+ sample_catalog_dict,
+ transform_stream_maps,
+ stream_map_config,
+):
+ """Run benchmark tests using the "repositories" stream."""
+ stream_size_scale = 1000
+
+ repositories_catalog = {
+ "streams": [
+ x
+ for x in sample_catalog_dict["streams"]
+ if x["tap_stream_id"] == "repositories"
+ ],
+ }
+
+ repositories_sample_stream = {
+ "repositories": sample_stream["repositories"] * stream_size_scale,
+ }
+ repositories_transform_stream_maps = {
+ "repositories": transform_stream_maps["repositories"],
+ }
+ repositories_sample_catalog_obj = Catalog.from_dict(repositories_catalog)
+ benchmark(
+ _run_transform,
+ stream_maps=repositories_transform_stream_maps,
+ stream_map_config=stream_map_config,
+ sample_stream=repositories_sample_stream,
+ sample_catalog_obj=repositories_sample_catalog_obj,
+ )
diff --git a/tests/core/test_mapper_class.py b/tests/core/test_mapper_class.py
new file mode 100644
index 000000000..0f0c1192a
--- /dev/null
+++ b/tests/core/test_mapper_class.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+import json
+from contextlib import nullcontext
+
+import pytest
+from click.testing import CliRunner
+
+from samples.sample_mapper.mapper import StreamTransform
+from singer_sdk.exceptions import ConfigValidationError
+
+
+@pytest.mark.parametrize(
+ "config_dict,expectation,errors",
+ [
+ pytest.param(
+ {},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["'stream_maps' is a required property"],
+ id="missing_stream_maps",
+ ),
+ pytest.param(
+ {"stream_maps": {}},
+ nullcontext(),
+ [],
+ id="valid_config",
+ ),
+ ],
+)
+def test_config_errors(config_dict: dict, expectation, errors: list[str]):
+ with expectation as exc:
+ StreamTransform(config=config_dict, validate_config=True)
+
+ if isinstance(exc, pytest.ExceptionInfo):
+ assert exc.value.errors == errors
+
+
+def test_cli_help():
+ """Test the CLI help message."""
+ runner = CliRunner(mix_stderr=False)
+ result = runner.invoke(StreamTransform.cli, ["--help"])
+ assert result.exit_code == 0
+ assert "Show this message and exit." in result.output
+
+
+def test_cli_config_validation(tmp_path):
+ """Test the CLI config validation."""
+ runner = CliRunner(mix_stderr=False)
+ config_path = tmp_path / "config.json"
+ config_path.write_text(json.dumps({}))
+ result = runner.invoke(StreamTransform.cli, ["--config", str(config_path)])
+ assert result.exit_code == 1
+ assert not result.stdout
+ assert "'stream_maps' is a required property" in result.stderr
diff --git a/tests/core/test_simpleeval.py b/tests/core/test_simpleeval.py
deleted file mode 100644
index d5cacb30f..000000000
--- a/tests/core/test_simpleeval.py
+++ /dev/null
@@ -1,1146 +0,0 @@
-"""
-Simpleeval tests originally imported on 2021-09-16 from:
-- https://github.com/danthedeckie/simpleeval
-
-For more information:
-- https://gitlab.com/meltano/sdk/-/issues/213
-
-"""
-from __future__ import annotations
-
-import ast
-import operator
-import os
-
-# flake8: noqa # Ignoring flake errors in imported module
-# pylint: disable=too-many-public-methods, missing-docstring
-import sys
-import unittest
-import warnings
-
-from singer_sdk.helpers import _simpleeval as simpleeval
-from singer_sdk.helpers._simpleeval import (
- AttributeDoesNotExist,
- EvalWithCompoundTypes,
- FeatureNotAvailable,
- FunctionNotDefined,
- InvalidExpression,
- NameNotDefined,
- SimpleEval,
- simple_eval,
-)
-
-
-class DRYTest(unittest.TestCase):
- """Stuff we need to do every test, let's do here instead..
- Don't Repeat Yourself."""
-
- def setUp(self):
- """ initialize a SimpleEval """
- self.s = SimpleEval()
-
- def t(self, expr, shouldbe): # pylint: disable=invalid-name
- """ test an evaluation of an expression against an expected answer """
- return self.assertEqual(self.s.eval(expr), shouldbe)
-
-
-class TestBasic(DRYTest):
- """ Simple expressions. """
-
- def test_maths_with_ints(self):
- """ simple maths expressions """
-
- self.t("21 + 21", 42)
- self.t("6*7", 42)
- self.t("20 + 1 + (10*2) + 1", 42)
- self.t("100/10", 10)
- self.t("12*12", 144)
- self.t("2 ** 10", 1024)
- self.t("100 % 9", 1)
-
- def test_bools_and_or(self):
- self.t('True and ""', "")
- self.t("True and False", False)
- self.t("True or False", True)
- self.t("False or False", False)
- self.t("1 - 1 or 21", 21)
- self.t("1 - 1 and 11", 0)
- self.t("110 == 100 + 10 and True", True)
- self.t("110 != 100 + 10 and True", False)
- self.t("False or 42", 42)
-
- self.t("False or None", None)
- self.t("None or None", None)
-
- self.s.names = {"out": True, "position": 3}
- self.t(
- "(out and position <=6 and -10)"
- " or (out and position > 6 and -5)"
- " or (not out and 15)",
- -10,
- )
-
- def test_not(self):
- self.t("not False", True)
- self.t("not True", False)
- self.t("not 0", True)
- self.t("not 1", False)
-
- def test_maths_with_floats(self):
- self.t("11.02 - 9.1", 1.92)
- self.t("29.1+39", 68.1)
-
- def test_comparisons(self):
- # GT & LT:
- self.t("1 > 0", True)
- self.t("100000 < 28", False)
- self.t("-2 < 11", True)
- self.t("+2 < 5", True)
- self.t("0 == 0", True)
-
- # GtE, LtE
- self.t("-2 <= -2", True)
- self.t("2 >= 2", True)
- self.t("1 >= 12", False)
- self.t("1.09 <= 1967392", True)
-
- self.t("1 < 2 < 3 < 4", 1 < 2 < 3 < 4)
- self.t("1 < 2 > 3 < 4", 1 < 2 > 3 < 4)
-
- self.t("1<2<1+1", 1 < 2 < 1 + 1)
- self.t("1 == 1 == 2", 1 == 1 == 2)
- self.t("1 == 1 < 2", 1 == 1 < 2)
-
- def test_mixed_comparisons(self):
- self.t("1 > 0.999999", True)
- self.t("1 == True", True) # Note ==, not 'is'.
- self.t("0 == False", True) # Note ==, not 'is'.
- self.t("False == False", True)
- self.t("False < True", True)
-
- def test_if_else(self):
- """ x if y else z """
-
- # and test if/else expressions:
- self.t("'a' if 1 == 1 else 'b'", "a")
- self.t("'a' if 1 > 2 else 'b'", "b")
-
- # and more complex expressions:
- self.t("'a' if 4 < 1 else 'b' if 1 == 2 else 'c'", "c")
-
- def test_default_conversions(self):
- """ conversion between types """
-
- self.t('int("20") + int(0.22*100)', 42)
- self.t('float("42")', 42.0)
- self.t('"Test Stuff!" + str(11)', "Test Stuff!11")
-
- def test_slicing(self):
- self.s.operators[ast.Slice] = (
- operator.getslice if hasattr(operator, "getslice") else operator.getitem
- )
- self.t("'hello'[1]", "e")
- self.t("'hello'[:]", "hello")
- self.t("'hello'[:3]", "hel")
- self.t("'hello'[3:]", "lo")
- self.t("'hello'[::2]", "hlo")
- self.t("'hello'[::-1]", "olleh")
- self.t("'hello'[3::]", "lo")
- self.t("'hello'[:3:]", "hel")
- self.t("'hello'[1:3]", "el")
- self.t("'hello'[1:3:]", "el")
- self.t("'hello'[1::2]", "el")
- self.t("'hello'[:1:2]", "h")
- self.t("'hello'[1:3:1]", "el")
- self.t("'hello'[1:3:2]", "e")
-
- with self.assertRaises(IndexError):
- self.t("'hello'[90]", 0)
-
- self.t('"spam" not in "my breakfast"', True)
- self.t('"silly" in "ministry of silly walks"', True)
- self.t('"I" not in "team"', True)
- self.t('"U" in "RUBBISH"', True)
-
- def test_is(self):
- self.t("1 is 1", True)
- self.t("1 is 2", False)
- self.t('1 is "a"', False)
- self.t("1 is None", False)
- self.t("None is None", True)
-
- self.t("1 is not 1", False)
- self.t("1 is not 2", True)
- self.t('1 is not "a"', True)
- self.t("1 is not None", True)
- self.t("None is not None", False)
-
- def test_fstring(self):
- if sys.version_info >= (3, 6, 0):
- self.t('f""', "")
- self.t('f"stuff"', "stuff")
- self.t('f"one is {1} and two is {2}"', "one is 1 and two is 2")
- self.t('f"1+1 is {1+1}"', "1+1 is 2")
- self.t("f\"{'dramatic':!<11}\"", "dramatic!!!")
-
- def test_set_not_allowed(self):
- with self.assertRaises(FeatureNotAvailable):
- self.t("{22}", False)
-
-
-class TestFunctions(DRYTest):
- """ Functions for expressions to play with """
-
- def test_load_file(self):
- """ add in a function which loads data from an external file. """
-
- # write to the file:
-
- with open("testfile.txt", "w") as f:
- f.write("42")
-
- # define the function we'll send to the eval'er
-
- def load_file(filename):
- """ load a file and return its contents """
- with open(filename) as f2:
- return f2.read()
-
- # simple load:
-
- self.s.functions = {"read": load_file}
- self.t("read('testfile.txt')", "42")
-
- # and we should have *replaced* the default functions. Let's check:
-
- with self.assertRaises(simpleeval.FunctionNotDefined):
- self.t("int(read('testfile.txt'))", 42)
-
- # OK, so we can load in the default functions as well...
-
- self.s.functions.update(simpleeval.DEFAULT_FUNCTIONS)
-
- # now it works:
-
- self.t("int(read('testfile.txt'))", 42)
-
- os.remove("testfile.txt")
-
- def test_randoms(self):
- """ test the rand() and randint() functions """
-
- i = self.s.eval("randint(1000)")
- self.assertEqual(type(i), int)
- self.assertLessEqual(i, 1000)
-
- f = self.s.eval("rand()")
- self.assertEqual(type(f), float)
-
- self.t("randint(20)<20", True)
- self.t("rand()<1.0", True)
-
- # I don't know how to further test these functions. Ideas?
-
- def test_methods(self):
- self.t('"WORD".lower()', "word")
- x = simpleeval.DISALLOW_METHODS
- simpleeval.DISALLOW_METHODS = []
- self.t('"{}:{}".format(1, 2)', "1:2")
- simpleeval.DISALLOW_METHODS = x
-
- def test_function_args_none(self):
- def foo():
- return 42
-
- self.s.functions["foo"] = foo
- self.t("foo()", 42)
-
- def test_function_args_required(self):
- def foo(toret):
- return toret
-
- self.s.functions["foo"] = foo
- with self.assertRaises(TypeError):
- self.t("foo()", 42)
-
- self.t("foo(12)", 12)
- self.t("foo(toret=100)", 100)
-
- def test_function_args_defaults(self):
- def foo(toret=9999):
- return toret
-
- self.s.functions["foo"] = foo
- self.t("foo()", 9999)
-
- self.t("foo(12)", 12)
- self.t("foo(toret=100)", 100)
-
- def test_function_args_bothtypes(self):
- def foo(mult, toret=100):
- return toret * mult
-
- self.s.functions["foo"] = foo
- with self.assertRaises(TypeError):
- self.t("foo()", 9999)
-
- self.t("foo(2)", 200)
-
- with self.assertRaises(TypeError):
- self.t("foo(toret=100)", 100)
-
- self.t("foo(4, toret=4)", 16)
- self.t("foo(mult=2, toret=4)", 8)
- self.t("foo(2, 10)", 20)
-
-
-class TestOperators(DRYTest):
- """ Test adding in new operators, removing them, make sure it works. """
-
- # TODO
- pass
-
-
-class TestNewFeatures(DRYTest):
- """ Tests which will break when new features are added..."""
-
- def test_lambda(self):
- with self.assertRaises(FeatureNotAvailable):
- self.t("lambda x:22", None)
-
- def test_lambda_application(self):
- with self.assertRaises(FeatureNotAvailable):
- self.t("(lambda x:22)(44)", None)
-
-
-class TestTryingToBreakOut(DRYTest):
- """ Test various weird methods to break the security sandbox... """
-
- def test_import(self):
- """ usual suspect. import """
- # cannot import things:
- with self.assertRaises(FeatureNotAvailable):
- self.t("import sys", None)
-
- def test_long_running(self):
- """ exponent operations can take a long time. """
- old_max = simpleeval.MAX_POWER
-
- self.t("9**9**5", 9 ** 9 ** 5)
-
- with self.assertRaises(simpleeval.NumberTooHigh):
- self.t("9**9**8", 0)
-
- # and does limiting work?
-
- simpleeval.MAX_POWER = 100
-
- with self.assertRaises(simpleeval.NumberTooHigh):
- self.t("101**2", 0)
-
- # good, so set it back:
-
- simpleeval.MAX_POWER = old_max
-
- def test_encode_bignums(self):
- # thanks gk
- if hasattr(1, "from_bytes"): # python3 only
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t(
- '(1).from_bytes(("123123123123123123123123").encode()*999999, "big")',
- 0,
- )
-
- def test_string_length(self):
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("50000*'text'", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'text'*50000", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("('text'*50000)*1000", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("(50000*'text')*1000", 0)
-
- self.t("'stuff'*20000", 20000 * "stuff")
-
- self.t("20000*'stuff'", 20000 * "stuff")
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("('stuff'*20000) + ('stuff'*20000) ", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'stuff'*100000", 100000 * "stuff")
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'" + (10000 * "stuff") + "'*100", 0)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'" + (50000 * "stuff") + "'", 0)
-
- if sys.version_info >= (3, 6, 0):
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("f'{\"foo\"*50000}'", 0)
-
- def test_bytes_array_test(self):
- self.t(
- "'20000000000000000000'.encode() * 5000",
- "20000000000000000000".encode() * 5000,
- )
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("'123121323123131231223'.encode() * 5000", 20)
-
- def test_list_length_test(self):
- self.t("'spam spam spam'.split() * 5000", ["spam", "spam", "spam"] * 5000)
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.t("('spam spam spam' * 5000).split() * 5000", None)
-
- def test_python_stuff(self):
- """ other various pythony things. """
- # it only evaluates the first statement:
- self.t("11; x = 21; x + x", 11)
-
- def test_function_globals_breakout(self):
- """ by accessing function.__globals__ or func_... """
- # thanks perkinslr.
-
- self.s.functions["x"] = lambda y: y + y
- self.t("x(100)", 200)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("x.__globals__", None)
-
- class EscapeArtist(object):
- @staticmethod
- def trapdoor():
- return 42
-
- @staticmethod
- def _quasi_private():
- return 84
-
- self.s.names["houdini"] = EscapeArtist()
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("houdini.trapdoor.__globals__", 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("houdini.trapdoor.func_globals", 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("houdini._quasi_private()", 0)
-
- # and test for changing '_' to '__':
-
- dis = simpleeval.DISALLOW_PREFIXES
- simpleeval.DISALLOW_PREFIXES = ["func_"]
-
- self.t("houdini.trapdoor()", 42)
- self.t("houdini._quasi_private()", 84)
-
- # and return things to normal
-
- simpleeval.DISALLOW_PREFIXES = dis
-
- def test_mro_breakout(self):
- class Blah(object):
- x = 42
-
- self.s.names["b"] = Blah
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t("b.mro()", None)
-
- def test_builtins_private_access(self):
- # explicit attempt of the exploit from perkinslr
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t(
- "True.__class__.__class__.__base__.__subclasses__()[-1]"
- ".__init__.func_globals['sys'].exit(1)",
- 42,
- )
-
- def test_string_format(self):
- # python has so many ways to break out!
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('"{string.__class__}".format(string="things")', 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.s.names["x"] = {"a": 1}
- self.t('"{a.__class__}".format_map(x)', 0)
-
- if sys.version_info >= (3, 6, 0):
- self.s.names["x"] = 42
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{x.__class__}"', 0)
-
- self.s.names["x"] = lambda y: y
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{x.__globals__}"', 0)
-
- class EscapeArtist(object):
- @staticmethod
- def trapdoor():
- return 42
-
- @staticmethod
- def _quasi_private():
- return 84
-
- self.s.names[
- "houdini"
- ] = EscapeArtist() # let's just retest this, but in a f-string
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{houdini.trapdoor.__globals__}"', 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{houdini.trapdoor.func_globals}"', 0)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- self.t('f"{houdini._quasi_private()}"', 0)
-
- # and test for changing '_' to '__':
-
- dis = simpleeval.DISALLOW_PREFIXES
- simpleeval.DISALLOW_PREFIXES = ["func_"]
-
- self.t('f"{houdini.trapdoor()}"', "42")
- self.t('f"{houdini._quasi_private()}"', "84")
-
- # and return things to normal
-
- simpleeval.DISALLOW_PREFIXES = dis
-
-
-class TestCompoundTypes(DRYTest):
- """ Test the compound-types edition of the library """
-
- def setUp(self):
- self.s = EvalWithCompoundTypes()
-
- def test_dict(self):
- self.t("{}", {})
- self.t('{"foo": "bar"}', {"foo": "bar"})
- self.t('{"foo": "bar"}["foo"]', "bar")
- self.t("dict()", {})
- self.t("dict(a=1)", {"a": 1})
-
- def test_dict_contains(self):
- self.t('{"a":22}["a"]', 22)
- with self.assertRaises(KeyError):
- self.t('{"a":22}["b"]', 22)
-
- self.t('{"a": 24}.get("b", 11)', 11)
- self.t('"a" in {"a": 24}', True)
-
- def test_tuple(self):
- self.t("()", ())
- self.t("(1,)", (1,))
- self.t("(1, 2, 3, 4, 5, 6)", (1, 2, 3, 4, 5, 6))
- self.t("(1, 2) + (3, 4)", (1, 2, 3, 4))
- self.t("(1, 2, 3)[1]", 2)
- self.t("tuple()", ())
- self.t('tuple("foo")', ("f", "o", "o"))
-
- def test_tuple_contains(self):
- self.t('("a","b")[1]', "b")
- with self.assertRaises(IndexError):
- self.t('("a","b")[5]', "b")
- self.t('"a" in ("b","c","a")', True)
-
- def test_list(self):
- self.t("[]", [])
- self.t("[1]", [1])
- self.t("[1, 2, 3, 4, 5]", [1, 2, 3, 4, 5])
- self.t("[1, 2, 3][1]", 2)
- self.t("list()", [])
- self.t('list("foo")', ["f", "o", "o"])
-
- def test_list_contains(self):
- self.t('["a","b"][1]', "b")
- with self.assertRaises(IndexError):
- self.t('("a","b")[5]', "b")
-
- self.t('"b" in ["a","b"]', True)
-
- def test_set(self):
- self.t("{1}", {1})
- self.t("{1, 2, 1, 2, 1, 2, 1}", {1, 2})
- self.t("set()", set())
- self.t('set("foo")', {"f", "o"})
-
- self.t("2 in {1,2,3,4}", True)
- self.t("22 not in {1,2,3,4}", True)
-
- def test_not(self):
- self.t("not []", True)
- self.t("not [0]", False)
- self.t("not {}", True)
- self.t("not {0: 1}", False)
- self.t("not {0}", False)
-
- def test_use_func(self):
- self.s = EvalWithCompoundTypes(functions={"map": map, "str": str})
- self.t("list(map(str, [-1, 0, 1]))", ["-1", "0", "1"])
- with self.assertRaises(NameNotDefined):
- self.s.eval("list(map(bad, [-1, 0, 1]))")
-
- with self.assertRaises(FunctionNotDefined):
- self.s.eval("dir(str)")
- with self.assertRaises(FeatureNotAvailable):
- self.s.eval("str.__dict__")
-
- self.s = EvalWithCompoundTypes(functions={"dir": dir, "str": str})
- self.t("dir(str)", dir(str))
-
-
-class TestComprehensions(DRYTest):
- """ Test the comprehensions support of the compound-types edition of the class. """
-
- def setUp(self):
- self.s = EvalWithCompoundTypes()
-
- def test_basic(self):
- self.t("[a + 1 for a in [1,2,3]]", [2, 3, 4])
-
- def test_with_self_reference(self):
- self.t("[a + a for a in [1,2,3]]", [2, 4, 6])
-
- def test_with_if(self):
- self.t("[a for a in [1,2,3,4,5] if a <= 3]", [1, 2, 3])
-
- def test_with_multiple_if(self):
- self.t("[a for a in [1,2,3,4,5] if a <= 3 and a > 1 ]", [2, 3])
-
- def test_attr_access_fails(self):
- with self.assertRaises(FeatureNotAvailable):
- self.t("[a.__class__ for a in [1,2,3]]", None)
-
- def test_unpack(self):
- self.t("[a+b for a,b in ((1,2),(3,4))]", [3, 7])
-
- def test_nested_unpack(self):
- self.t("[a+b+c for a, (b, c) in ((1,(1,1)),(3,(2,2)))]", [3, 7])
-
- def test_other_places(self):
- self.s.functions = {"sum": sum}
- self.t("sum([a+1 for a in [1,2,3,4,5]])", 20)
- self.t("sum(a+1 for a in [1,2,3,4,5])", 20)
-
- def test_external_names_work(self):
- self.s.names = {"x": [22, 102, 12.3]}
- self.t("[a/2 for a in x]", [11.0, 51.0, 6.15])
-
- self.s.names = lambda x: ord(x.id)
- self.t("[a + a for a in [b, c, d]]", [ord(x) * 2 for x in "bcd"])
-
- def test_multiple_generators(self):
- self.s.functions = {"range": range}
- s = "[j for i in range(100) if i > 10 for j in range(i) if j < 20]"
- self.t(s, eval(s))
-
- def test_triple_generators(self):
- self.s.functions = {"range": range}
- s = "[(a,b,c) for a in range(4) for b in range(a) for c in range(b)]"
- self.t(s, eval(s))
-
- def test_too_long_generator(self):
- self.s.functions = {"range": range}
- s = "[j for i in range(1000) if i > 10 for j in range(i) if j < 20]"
- with self.assertRaises(simpleeval.IterableTooLong):
- self.s.eval(s)
-
- def test_too_long_generator_2(self):
- self.s.functions = {"range": range}
- s = "[j for i in range(100) if i > 1 for j in range(i+10) if j < 100 for k in range(i*j)]"
- with self.assertRaises(simpleeval.IterableTooLong):
- self.s.eval(s)
-
- def test_nesting_generators_to_cheat(self):
- self.s.functions = {"range": range}
- s = "[[[c for c in range(a)] for a in range(b)] for b in range(200)]"
-
- with self.assertRaises(simpleeval.IterableTooLong):
- self.s.eval(s)
-
- def test_no_leaking_names(self):
- # see issue #52, failing list comprehensions could leak locals
- with self.assertRaises(simpleeval.NameNotDefined):
- self.s.eval('[x if x == "2" else y for x in "123"]')
-
- with self.assertRaises(simpleeval.NameNotDefined):
- self.s.eval("x")
-
-
-class TestNames(DRYTest):
- """ 'names', what other languages call variables... """
-
- def test_none(self):
- """ what to do when names isn't defined, or is 'none' """
- with self.assertRaises(NameNotDefined):
- self.t("a == 2", None)
-
- self.s.names["s"] = 21
-
- with self.assertRaises(NameNotDefined):
- with warnings.catch_warnings(record=True) as ws:
- self.t("s += a", 21)
-
- self.s.names = None
-
- with self.assertRaises(InvalidExpression):
- self.t("s", 21)
-
- self.s.names = {"a": {"b": {"c": 42}}}
-
- with self.assertRaises(AttributeDoesNotExist):
- self.t("a.b.d**2", 42)
-
- def test_dict(self):
- """ using a normal dict for names lookup """
-
- self.s.names = {"a": 42}
- self.t("a + a", 84)
-
- self.s.names["also"] = 100
-
- self.t("a + also - a", 100)
-
- # however, you can't assign to those names:
- with warnings.catch_warnings(record=True) as ws:
- self.t("a = 200", 200)
-
- self.assertEqual(self.s.names["a"], 42)
-
- # or assign to lists
-
- self.s.names["b"] = [0]
-
- with warnings.catch_warnings(record=True) as ws:
- self.t("b[0] = 11", 11)
-
- self.assertEqual(self.s.names["b"], [0])
-
- # but you can get items from a list:
-
- self.s.names["b"] = [6, 7]
-
- self.t("b[0] * b[1]", 42)
-
- # or from a dict
-
- self.s.names["c"] = {"i": 11}
-
- self.t("c['i']", 11)
- self.t("c.get('i')", 11)
- self.t("c.get('j', 11)", 11)
- self.t("c.get('j')", None)
-
- # you still can't assign though:
-
- with warnings.catch_warnings(record=True) as ws:
- self.t("c['b'] = 99", 99)
-
- self.assertFalse("b" in self.s.names["c"])
-
- # and going all 'inception' on it doesn't work either:
-
- self.s.names["c"]["c"] = {"c": 11}
-
- with warnings.catch_warnings(record=True) as ws:
- self.t("c['c']['c'] = 21", 21)
-
- self.assertEqual(self.s.names["c"]["c"]["c"], 11)
-
- def test_dict_attr_access(self):
- # nested dict
-
- self.assertEqual(self.s.ATTR_INDEX_FALLBACK, True)
-
- self.s.names = {"a": {"b": {"c": 42}}}
-
- self.t("a.b.c*2", 84)
-
- with warnings.catch_warnings(record=True) as ws:
- self.t("a.b.c = 11", 11)
-
- self.assertEqual(self.s.names["a"]["b"]["c"], 42)
-
- # TODO: Wat?
- with warnings.catch_warnings(record=True) as ws:
- self.t("a.d = 11", 11)
-
- with self.assertRaises(KeyError):
- self.assertEqual(self.s.names["a"]["d"], 11)
-
- def test_dict_attr_access_disabled(self):
- # nested dict
-
- self.s.ATTR_INDEX_FALLBACK = False
- self.assertEqual(self.s.ATTR_INDEX_FALLBACK, False)
-
- self.s.names = {"a": {"b": {"c": 42}}}
-
- with self.assertRaises(simpleeval.AttributeDoesNotExist):
- self.t("a.b.c * 2", 84)
-
- self.t("a['b']['c'] * 2", 84)
-
- self.assertEqual(self.s.names["a"]["b"]["c"], 42)
-
- def test_object(self):
- """ using an object for name lookup """
-
- class TestObject(object):
- @staticmethod
- def method_thing():
- return 42
-
- o = TestObject()
- o.a = 23
- o.b = 42
- o.c = TestObject()
- o.c.d = 9001
-
- self.s.names = {"o": o}
-
- self.t("o", o)
- self.t("o.a", 23)
- self.t("o.b + o.c.d", 9043)
-
- self.t("o.method_thing()", 42)
-
- with self.assertRaises(AttributeDoesNotExist):
- self.t("o.d", None)
-
- def test_func(self):
- """ using a function for 'names lookup' """
-
- def resolver(_):
- """ all names now equal 1024! """
- return 1024
-
- self.s.names = resolver
-
- self.t("a", 1024)
- self.t("a + b - c - d", 0)
-
- # the function can do stuff with the value it's sent:
-
- def my_name(node):
- """ all names equal their textual name, twice. """
- return node.id + node.id
-
- self.s.names = my_name
-
- self.t("a", "aa")
-
- def test_from_doc(self):
- """ the 'name first letter as value' example from the docs """
-
- def name_handler(node):
- """return the alphabet number of the first letter of
- the name's textual name"""
- return ord(node.id[0].lower()) - 96
-
- self.s.names = name_handler
- self.t("a", 1)
- self.t("a + b", 3)
-
-
-class TestWhitespace(DRYTest):
- """ test that incorrect whitespace (preceding/trailing) doesn't matter. """
-
- def test_no_whitespace(self):
- self.t("200 + 200", 400)
-
- def test_trailing(self):
- self.t("200 + 200 ", 400)
-
- def test_preciding_whitespace(self):
- self.t(" 200 + 200", 400)
-
- def test_preceding_tab_whitespace(self):
- self.t("\t200 + 200", 400)
-
- def test_preceding_mixed_whitespace(self):
- self.t(" \t 200 + 200", 400)
-
- def test_both_ends_whitespace(self):
- self.t(" \t 200 + 200 ", 400)
-
-
-class TestSimpleEval(unittest.TestCase):
- """ test the 'simple_eval' wrapper function """
-
- def test_basic_run(self):
- self.assertEqual(simple_eval("6*7"), 42)
-
- def test_default_functions(self):
- self.assertEqual(simple_eval("rand() < 1.0 and rand() > -0.01"), True)
- self.assertEqual(simple_eval("randint(200) < 200 and rand() > 0"), True)
-
-
-class TestMethodChaining(unittest.TestCase):
- def test_chaining_correct(self):
- """
- Contributed by Khalid Grandi (xaled).
- """
-
- class A(object):
- def __init__(self):
- self.a = "0"
-
- def add(self, b):
- self.a += "-add" + str(b)
- return self
-
- def sub(self, b):
- self.a += "-sub" + str(b)
- return self
-
- def tostring(self):
- return str(self.a)
-
- x = A()
- self.assertEqual(
- simple_eval("x.add(1).sub(2).sub(3).tostring()", names={"x": x}),
- "0-add1-sub2-sub3",
- )
-
-
-class TestExtendingClass(unittest.TestCase):
- """
- It should be pretty easy to extend / inherit from the SimpleEval class,
- to further lock things down, or unlock stuff, or whatever.
- """
-
- def test_methods_forbidden(self):
- # Example from README
- class EvalNoMethods(simpleeval.SimpleEval):
- def _eval_call(self, node):
- if isinstance(node.func, ast.Attribute):
- raise simpleeval.FeatureNotAvailable(
- "No methods please, we're British"
- )
- return super(EvalNoMethods, self)._eval_call(node)
-
- e = EvalNoMethods()
-
- self.assertEqual(e.eval('"stuff happens"'), "stuff happens")
- self.assertEqual(e.eval("22 + 20"), 42)
- self.assertEqual(e.eval('int("42")'), 42)
-
- with self.assertRaises(simpleeval.FeatureNotAvailable):
- e.eval('" blah ".strip()')
-
-
-class TestExceptions(unittest.TestCase):
- """
- confirm a few attributes exist properly and haven't been
- eaten by 2to3 or whatever... (see #41)
- """
-
- def test_functionnotdefined(self):
- try:
- raise FunctionNotDefined("foo", "foo in bar")
- except FunctionNotDefined as e:
- assert hasattr(e, "func_name")
- assert getattr(e, "func_name") == "foo"
- assert hasattr(e, "expression")
- assert getattr(e, "expression") == "foo in bar"
-
- def test_namenotdefined(self):
- try:
- raise NameNotDefined("foo", "foo in bar")
- except NameNotDefined as e:
- assert hasattr(e, "name")
- assert getattr(e, "name") == "foo"
- assert hasattr(e, "expression")
- assert getattr(e, "expression") == "foo in bar"
-
- def test_attributedoesnotexist(self):
- try:
- raise AttributeDoesNotExist("foo", "foo in bar")
- except AttributeDoesNotExist as e:
- assert hasattr(e, "attr")
- assert getattr(e, "attr") == "foo"
- assert hasattr(e, "expression")
- assert getattr(e, "expression") == "foo in bar"
-
-
-class TestUnusualComparisons(DRYTest):
- def test_custom_comparison_returner(self):
- class Blah(object):
- def __gt__(self, other):
- return self
-
- b = Blah()
- self.s.names = {"b": b}
- self.t("b > 2", b)
-
- def test_custom_comparison_doesnt_return_boolable(self):
- """
- SqlAlchemy, bless it's cotton socks, returns BinaryExpression objects
- when asking for comparisons between things. These BinaryExpressions
- raise a TypeError if you try and check for Truthyiness.
- """
-
- class BinaryExpression(object):
- def __init__(self, value):
- self.value = value
-
- def __eq__(self, other):
- return self.value == getattr(other, "value", other)
-
- def __repr__(self):
- return "".format(self.value)
-
- def __bool__(self):
- # This is the only important part, to match SqlAlchemy - the rest
- # of the methods are just to make testing a bit easier...
- raise TypeError("Boolean value of this clause is not defined")
-
- class Blah(object):
- def __gt__(self, other):
- return BinaryExpression("GT")
-
- def __lt__(self, other):
- return BinaryExpression("LT")
-
- b = Blah()
- self.s.names = {"b": b}
- # This should not crash:
- e = eval("b > 2", self.s.names)
-
- self.t("b > 2", BinaryExpression("GT"))
- self.t("1 < 5 > b", BinaryExpression("LT"))
-
-
-class TestGetItemUnhappy(DRYTest):
- # Again, SqlAlchemy doing unusual things. Throwing it's own errors, rather than
- # expected types...
-
- def test_getitem_not_implemented(self):
- class Meh(object):
- def __getitem__(self, key):
- raise NotImplementedError("booya!")
-
- def __getattr__(self, key):
- return 42
-
- m = Meh()
-
- self.assertEqual(m.anything, 42)
- with self.assertRaises(NotImplementedError):
- m["nothing"]
-
- self.s.names = {"m": m}
- self.t("m.anything", 42)
-
- with self.assertRaises(NotImplementedError):
- self.t("m['nothing']", None)
-
- self.s.ATTR_INDEX_FALLBACK = False
-
- self.t("m.anything", 42)
-
- with self.assertRaises(NotImplementedError):
- self.t("m['nothing']", None)
-
-
-class TestShortCircuiting(DRYTest):
- def test_shortcircuit_if(self):
- x = []
-
- def foo(y):
- x.append(y)
- return y
-
- self.s.functions = {"foo": foo}
- self.t("foo(1) if foo(2) else foo(3)", 1)
- self.assertListEqual(x, [2, 1])
-
- x = []
- self.t("42 if True else foo(99)", 42)
- self.assertListEqual(x, [])
-
- def test_shortcircuit_comparison(self):
- x = []
-
- def foo(y):
- x.append(y)
- return y
-
- self.s.functions = {"foo": foo}
- self.t("foo(11) < 12", True)
- self.assertListEqual(x, [11])
- x = []
-
- self.t("1 > 2 < foo(22)", False)
- self.assertListEqual(x, [])
-
-
-class TestDisallowedFunctions(DRYTest):
- def test_functions_are_disallowed_at_init(self):
- DISALLOWED = [
- type,
- isinstance,
- eval,
- getattr,
- setattr,
- help,
- repr,
- compile,
- open,
- ]
- if simpleeval.PYTHON3:
- exec("DISALLOWED.append(exec)") # exec is not a function in Python2...
-
- for f in simpleeval.DISALLOW_FUNCTIONS:
- assert f in DISALLOWED
-
- for x in DISALLOWED:
- with self.assertRaises(FeatureNotAvailable):
- s = SimpleEval(functions={"foo": x})
-
- def test_functions_are_disallowed_in_expressions(self):
- DISALLOWED = [
- type,
- isinstance,
- eval,
- getattr,
- setattr,
- help,
- repr,
- compile,
- open,
- ]
-
- if simpleeval.PYTHON3:
- exec("DISALLOWED.append(exec)") # exec is not a function in Python2...
-
- for f in simpleeval.DISALLOW_FUNCTIONS:
- assert f in DISALLOWED
-
- DF = simpleeval.DEFAULT_FUNCTIONS.copy()
-
- for x in DISALLOWED:
- simpleeval.DEFAULT_FUNCTIONS = DF.copy()
- with self.assertRaises(FeatureNotAvailable):
- s = SimpleEval()
- s.functions["foo"] = x
- s.eval("foo(42)")
-
- simpleeval.DEFAULT_FUNCTIONS = DF.copy()
-
-
-if __name__ == "__main__": # pragma: no cover
- unittest.main()
diff --git a/tests/core/test_singer_messages.py b/tests/core/test_singer_messages.py
index 3a2253611..5ea161e8e 100644
--- a/tests/core/test_singer_messages.py
+++ b/tests/core/test_singer_messages.py
@@ -3,7 +3,11 @@
import pytest
from singer_sdk._singerlib import SingerMessageType
-from singer_sdk.helpers._batch import JSONLinesEncoding, SDKBatchMessage
+from singer_sdk.helpers._batch import (
+ JSONLinesEncoding,
+ ParquetEncoding,
+ SDKBatchMessage,
+)
@pytest.mark.parametrize(
@@ -28,8 +32,27 @@
],
},
),
+ (
+ SDKBatchMessage(
+ stream="test_stream",
+ encoding=ParquetEncoding("gzip"),
+ manifest=[
+ "path/to/file1.parquet.gz",
+ "path/to/file2.parquet.gz",
+ ],
+ ),
+ {
+ "type": SingerMessageType.BATCH,
+ "stream": "test_stream",
+ "encoding": {"compression": "gzip", "format": "parquet"},
+ "manifest": [
+ "path/to/file1.parquet.gz",
+ "path/to/file2.parquet.gz",
+ ],
+ },
+ ),
],
- ids=["batch-message-jsonl"],
+ ids=["batch-message-jsonl", "batch-message-parquet"],
)
def test_batch_message_as_dict(message, expected):
"""Test batch message as dict."""
diff --git a/tests/core/test_sql_typing.py b/tests/core/test_sql_typing.py
index 0d2c4bac0..4248ea06d 100644
--- a/tests/core/test_sql_typing.py
+++ b/tests/core/test_sql_typing.py
@@ -3,7 +3,7 @@
from __future__ import annotations
import pytest
-import sqlalchemy
+import sqlalchemy as sa
from singer_sdk import typing as th
@@ -11,25 +11,25 @@
@pytest.mark.parametrize(
"jsonschema_type,sql_type",
[
- (th.StringType().to_dict(), sqlalchemy.types.VARCHAR()),
- (th.IntegerType().to_dict(), sqlalchemy.types.INTEGER()),
- (th.BooleanType().to_dict(), sqlalchemy.types.BOOLEAN()),
- (th.NumberType().to_dict(), sqlalchemy.types.DECIMAL()),
- (th.ObjectType().to_dict(), sqlalchemy.types.VARCHAR()),
- (th.DateTimeType().to_dict(), sqlalchemy.types.DATETIME()),
- (th.DateType().to_dict(), sqlalchemy.types.DATE()),
+ (th.StringType().to_dict(), sa.types.VARCHAR()),
+ (th.IntegerType().to_dict(), sa.types.INTEGER()),
+ (th.BooleanType().to_dict(), sa.types.BOOLEAN()),
+ (th.NumberType().to_dict(), sa.types.DECIMAL()),
+ (th.ObjectType().to_dict(), sa.types.VARCHAR()),
+ (th.DateTimeType().to_dict(), sa.types.DATETIME()),
+ (th.DateType().to_dict(), sa.types.DATE()),
# Unhandled types end up as 'varchar':
(
th.CustomType({"type": "array", "items": "something"}).to_dict(),
- sqlalchemy.types.VARCHAR(),
+ sa.types.VARCHAR(),
),
(
th.CustomType({"cannot": "compute"}).to_dict(),
- sqlalchemy.types.VARCHAR(),
+ sa.types.VARCHAR(),
),
(
th.CustomType({"type": "string", "maxLength": 10}).to_dict(),
- sqlalchemy.types.VARCHAR(10),
+ sa.types.VARCHAR(10),
),
],
ids=[
@@ -47,7 +47,7 @@
)
def test_convert_jsonschema_type_to_sql_type(
jsonschema_type: dict,
- sql_type: sqlalchemy.types.TypeEngine,
+ sql_type: sa.types.TypeEngine,
):
result = th.to_sql_type(jsonschema_type)
assert isinstance(result, sql_type.__class__)
@@ -57,17 +57,17 @@ def test_convert_jsonschema_type_to_sql_type(
@pytest.mark.parametrize(
"sql_type,is_of_jsonschema_type",
[
- (sqlalchemy.types.VARCHAR, th.StringType().to_dict()),
- (sqlalchemy.types.INTEGER, th.IntegerType().to_dict()),
- (sqlalchemy.types.BOOLEAN, th.BooleanType().to_dict()),
- (sqlalchemy.types.DATETIME, th.DateTimeType().to_dict()),
- (sqlalchemy.types.DATE, th.DateType().to_dict()),
+ (sa.types.VARCHAR, th.StringType().to_dict()),
+ (sa.types.INTEGER, th.IntegerType().to_dict()),
+ (sa.types.BOOLEAN, th.BooleanType().to_dict()),
+ (sa.types.DATETIME, th.DateTimeType().to_dict()),
+ (sa.types.DATE, th.DateType().to_dict()),
# Unhandled types end up as 'string':
- (sqlalchemy.types.CLOB, th.StringType().to_dict()),
+ (sa.types.CLOB, th.StringType().to_dict()),
],
)
def test_convert_sql_type_to_jsonschema_type(
- sql_type: sqlalchemy.types.TypeEngine,
+ sql_type: sa.types.TypeEngine,
is_of_jsonschema_type: dict,
):
result = th.to_jsonschema_type(sql_type)
diff --git a/tests/core/test_streams.py b/tests/core/test_streams.py
index 34bbc7514..f3d9aba84 100644
--- a/tests/core/test_streams.py
+++ b/tests/core/test_streams.py
@@ -10,71 +10,23 @@
import requests
from singer_sdk._singerlib import Catalog, MetadataMapping
+from singer_sdk.exceptions import (
+ InvalidReplicationKeyException,
+)
from singer_sdk.helpers._classproperty import classproperty
from singer_sdk.helpers.jsonpath import _compile_jsonpath, extract_jsonpath
from singer_sdk.pagination import first
-from singer_sdk.streams.core import (
- REPLICATION_FULL_TABLE,
- REPLICATION_INCREMENTAL,
- Stream,
-)
+from singer_sdk.streams.core import REPLICATION_FULL_TABLE, REPLICATION_INCREMENTAL
from singer_sdk.streams.graphql import GraphQLStream
from singer_sdk.streams.rest import RESTStream
-from singer_sdk.tap_base import Tap
-from singer_sdk.typing import (
- DateTimeType,
- IntegerType,
- PropertiesList,
- Property,
- StringType,
-)
+from singer_sdk.typing import IntegerType, PropertiesList, Property, StringType
+from tests.core.conftest import SimpleTestStream
CONFIG_START_DATE = "2021-01-01"
-
-class SimpleTestStream(Stream):
- """Test stream class."""
-
- name = "test"
- schema = PropertiesList(
- Property("id", IntegerType, required=True),
- Property("value", StringType, required=True),
- Property("updatedAt", DateTimeType, required=True),
- ).to_dict()
- replication_key = "updatedAt"
-
- def __init__(self, tap: Tap):
- """Create a new stream."""
- super().__init__(tap, schema=self.schema, name=self.name)
-
- def get_records(
- self,
- context: dict | None, # noqa: ARG002
- ) -> t.Iterable[dict[str, t.Any]]:
- """Generate records."""
- yield {"id": 1, "value": "Egypt"}
- yield {"id": 2, "value": "Germany"}
- yield {"id": 3, "value": "India"}
-
-
-class UnixTimestampIncrementalStream(SimpleTestStream):
- name = "unix_ts"
- schema = PropertiesList(
- Property("id", IntegerType, required=True),
- Property("value", StringType, required=True),
- Property("updatedAt", IntegerType, required=True),
- ).to_dict()
- replication_key = "updatedAt"
-
-
-class UnixTimestampIncrementalStream2(UnixTimestampIncrementalStream):
- name = "unix_ts_override"
-
- def compare_start_date(self, value: str, start_date_value: str) -> str:
- """Compare a value to a start date value."""
-
- start_timestamp = pendulum.parse(start_date_value).format("X")
- return max(value, start_timestamp, key=float)
+if t.TYPE_CHECKING:
+ from singer_sdk import Stream, Tap
+ from tests.core.conftest import SimpleTestTap
class RestTestStream(RESTStream):
@@ -94,19 +46,18 @@ def get_next_page_token(
response: requests.Response,
previous_token: str | None, # noqa: ARG002
) -> str | None:
- if self.next_page_token_jsonpath:
- all_matches = extract_jsonpath(
- self.next_page_token_jsonpath,
- response.json(),
- )
- try:
- return first(all_matches)
- except StopIteration:
- return None
-
- else:
+ if not self.next_page_token_jsonpath:
return response.headers.get("X-Next-Page", None)
+ all_matches = extract_jsonpath(
+ self.next_page_token_jsonpath,
+ response.json(),
+ )
+ try:
+ return first(all_matches)
+ except StopIteration:
+ return None
+
class GraphqlTestStream(GraphQLStream):
"""Test Graphql stream class."""
@@ -121,43 +72,13 @@ class GraphqlTestStream(GraphQLStream):
replication_key = "updatedAt"
-class SimpleTestTap(Tap):
- """Test tap class."""
-
- name = "test-tap"
- settings_jsonschema = PropertiesList(Property("start_date", DateTimeType)).to_dict()
-
- def discover_streams(self) -> list[Stream]:
- """List all streams."""
- return [
- SimpleTestStream(self),
- UnixTimestampIncrementalStream(self),
- UnixTimestampIncrementalStream2(self),
- ]
-
-
@pytest.fixture
-def tap() -> SimpleTestTap:
- """Tap instance."""
- return SimpleTestTap(
- config={"start_date": CONFIG_START_DATE},
- parse_env_config=False,
- )
-
-
-@pytest.fixture
-def stream(tap: SimpleTestTap) -> SimpleTestStream:
- """Create a new stream instance."""
- return t.cast(SimpleTestStream, tap.load_streams()[0])
-
-
-@pytest.fixture
-def unix_timestamp_stream(tap: SimpleTestTap) -> UnixTimestampIncrementalStream:
+def stream(tap):
"""Create a new stream instance."""
- return t.cast(UnixTimestampIncrementalStream, tap.load_streams()[1])
+ return tap.load_streams()[0]
-def test_stream_apply_catalog(stream: SimpleTestStream):
+def test_stream_apply_catalog(stream: Stream):
"""Applying a catalog to a stream should overwrite fields."""
assert stream.primary_keys == []
assert stream.replication_key == "updatedAt"
@@ -189,22 +110,32 @@ def test_stream_apply_catalog(stream: SimpleTestStream):
@pytest.mark.parametrize(
- "stream_name,bookmark_value,expected_starting_value",
+ "stream_name,forced_replication_method,bookmark_value,expected_starting_value",
[
pytest.param(
"test",
None,
+ None,
pendulum.parse(CONFIG_START_DATE),
id="datetime-repl-key-no-state",
),
pytest.param(
"test",
+ None,
"2021-02-01",
pendulum.datetime(2021, 2, 1),
id="datetime-repl-key-recent-bookmark",
),
pytest.param(
"test",
+ REPLICATION_FULL_TABLE,
+ "2021-02-01",
+ None,
+ id="datetime-forced-full-table",
+ ),
+ pytest.param(
+ "test",
+ None,
"2020-01-01",
pendulum.parse(CONFIG_START_DATE),
id="datetime-repl-key-old-bookmark",
@@ -212,17 +143,20 @@ def test_stream_apply_catalog(stream: SimpleTestStream):
pytest.param(
"unix_ts",
None,
+ None,
CONFIG_START_DATE,
id="naive-unix-ts-repl-key-no-state",
),
pytest.param(
"unix_ts",
+ None,
"1612137600",
"1612137600",
id="naive-unix-ts-repl-key-recent-bookmark",
),
pytest.param(
"unix_ts",
+ None,
"1577858400",
"1577858400",
id="naive-unix-ts-repl-key-old-bookmark",
@@ -230,17 +164,20 @@ def test_stream_apply_catalog(stream: SimpleTestStream):
pytest.param(
"unix_ts_override",
None,
+ None,
CONFIG_START_DATE,
id="unix-ts-repl-key-no-state",
),
pytest.param(
"unix_ts_override",
+ None,
"1612137600",
"1612137600",
id="unix-ts-repl-key-recent-bookmark",
),
pytest.param(
"unix_ts_override",
+ None,
"1577858400",
pendulum.parse(CONFIG_START_DATE).format("X"),
id="unix-ts-repl-key-old-bookmark",
@@ -248,8 +185,9 @@ def test_stream_apply_catalog(stream: SimpleTestStream):
],
)
def test_stream_starting_timestamp(
- tap: SimpleTestTap,
+ tap: Tap,
stream_name: str,
+ forced_replication_method: str | None,
bookmark_value: str,
expected_starting_value: t.Any,
):
@@ -272,7 +210,27 @@ def test_stream_starting_timestamp(
},
)
stream._write_starting_replication_value(None)
- assert get_starting_value(None) == expected_starting_value
+
+ with stream.with_replication_method(forced_replication_method):
+ assert get_starting_value(None) == expected_starting_value
+
+
+def test_stream_invalid_replication_key(tap: SimpleTestTap):
+ """Validate an exception is raised if replication_key not in schema."""
+
+ class InvalidReplicationKeyStream(SimpleTestStream):
+ replication_key = "INVALID"
+
+ stream = InvalidReplicationKeyStream(tap)
+
+ with pytest.raises(
+ InvalidReplicationKeyException,
+ match=(
+ f"Field '{stream.replication_key}' is not in schema for stream "
+ f"'{stream.name}'"
+ ),
+ ):
+ _check = stream.is_timestamp_replication_key
@pytest.mark.parametrize(
@@ -332,12 +290,7 @@ def test_stream_starting_timestamp(
"nested_values",
],
)
-def test_jsonpath_rest_stream(
- tap: SimpleTestTap,
- path: str,
- content: str,
- result: list[dict],
-):
+def test_jsonpath_rest_stream(tap: Tap, path: str, content: str, result: list[dict]):
"""Validate records are extracted correctly from the API response."""
fake_response = requests.Response()
fake_response._content = str.encode(content)
@@ -350,7 +303,7 @@ def test_jsonpath_rest_stream(
assert list(records) == result
-def test_jsonpath_graphql_stream_default(tap: SimpleTestTap):
+def test_jsonpath_graphql_stream_default(tap: Tap):
"""Validate graphql JSONPath, defaults to the stream name."""
content = """{
"data": {
@@ -370,7 +323,7 @@ def test_jsonpath_graphql_stream_default(tap: SimpleTestTap):
assert list(records) == [{"id": 1, "value": "abc"}, {"id": 2, "value": "def"}]
-def test_jsonpath_graphql_stream_override(tap: SimpleTestTap):
+def test_jsonpath_graphql_stream_override(tap: Tap):
"""Validate graphql jsonpath can be updated."""
content = """[
{"id": 1, "value": "abc"},
@@ -457,7 +410,7 @@ def records_jsonpath(cls): # noqa: N805
],
)
def test_next_page_token_jsonpath(
- tap: SimpleTestTap,
+ tap: Tap,
path: str,
content: str,
headers: dict,
@@ -489,7 +442,7 @@ def test_cached_jsonpath():
assert recompiled is compiled
-def test_sync_costs_calculation(tap: SimpleTestTap, caplog):
+def test_sync_costs_calculation(tap: Tap, caplog):
"""Test sync costs are added up correctly."""
fake_request = requests.PreparedRequest()
fake_response = requests.Response()
@@ -574,7 +527,7 @@ def calculate_test_cost(
),
],
)
-def test_stream_class_selection(input_catalog, selection):
+def test_stream_class_selection(tap_class, input_catalog, selection):
"""Test stream class selection."""
class SelectedStream(RESTStream):
@@ -586,11 +539,12 @@ class UnselectedStream(SelectedStream):
name = "unselected_stream"
selected_by_default = False
- class MyTap(SimpleTestTap):
+ class MyTap(tap_class):
def discover_streams(self):
return [SelectedStream(self), UnselectedStream(self)]
# Check that the selected stream is selected
- tap = MyTap(config=None, catalog=input_catalog)
- for stream in selection:
- assert tap.streams[stream].selected is selection[stream]
+ tap = MyTap(config=None, catalog=input_catalog, validate_config=False)
+ assert all(
+ tap.streams[stream].selected is selection[stream] for stream in selection
+ )
diff --git a/tests/core/test_tap_class.py b/tests/core/test_tap_class.py
new file mode 100644
index 000000000..93015fbb1
--- /dev/null
+++ b/tests/core/test_tap_class.py
@@ -0,0 +1,92 @@
+from __future__ import annotations
+
+import json
+import typing as t
+from contextlib import nullcontext
+
+import pytest
+from click.testing import CliRunner
+
+from singer_sdk.exceptions import ConfigValidationError
+
+if t.TYPE_CHECKING:
+ from singer_sdk import Tap
+
+
+@pytest.mark.parametrize(
+ "config_dict,expectation,errors",
+ [
+ pytest.param(
+ {},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["'username' is a required property", "'password' is a required property"],
+ id="missing_username_and_password",
+ ),
+ pytest.param(
+ {"username": "utest"},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["'password' is a required property"],
+ id="missing_password",
+ ),
+ pytest.param(
+ {"username": "utest", "password": "ptest", "extra": "not valid"},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["Additional properties are not allowed ('extra' was unexpected)"],
+ id="extra_property",
+ ),
+ pytest.param(
+ {"username": "utest", "password": "ptest"},
+ nullcontext(),
+ [],
+ id="valid_config",
+ ),
+ ],
+)
+def test_config_errors(
+ tap_class: type[Tap],
+ config_dict: dict,
+ expectation,
+ errors: list[str],
+):
+ with expectation as exc:
+ tap_class(config=config_dict, validate_config=True)
+
+ if isinstance(exc, pytest.ExceptionInfo):
+ assert exc.value.errors == errors
+
+
+def test_cli(tap_class: type[Tap]):
+ """Test the CLI."""
+ runner = CliRunner(mix_stderr=False)
+ result = runner.invoke(tap_class.cli, ["--help"])
+ assert result.exit_code == 0
+ assert "Show this message and exit." in result.output
+
+
+def test_cli_config_validation(tap_class: type[Tap], tmp_path):
+ """Test the CLI config validation."""
+ runner = CliRunner(mix_stderr=False)
+ config_path = tmp_path / "config.json"
+ config_path.write_text(json.dumps({}))
+ result = runner.invoke(tap_class.cli, ["--config", str(config_path)])
+ assert result.exit_code == 1
+ assert not result.stdout
+ assert "'username' is a required property" in result.stderr
+ assert "'password' is a required property" in result.stderr
+
+
+def test_cli_discover(tap_class: type[Tap], tmp_path):
+ """Test the CLI discover command."""
+ runner = CliRunner(mix_stderr=False)
+ config_path = tmp_path / "config.json"
+ config_path.write_text(json.dumps({}))
+ result = runner.invoke(
+ tap_class.cli,
+ [
+ "--config",
+ str(config_path),
+ "--discover",
+ ],
+ )
+ assert result.exit_code == 0
+ assert "streams" in json.loads(result.stdout)
diff --git a/tests/core/test_target_base.py b/tests/core/test_target_base.py
index 778fab722..de344c7e3 100644
--- a/tests/core/test_target_base.py
+++ b/tests/core/test_target_base.py
@@ -2,7 +2,14 @@
import copy
-from tests.conftest import BatchSinkMock, TargetMock
+import pytest
+
+from singer_sdk.exceptions import (
+ MissingKeyPropertiesError,
+ RecordsWithoutSchemaException,
+)
+from singer_sdk.helpers.capabilities import PluginCapabilities
+from tests.conftest import BatchSinkMock, SQLSinkMock, SQLTargetMock, TargetMock
def test_get_sink():
@@ -28,3 +35,109 @@ def test_get_sink():
key_properties=key_properties,
)
assert sink_returned == sink
+
+
+def test_validate_record():
+ target = TargetMock()
+ sink = BatchSinkMock(
+ target=target,
+ stream_name="test",
+ schema={
+ "properties": {
+ "id": {"type": ["integer"]},
+ "name": {"type": ["string"]},
+ },
+ },
+ key_properties=["id"],
+ )
+
+ # Test valid record
+ sink._singer_validate_message({"id": 1, "name": "test"})
+
+ # Test invalid record
+ with pytest.raises(MissingKeyPropertiesError):
+ sink._singer_validate_message({"name": "test"})
+
+
+def test_target_about_info():
+ target = TargetMock()
+ about = target._get_about_info()
+
+ assert about.capabilities == [
+ PluginCapabilities.ABOUT,
+ PluginCapabilities.STREAM_MAPS,
+ PluginCapabilities.FLATTENING,
+ PluginCapabilities.BATCH,
+ ]
+
+ assert "stream_maps" in about.settings["properties"]
+ assert "stream_map_config" in about.settings["properties"]
+ assert "flattening_enabled" in about.settings["properties"]
+ assert "flattening_max_depth" in about.settings["properties"]
+ assert "batch_config" in about.settings["properties"]
+ assert "add_record_metadata" in about.settings["properties"]
+
+
+def test_sql_get_sink():
+ input_schema_1 = {
+ "properties": {
+ "id": {
+ "type": ["string", "null"],
+ },
+ "col_ts": {
+ "format": "date-time",
+ "type": ["string", "null"],
+ },
+ },
+ }
+ input_schema_2 = copy.deepcopy(input_schema_1)
+ key_properties = []
+ target = SQLTargetMock(config={"sqlalchemy_url": "sqlite:///"})
+ sink = SQLSinkMock(
+ target=target,
+ stream_name="foo",
+ schema=input_schema_1,
+ key_properties=key_properties,
+ connector=target.target_connector,
+ )
+ target._sinks_active["foo"] = sink
+ sink_returned = target.get_sink(
+ "foo",
+ schema=input_schema_2,
+ key_properties=key_properties,
+ )
+ assert sink_returned is sink
+
+
+def test_add_sqlsink_and_get_sink():
+ input_schema_1 = {
+ "properties": {
+ "id": {
+ "type": ["string", "null"],
+ },
+ "col_ts": {
+ "format": "date-time",
+ "type": ["string", "null"],
+ },
+ },
+ }
+ input_schema_2 = copy.deepcopy(input_schema_1)
+ key_properties = []
+ target = SQLTargetMock(config={"sqlalchemy_url": "sqlite:///"})
+ sink = target.add_sqlsink(
+ "foo",
+ schema=input_schema_2,
+ key_properties=key_properties,
+ )
+
+ sink_returned = target.get_sink(
+ "foo",
+ )
+
+ assert sink_returned is sink
+
+ # Test invalid call
+ with pytest.raises(RecordsWithoutSchemaException):
+ target.get_sink(
+ "bar",
+ )
diff --git a/tests/core/test_target_class.py b/tests/core/test_target_class.py
new file mode 100644
index 000000000..f84ae1dae
--- /dev/null
+++ b/tests/core/test_target_class.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+import json
+from contextlib import nullcontext
+
+import pytest
+from click.testing import CliRunner
+
+from samples.sample_target_sqlite import SQLiteTarget
+from singer_sdk.exceptions import ConfigValidationError
+
+
+@pytest.mark.parametrize(
+ "config_dict,expectation,errors",
+ [
+ pytest.param(
+ {},
+ pytest.raises(ConfigValidationError, match="Config validation failed"),
+ ["'path_to_db' is a required property"],
+ id="missing_path_to_db",
+ ),
+ pytest.param(
+ {"path_to_db": "sqlite://test.db"},
+ nullcontext(),
+ [],
+ id="valid_config",
+ ),
+ ],
+)
+def test_config_errors(config_dict: dict, expectation, errors: list[str]):
+ with expectation as exc:
+ SQLiteTarget(config=config_dict, validate_config=True)
+
+ if isinstance(exc, pytest.ExceptionInfo):
+ assert exc.value.errors == errors
+
+
+def test_cli():
+ """Test the CLI."""
+ runner = CliRunner(mix_stderr=False)
+ result = runner.invoke(SQLiteTarget.cli, ["--help"])
+ assert result.exit_code == 0
+ assert "Show this message and exit." in result.output
+
+
+def test_cli_config_validation(tmp_path):
+ """Test the CLI config validation."""
+ runner = CliRunner(mix_stderr=False)
+ config_path = tmp_path / "config.json"
+ config_path.write_text(json.dumps({}))
+ result = runner.invoke(SQLiteTarget.cli, ["--config", str(config_path)])
+ assert result.exit_code == 1
+ assert not result.stdout
+ assert "'path_to_db' is a required property" in result.stderr
diff --git a/tests/core/test_testing.py b/tests/core/test_testing.py
new file mode 100644
index 000000000..5715cd1e1
--- /dev/null
+++ b/tests/core/test_testing.py
@@ -0,0 +1,43 @@
+"""Test the plugin testing helpers."""
+
+from __future__ import annotations
+
+import pytest
+
+from singer_sdk.testing.factory import BaseTestClass
+
+
+def test_module_deprecations():
+ with pytest.deprecated_call():
+ from singer_sdk.testing import get_standard_tap_tests # noqa: F401
+
+ with pytest.deprecated_call():
+ from singer_sdk.testing import get_standard_target_tests # noqa: F401
+
+ from singer_sdk import testing
+
+ with pytest.raises(
+ AttributeError,
+ match="module singer_sdk.testing has no attribute",
+ ):
+ testing.foo # noqa: B018
+
+
+def test_test_class_mro():
+ class PluginTestClass(BaseTestClass):
+ pass
+
+ PluginTestClass.params["x"] = 1
+
+ class AnotherPluginTestClass(BaseTestClass):
+ pass
+
+ AnotherPluginTestClass.params["x"] = 2
+ AnotherPluginTestClass.params["y"] = 3
+
+ class SubPluginTestClass(PluginTestClass):
+ pass
+
+ assert PluginTestClass.params == {"x": 1}
+ assert AnotherPluginTestClass.params == {"x": 2, "y": 3}
+ assert SubPluginTestClass.params == {"x": 1}
diff --git a/tests/core/test_typing.py b/tests/core/test_typing.py
index b2cf9c691..7bb0ab362 100644
--- a/tests/core/test_typing.py
+++ b/tests/core/test_typing.py
@@ -6,7 +6,7 @@
import logging
import pytest
-import sqlalchemy
+import sqlalchemy as sa
from singer_sdk.helpers._typing import (
TypeConformanceLevel,
@@ -297,23 +297,23 @@ def test_conform_primitives():
@pytest.mark.parametrize(
"jsonschema_type,expected",
[
- ({"type": ["string", "null"]}, sqlalchemy.types.VARCHAR),
- ({"type": ["integer", "null"]}, sqlalchemy.types.INTEGER),
- ({"type": ["number", "null"]}, sqlalchemy.types.DECIMAL),
- ({"type": ["boolean", "null"]}, sqlalchemy.types.BOOLEAN),
- ({"type": "object", "properties": {}}, sqlalchemy.types.VARCHAR),
- ({"type": "array"}, sqlalchemy.types.VARCHAR),
- ({"format": "date", "type": ["string", "null"]}, sqlalchemy.types.DATE),
- ({"format": "time", "type": ["string", "null"]}, sqlalchemy.types.TIME),
+ ({"type": ["string", "null"]}, sa.types.VARCHAR),
+ ({"type": ["integer", "null"]}, sa.types.INTEGER),
+ ({"type": ["number", "null"]}, sa.types.DECIMAL),
+ ({"type": ["boolean", "null"]}, sa.types.BOOLEAN),
+ ({"type": "object", "properties": {}}, sa.types.VARCHAR),
+ ({"type": "array"}, sa.types.VARCHAR),
+ ({"format": "date", "type": ["string", "null"]}, sa.types.DATE),
+ ({"format": "time", "type": ["string", "null"]}, sa.types.TIME),
(
{"format": "date-time", "type": ["string", "null"]},
- sqlalchemy.types.DATETIME,
+ sa.types.DATETIME,
),
(
{"anyOf": [{"type": "string", "format": "date-time"}, {"type": "null"}]},
- sqlalchemy.types.DATETIME,
+ sa.types.DATETIME,
),
- ({"anyOf": [{"type": "integer"}, {"type": "null"}]}, sqlalchemy.types.INTEGER),
+ ({"anyOf": [{"type": "integer"}, {"type": "null"}]}, sa.types.INTEGER),
],
)
def test_to_sql_type(jsonschema_type, expected):
diff --git a/tests/samples/conftest.py b/tests/samples/conftest.py
index 60f277412..b9ce33319 100644
--- a/tests/samples/conftest.py
+++ b/tests/samples/conftest.py
@@ -5,7 +5,7 @@
from pathlib import Path
import pytest
-from sqlalchemy import text
+import sqlalchemy as sa
from samples.sample_tap_sqlite import SQLiteConnector, SQLiteTap
from singer_sdk._singerlib import Catalog
@@ -23,23 +23,18 @@ def _sqlite_sample_db(sqlite_connector):
"""Return a path to a newly constructed sample DB."""
with sqlite_connector._connect() as conn, conn.begin():
for t in range(3):
- conn.execute(text(f"DROP TABLE IF EXISTS t{t}"))
+ conn.execute(sa.text(f"DROP TABLE IF EXISTS t{t}"))
conn.execute(
- text(f"CREATE TABLE t{t} (c1 int PRIMARY KEY, c2 varchar(10))"),
+ sa.text(f"CREATE TABLE t{t} (c1 int PRIMARY KEY, c2 varchar(10))"),
)
for x in range(100):
conn.execute(
- text(f"INSERT INTO t{t} VALUES ({x}, 'x={x}')"), # noqa: S608
+ sa.text(f"INSERT INTO t{t} VALUES ({x}, 'x={x}')"), # noqa: S608
)
@pytest.fixture
-def sqlite_sample_tap(
- _sqlite_sample_db,
- sqlite_sample_db_config,
- sqlite_sample_db_state,
-) -> SQLiteTap:
- _ = _sqlite_sample_db
+def sqlite_sample_db_catalog(sqlite_sample_db_config) -> Catalog:
catalog_obj = Catalog.from_dict(
_get_tap_catalog(SQLiteTap, config=sqlite_sample_db_config, select_all=True),
)
@@ -55,9 +50,20 @@ def sqlite_sample_tap(
t2.key_properties = ["c1"]
t2.replication_key = "c1"
t2.replication_method = "INCREMENTAL"
+ return catalog_obj
+
+
+@pytest.fixture
+def sqlite_sample_tap(
+ _sqlite_sample_db,
+ sqlite_sample_db_config,
+ sqlite_sample_db_state,
+ sqlite_sample_db_catalog,
+) -> SQLiteTap:
+ _ = _sqlite_sample_db
return SQLiteTap(
config=sqlite_sample_db_config,
- catalog=catalog_obj.to_dict(),
+ catalog=sqlite_sample_db_catalog.to_dict(),
state=sqlite_sample_db_state,
)
@@ -73,7 +79,7 @@ def path_to_sample_data_db(tmp_path: Path) -> Path:
@pytest.fixture
-def sqlite_sample_db_config(path_to_sample_data_db: str) -> dict:
+def sqlite_sample_db_config(path_to_sample_data_db: Path) -> dict:
"""Get configuration dictionary for target-csv."""
return {"path_to_db": str(path_to_sample_data_db)}
diff --git a/tests/samples/test_tap_sqlite.py b/tests/samples/test_tap_sqlite.py
index 5e1349f94..b5ed7b549 100644
--- a/tests/samples/test_tap_sqlite.py
+++ b/tests/samples/test_tap_sqlite.py
@@ -1,12 +1,20 @@
from __future__ import annotations
+import datetime
+import json
import typing as t
+import pytest
+import time_machine
+from click.testing import CliRunner
+
+from samples.sample_tap_sqlite import SQLiteTap
from samples.sample_target_csv.csv_target import SampleTargetCSV
from singer_sdk import SQLStream
from singer_sdk._singerlib import MetadataMapping, StreamMetadata
from singer_sdk.testing import (
get_standard_tap_tests,
+ tap_sync_test,
tap_to_target_sync_test,
)
@@ -24,10 +32,27 @@ def _discover_and_select_all(tap: SQLTap) -> None:
catalog_entry["metadata"] = md.to_list()
+def test_tap_sqlite_cli(sqlite_sample_db_config: dict[str, t.Any], tmp_path: Path):
+ runner = CliRunner()
+ filepath = tmp_path / "config.json"
+
+ with filepath.open("w") as f:
+ json.dump(sqlite_sample_db_config, f)
+
+ result = runner.invoke(
+ SQLiteTap.cli,
+ ["--discover", "--config", str(filepath)],
+ )
+ assert result.exit_code == 0
+
+ catalog = json.loads(result.stdout)
+ assert "streams" in catalog
+
+
def test_sql_metadata(sqlite_sample_tap: SQLTap):
stream = t.cast(SQLStream, sqlite_sample_tap.streams["main-t1"])
detected_metadata = stream.catalog_entry["metadata"]
- detected_root_md = [md for md in detected_metadata if md["breadcrumb"] == []][0]
+ detected_root_md = next(md for md in detected_metadata if md["breadcrumb"] == [])
detected_root_md = detected_root_md["metadata"]
translated_metadata = StreamMetadata.from_dict(detected_root_md)
assert detected_root_md["schema-name"] == translated_metadata.schema_name
@@ -95,3 +120,27 @@ def test_sync_sqlite_to_csv(sqlite_sample_tap: SQLTap, tmp_path: Path):
sqlite_sample_tap,
SampleTargetCSV(config={"target_folder": f"{tmp_path}/"}),
)
+
+
+@pytest.fixture
+@time_machine.travel(
+ datetime.datetime(2022, 1, 1, tzinfo=datetime.timezone.utc),
+ tick=False,
+)
+def sqlite_sample_tap_state_messages(sqlite_sample_tap: SQLTap) -> list[dict]:
+ stdout, _ = tap_sync_test(sqlite_sample_tap)
+ state_messages = []
+ for line in stdout.readlines():
+ message = json.loads(line)
+ if message["type"] == "STATE":
+ state_messages.append(message)
+
+ return state_messages
+
+
+def test_sqlite_state(sqlite_sample_tap_state_messages):
+ assert all(
+ "progress_markers" not in bookmark
+ for message in sqlite_sample_tap_state_messages
+ for bookmark in message["value"]["bookmarks"].values()
+ )
diff --git a/tests/samples/test_target_csv.py b/tests/samples/test_target_csv.py
index 715edbb65..a0a3f3497 100644
--- a/tests/samples/test_target_csv.py
+++ b/tests/samples/test_target_csv.py
@@ -1,6 +1,7 @@
"""Test tap-to-target sync."""
from __future__ import annotations
+import datetime
import json
import shutil
import typing as t
@@ -8,12 +9,13 @@
from pathlib import Path
import pytest
+import time_machine
from click.testing import CliRunner
-from freezegun import freeze_time
from samples.sample_mapper.mapper import StreamTransform
from samples.sample_tap_countries.countries_tap import SampleTapCountries
from samples.sample_target_csv.csv_target import SampleTargetCSV
+from singer_sdk.helpers._compat import importlib_resources
from singer_sdk.testing import (
get_target_test_class,
sync_end_to_end,
@@ -77,12 +79,33 @@ def test_target_batching():
buf, _ = tap_sync_test(tap)
- mocked_starttime = "2012-01-01 12:00:00"
- mocked_jumptotime2 = "2012-01-01 12:31:00"
- mocked_jumptotime3 = "2012-01-01 13:02:00"
+ mocked_starttime = datetime.datetime(
+ 2012,
+ 1,
+ 1,
+ 12,
+ 0,
+ tzinfo=datetime.timezone.utc,
+ )
+ mocked_jumptotime2 = datetime.datetime(
+ 2012,
+ 1,
+ 1,
+ 12,
+ 31,
+ tzinfo=datetime.timezone.utc,
+ )
+ mocked_jumptotime3 = datetime.datetime(
+ 2012,
+ 1,
+ 1,
+ 13,
+ 2,
+ tzinfo=datetime.timezone.utc,
+ )
countries_record_count = 257
- with freeze_time(mocked_starttime):
+ with time_machine.travel(mocked_starttime, tick=False):
target = TargetMock(config={})
target.max_parallelism = 1 # Limit unit test to 1 process
assert target.num_records_processed == 0
@@ -96,7 +119,7 @@ def test_target_batching():
assert len(target.records_written) == 0 # Drain not yet called
assert len(target.state_messages_written) == 0 # Drain not yet called
- with freeze_time(mocked_jumptotime2):
+ with time_machine.travel(mocked_jumptotime2, tick=False):
buf.seek(0)
target_sync_test(target, buf, finalize=False)
@@ -105,7 +128,7 @@ def test_target_batching():
assert len(target.records_written) == countries_record_count + 1
assert len(target.state_messages_written) == 1
- with freeze_time(mocked_jumptotime3):
+ with time_machine.travel(mocked_jumptotime3, tick=False):
buf.seek(0)
target_sync_test(target, buf, finalize=False)
@@ -124,7 +147,9 @@ def test_target_batching():
}
-SAMPLE_FILENAME = Path(__file__).parent / Path("./resources/messages.jsonl")
+SAMPLE_FILENAME = (
+ importlib_resources.files("tests.samples") / "resources/messages.jsonl"
+)
EXPECTED_OUTPUT = """"id" "name"
1 "Chris"
2 "Mike"
diff --git a/tests/samples/test_target_sqlite.py b/tests/samples/test_target_sqlite.py
index a7ca3b3c5..bddcbcf07 100644
--- a/tests/samples/test_target_sqlite.py
+++ b/tests/samples/test_target_sqlite.py
@@ -12,20 +12,20 @@
from uuid import uuid4
import pytest
-import sqlalchemy
+import sqlalchemy as sa
from samples.sample_tap_hostile import SampleTapHostile
from samples.sample_tap_sqlite import SQLiteTap
from samples.sample_target_sqlite import SQLiteSink, SQLiteTarget
from singer_sdk import typing as th
from singer_sdk.testing import (
- _get_tap_catalog,
tap_sync_test,
tap_to_target_sync_test,
target_sync_test,
)
if t.TYPE_CHECKING:
+ from singer_sdk._singerlib import Catalog
from singer_sdk.tap_base import SQLTap
from singer_sdk.target_base import SQLTarget
@@ -36,7 +36,7 @@ def path_to_target_db(tmp_path: Path) -> Path:
@pytest.fixture
-def sqlite_target_test_config(path_to_target_db: str) -> dict:
+def sqlite_target_test_config(path_to_target_db: Path) -> dict:
"""Get configuration dictionary for target-csv."""
return {"path_to_db": str(path_to_target_db)}
@@ -67,6 +67,7 @@ def sqlite_sample_target_batch(sqlite_target_test_config):
def test_sync_sqlite_to_sqlite(
sqlite_sample_tap: SQLTap,
sqlite_sample_target: SQLTarget,
+ sqlite_sample_db_catalog: Catalog,
):
"""End-to-end-to-end test for SQLite tap and target.
@@ -84,8 +85,10 @@ def test_sync_sqlite_to_sqlite(
)
orig_stdout.seek(0)
tapped_config = dict(sqlite_sample_target.config)
- catalog = _get_tap_catalog(SQLiteTap, config=tapped_config, select_all=True)
- tapped_target = SQLiteTap(config=tapped_config, catalog=catalog)
+ tapped_target = SQLiteTap(
+ config=tapped_config,
+ catalog=sqlite_sample_db_catalog.to_dict(),
+ )
new_stdout, _ = tap_sync_test(tapped_target)
orig_stdout.seek(0)
@@ -157,7 +160,7 @@ def test_sqlite_schema_addition(sqlite_sample_target: SQLTarget):
]
)
# sqlite doesn't support schema creation
- with pytest.raises(sqlalchemy.exc.OperationalError) as excinfo:
+ with pytest.raises(sa.exc.OperationalError) as excinfo:
target_sync_test(
sqlite_sample_target,
input=StringIO(tap_output),
@@ -350,6 +353,52 @@ def test_sqlite_process_batch_message(
assert cursor.fetchone()[0] == 4
+def test_sqlite_process_batch_parquet(
+ sqlite_target_test_config: dict,
+ sqlite_sample_target_batch: SQLiteTarget,
+):
+ """Test handling a Parquet batch message for the SQLite target."""
+ config = {
+ **sqlite_target_test_config,
+ "batch_config": {
+ "encoding": {"format": "parquet", "compression": "gzip"},
+ "batch_size": 100,
+ },
+ }
+ schema_message = {
+ "type": "SCHEMA",
+ "stream": "continents",
+ "key_properties": ["id"],
+ "schema": {
+ "required": ["id"],
+ "type": "object",
+ "properties": {
+ "code": {"type": "string"},
+ "name": {"type": "string"},
+ },
+ },
+ }
+ batch_message = {
+ "type": "BATCH",
+ "stream": "continents",
+ "encoding": {"format": "parquet", "compression": "gzip"},
+ "manifest": [
+ "file://tests/core/resources/continents.parquet.gz",
+ ],
+ }
+ tap_output = "\n".join([json.dumps(schema_message), json.dumps(batch_message)])
+
+ target_sync_test(
+ sqlite_sample_target_batch,
+ input=StringIO(tap_output),
+ finalize=True,
+ )
+ db = sqlite3.connect(config["path_to_db"])
+ cursor = db.cursor()
+ cursor.execute("SELECT COUNT(*) as count FROM continents")
+ assert cursor.fetchone()[0] == 7
+
+
def test_sqlite_column_no_morph(sqlite_sample_target: SQLTarget):
"""End-to-end-to-end test for SQLite tap and target.
@@ -396,6 +445,35 @@ def test_sqlite_column_no_morph(sqlite_sample_target: SQLTarget):
target_sync_test(sqlite_sample_target, input=StringIO(tap_output_b), finalize=True)
+def test_record_with_missing_properties(
+ sqlite_sample_target: SQLTarget,
+):
+ """Test handling of records with missing properties."""
+ tap_output = "\n".join(
+ json.dumps(msg)
+ for msg in [
+ {
+ "type": "SCHEMA",
+ "stream": "test_stream",
+ "schema": {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "name": {"type": "string"},
+ },
+ },
+ "key_properties": ["id"],
+ },
+ {
+ "type": "RECORD",
+ "stream": "test_stream",
+ "record": {"id": 1},
+ },
+ ]
+ )
+ target_sync_test(sqlite_sample_target, input=StringIO(tap_output), finalize=True)
+
+
@pytest.mark.parametrize(
"stream_name,schema,key_properties,expected_dml",
[
@@ -479,3 +557,48 @@ def test_hostile_to_sqlite(
"hname_starts_with_number",
"name_with_emoji_",
}
+
+
+def test_overwrite_load_method(
+ sqlite_target_test_config: dict,
+):
+ sqlite_target_test_config["load_method"] = "overwrite"
+ target = SQLiteTarget(config=sqlite_target_test_config)
+ test_tbl = f"zzz_tmp_{str(uuid4()).split('-')[-1]}"
+ schema_msg = {
+ "type": "SCHEMA",
+ "stream": test_tbl,
+ "schema": {
+ "type": "object",
+ "properties": {"col_a": th.StringType().to_dict()},
+ },
+ }
+
+ tap_output_a = "\n".join(
+ json.dumps(msg)
+ for msg in [
+ schema_msg,
+ {"type": "RECORD", "stream": test_tbl, "record": {"col_a": "123"}},
+ ]
+ )
+ # Assert
+ db = sqlite3.connect(sqlite_target_test_config["path_to_db"])
+ cursor = db.cursor()
+
+ target_sync_test(target, input=StringIO(tap_output_a), finalize=True)
+ cursor.execute(f"SELECT col_a FROM {test_tbl} ;") # noqa: S608
+ records = [res[0] for res in cursor.fetchall()]
+ assert records == ["123"]
+
+ tap_output_b = "\n".join(
+ json.dumps(msg)
+ for msg in [
+ schema_msg,
+ {"type": "RECORD", "stream": test_tbl, "record": {"col_a": "456"}},
+ ]
+ )
+ target = SQLiteTarget(config=sqlite_target_test_config)
+ target_sync_test(target, input=StringIO(tap_output_b), finalize=True)
+ cursor.execute(f"SELECT col_a FROM {test_tbl} ;") # noqa: S608
+ records = [res[0] for res in cursor.fetchall()]
+ assert records == ["456"]
diff --git a/tests/snapshots/countries_write_schemas/countries_write_schemas b/tests/snapshots/countries_write_schemas/countries_write_schemas
index b0808ce23..02b567fc9 100644
--- a/tests/snapshots/countries_write_schemas/countries_write_schemas
+++ b/tests/snapshots/countries_write_schemas/countries_write_schemas
@@ -1,2 +1,2 @@
-{"type": "SCHEMA", "stream": "continents", "schema": {"properties": {"code": {"type": ["null", "string"]}, "name": {"type": ["null", "string"]}}, "type": "object"}, "key_properties": ["code"]}
+{"type": "SCHEMA", "stream": "continents", "schema": {"properties": {"code": {"type": ["string", "null"]}, "name": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": ["code"]}
{"type": "SCHEMA", "stream": "countries", "schema": {"properties": {"code": {"type": ["string", "null"]}, "name": {"type": ["string", "null"]}, "native": {"type": ["string", "null"]}, "phone": {"type": ["string", "null"]}, "capital": {"type": ["string", "null"]}, "currency": {"type": ["string", "null"]}, "emoji": {"type": ["string", "null"]}, "continent": {"properties": {"code": {"type": ["string", "null"]}, "name": {"type": ["string", "null"]}}, "type": ["object", "null"]}, "languages": {"items": {"properties": {"code": {"type": ["string", "null"]}, "name": {"type": ["string", "null"]}}, "type": "object"}, "type": ["array", "null"]}}, "type": "object"}, "key_properties": ["code"]}
diff --git a/tests/snapshots/mapped_stream/aliased_stream.jsonl b/tests/snapshots/mapped_stream/aliased_stream.jsonl
index 46d5daffe..8df28ddf4 100644
--- a/tests/snapshots/mapped_stream/aliased_stream.jsonl
+++ b/tests/snapshots/mapped_stream/aliased_stream.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "aliased_stream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "aliased_stream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "aliased_stream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/drop_property.jsonl b/tests/snapshots/mapped_stream/drop_property.jsonl
index 8694f4736..aece20a30 100644
--- a/tests/snapshots/mapped_stream/drop_property.jsonl
+++ b/tests/snapshots/mapped_stream/drop_property.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/drop_property_null_string.jsonl b/tests/snapshots/mapped_stream/drop_property_null_string.jsonl
index 8694f4736..aece20a30 100644
--- a/tests/snapshots/mapped_stream/drop_property_null_string.jsonl
+++ b/tests/snapshots/mapped_stream/drop_property_null_string.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/flatten_all.jsonl b/tests/snapshots/mapped_stream/flatten_all.jsonl
index c54db1563..e588c9dc1 100644
--- a/tests/snapshots/mapped_stream/flatten_all.jsonl
+++ b/tests/snapshots/mapped_stream/flatten_all.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub__num": {"type": ["integer", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub__num": 1}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub__num": 2}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub__num": 3}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub__num": {"type": ["integer", "null"]}, "user__some_numbers": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub__num": 1, "user__some_numbers": "[3.14, 2.718]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub__num": 2, "user__some_numbers": "[10.32, 1.618]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub__num": 3, "user__some_numbers": "[1.414, 1.732]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/flatten_depth_0.jsonl b/tests/snapshots/mapped_stream/flatten_depth_0.jsonl
new file mode 100644
index 000000000..7a8b54581
--- /dev/null
+++ b/tests/snapshots/mapped_stream/flatten_depth_0.jsonl
@@ -0,0 +1,6 @@
+{"type": "STATE", "value": {}}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/flatten_depth_1.jsonl b/tests/snapshots/mapped_stream/flatten_depth_1.jsonl
index 275e3295c..e3f6cd05f 100644
--- a/tests/snapshots/mapped_stream/flatten_depth_1.jsonl
+++ b/tests/snapshots/mapped_stream/flatten_depth_1.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub": "{\"num\": 1}"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub": "{\"num\": 2}"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub": "{\"num\": 3}"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub": {"type": ["string", "null"]}, "user__some_numbers": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub": "{\"num\": 1}", "user__some_numbers": "[3.14, 2.718]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub": "{\"num\": 2}", "user__some_numbers": "[10.32, 1.618]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub": "{\"num\": 3}", "user__some_numbers": "[1.414, 1.732]"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/keep_all_fields.jsonl b/tests/snapshots/mapped_stream/keep_all_fields.jsonl
index 13ddce438..da476f6a5 100644
--- a/tests/snapshots/mapped_stream/keep_all_fields.jsonl
+++ b/tests/snapshots/mapped_stream/keep_all_fields.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}, "email_hash": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}, "email_hash": "c160f8cc69a4f0bf2b0362752353d060"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}, "email_hash": "4b9bb80620f03eb3719e0a061c14283d"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}, "email_hash": "426b189df1e2f359efe6ee90f2d2030f"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}, "email_hash": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}, "email_hash": "c160f8cc69a4f0bf2b0362752353d060"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}, "email_hash": "4b9bb80620f03eb3719e0a061c14283d"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}, "email_hash": "426b189df1e2f359efe6ee90f2d2030f"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/map_and_flatten.jsonl b/tests/snapshots/mapped_stream/map_and_flatten.jsonl
index bf2620184..921094d05 100644
--- a/tests/snapshots/mapped_stream/map_and_flatten.jsonl
+++ b/tests/snapshots/mapped_stream/map_and_flatten.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub__num": {"type": ["integer", "null"]}, "email_hash": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": ["email_hash"]}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub__num": 1, "email_hash": "c160f8cc69a4f0bf2b0362752353d060"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub__num": 2, "email_hash": "4b9bb80620f03eb3719e0a061c14283d"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub__num": 3, "email_hash": "426b189df1e2f359efe6ee90f2d2030f"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user__id": {"type": ["integer", "null"]}, "user__sub__num": {"type": ["integer", "null"]}, "user__some_numbers": {"type": ["string", "null"]}, "email_hash": {"type": ["string", "null"]}}, "type": "object"}, "key_properties": ["email_hash"]}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user__id": 1, "user__sub__num": 1, "user__some_numbers": "[3.14, 2.718]", "email_hash": "c160f8cc69a4f0bf2b0362752353d060"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user__id": 2, "user__sub__num": 2, "user__some_numbers": "[10.32, 1.618]", "email_hash": "4b9bb80620f03eb3719e0a061c14283d"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user__id": 3, "user__sub__num": 3, "user__some_numbers": "[1.414, 1.732]", "email_hash": "426b189df1e2f359efe6ee90f2d2030f"}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/no_map.jsonl b/tests/snapshots/mapped_stream/no_map.jsonl
index 019b1f9d9..7a8b54581 100644
--- a/tests/snapshots/mapped_stream/no_map.jsonl
+++ b/tests/snapshots/mapped_stream/no_map.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/record_to_column.jsonl b/tests/snapshots/mapped_stream/record_to_column.jsonl
new file mode 100644
index 000000000..8fc3efb21
--- /dev/null
+++ b/tests/snapshots/mapped_stream/record_to_column.jsonl
@@ -0,0 +1,6 @@
+{"type": "STATE", "value": {}}
+{"type": "SCHEMA", "stream": "mystream", "schema": {"type": "object", "properties": {"_data": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}}, "key_properties": []}
+{"type": "RECORD", "stream": "mystream", "record": {"_data": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"_data": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "mystream", "record": {"_data": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/sourced_stream_1.jsonl b/tests/snapshots/mapped_stream/sourced_stream_1.jsonl
index e63d03815..a68add19c 100644
--- a/tests/snapshots/mapped_stream/sourced_stream_1.jsonl
+++ b/tests/snapshots/mapped_stream/sourced_stream_1.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "sourced_stream_1", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "sourced_stream_1", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/sourced_stream_1_null_string.jsonl b/tests/snapshots/mapped_stream/sourced_stream_1_null_string.jsonl
index e63d03815..a68add19c 100644
--- a/tests/snapshots/mapped_stream/sourced_stream_1_null_string.jsonl
+++ b/tests/snapshots/mapped_stream/sourced_stream_1_null_string.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "sourced_stream_1", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "sourced_stream_1", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_1", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}
diff --git a/tests/snapshots/mapped_stream/sourced_stream_2.jsonl b/tests/snapshots/mapped_stream/sourced_stream_2.jsonl
index 41cce23d7..234551006 100644
--- a/tests/snapshots/mapped_stream/sourced_stream_2.jsonl
+++ b/tests/snapshots/mapped_stream/sourced_stream_2.jsonl
@@ -1,6 +1,6 @@
{"type": "STATE", "value": {}}
-{"type": "SCHEMA", "stream": "sourced_stream_2", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
-{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
-{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "SCHEMA", "stream": "sourced_stream_2", "schema": {"properties": {"email": {"type": ["string", "null"]}, "count": {"type": ["integer", "null"]}, "user": {"properties": {"id": {"type": ["integer", "null"]}, "sub": {"properties": {"num": {"type": ["integer", "null"]}}, "type": ["object", "null"]}, "some_numbers": {"items": {"type": ["number"]}, "type": ["array", "null"]}}, "type": ["object", "null"]}}, "type": "object"}, "key_properties": []}
+{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "alice@example.com", "count": 21, "user": {"id": 1, "sub": {"num": 1}, "some_numbers": [3.14, 2.718]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "bob@example.com", "count": 13, "user": {"id": 2, "sub": {"num": 2}, "some_numbers": [10.32, 1.618]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
+{"type": "RECORD", "stream": "sourced_stream_2", "record": {"email": "charlie@example.com", "count": 19, "user": {"id": 3, "sub": {"num": 3}, "some_numbers": [1.414, 1.732]}}, "time_extracted": "2022-01-01T00:00:00+00:00"}
{"type": "STATE", "value": {"bookmarks": {"mystream": {}}}}