From f31688a2eb870f7bd5533ec840fb35f8a6ae804d Mon Sep 17 00:00:00 2001 From: Mark Date: Fri, 23 Sep 2022 17:56:48 +1000 Subject: [PATCH] feat: Change to a cli package. (#7) --- .flake8 | 10 - .github/CODEOWNERS | 2 + .github/ISSUE_TEMPLATE/bug-report.yml | 48 + .github/ISSUE_TEMPLATE/feature-request.yml | 31 + .github/dependabot.yml | 26 + .github/labeler.yml | 11 + .github/release.yml | 18 + .github/workflows/app-tests.yml | 84 -- .github/workflows/code-analysis.yml | 81 ++ .github/workflows/codeql-analysis.yml | 59 -- .github/workflows/create-package.yml | 127 +++ .github/workflows/labeler.yml | 21 + .gitignore | 125 ++- .idea/.gitignore | 8 - .idea/gather-vision.iml | 33 +- .idea/inspectionProfiles/Project_Default.xml | 2 +- .idea/misc.xml | 2 +- .idea/sqldialects.xml | 6 - .idea/watcherTasks.xml | 2 +- CHANGELOG.md | 4 + CONTRIBUTING.md | 133 +++ MANIFEST.in | 3 + README.md | 16 +- VERSION | 1 + docs/doc-search.html | 141 +++ docs/gather_vision/app.html | 556 +++++++++++ docs/gather_vision/cli.html | 534 ++++++++++ docs/gather_vision/index.html | 155 +++ docs/gather_vision/model.html | 299 ++++++ docs/gather_vision/plugin.html | 260 +++++ docs/gather_vision/utils.html | 316 ++++++ docs/index.html | 21 + docs/index.js | 217 ++++ gather_vision/__init__.py | 0 gather_vision/admin/__init__.py | 19 - gather_vision/admin/outages.py | 67 -- gather_vision/admin/petitions.py | 32 - gather_vision/admin/playlists.py | 41 - gather_vision/admin/shared.py | 8 - gather_vision/admin/transport.py | 41 - gather_vision/apps.py | 9 - gather_vision/management/__init__.py | 0 gather_vision/management/commands/__init__.py | 0 .../management/commands/visionprocess.py | 85 -- gather_vision/migrations/0001_initial.py | 12 - gather_vision/migrations/0002_initial.py | 194 ---- ...3_alter_petitionchange_options_and_more.py | 55 -- gather_vision/migrations/__init__.py | 0 gather_vision/models/__init__.py | 20 - gather_vision/models/abstract_base.py | 16 - gather_vision/models/information_source.py | 41 - gather_vision/models/outage_group.py | 57 -- gather_vision/models/outage_item.py | 69 -- gather_vision/models/petition_change.py | 37 - gather_vision/models/petition_item.py | 77 -- gather_vision/models/playlist_entry.py | 45 - gather_vision/models/playlist_item.py | 22 - gather_vision/models/playlist_track.py | 50 - gather_vision/models/transport_item.py | 311 ------ gather_vision/models/transport_line.py | 15 - gather_vision/process/__init__.py | 0 gather_vision/process/cache/__init__.py | 0 .../cache/available_expiration_time.py | 46 - .../process/cache/external_http_cache.py | 79 -- gather_vision/process/cache/local_cache.py | 83 -- gather_vision/process/component/__init__.py | 0 gather_vision/process/component/cache.py | 24 - .../process/component/html_extract.py | 49 - .../process/component/http_client.py | 69 -- gather_vision/process/component/ical.py | 90 -- gather_vision/process/component/logger.py | 56 -- gather_vision/process/component/metadata.py | 72 -- gather_vision/process/component/normalise.py | 187 ---- .../process/component/spotify_client.py | 269 ----- .../process/component/sqlite_client.py | 36 - .../process/component/time_series.py | 100 -- .../process/component/youtube_music_client.py | 124 --- gather_vision/process/item/__init__.py | 3 - gather_vision/process/item/contact_trace.py | 62 -- gather_vision/process/item/playlist.py | 42 - gather_vision/process/item/playlist_conf.py | 12 - gather_vision/process/item/track.py | 81 -- gather_vision/process/item/transport_event.py | 39 - gather_vision/process/manage/__init__.py | 0 .../process/manage/contact_tracing.py | 14 - gather_vision/process/manage/outages.py | 48 - gather_vision/process/manage/petitions.py | 65 -- gather_vision/process/manage/playlists.py | 375 ------- gather_vision/process/manage/transport.py | 191 ---- gather_vision/process/service/__init__.py | 0 .../service/contact_tracing/__init__.py | 0 .../process/service/contact_tracing/au_qld.py | 190 ---- .../process/service/discord/__init__.py | 0 .../process/service/discord/client.py | 47 - .../process/service/discord/embed.py | 90 -- .../process/service/discord/embed_field.py | 16 - .../process/service/discord/webhook.py | 43 - .../process/service/outages/__init__.py | 0 .../process/service/outages/energex_events.py | 190 ---- .../process/service/outages/energex_import.py | 224 ----- .../process/service/petition/__init__.py | 0 .../process/service/petition/au_qld.py | 205 ---- .../process/service/petition/au_qld_bcc.py | 193 ---- .../service/petition/petition_import.py | 184 ---- .../process/service/playlist/__init__.py | 0 .../process/service/playlist/abc_radio.py | 247 ----- .../process/service/playlist/abstract.py | 103 -- .../process/service/playlist/last_fm.py | 186 ---- .../process/service/playlist/radio_4zzz.py | 302 ------ .../process/service/playlist/spotify.py | 246 ----- .../process/service/playlist/youtube_music.py | 211 ---- .../process/service/transport/__init__.py | 0 .../service/transport/qld_rail_events.py | 138 --- .../service/transport/translink_notices.py | 220 ----- gather_vision/process/support/__init__.py | 0 .../process/support/select_format_mixin.py | 40 - .../process/support/transport/__init__.py | 0 .../support/transport/render_csv_mixin.py | 39 - .../support/transport/render_ics_mixin.py | 50 - .../support/transport/render_json_mixin.py | 10 - .../support/transport/render_txt_mixin.py | 20 - .../gather_vision/android-chrome-192x192.png | Bin 7717 -> 0 bytes .../gather_vision/android-chrome-512x512.png | Bin 25774 -> 0 bytes .../static/gather_vision/apple-touch-icon.png | Bin 7356 -> 0 bytes .../static/gather_vision/favicon-16x16.png | Bin 372 -> 0 bytes .../static/gather_vision/favicon-32x32.png | Bin 731 -> 0 bytes .../static/gather_vision/favicon.ico | Bin 15406 -> 0 bytes .../gather_vision/graph-change-over-time.js | 5 - .../static/gather_vision/site.webmanifest | 1 - .../templates/gather_vision/about/index.html | 251 ----- .../gather_vision/contact_tracing/index.html | 8 - .../templates/gather_vision/home/index.html | 76 -- .../templates/gather_vision/layout.html | 81 -- .../gather_vision/nav_breadcrumb.html | 14 - .../templates/gather_vision/nav_link.html | 1 - .../gather_vision/outages/index.html | 20 - .../gather_vision/petitions/index.html | 19 - .../gather_vision/playlists/index.html | 8 - .../gather_vision/transport/index.html | 94 -- gather_vision/templatetags/__init__.py | 0 .../templatetags/gather_vision_extras.py | 40 - gather_vision/tests/__init__.py | 0 gather_vision/tests/process/__init__.py | 0 .../tests/process/component/__init__.py | 0 .../tests/process/component/test_abc_radio.py | 136 --- .../tests/process/component/test_last_fm.py | 118 --- gather_vision/tests/support.py | 71 -- gather_vision/tests/test_cache.py | 62 -- .../tests/test_component_normalise.py | 32 - .../tests/test_mgmt_cmd_contact_tracing.py | 163 --- gather_vision/tests/test_mgmt_cmd_outages.py | 312 ------ .../tests/test_mgmt_cmd_petitions.py | 321 ------ .../tests/test_mgmt_cmd_playlists.py | 10 - .../tests/test_mgmt_cmd_transport.py | 282 ------ gather_vision/urls.py | 60 -- gather_vision/views/__init__.py | 0 gather_vision/views/contact_tracing.py | 7 - gather_vision/views/general.py | 12 - gather_vision/views/outages.py | 83 -- gather_vision/views/petitions.py | 77 -- gather_vision/views/playlists.py | 7 - gather_vision/views/transport.py | 84 -- gather_vision_proj/__init__.py | 0 gather_vision_proj/admin.py | 8 - gather_vision_proj/apps.py | 5 - gather_vision_proj/asgi.py | 16 - gather_vision_proj/gather_vision_env.py | 97 -- gather_vision_proj/settings.py | 153 --- gather_vision_proj/urls.py | 58 -- gather_vision_proj/wsgi.py | 16 - manage.py | 22 - poetry.lock | 925 ------------------ pyproject.toml | 154 +-- requirements-dev.txt | 41 + requirements.txt | 5 + setup.cfg | 4 + src/gather_vision/__init__.py | 5 + src/gather_vision/app.py | 108 ++ src/gather_vision/cli.py | 179 ++++ src/gather_vision/model.py | 39 + src/gather_vision/plugin.py | 33 + src/gather_vision/utils.py | 64 ++ templates/admin/base.html | 17 - tests/test_cli.py | 92 ++ 184 files changed, 3797 insertions(+), 10784 deletions(-) delete mode 100644 .flake8 create mode 100644 .github/CODEOWNERS create mode 100644 .github/ISSUE_TEMPLATE/bug-report.yml create mode 100644 .github/ISSUE_TEMPLATE/feature-request.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/labeler.yml create mode 100644 .github/release.yml delete mode 100644 .github/workflows/app-tests.yml create mode 100644 .github/workflows/code-analysis.yml delete mode 100644 .github/workflows/codeql-analysis.yml create mode 100644 .github/workflows/create-package.yml create mode 100644 .github/workflows/labeler.yml delete mode 100644 .idea/.gitignore delete mode 100644 .idea/sqldialects.xml create mode 100644 CHANGELOG.md create mode 100644 CONTRIBUTING.md create mode 100644 MANIFEST.in create mode 100644 VERSION create mode 100644 docs/doc-search.html create mode 100644 docs/gather_vision/app.html create mode 100644 docs/gather_vision/cli.html create mode 100644 docs/gather_vision/index.html create mode 100644 docs/gather_vision/model.html create mode 100644 docs/gather_vision/plugin.html create mode 100644 docs/gather_vision/utils.html create mode 100644 docs/index.html create mode 100644 docs/index.js delete mode 100644 gather_vision/__init__.py delete mode 100644 gather_vision/admin/__init__.py delete mode 100644 gather_vision/admin/outages.py delete mode 100644 gather_vision/admin/petitions.py delete mode 100644 gather_vision/admin/playlists.py delete mode 100644 gather_vision/admin/shared.py delete mode 100644 gather_vision/admin/transport.py delete mode 100644 gather_vision/apps.py delete mode 100644 gather_vision/management/__init__.py delete mode 100644 gather_vision/management/commands/__init__.py delete mode 100644 gather_vision/management/commands/visionprocess.py delete mode 100644 gather_vision/migrations/0001_initial.py delete mode 100644 gather_vision/migrations/0002_initial.py delete mode 100644 gather_vision/migrations/0003_alter_petitionchange_options_and_more.py delete mode 100644 gather_vision/migrations/__init__.py delete mode 100644 gather_vision/models/__init__.py delete mode 100644 gather_vision/models/abstract_base.py delete mode 100644 gather_vision/models/information_source.py delete mode 100644 gather_vision/models/outage_group.py delete mode 100644 gather_vision/models/outage_item.py delete mode 100644 gather_vision/models/petition_change.py delete mode 100644 gather_vision/models/petition_item.py delete mode 100644 gather_vision/models/playlist_entry.py delete mode 100644 gather_vision/models/playlist_item.py delete mode 100644 gather_vision/models/playlist_track.py delete mode 100644 gather_vision/models/transport_item.py delete mode 100644 gather_vision/models/transport_line.py delete mode 100644 gather_vision/process/__init__.py delete mode 100644 gather_vision/process/cache/__init__.py delete mode 100644 gather_vision/process/cache/available_expiration_time.py delete mode 100644 gather_vision/process/cache/external_http_cache.py delete mode 100644 gather_vision/process/cache/local_cache.py delete mode 100644 gather_vision/process/component/__init__.py delete mode 100644 gather_vision/process/component/cache.py delete mode 100644 gather_vision/process/component/html_extract.py delete mode 100644 gather_vision/process/component/http_client.py delete mode 100644 gather_vision/process/component/ical.py delete mode 100644 gather_vision/process/component/logger.py delete mode 100644 gather_vision/process/component/metadata.py delete mode 100644 gather_vision/process/component/normalise.py delete mode 100644 gather_vision/process/component/spotify_client.py delete mode 100644 gather_vision/process/component/sqlite_client.py delete mode 100644 gather_vision/process/component/time_series.py delete mode 100644 gather_vision/process/component/youtube_music_client.py delete mode 100644 gather_vision/process/item/__init__.py delete mode 100644 gather_vision/process/item/contact_trace.py delete mode 100644 gather_vision/process/item/playlist.py delete mode 100644 gather_vision/process/item/playlist_conf.py delete mode 100644 gather_vision/process/item/track.py delete mode 100644 gather_vision/process/item/transport_event.py delete mode 100644 gather_vision/process/manage/__init__.py delete mode 100644 gather_vision/process/manage/contact_tracing.py delete mode 100644 gather_vision/process/manage/outages.py delete mode 100644 gather_vision/process/manage/petitions.py delete mode 100644 gather_vision/process/manage/playlists.py delete mode 100644 gather_vision/process/manage/transport.py delete mode 100644 gather_vision/process/service/__init__.py delete mode 100644 gather_vision/process/service/contact_tracing/__init__.py delete mode 100644 gather_vision/process/service/contact_tracing/au_qld.py delete mode 100644 gather_vision/process/service/discord/__init__.py delete mode 100644 gather_vision/process/service/discord/client.py delete mode 100644 gather_vision/process/service/discord/embed.py delete mode 100644 gather_vision/process/service/discord/embed_field.py delete mode 100644 gather_vision/process/service/discord/webhook.py delete mode 100644 gather_vision/process/service/outages/__init__.py delete mode 100644 gather_vision/process/service/outages/energex_events.py delete mode 100644 gather_vision/process/service/outages/energex_import.py delete mode 100644 gather_vision/process/service/petition/__init__.py delete mode 100644 gather_vision/process/service/petition/au_qld.py delete mode 100644 gather_vision/process/service/petition/au_qld_bcc.py delete mode 100644 gather_vision/process/service/petition/petition_import.py delete mode 100644 gather_vision/process/service/playlist/__init__.py delete mode 100644 gather_vision/process/service/playlist/abc_radio.py delete mode 100644 gather_vision/process/service/playlist/abstract.py delete mode 100644 gather_vision/process/service/playlist/last_fm.py delete mode 100644 gather_vision/process/service/playlist/radio_4zzz.py delete mode 100644 gather_vision/process/service/playlist/spotify.py delete mode 100644 gather_vision/process/service/playlist/youtube_music.py delete mode 100644 gather_vision/process/service/transport/__init__.py delete mode 100644 gather_vision/process/service/transport/qld_rail_events.py delete mode 100644 gather_vision/process/service/transport/translink_notices.py delete mode 100644 gather_vision/process/support/__init__.py delete mode 100644 gather_vision/process/support/select_format_mixin.py delete mode 100644 gather_vision/process/support/transport/__init__.py delete mode 100644 gather_vision/process/support/transport/render_csv_mixin.py delete mode 100644 gather_vision/process/support/transport/render_ics_mixin.py delete mode 100644 gather_vision/process/support/transport/render_json_mixin.py delete mode 100644 gather_vision/process/support/transport/render_txt_mixin.py delete mode 100644 gather_vision/static/gather_vision/android-chrome-192x192.png delete mode 100644 gather_vision/static/gather_vision/android-chrome-512x512.png delete mode 100644 gather_vision/static/gather_vision/apple-touch-icon.png delete mode 100644 gather_vision/static/gather_vision/favicon-16x16.png delete mode 100644 gather_vision/static/gather_vision/favicon-32x32.png delete mode 100644 gather_vision/static/gather_vision/favicon.ico delete mode 100644 gather_vision/static/gather_vision/graph-change-over-time.js delete mode 100644 gather_vision/static/gather_vision/site.webmanifest delete mode 100644 gather_vision/templates/gather_vision/about/index.html delete mode 100644 gather_vision/templates/gather_vision/contact_tracing/index.html delete mode 100644 gather_vision/templates/gather_vision/home/index.html delete mode 100644 gather_vision/templates/gather_vision/layout.html delete mode 100644 gather_vision/templates/gather_vision/nav_breadcrumb.html delete mode 100644 gather_vision/templates/gather_vision/nav_link.html delete mode 100644 gather_vision/templates/gather_vision/outages/index.html delete mode 100644 gather_vision/templates/gather_vision/petitions/index.html delete mode 100644 gather_vision/templates/gather_vision/playlists/index.html delete mode 100644 gather_vision/templates/gather_vision/transport/index.html delete mode 100644 gather_vision/templatetags/__init__.py delete mode 100644 gather_vision/templatetags/gather_vision_extras.py delete mode 100644 gather_vision/tests/__init__.py delete mode 100644 gather_vision/tests/process/__init__.py delete mode 100644 gather_vision/tests/process/component/__init__.py delete mode 100644 gather_vision/tests/process/component/test_abc_radio.py delete mode 100644 gather_vision/tests/process/component/test_last_fm.py delete mode 100644 gather_vision/tests/support.py delete mode 100644 gather_vision/tests/test_cache.py delete mode 100644 gather_vision/tests/test_component_normalise.py delete mode 100644 gather_vision/tests/test_mgmt_cmd_contact_tracing.py delete mode 100644 gather_vision/tests/test_mgmt_cmd_outages.py delete mode 100644 gather_vision/tests/test_mgmt_cmd_petitions.py delete mode 100644 gather_vision/tests/test_mgmt_cmd_playlists.py delete mode 100644 gather_vision/tests/test_mgmt_cmd_transport.py delete mode 100644 gather_vision/urls.py delete mode 100644 gather_vision/views/__init__.py delete mode 100644 gather_vision/views/contact_tracing.py delete mode 100644 gather_vision/views/general.py delete mode 100644 gather_vision/views/outages.py delete mode 100644 gather_vision/views/petitions.py delete mode 100644 gather_vision/views/playlists.py delete mode 100644 gather_vision/views/transport.py delete mode 100644 gather_vision_proj/__init__.py delete mode 100644 gather_vision_proj/admin.py delete mode 100644 gather_vision_proj/apps.py delete mode 100644 gather_vision_proj/asgi.py delete mode 100644 gather_vision_proj/gather_vision_env.py delete mode 100644 gather_vision_proj/settings.py delete mode 100644 gather_vision_proj/urls.py delete mode 100644 gather_vision_proj/wsgi.py delete mode 100644 manage.py delete mode 100644 poetry.lock create mode 100644 requirements-dev.txt create mode 100644 requirements.txt create mode 100644 setup.cfg create mode 100644 src/gather_vision/__init__.py create mode 100644 src/gather_vision/app.py create mode 100644 src/gather_vision/cli.py create mode 100644 src/gather_vision/model.py create mode 100644 src/gather_vision/plugin.py create mode 100644 src/gather_vision/utils.py delete mode 100644 templates/admin/base.html create mode 100644 tests/test_cli.py diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 50e40cc..0000000 --- a/.flake8 +++ /dev/null @@ -1,10 +0,0 @@ -[flake8] -extend-exclude = - __pycache__, - .pytest_cache, - .venv/, - gather_vision/tests/, - gather_vision/migrations/ -max-line-length = 88 -max-complexity = 15 -ignore = W504 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..c672d00 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,2 @@ +# These owners will be the default owners for everything in the repo. +* @cofiem diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 0000000..a8dd4bc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,48 @@ +name: Bug Report +description: Create a report to help us improve +title: "[Bug]: " +labels: + - bug +assignees: + - cofiem +body: + - type: textarea + id: description + attributes: + label: Describe the problem + description: A summary of the problem you've seen. + placeholder: Tell us what you see! + value: "e.g. When the program is given these arguments, it does this action I don't want ..." + validations: + required: true + - type: textarea + id: reproduce + attributes: + label: Steps to reproduce the behavior + description: Your step-by-step guide to help use reproduce the problem. + placeholder: Tell us what you see! + value: "e.g. Run using these arguments ... Look in at the output in this folder ..." + validations: + required: true + - type: textarea + id: expected + attributes: + label: Expected behavior + description: What do you expect to happen? + placeholder: Tell us what you want to see! + value: "e.g. Run using these arguments ... should do this ..." + validations: + required: true + - type: input + id: program-details + attributes: + label: Program Details + description: What operating system and what version of the program is being used? + placeholder: "e.g. OS: [e.g. iOS], Program version [e.g. 2.1]" + validations: + required: false + - type: markdown + attributes: + value: | + Please attach any screenshots or logging output to help explain your problem. + Thanks for taking the time to fill out this bug report! diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 0000000..7ce1ec1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,31 @@ +name: Feature Request +description: Suggest an idea for this project +title: "[Feature]: " +labels: + - enhancement +assignees: + - cofiem +body: + - type: textarea + id: description + attributes: + label: Describe the feature + description: A summary of the new functionality you'd like + placeholder: Tell us what you want to see! + value: "e.g. It would be nice to be able to do this and get this output ..." + validations: + required: true + - type: textarea + id: alternative + attributes: + label: Are there other approaches? + description: Any alternative solutions or features you've considered + placeholder: Tell us what else could be done + value: "e.g. Your program could interact with this other program like this ..." + validations: + required: true + - type: markdown + attributes: + value: | + Please attach any screenshots or other files to help explain your feature. + Thanks for taking the time to fill out this feature request! diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..bd934b1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,26 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + target-branch: "main" + open-pull-requests-limit: 3 + + # Maintain dependencies for pip packages + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + target-branch: "main" + commit-message: + prefix: "pip stable" + prefix-development: "pip dev" + open-pull-requests-limit: 3 diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000..3997662 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,11 @@ +documentation: + - docs/**/* + +dependencies: + - requirements.txt + - requirements-dev.txt + - pyproject.toml + - MANIFEST.in + +tests: + - tests/**/* diff --git a/.github/release.yml b/.github/release.yml new file mode 100644 index 0000000..3714950 --- /dev/null +++ b/.github/release.yml @@ -0,0 +1,18 @@ +# Automatically generate release notes +# .github/release.yml +# see https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes#configuring-automatically-generated-release-notes + +changelog: + categories: + - title: New Features + labels: + - enhancement + - title: Bug Fixes + labels: + - bug + - title: Documentation changes + labels: + - documentation + - title: Dependency updates + labels: + - dependencies diff --git a/.github/workflows/app-tests.yml b/.github/workflows/app-tests.yml deleted file mode 100644 index babe1fe..0000000 --- a/.github/workflows/app-tests.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: Test and build - -on: - push: - branches: - - main - -defaults: - run: - shell: bash - -jobs: - build: - name: Test and build - runs-on: ubuntu-latest - - strategy: - fail-fast: false - max-parallel: 4 - matrix: - python-version: - - '3.9' - - '3.10' - poetry-version: - - '1.1.12' - - steps: - # These steps are trying to keep the Python package and the testing tools separate. - # This way, the dev tools won't conflict with the package dependencies. - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - # this caches the pip global cache - cache: 'pip' - cache-dependency-path: '**/poetry.lock' - - - name: Cache Python virtual environment - uses: actions/cache@v2 - with: - path: | - .venv - key: python-venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }} - restore-keys: | - python-venv-${{ runner.os }}-${{ matrix.python-version }} - python-venv-${{ runner.os }} - python-venv - - - name: Install Python Poetry - run: | - curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py | python - - echo "$HOME/.local/bin" >> $GITHUB_PATH - - - name: Create and populate Python virtual environment - run: | - if [[ ! -d $PWD/.venv ]]; then - python -m venv $PWD/.venv - fi - poetry config virtualenvs.in-project true - poetry run python -m pip install --upgrade pip setuptools wheel - poetry install --remove-untracked - - - name: Run Django tests with coverage - run: | - poetry run coverage run -m pytest --junitxml=test-results/test-${{ matrix.python-version }}.xml - poetry run coverage report - poetry run coverage html --directory htmlcov/coverage-${{ matrix.python-version }} - - - name: Run Python linters - run: | - poetry run flake8 . --count --show-source --statistics - poetry run black --check . - - - name: Upload test results - uses: actions/upload-artifact@v2 - with: - name: test-${{ matrix.python-version }} - path: | - test-results/test-${{ matrix.python-version }}.xml - htmlcov/coverage-${{ matrix.python-version }}/ - if: ${{ always() }} diff --git a/.github/workflows/code-analysis.yml b/.github/workflows/code-analysis.yml new file mode 100644 index 0000000..84d1ec5 --- /dev/null +++ b/.github/workflows/code-analysis.yml @@ -0,0 +1,81 @@ +name: Code Analysis + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + schedule: + - cron: '40 0 * * 1' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ "python" ] + python-version: [ "3.7", "3.8", "3.9", "3.10", "3.11.0-rc.1" ] + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: pip + + - name: Install dependencies + run: | + echo "::group::Pip dependencies" + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade -r requirements-dev.txt -r requirements.txt + echo "::endgroup::" + + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + setup-python-dependencies: false + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + + - name: Install snyk to check for vulnerabilities + uses: snyk/actions/setup@master + + - name: Run Snyk over the runtime dependencies + run: snyk test --sarif-file-output=snyk-requirements-${{ matrix.language }}.sarif --command=python --file=requirements.txt --package-manager=pip --skip-unresolved + continue-on-error: true + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + + - name: Upload Snyk runtime dependencies scan result to GitHub Code Scanning + uses: github/codeql-action/upload-sarif@v2 + with: + sarif_file: snyk-requirements-${{ matrix.language }}.sarif + category: requirements-runtime-${{ matrix.language }} + + - name: Run Snyk over the development dependencies + run: snyk test --sarif-file-output=snyk-requirements-dev-${{ matrix.language }}.sarif --command=python --file=requirements-dev.txt --package-manager=pip --skip-unresolved + continue-on-error: true + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + + - name: Upload Snyk development dependencies scan result to GitHub Code Scanning + uses: github/codeql-action/upload-sarif@v2 + with: + sarif_file: snyk-requirements-dev-${{ matrix.language }}.sarif + category: requirements-dev-${{ matrix.language }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index be54694..0000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: CodeQL - -on: - push: - branches: - - main - pull_request: - # The branches below must be a subset of the branches above - branches: - - main - schedule: - - cron: '42 13 * * 1' - -defaults: - run: - shell: bash - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - max-parallel: 4 - matrix: - language: - - 'python' - python-version: - - '3.9' - - '3.10' - poetry-version: - - '1.1.12' - - steps: - - name: Check out code - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/poetry.lock' - - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: ${{ matrix.language }} - - - name: Autobuild - uses: github/codeql-action/autobuild@v1 - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/create-package.yml b/.github/workflows/create-package.yml new file mode 100644 index 0000000..dbb527b --- /dev/null +++ b/.github/workflows/create-package.yml @@ -0,0 +1,127 @@ +name: Create Package + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test_lint: + name: Test and lint + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: [ "3.7", "3.8", "3.9", "3.10", "3.11.0-rc.1" ] + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: pip + + - name: Install dependencies + run: | + echo "::group::Pip dependencies" + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade -r requirements-dev.txt -r requirements.txt + echo "::endgroup::" + + - name: Run tests + run: | + echo "::group::Tests - Run tests with coverage" + ( + set -o pipefail + python -X dev -m pytest --doctest-modules \ + --junitxml=artifact-${{ matrix.python-version }}-pytest-coverage.xml \ + --cov-report=term-missing:skip-covered --cov=src/ tests/ | tee artifact-${{ matrix.python-version }}-pytest-coverage.txt + ) + echo "::endgroup::" + + echo "::group::Tests - Coverage report" + python -X dev -m coverage report || true + echo "::endgroup::" + + - name: Pytest coverage comment + if: matrix.python-version == '3.9' + uses: MishaKav/pytest-coverage-comment@v1.1.35 + with: + pytest-coverage-path: ./artifact-${{ matrix.python-version }}-pytest-coverage.txt + junitxml-path: ./artifact-${{ matrix.python-version }}-pytest-coverage.xml + + - name: Run linters + run: | + echo "::group::Linter - mypy" + python -X dev -m mypy --junit-xml artifact-${{ matrix.python-version }}-mypy.xml src || true + echo "::endgroup::" + + echo "::group::Linter - black" + python -X dev -m black --check src || true + echo "::endgroup::" + + echo "::group::Linter - pylint" + python -X dev -m pylint src || true + echo "::endgroup::" + + echo "::group::Linter - pydocstyle" + python -X dev -m pydocstyle src || true + echo "::endgroup::" + + echo "::group::Linter - pyright" + python -X dev -m pyright src || true + echo "::endgroup::" + + echo "::group::Linter - flake8" + python -X dev -m flake8 src --count --show-source --statistics --exit-zero + echo "::endgroup::" + + echo "::group::Linter - pytype" + python -X dev -m pytype -j auto || true + echo "::endgroup::" + + - name: Upload pytest test results + uses: actions/upload-artifact@v3 + with: + name: artifacts-${{ matrix.python-version }} + path: artifact-${{ matrix.python-version }}* + # Use always() to always run this step to publish test results when there are test failures + if: ${{ always() }} + + build_publish: + name: Build and publish + runs-on: ubuntu-latest + needs: test_lint + strategy: + fail-fast: true + matrix: + python-version: [ "3.9" ] + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: pip + + - name: Install dependencies + run: | + echo "::group::Pip dependencies" + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade -r requirements-dev.txt -r requirements.txt + echo "::endgroup::" + + - name: Build a binary wheel and a source tarball + run: | + echo "::group::Build package" + python -m build --sdist --wheel --outdir dist/ . + echo "::endgroup::" diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 0000000..4a5644e --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,21 @@ +name: Labeler + +on: + pull_request_target: + types: [ "opened" ] + branches: [ "main" ] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + label: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v4 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.gitignore b/.gitignore index f68c127..59e3143 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +# From: https://github.com/github/gitignore/blob/main/Python.gitignore # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -20,7 +21,6 @@ parts/ sdist/ var/ wheels/ -pip-wheel-metadata/ share/python-wheels/ *.egg-info/ .installed.cfg @@ -50,7 +50,7 @@ coverage.xml *.py,cover .hypothesis/ .pytest_cache/ -.test-results/ +cover/ # Translations *.mo @@ -73,6 +73,7 @@ instance/ docs/_build/ # PyBuilder +.pybuilder/ target/ # Jupyter Notebook @@ -83,7 +84,9 @@ profile_default/ ipython_config.py # pyenv -.python-version +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. @@ -92,7 +95,22 @@ ipython_config.py # install all needed dependencies. #Pipfile.lock -# PEP 582; used by e.g. github.com/David-OConnor/pyflow +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ # Celery stuff @@ -104,14 +122,16 @@ celerybeat.pid # Environments .env -.env-test .venv -.venv-test env/ venv/ ENV/ env.bak/ venv.bak/ +.env-test +.local/ +.venv-test/ +.pypi-test/ # Spyder project settings .spyderproject @@ -131,5 +151,94 @@ dmypy.json # Pyre type checker .pyre/ -# local data -.local/ +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# From: https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser diff --git a/.idea/.gitignore b/.idea/.gitignore deleted file mode 100644 index 73f69e0..0000000 --- a/.idea/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml -# Datasource local storage ignored files -/dataSources/ -/dataSources.local.xml -# Editor-based HTTP Client requests -/httpRequests/ diff --git a/.idea/gather-vision.iml b/.idea/gather-vision.iml index cb4f085..fde6929 100644 --- a/.idea/gather-vision.iml +++ b/.idea/gather-vision.iml @@ -1,35 +1,20 @@ - - - - - - + + - + - - + + - + - - + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml index 9c707b5..4617371 100644 --- a/.idea/inspectionProfiles/Project_Default.xml +++ b/.idea/inspectionProfiles/Project_Default.xml @@ -4,4 +4,4 @@ - \ No newline at end of file + diff --git a/.idea/misc.xml b/.idea/misc.xml index f9dc4b3..ba1b8e7 100644 --- a/.idea/misc.xml +++ b/.idea/misc.xml @@ -1,4 +1,4 @@ - + \ No newline at end of file diff --git a/.idea/sqldialects.xml b/.idea/sqldialects.xml deleted file mode 100644 index 8581627..0000000 --- a/.idea/sqldialects.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/.idea/watcherTasks.xml b/.idea/watcherTasks.xml index 78808a4..35633d4 100644 --- a/.idea/watcherTasks.xml +++ b/.idea/watcherTasks.xml @@ -22,4 +22,4 @@ - \ No newline at end of file + diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..8ed5ded --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,4 @@ +# Change log + +*unreleased* + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..4ce15f9 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,133 @@ +# Gather vision contributing guide + +## Development + +Create a virtual environment: + +```bash +python -m venv .venv +``` + +Install runtime dependencies and development dependencies: + +```bash +# Windows +.venv\Scripts\activate.ps1 + +# Linux +source .venv/bin/activate + +# install dependencies +python -m pip install --upgrade pip setuptools wheel +python -m pip install --upgrade -r requirements-dev.txt -r requirements.txt + +# check for outdated packages +pip list --outdated +``` + +## Run tests and linters + +```bash +# Tests - multiple python versions using tox +# (it might be necessary to (un)comment `recreate = true` in pyproject.toml) +python -X dev -m tox + +# Tests - Run tests with coverage +python -X dev -m coverage run -m pytest --tb=line --doctest-modules +( + set -o pipefail + python -X dev -m pytest --doctest-modules --junitxml=pytest.xml \ + --cov-report=term-missing:skip-covered --cov=src/ tests/ | tee pytest-coverage.txt +) + +# Tests - Coverage report +python -X dev -m coverage report + +# Linter - flake8 +python -X dev -m flake8 src --count --show-source --statistics + +# Linter - mypy +python -X dev -m mypy src + +# Linter - black +python -X dev -m black --check src + +# Linter - pylint +python -X dev -m pylint src + +# Linter - pydocstyle +python -X dev -m pydocstyle src + +# Linter - pyright +python -X dev -m pyright src + +# Linter - pytype +python -X dev -m pytype -j auto +``` + +## Generate docs + +Generate the docs using pdoc3: + +```bash +pdoc --html --output-dir docs src/gather_vision --force \ + --config "lunr_search={'fuzziness': 1, 'index_docstrings': True}" \ + --config "git_link_template='https://github.com/anotherbyte-net/gather-vision/blob/{commit}/{path}#L{start_line}-L{end_line}'" +``` + +## Create and upload release + +Generate the distribution package archives. + +```bash +python -X dev -m build +``` + +Upload archives to Test PyPI first. + +```bash +python -X dev -m twine upload --repository testpypi dist/* +``` + +When uploading: + +- for username, use `__token__` +- for password, create a token at https://test.pypi.org/manage/account/#api-tokens + +Go to the [test project page](https://test.pypi.org/project/gather-vision) and check that it looks ok. + +Then create a new virtual environment, install the dependencies, and install from Test PyPI. + +```bash +python -m venv .venv-test +source .venv-test/bin/activate +python -m pip install --upgrade pip setuptools wheel +python -m pip install --upgrade -r requirements.txt + +GATHER_VISION_VERSION='0.0.1' +pip install --index-url https://test.pypi.org/simple/ --no-deps gather-vision==$GATHER_VISION_VERSION +# or +pip install dist/gather_vision-$GATHER_VISION_VERSION-py3-none-any.whl +``` + +Test the installed package. + +```bash +gather-vision --version +gather-vision --help +``` + +If the package seems to work as expected, upload it to the live PyPI. + +```bash +python -X dev -m twine upload dist/* +``` + +When uploading: + +- for username, use `__token__` +- for password, create a token at https://pypi.org/manage/account/#api-tokens + +Go to the [live project page](https://pypi.org/project/gather-vision) and check that it looks ok. + +Done! diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..4ba9177 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,3 @@ +include VERSION +include requirements.txt +include requirements-dev.txt diff --git a/README.md b/README.md index 9f495e8..f5804b5 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,15 @@ # gather-vision -A collection of data and ideas related to society and government in Queensland, Australia. -[![Test and build](https://github.com/anotherbyte-net/gather-vision/actions/workflows/app-tests.yml/badge.svg?branch=main)](https://github.com/anotherbyte-net/gather-vision/actions/workflows/app-tests.yml) -[![CodeQL](https://github.com/anotherbyte-net/gather-vision/actions/workflows/codeql-analysis.yml/badge.svg?branch=main)](https://github.com/anotherbyte-net/gather-vision/actions/workflows/codeql-analysis.yml) +Obtain, extract, organise, and store information. + +## Install + +Install from PyPI using pip: + +```bash +pip install gather-vision +``` + +[![PyPI](https://img.shields.io/pypi/v/gather-vision)](https://pypi.org/project/gather-vision/) +![PyPI - Python Version](https://img.shields.io/pypi/pyversions/gather-vision) +![GitHub Workflow Status (branch)](https://img.shields.io/github/workflow/status/anotherbyte-net/gather-vision/Create%20Package/main) diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..8acdd82 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +0.0.1 diff --git a/docs/doc-search.html b/docs/doc-search.html new file mode 100644 index 0000000..0f0cf80 --- /dev/null +++ b/docs/doc-search.html @@ -0,0 +1,141 @@ + + + + + + Search + + + + + + + +
+

+
    +
    +
    +

    Search results provided by Lunr.js

    +
    + + + + + \ No newline at end of file diff --git a/docs/gather_vision/app.html b/docs/gather_vision/app.html new file mode 100644 index 0000000..383dadf --- /dev/null +++ b/docs/gather_vision/app.html @@ -0,0 +1,556 @@ + + + + + + +gather_vision.app API documentation + + + + + + + + + + + +
    +
    +
    +

    Module gather_vision.app

    +
    +
    +

    The main application features.

    +
    + +Expand source code +Browse git + +
    """The main application features."""
    +import typing
    +
    +from importlib_metadata import EntryPoints, entry_points
    +
    +from gather_vision import model, plugin, utils
    +
    +
    +class App:
    +    """The main application."""
    +
    +    group = "gather_vision.plugin"
    +
    +    entry_points: typing.Optional[EntryPoints] = None
    +    plugins: typing.Dict[str, plugin.Entry] = {}
    +
    +    def collect(self) -> EntryPoints:
    +        """Collect the available plugins.
    +
    +        Returns:
    +            A collection of EntryPoints.
    +        """
    +        if self.entry_points is None:
    +            self.entry_points = entry_points(group=self.group)
    +        return self.entry_points
    +
    +    def load(self) -> typing.Dict[str, plugin.Entry]:
    +        """Load the plugin class for each plugin.
    +
    +        Returns:
    +            A list of
    +        """
    +        if not self.plugins:
    +            for entry_point in self.collect():
    +                self.plugins[entry_point.name] = entry_point.load()
    +        return self.plugins
    +
    +    def get(self, name: str) -> typing.Optional[plugin.Entry]:
    +        """Get the class for a plugin.
    +
    +        Args:
    +            name: The name of the plugin.
    +
    +        Returns:
    +            The plugin entry class.
    +        """
    +        if name in self.plugins:
    +            return self.plugins[name]
    +
    +        entry_pts = entry_points(group=self.group, name=name)
    +        if entry_pts and len(entry_pts) == 1:
    +            entry_point = entry_pts[0]
    +            self.plugins[entry_point.name] = entry_point.load()
    +
    +        return self.plugins.get(name)
    +
    +    def update(self, args: model.UpdateArgs) -> model.UpdateResult:
    +        """Execute the update action for the plugin with the given name.
    +
    +        Args:
    +            args: The update arguments.
    +
    +        Returns:
    +            The result of running the plugin's update process.
    +        """
    +        named_plugin = self.plugins.get(args.name)
    +        if not named_plugin:
    +            raise utils.GatherVisionException(
    +                f"Could not find plugin named '{args.name}'."
    +            )
    +        result = named_plugin.update(args)
    +        return result
    +
    +    def show(self, args: model.ShowArgs) -> model.ShowResult:
    +        """Execute the show action for the plugin with the given name.
    +
    +        Args:
    +            args: The show arguments.
    +
    +        Returns:
    +            The details of the plugin.
    +        """
    +        named_plugin = self.plugins.get(args.name)
    +        if not named_plugin:
    +            raise utils.GatherVisionException(
    +                f"Could not find plugin named '{args.name}'."
    +            )
    +        result = named_plugin.show(args)
    +        return result
    +
    +    def list(
    +        self, args: model.ListArgs  # noqa: U100 pylint: disable=unused-argument
    +    ) -> model.ListResult:
    +        """List all available plugins.
    +
    +        Args:
    +            args: The list arguments.
    +
    +        Returns:
    +            A list of plugins.
    +        """
    +        names = []
    +        for item in self.collect():
    +            if not item:
    +                continue
    +            names.append(item.name)
    +        result = model.ListResult(sorted(names))
    +        return result
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +

    Classes

    +
    +
    +class App +
    +
    +

    The main application.

    +
    + +Expand source code +Browse git + +
    class App:
    +    """The main application."""
    +
    +    group = "gather_vision.plugin"
    +
    +    entry_points: typing.Optional[EntryPoints] = None
    +    plugins: typing.Dict[str, plugin.Entry] = {}
    +
    +    def collect(self) -> EntryPoints:
    +        """Collect the available plugins.
    +
    +        Returns:
    +            A collection of EntryPoints.
    +        """
    +        if self.entry_points is None:
    +            self.entry_points = entry_points(group=self.group)
    +        return self.entry_points
    +
    +    def load(self) -> typing.Dict[str, plugin.Entry]:
    +        """Load the plugin class for each plugin.
    +
    +        Returns:
    +            A list of
    +        """
    +        if not self.plugins:
    +            for entry_point in self.collect():
    +                self.plugins[entry_point.name] = entry_point.load()
    +        return self.plugins
    +
    +    def get(self, name: str) -> typing.Optional[plugin.Entry]:
    +        """Get the class for a plugin.
    +
    +        Args:
    +            name: The name of the plugin.
    +
    +        Returns:
    +            The plugin entry class.
    +        """
    +        if name in self.plugins:
    +            return self.plugins[name]
    +
    +        entry_pts = entry_points(group=self.group, name=name)
    +        if entry_pts and len(entry_pts) == 1:
    +            entry_point = entry_pts[0]
    +            self.plugins[entry_point.name] = entry_point.load()
    +
    +        return self.plugins.get(name)
    +
    +    def update(self, args: model.UpdateArgs) -> model.UpdateResult:
    +        """Execute the update action for the plugin with the given name.
    +
    +        Args:
    +            args: The update arguments.
    +
    +        Returns:
    +            The result of running the plugin's update process.
    +        """
    +        named_plugin = self.plugins.get(args.name)
    +        if not named_plugin:
    +            raise utils.GatherVisionException(
    +                f"Could not find plugin named '{args.name}'."
    +            )
    +        result = named_plugin.update(args)
    +        return result
    +
    +    def show(self, args: model.ShowArgs) -> model.ShowResult:
    +        """Execute the show action for the plugin with the given name.
    +
    +        Args:
    +            args: The show arguments.
    +
    +        Returns:
    +            The details of the plugin.
    +        """
    +        named_plugin = self.plugins.get(args.name)
    +        if not named_plugin:
    +            raise utils.GatherVisionException(
    +                f"Could not find plugin named '{args.name}'."
    +            )
    +        result = named_plugin.show(args)
    +        return result
    +
    +    def list(
    +        self, args: model.ListArgs  # noqa: U100 pylint: disable=unused-argument
    +    ) -> model.ListResult:
    +        """List all available plugins.
    +
    +        Args:
    +            args: The list arguments.
    +
    +        Returns:
    +            A list of plugins.
    +        """
    +        names = []
    +        for item in self.collect():
    +            if not item:
    +                continue
    +            names.append(item.name)
    +        result = model.ListResult(sorted(names))
    +        return result
    +
    +

    Class variables

    +
    +
    var entry_points : Optional[importlib_metadata.EntryPoints]
    +
    +
    +
    +
    var group
    +
    +
    +
    +
    var plugins : Dict[str, Entry]
    +
    +
    +
    +
    +

    Methods

    +
    +
    +def collect(self) ‑> importlib_metadata.EntryPoints +
    +
    +

    Collect the available plugins.

    +

    Returns

    +

    A collection of EntryPoints.

    +
    + +Expand source code +Browse git + +
    def collect(self) -> EntryPoints:
    +    """Collect the available plugins.
    +
    +    Returns:
    +        A collection of EntryPoints.
    +    """
    +    if self.entry_points is None:
    +        self.entry_points = entry_points(group=self.group)
    +    return self.entry_points
    +
    +
    +
    +def get(self, name: str) ‑> Optional[Entry] +
    +
    +

    Get the class for a plugin.

    +

    Args

    +
    +
    name
    +
    The name of the plugin.
    +
    +

    Returns

    +

    The plugin entry class.

    +
    + +Expand source code +Browse git + +
    def get(self, name: str) -> typing.Optional[plugin.Entry]:
    +    """Get the class for a plugin.
    +
    +    Args:
    +        name: The name of the plugin.
    +
    +    Returns:
    +        The plugin entry class.
    +    """
    +    if name in self.plugins:
    +        return self.plugins[name]
    +
    +    entry_pts = entry_points(group=self.group, name=name)
    +    if entry_pts and len(entry_pts) == 1:
    +        entry_point = entry_pts[0]
    +        self.plugins[entry_point.name] = entry_point.load()
    +
    +    return self.plugins.get(name)
    +
    +
    +
    +def list(self, args: ListArgs) ‑> ListResult +
    +
    +

    List all available plugins.

    +

    Args

    +
    +
    args
    +
    The list arguments.
    +
    +

    Returns

    +

    A list of plugins.

    +
    + +Expand source code +Browse git + +
    def list(
    +    self, args: model.ListArgs  # noqa: U100 pylint: disable=unused-argument
    +) -> model.ListResult:
    +    """List all available plugins.
    +
    +    Args:
    +        args: The list arguments.
    +
    +    Returns:
    +        A list of plugins.
    +    """
    +    names = []
    +    for item in self.collect():
    +        if not item:
    +            continue
    +        names.append(item.name)
    +    result = model.ListResult(sorted(names))
    +    return result
    +
    +
    +
    +def load(self) ‑> Dict[str, Entry] +
    +
    +

    Load the plugin class for each plugin.

    +

    Returns

    +

    A list of

    +
    + +Expand source code +Browse git + +
    def load(self) -> typing.Dict[str, plugin.Entry]:
    +    """Load the plugin class for each plugin.
    +
    +    Returns:
    +        A list of
    +    """
    +    if not self.plugins:
    +        for entry_point in self.collect():
    +            self.plugins[entry_point.name] = entry_point.load()
    +    return self.plugins
    +
    +
    +
    +def show(self, args: ShowArgs) ‑> ShowResult +
    +
    +

    Execute the show action for the plugin with the given name.

    +

    Args

    +
    +
    args
    +
    The show arguments.
    +
    +

    Returns

    +

    The details of the plugin.

    +
    + +Expand source code +Browse git + +
    def show(self, args: model.ShowArgs) -> model.ShowResult:
    +    """Execute the show action for the plugin with the given name.
    +
    +    Args:
    +        args: The show arguments.
    +
    +    Returns:
    +        The details of the plugin.
    +    """
    +    named_plugin = self.plugins.get(args.name)
    +    if not named_plugin:
    +        raise utils.GatherVisionException(
    +            f"Could not find plugin named '{args.name}'."
    +        )
    +    result = named_plugin.show(args)
    +    return result
    +
    +
    +
    +def update(self, args: UpdateArgs) ‑> UpdateResult +
    +
    +

    Execute the update action for the plugin with the given name.

    +

    Args

    +
    +
    args
    +
    The update arguments.
    +
    +

    Returns

    +

    The result of running the plugin's update process.

    +
    + +Expand source code +Browse git + +
    def update(self, args: model.UpdateArgs) -> model.UpdateResult:
    +    """Execute the update action for the plugin with the given name.
    +
    +    Args:
    +        args: The update arguments.
    +
    +    Returns:
    +        The result of running the plugin's update process.
    +    """
    +    named_plugin = self.plugins.get(args.name)
    +    if not named_plugin:
    +        raise utils.GatherVisionException(
    +            f"Could not find plugin named '{args.name}'."
    +        )
    +    result = named_plugin.update(args)
    +    return result
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + \ No newline at end of file diff --git a/docs/gather_vision/cli.html b/docs/gather_vision/cli.html new file mode 100644 index 0000000..8890fb3 --- /dev/null +++ b/docs/gather_vision/cli.html @@ -0,0 +1,534 @@ + + + + + + +gather_vision.cli API documentation + + + + + + + + + + + +
    +
    +
    +

    Module gather_vision.cli

    +
    +
    +

    Command line for gather vision.

    +
    + +Expand source code +Browse git + +
    """Command line for gather vision."""
    +
    +import argparse
    +import logging
    +import sys
    +import typing
    +
    +from gather_vision import app, model, utils
    +
    +
    +def cli_update(args: argparse.Namespace) -> bool:
    +    """Run the update action from the cli.
    +
    +    Args:
    +        args: The arguments for the update action.
    +
    +    Returns:
    +        True if there were no errors.
    +    """
    +    logger = logging.getLogger(__name__)
    +
    +    app_args = model.UpdateArgs(name=args.name)
    +    main_app = app.App()
    +
    +    logger.info("Updating '%s'.", args.name)
    +    main_app.update(app_args)
    +    return True
    +
    +
    +def cli_show(args: argparse.Namespace) -> bool:
    +    """Run the show action from the cli.
    +
    +    Args:
    +        args: The arguments for the show action.
    +
    +    Returns:
    +        True if there were no errors.
    +    """
    +    logger = logging.getLogger(__name__)
    +
    +    app_args = model.ShowArgs(name=args.name)
    +    main_app = app.App()
    +
    +    logger.info("Showing '%s'.", args.name)
    +    main_app.show(app_args)
    +    return True
    +
    +
    +def cli_list(
    +    args: argparse.Namespace,  # noqa: U100 pylint: disable=unused-argument
    +) -> bool:
    +    """Run the list action from the cli.
    +
    +    Args:
    +        args: The arguments for the list action.
    +
    +    Returns:
    +        True if there were no errors.
    +    """
    +    logger = logging.getLogger(__name__)
    +
    +    app_args = model.ListArgs()
    +    main_app = app.App()
    +    result = main_app.list(app_args)
    +
    +    logger.info("Listing %s plugins.", len(result.names))
    +    for index, name in enumerate(result.names):
    +        logger.info("  %s) %s", index + 1, name)
    +    return True
    +
    +
    +def main(args: typing.Optional[typing.List[str]] = None) -> int:
    +    """Run as a command line program.
    +
    +    Args:
    +        args: The program arguments.
    +
    +    Returns:
    +        int: Program exit code.
    +    """
    +    if args is None:
    +        args = sys.argv[1:]
    +
    +    # configure logging
    +    logging.basicConfig(
    +        format="%(asctime)s [%(levelname)-8s] %(message)s", level=logging.DEBUG
    +    )
    +    logger = logging.getLogger(__name__)
    +
    +    # create the top-level parser
    +    parser = argparse.ArgumentParser(
    +        prog=utils.get_name_dash(),
    +        description="Obtain, extract, organise, and store information.",
    +    )
    +    parser.add_argument(
    +        "--version",
    +        action="version",
    +        version=f"%(prog)s {utils.get_version()}",
    +    )
    +    parser.add_argument(
    +        "--log-level",
    +        default="info",
    +        choices=["debug", "info", "warning", "error", "critical"],
    +        help="the log level: debug, info, warning, error, critical",
    +    )
    +    subparsers = parser.add_subparsers(
    +        title="Available subcommands",
    +        description="The actions available for plugins",
    +        dest="subcommand_action",
    +        required=False,
    +        help="The subcommands available to interact with installed plugins.",
    +        metavar="action",
    +    )
    +
    +    # create the parser for the "update" command
    +    parser_update = subparsers.add_parser("update")
    +    parser_update.add_argument(
    +        "name",
    +        help="The name of the update to run.",
    +    )
    +    parser_update.set_defaults(func=cli_update)
    +
    +    # create the parser for the "show" command
    +    parser_show = subparsers.add_parser("show")
    +    parser_show.add_argument(
    +        "name",
    +        help="The name of the group of information to show.",
    +    )
    +    parser_show.set_defaults(func=cli_show)
    +
    +    # create the parser for the "list" command
    +    parser_list = subparsers.add_parser("list")
    +    parser_list.set_defaults(func=cli_list)
    +
    +    try:
    +        parsed_args = parser.parse_args(args)
    +
    +        logging.getLogger().setLevel((parsed_args.log_level or "info").upper())
    +
    +        if not parsed_args.subcommand_action:
    +            parser.print_help(file=sys.stderr)
    +            sys.exit(1)
    +
    +        if logger.isEnabledFor(logging.DEBUG):
    +            logger.debug(
    +                "Starting %s with arguments '%s'.", utils.get_name_dash(), args
    +            )
    +        else:
    +            logger.info("Starting %s.", utils.get_name_dash())
    +
    +        if parsed_args.subcommand_action and hasattr(parsed_args, "func"):
    +            result = parsed_args.func(parsed_args)
    +        else:
    +            logger.warning("Not sure what to do with arguments '%s'.", args)
    +            result = False
    +
    +        outcome = 0 if result is True else 1
    +        if outcome == 0:
    +            logger.info("Finished.")
    +        else:
    +            logger.info("Finished with exit code %s.", outcome)
    +
    +        return sys.exit(outcome)
    +
    +    except utils.GatherVisionException as error:
    +        if logger.isEnabledFor(logging.DEBUG):
    +            raise
    +        logger.error("Error: %s - %s", error.__class__.__name__, str(error))
    +        return sys.exit(1)
    +
    +    except Exception as error:  # pylint: disable=broad-except
    +        if logger.isEnabledFor(logging.DEBUG):
    +            raise
    +        logger.error("Error: %s - %s", error.__class__.__name__, str(error))
    +        return sys.exit(2)
    +
    +
    +if __name__ == "__main__":
    +    main()
    +
    +
    +
    +
    +
    +
    +
    +

    Functions

    +
    +
    +def cli_list(args: argparse.Namespace) ‑> bool +
    +
    +

    Run the list action from the cli.

    +

    Args

    +
    +
    args
    +
    The arguments for the list action.
    +
    +

    Returns

    +

    True if there were no errors.

    +
    + +Expand source code +Browse git + +
    def cli_list(
    +    args: argparse.Namespace,  # noqa: U100 pylint: disable=unused-argument
    +) -> bool:
    +    """Run the list action from the cli.
    +
    +    Args:
    +        args: The arguments for the list action.
    +
    +    Returns:
    +        True if there were no errors.
    +    """
    +    logger = logging.getLogger(__name__)
    +
    +    app_args = model.ListArgs()
    +    main_app = app.App()
    +    result = main_app.list(app_args)
    +
    +    logger.info("Listing %s plugins.", len(result.names))
    +    for index, name in enumerate(result.names):
    +        logger.info("  %s) %s", index + 1, name)
    +    return True
    +
    +
    +
    +def cli_show(args: argparse.Namespace) ‑> bool +
    +
    +

    Run the show action from the cli.

    +

    Args

    +
    +
    args
    +
    The arguments for the show action.
    +
    +

    Returns

    +

    True if there were no errors.

    +
    + +Expand source code +Browse git + +
    def cli_show(args: argparse.Namespace) -> bool:
    +    """Run the show action from the cli.
    +
    +    Args:
    +        args: The arguments for the show action.
    +
    +    Returns:
    +        True if there were no errors.
    +    """
    +    logger = logging.getLogger(__name__)
    +
    +    app_args = model.ShowArgs(name=args.name)
    +    main_app = app.App()
    +
    +    logger.info("Showing '%s'.", args.name)
    +    main_app.show(app_args)
    +    return True
    +
    +
    +
    +def cli_update(args: argparse.Namespace) ‑> bool +
    +
    +

    Run the update action from the cli.

    +

    Args

    +
    +
    args
    +
    The arguments for the update action.
    +
    +

    Returns

    +

    True if there were no errors.

    +
    + +Expand source code +Browse git + +
    def cli_update(args: argparse.Namespace) -> bool:
    +    """Run the update action from the cli.
    +
    +    Args:
    +        args: The arguments for the update action.
    +
    +    Returns:
    +        True if there were no errors.
    +    """
    +    logger = logging.getLogger(__name__)
    +
    +    app_args = model.UpdateArgs(name=args.name)
    +    main_app = app.App()
    +
    +    logger.info("Updating '%s'.", args.name)
    +    main_app.update(app_args)
    +    return True
    +
    +
    +
    +def main(args: Optional[List[str]] = None) ‑> int +
    +
    +

    Run as a command line program.

    +

    Args

    +
    +
    args
    +
    The program arguments.
    +
    +

    Returns

    +
    +
    int
    +
    Program exit code.
    +
    +
    + +Expand source code +Browse git + +
    def main(args: typing.Optional[typing.List[str]] = None) -> int:
    +    """Run as a command line program.
    +
    +    Args:
    +        args: The program arguments.
    +
    +    Returns:
    +        int: Program exit code.
    +    """
    +    if args is None:
    +        args = sys.argv[1:]
    +
    +    # configure logging
    +    logging.basicConfig(
    +        format="%(asctime)s [%(levelname)-8s] %(message)s", level=logging.DEBUG
    +    )
    +    logger = logging.getLogger(__name__)
    +
    +    # create the top-level parser
    +    parser = argparse.ArgumentParser(
    +        prog=utils.get_name_dash(),
    +        description="Obtain, extract, organise, and store information.",
    +    )
    +    parser.add_argument(
    +        "--version",
    +        action="version",
    +        version=f"%(prog)s {utils.get_version()}",
    +    )
    +    parser.add_argument(
    +        "--log-level",
    +        default="info",
    +        choices=["debug", "info", "warning", "error", "critical"],
    +        help="the log level: debug, info, warning, error, critical",
    +    )
    +    subparsers = parser.add_subparsers(
    +        title="Available subcommands",
    +        description="The actions available for plugins",
    +        dest="subcommand_action",
    +        required=False,
    +        help="The subcommands available to interact with installed plugins.",
    +        metavar="action",
    +    )
    +
    +    # create the parser for the "update" command
    +    parser_update = subparsers.add_parser("update")
    +    parser_update.add_argument(
    +        "name",
    +        help="The name of the update to run.",
    +    )
    +    parser_update.set_defaults(func=cli_update)
    +
    +    # create the parser for the "show" command
    +    parser_show = subparsers.add_parser("show")
    +    parser_show.add_argument(
    +        "name",
    +        help="The name of the group of information to show.",
    +    )
    +    parser_show.set_defaults(func=cli_show)
    +
    +    # create the parser for the "list" command
    +    parser_list = subparsers.add_parser("list")
    +    parser_list.set_defaults(func=cli_list)
    +
    +    try:
    +        parsed_args = parser.parse_args(args)
    +
    +        logging.getLogger().setLevel((parsed_args.log_level or "info").upper())
    +
    +        if not parsed_args.subcommand_action:
    +            parser.print_help(file=sys.stderr)
    +            sys.exit(1)
    +
    +        if logger.isEnabledFor(logging.DEBUG):
    +            logger.debug(
    +                "Starting %s with arguments '%s'.", utils.get_name_dash(), args
    +            )
    +        else:
    +            logger.info("Starting %s.", utils.get_name_dash())
    +
    +        if parsed_args.subcommand_action and hasattr(parsed_args, "func"):
    +            result = parsed_args.func(parsed_args)
    +        else:
    +            logger.warning("Not sure what to do with arguments '%s'.", args)
    +            result = False
    +
    +        outcome = 0 if result is True else 1
    +        if outcome == 0:
    +            logger.info("Finished.")
    +        else:
    +            logger.info("Finished with exit code %s.", outcome)
    +
    +        return sys.exit(outcome)
    +
    +    except utils.GatherVisionException as error:
    +        if logger.isEnabledFor(logging.DEBUG):
    +            raise
    +        logger.error("Error: %s - %s", error.__class__.__name__, str(error))
    +        return sys.exit(1)
    +
    +    except Exception as error:  # pylint: disable=broad-except
    +        if logger.isEnabledFor(logging.DEBUG):
    +            raise
    +        logger.error("Error: %s - %s", error.__class__.__name__, str(error))
    +        return sys.exit(2)
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + \ No newline at end of file diff --git a/docs/gather_vision/index.html b/docs/gather_vision/index.html new file mode 100644 index 0000000..0edc87f --- /dev/null +++ b/docs/gather_vision/index.html @@ -0,0 +1,155 @@ + + + + + + +gather_vision API documentation + + + + + + + + + + + +
    +
    +
    +

    Package gather_vision

    +
    +
    +

    Documentation for the leaf focus package.

    +

    gather-vision

    +

    Obtain, extract, organise, and store information.

    +

    Install

    +

    Install from PyPI using pip:

    +
    pip install gather-vision
    +
    +

    PyPI +PyPI - Python Version +GitHub Workflow Status (branch)

    +

    Change log

    +

    unreleased

    +
    + +Expand source code +Browse git + +
    """Documentation for the leaf focus package.
    +
    +.. include:: ../../README.md
    +.. include:: ../../CHANGELOG.md
    +"""
    +
    +
    +
    +

    Sub-modules

    +
    +
    gather_vision.app
    +
    +

    The main application features.

    +
    +
    gather_vision.cli
    +
    +

    Command line for gather vision.

    +
    +
    gather_vision.model
    +
    +

    Models used by other modules.

    +
    +
    gather_vision.plugin
    +
    +

    Available to plugins.

    +
    +
    gather_vision.utils
    +
    +

    Small utility functions.

    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + \ No newline at end of file diff --git a/docs/gather_vision/model.html b/docs/gather_vision/model.html new file mode 100644 index 0000000..6661fd3 --- /dev/null +++ b/docs/gather_vision/model.html @@ -0,0 +1,299 @@ + + + + + + +gather_vision.model API documentation + + + + + + + + + + + +
    +
    +
    +

    Module gather_vision.model

    +
    +
    +

    Models used by other modules.

    +
    + +Expand source code +Browse git + +
    """Models used by other modules."""
    +import dataclasses
    +import typing
    +
    +
    +@dataclasses.dataclass
    +class UpdateArgs:
    +    """The arguments for the update command."""
    +
    +    name: str
    +
    +
    +@dataclasses.dataclass
    +class UpdateResult:
    +    """The result from the update command."""
    +
    +
    +@dataclasses.dataclass
    +class ShowArgs:
    +    """The arguments for the show command."""
    +
    +    name: str
    +
    +
    +@dataclasses.dataclass
    +class ShowResult:
    +    """The result from the show command."""
    +
    +
    +@dataclasses.dataclass
    +class ListArgs:
    +    """The arguments for the list command."""
    +
    +
    +@dataclasses.dataclass
    +class ListResult:
    +    """The result from the list command."""
    +
    +    names: typing.List[str]
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +

    Classes

    +
    +
    +class ListArgs +
    +
    +

    The arguments for the list command.

    +
    + +Expand source code +Browse git + +
    @dataclasses.dataclass
    +class ListArgs:
    +    """The arguments for the list command."""
    +
    +
    +
    +class ListResult +(names: List[str]) +
    +
    +

    The result from the list command.

    +
    + +Expand source code +Browse git + +
    @dataclasses.dataclass
    +class ListResult:
    +    """The result from the list command."""
    +
    +    names: typing.List[str]
    +
    +

    Class variables

    +
    +
    var names : List[str]
    +
    +
    +
    +
    +
    +
    +class ShowArgs +(name: str) +
    +
    +

    The arguments for the show command.

    +
    + +Expand source code +Browse git + +
    @dataclasses.dataclass
    +class ShowArgs:
    +    """The arguments for the show command."""
    +
    +    name: str
    +
    +

    Class variables

    +
    +
    var name : str
    +
    +
    +
    +
    +
    +
    +class ShowResult +
    +
    +

    The result from the show command.

    +
    + +Expand source code +Browse git + +
    @dataclasses.dataclass
    +class ShowResult:
    +    """The result from the show command."""
    +
    +
    +
    +class UpdateArgs +(name: str) +
    +
    +

    The arguments for the update command.

    +
    + +Expand source code +Browse git + +
    @dataclasses.dataclass
    +class UpdateArgs:
    +    """The arguments for the update command."""
    +
    +    name: str
    +
    +

    Class variables

    +
    +
    var name : str
    +
    +
    +
    +
    +
    +
    +class UpdateResult +
    +
    +

    The result from the update command.

    +
    + +Expand source code +Browse git + +
    @dataclasses.dataclass
    +class UpdateResult:
    +    """The result from the update command."""
    +
    +
    +
    +
    +
    + +
    + + + \ No newline at end of file diff --git a/docs/gather_vision/plugin.html b/docs/gather_vision/plugin.html new file mode 100644 index 0000000..84a0e94 --- /dev/null +++ b/docs/gather_vision/plugin.html @@ -0,0 +1,260 @@ + + + + + + +gather_vision.plugin API documentation + + + + + + + + + + + +
    +
    +
    +

    Module gather_vision.plugin

    +
    +
    +

    Available to plugins.

    +
    + +Expand source code +Browse git + +
    """Available to plugins."""
    +import abc
    +
    +from gather_vision import model
    +
    +
    +class Entry(abc.ABC):
    +    """The entry point class for plugins.
    +    Compatible plugins must implement this class."""
    +
    +    @abc.abstractmethod
    +    def update(self, args: model.UpdateArgs) -> model.UpdateResult:  # noqa: U100
    +        """Run the update action.
    +
    +        Args:
    +            args: The arguments for update.
    +
    +        Returns:
    +            The result of the update action.
    +        """
    +        raise NotImplementedError("Must implement 'update'.")
    +
    +    @abc.abstractmethod
    +    def show(self, args: model.ShowArgs) -> model.ShowResult:  # noqa: U100
    +        """Run the show action.
    +
    +        Args:
    +            args: The arguments for show.
    +
    +        Returns:
    +            The result of the show action.
    +        """
    +        raise NotImplementedError("Must implement 'show'.")
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +

    Classes

    +
    +
    +class Entry +
    +
    +

    The entry point class for plugins. +Compatible plugins must implement this class.

    +
    + +Expand source code +Browse git + +
    class Entry(abc.ABC):
    +    """The entry point class for plugins.
    +    Compatible plugins must implement this class."""
    +
    +    @abc.abstractmethod
    +    def update(self, args: model.UpdateArgs) -> model.UpdateResult:  # noqa: U100
    +        """Run the update action.
    +
    +        Args:
    +            args: The arguments for update.
    +
    +        Returns:
    +            The result of the update action.
    +        """
    +        raise NotImplementedError("Must implement 'update'.")
    +
    +    @abc.abstractmethod
    +    def show(self, args: model.ShowArgs) -> model.ShowResult:  # noqa: U100
    +        """Run the show action.
    +
    +        Args:
    +            args: The arguments for show.
    +
    +        Returns:
    +            The result of the show action.
    +        """
    +        raise NotImplementedError("Must implement 'show'.")
    +
    +

    Ancestors

    +
      +
    • abc.ABC
    • +
    +

    Methods

    +
    +
    +def show(self, args: ShowArgs) ‑> ShowResult +
    +
    +

    Run the show action.

    +

    Args

    +
    +
    args
    +
    The arguments for show.
    +
    +

    Returns

    +

    The result of the show action.

    +
    + +Expand source code +Browse git + +
    @abc.abstractmethod
    +def show(self, args: model.ShowArgs) -> model.ShowResult:  # noqa: U100
    +    """Run the show action.
    +
    +    Args:
    +        args: The arguments for show.
    +
    +    Returns:
    +        The result of the show action.
    +    """
    +    raise NotImplementedError("Must implement 'show'.")
    +
    +
    +
    +def update(self, args: UpdateArgs) ‑> UpdateResult +
    +
    +

    Run the update action.

    +

    Args

    +
    +
    args
    +
    The arguments for update.
    +
    +

    Returns

    +

    The result of the update action.

    +
    + +Expand source code +Browse git + +
    @abc.abstractmethod
    +def update(self, args: model.UpdateArgs) -> model.UpdateResult:  # noqa: U100
    +    """Run the update action.
    +
    +    Args:
    +        args: The arguments for update.
    +
    +    Returns:
    +        The result of the update action.
    +    """
    +    raise NotImplementedError("Must implement 'update'.")
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + \ No newline at end of file diff --git a/docs/gather_vision/utils.html b/docs/gather_vision/utils.html new file mode 100644 index 0000000..89bc7a4 --- /dev/null +++ b/docs/gather_vision/utils.html @@ -0,0 +1,316 @@ + + + + + + +gather_vision.utils API documentation + + + + + + + + + + + +
    +
    +
    +

    Module gather_vision.utils

    +
    +
    +

    Small utility functions.

    +
    + +Expand source code +Browse git + +
    """Small utility functions."""
    +import pathlib
    +import typing
    +
    +from importlib_metadata import PackageNotFoundError, distribution
    +from importlib_resources import as_file, files
    +
    +
    +def get_name_dash() -> str:
    +    """Get the package name with word separated by dashes."""
    +    return "gather-vision"
    +
    +
    +def get_name_under() -> str:
    +    """Get the package name with word separated by underscores."""
    +    return "gather_vision"
    +
    +
    +def get_version() -> typing.Optional[str]:
    +    """Get the package version."""
    +    try:
    +        dist = distribution(get_name_dash())
    +        return dist.version
    +    except PackageNotFoundError:
    +        pass
    +
    +    try:
    +        with as_file(files(get_name_under()).joinpath("cli.py")) as file_path:
    +            return (file_path.parent.parent.parent / "VERSION").read_text().strip()
    +    except FileNotFoundError:
    +        pass
    +
    +    return None
    +
    +
    +def validate(name: str, value, expected: typing.List) -> None:
    +    """Validate that a value is one of the expected values."""
    +    if value is not None and value not in expected:
    +        opts = ", ".join(sorted([str(i) for i in expected]))
    +        raise GatherVisionException(
    +            f"Invalid {name} '{value}'. Expected one of '{opts}'."
    +        )
    +
    +
    +def validate_path(
    +    name: str, value: pathlib.Path, must_exist: bool = False
    +) -> pathlib.Path:
    +    """Validate a path."""
    +    if not value:
    +        raise GatherVisionException(f"Must provide path {name}.")
    +
    +    try:
    +        if must_exist is True:
    +            abs_path = value.resolve(strict=True)
    +        else:
    +            abs_path = value.absolute()
    +
    +        return abs_path
    +    except Exception as error:
    +        raise GatherVisionException(f"Invalid path '{value}'.") from error
    +
    +
    +class GatherVisionException(Exception):
    +    """A gather vision error."""
    +
    +
    +
    +
    +
    +
    +
    +

    Functions

    +
    +
    +def get_name_dash() ‑> str +
    +
    +

    Get the package name with word separated by dashes.

    +
    + +Expand source code +Browse git + +
    def get_name_dash() -> str:
    +    """Get the package name with word separated by dashes."""
    +    return "gather-vision"
    +
    +
    +
    +def get_name_under() ‑> str +
    +
    +

    Get the package name with word separated by underscores.

    +
    + +Expand source code +Browse git + +
    def get_name_under() -> str:
    +    """Get the package name with word separated by underscores."""
    +    return "gather_vision"
    +
    +
    +
    +def get_version() ‑> Optional[str] +
    +
    +

    Get the package version.

    +
    + +Expand source code +Browse git + +
    def get_version() -> typing.Optional[str]:
    +    """Get the package version."""
    +    try:
    +        dist = distribution(get_name_dash())
    +        return dist.version
    +    except PackageNotFoundError:
    +        pass
    +
    +    try:
    +        with as_file(files(get_name_under()).joinpath("cli.py")) as file_path:
    +            return (file_path.parent.parent.parent / "VERSION").read_text().strip()
    +    except FileNotFoundError:
    +        pass
    +
    +    return None
    +
    +
    +
    +def validate(name: str, value, expected: List) ‑> None +
    +
    +

    Validate that a value is one of the expected values.

    +
    + +Expand source code +Browse git + +
    def validate(name: str, value, expected: typing.List) -> None:
    +    """Validate that a value is one of the expected values."""
    +    if value is not None and value not in expected:
    +        opts = ", ".join(sorted([str(i) for i in expected]))
    +        raise GatherVisionException(
    +            f"Invalid {name} '{value}'. Expected one of '{opts}'."
    +        )
    +
    +
    +
    +def validate_path(name: str, value: pathlib.Path, must_exist: bool = False) ‑> pathlib.Path +
    +
    +

    Validate a path.

    +
    + +Expand source code +Browse git + +
    def validate_path(
    +    name: str, value: pathlib.Path, must_exist: bool = False
    +) -> pathlib.Path:
    +    """Validate a path."""
    +    if not value:
    +        raise GatherVisionException(f"Must provide path {name}.")
    +
    +    try:
    +        if must_exist is True:
    +            abs_path = value.resolve(strict=True)
    +        else:
    +            abs_path = value.absolute()
    +
    +        return abs_path
    +    except Exception as error:
    +        raise GatherVisionException(f"Invalid path '{value}'.") from error
    +
    +
    +
    +
    +
    +

    Classes

    +
    +
    +class GatherVisionException +(*args, **kwargs) +
    +
    +

    A gather vision error.

    +
    + +Expand source code +Browse git + +
    class GatherVisionException(Exception):
    +    """A gather vision error."""
    +
    +

    Ancestors

    +
      +
    • builtins.Exception
    • +
    • builtins.BaseException
    • +
    +
    +
    +
    +
    + +
    + + + \ No newline at end of file diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 0000000..4d0db3b --- /dev/null +++ b/docs/index.html @@ -0,0 +1,21 @@ + + + + + + gather-vision + + + + + + + + +

    gather-vision documentation

    + + + + + + diff --git a/docs/index.js b/docs/index.js new file mode 100644 index 0000000..aff25c7 --- /dev/null +++ b/docs/index.js @@ -0,0 +1,217 @@ +URLS=[ +"gather_vision/index.html", +"gather_vision/app.html", +"gather_vision/cli.html", +"gather_vision/model.html", +"gather_vision/plugin.html", +"gather_vision/utils.html" +]; +INDEX=[ +{ +"ref":"gather_vision", +"url":0, +"doc":"Documentation for the leaf focus package. gather-vision Obtain, extract, organise, and store information. Install Install from PyPI using pip: pip install gather-vision [![PyPI](https: img.shields.io/pypi/v/gather-vision)](https: pypi.org/project/gather-vision/) ![PyPI - Python Version](https: img.shields.io/pypi/pyversions/gather-vision) ![GitHub Workflow Status (branch)](https: img.shields.io/github/workflow/status/anotherbyte-net/gather-vision/Create%20Package/main) Change log unreleased " +}, +{ +"ref":"gather_vision.app", +"url":1, +"doc":"The main application features." +}, +{ +"ref":"gather_vision.app.App", +"url":1, +"doc":"The main application." +}, +{ +"ref":"gather_vision.app.App.group", +"url":1, +"doc":"" +}, +{ +"ref":"gather_vision.app.App.entry_points", +"url":1, +"doc":"" +}, +{ +"ref":"gather_vision.app.App.plugins", +"url":1, +"doc":"" +}, +{ +"ref":"gather_vision.app.App.collect", +"url":1, +"doc":"Collect the available plugins. Returns: A collection of EntryPoints.", +"func":1 +}, +{ +"ref":"gather_vision.app.App.load", +"url":1, +"doc":"Load the plugin class for each plugin. Returns: A list of", +"func":1 +}, +{ +"ref":"gather_vision.app.App.get", +"url":1, +"doc":"Get the class for a plugin. Args: name: The name of the plugin. Returns: The plugin entry class.", +"func":1 +}, +{ +"ref":"gather_vision.app.App.update", +"url":1, +"doc":"Execute the update action for the plugin with the given name. Args: args: The update arguments. Returns: The result of running the plugin's update process.", +"func":1 +}, +{ +"ref":"gather_vision.app.App.show", +"url":1, +"doc":"Execute the show action for the plugin with the given name. Args: args: The show arguments. Returns: The details of the plugin.", +"func":1 +}, +{ +"ref":"gather_vision.app.App.list", +"url":1, +"doc":"List all available plugins. Args: args: The list arguments. Returns: A list of plugins.", +"func":1 +}, +{ +"ref":"gather_vision.cli", +"url":2, +"doc":"Command line for gather vision." +}, +{ +"ref":"gather_vision.cli.cli_update", +"url":2, +"doc":"Run the update action from the cli. Args: args: The arguments for the update action. Returns: True if there were no errors.", +"func":1 +}, +{ +"ref":"gather_vision.cli.cli_show", +"url":2, +"doc":"Run the show action from the cli. Args: args: The arguments for the show action. Returns: True if there were no errors.", +"func":1 +}, +{ +"ref":"gather_vision.cli.cli_list", +"url":2, +"doc":"Run the list action from the cli. Args: args: The arguments for the list action. Returns: True if there were no errors.", +"func":1 +}, +{ +"ref":"gather_vision.cli.main", +"url":2, +"doc":"Run as a command line program. Args: args: The program arguments. Returns: int: Program exit code.", +"func":1 +}, +{ +"ref":"gather_vision.model", +"url":3, +"doc":"Models used by other modules." +}, +{ +"ref":"gather_vision.model.UpdateArgs", +"url":3, +"doc":"The arguments for the update command." +}, +{ +"ref":"gather_vision.model.UpdateArgs.name", +"url":3, +"doc":"" +}, +{ +"ref":"gather_vision.model.UpdateResult", +"url":3, +"doc":"The result from the update command." +}, +{ +"ref":"gather_vision.model.ShowArgs", +"url":3, +"doc":"The arguments for the show command." +}, +{ +"ref":"gather_vision.model.ShowArgs.name", +"url":3, +"doc":"" +}, +{ +"ref":"gather_vision.model.ShowResult", +"url":3, +"doc":"The result from the show command." +}, +{ +"ref":"gather_vision.model.ListArgs", +"url":3, +"doc":"The arguments for the list command." +}, +{ +"ref":"gather_vision.model.ListResult", +"url":3, +"doc":"The result from the list command." +}, +{ +"ref":"gather_vision.model.ListResult.names", +"url":3, +"doc":"" +}, +{ +"ref":"gather_vision.plugin", +"url":4, +"doc":"Available to plugins." +}, +{ +"ref":"gather_vision.plugin.Entry", +"url":4, +"doc":"The entry point class for plugins. Compatible plugins must implement this class." +}, +{ +"ref":"gather_vision.plugin.Entry.update", +"url":4, +"doc":"Run the update action. Args: args: The arguments for update. Returns: The result of the update action.", +"func":1 +}, +{ +"ref":"gather_vision.plugin.Entry.show", +"url":4, +"doc":"Run the show action. Args: args: The arguments for show. Returns: The result of the show action.", +"func":1 +}, +{ +"ref":"gather_vision.utils", +"url":5, +"doc":"Small utility functions." +}, +{ +"ref":"gather_vision.utils.get_name_dash", +"url":5, +"doc":"Get the package name with word separated by dashes.", +"func":1 +}, +{ +"ref":"gather_vision.utils.get_name_under", +"url":5, +"doc":"Get the package name with word separated by underscores.", +"func":1 +}, +{ +"ref":"gather_vision.utils.get_version", +"url":5, +"doc":"Get the package version.", +"func":1 +}, +{ +"ref":"gather_vision.utils.validate", +"url":5, +"doc":"Validate that a value is one of the expected values.", +"func":1 +}, +{ +"ref":"gather_vision.utils.validate_path", +"url":5, +"doc":"Validate a path.", +"func":1 +}, +{ +"ref":"gather_vision.utils.GatherVisionException", +"url":5, +"doc":"A gather vision error." +} +] \ No newline at end of file diff --git a/gather_vision/__init__.py b/gather_vision/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/admin/__init__.py b/gather_vision/admin/__init__.py deleted file mode 100644 index 84d3dc5..0000000 --- a/gather_vision/admin/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# shared -from .shared import InformationSourceAdmin # noqa: F401 - -# outages -from .outages import OutageGroupAdmin # noqa: F401 -from .outages import OutageItemAdmin # noqa: F401 - -# petitions -from .petitions import PetitionItemAdmin # noqa: F401 -from .petitions import PetitionChangeAdmin # noqa: F401 - -# playlists -from .playlists import PlaylistItemAdmin # noqa: F401 -from .playlists import PlaylistEntryAdmin # noqa: F401 -from .playlists import PlaylistTrackAdmin # noqa: F401 - -# transport -from .transport import TransportItemAdmin # noqa: F401 -from .transport import TransportLineAdmin # noqa: F401 diff --git a/gather_vision/admin/outages.py b/gather_vision/admin/outages.py deleted file mode 100644 index 8a4a910..0000000 --- a/gather_vision/admin/outages.py +++ /dev/null @@ -1,67 +0,0 @@ -from django.contrib import admin - -import gather_vision.models as app_models - - -@admin.register(app_models.OutageItem) -class OutageItemAdmin(admin.ModelAdmin): - list_display = ( - "group", - "customers", - "council", - "suburb", - "post_code", - "cause", - "restored_date", - "source", - ) - date_hierarchy = "restored_date" - list_filter = ("source__title", "council", "cause") - search_fields = ( - "event_name", - "council", - "suburb", - "post_code", - "cause", - "streets", - ) - ordering = ("-modified_date",) - - -class OutageItemInlineAdmin(admin.TabularInline): - model = app_models.OutageItem - can_delete = False - show_change_link = True - extra = 0 - fields = ( - "customers", - "council", - "suburb", - "post_code", - "cause", - "restored_date", - ) - readonly_fields = ( - "customers", - "council", - "suburb", - "post_code", - "cause", - "restored_date", - ) - - -@admin.register(app_models.OutageGroup) -class OutageGroupAdmin(admin.ModelAdmin): - list_display = ( - "total_customers", - "demand", - "rating", - "source_updated_date", - "retrieved_date", - ) - date_hierarchy = "source_updated_date" - list_filter = ("rating",) - search_fields = ("demand", "total_customers") - inlines = [OutageItemInlineAdmin] - ordering = ("-retrieved_date",) diff --git a/gather_vision/admin/petitions.py b/gather_vision/admin/petitions.py deleted file mode 100644 index ab76ae8..0000000 --- a/gather_vision/admin/petitions.py +++ /dev/null @@ -1,32 +0,0 @@ -from django.contrib import admin - -import gather_vision.models as app_models - - -@admin.register(app_models.PetitionChange) -class PetitionChangeAdmin(admin.ModelAdmin): - list_display = ("retrieved_date", "signatures", "petition") - date_hierarchy = "retrieved_date" - list_filter = ("petition__source__title",) - search_fields = ("signatures", "petition__title", "petition__code") - ordering = ("-retrieved_date",) - - -class PetitionChangeInlineAdmin(admin.TabularInline): - model = app_models.PetitionChange - can_delete = False - show_change_link = True - extra = 0 - readonly_fields = ("retrieved_date", "signatures") - - -@admin.register(app_models.PetitionItem) -class PetitionItemAdmin(admin.ModelAdmin): - list_display = ("title", "code", "opened_date", "closed_date") - date_hierarchy = "closed_date" - list_filter = ("source__title", "eligibility") - search_fields = ("title", "code", "principal", "sponsor", "body") - inlines = [ - PetitionChangeInlineAdmin, - ] - ordering = ("-closed_date",) diff --git a/gather_vision/admin/playlists.py b/gather_vision/admin/playlists.py deleted file mode 100644 index 46297f6..0000000 --- a/gather_vision/admin/playlists.py +++ /dev/null @@ -1,41 +0,0 @@ -from django.contrib import admin - -import gather_vision.models as app_models - - -@admin.register(app_models.PlaylistItem) -class PlaylistItemAdmin(admin.ModelAdmin): - list_display = ("source", "retrieved_date") - date_hierarchy = "retrieved_date" - list_filter = ("source__title",) - search_fields = ("entries__position", "entries__position_change") - - -@admin.register(app_models.PlaylistEntry) -class PlaylistEntryAdmin(admin.ModelAdmin): - list_display = ("playlist", "position", "position_change") - date_hierarchy = "modified_date" - list_filter = ("playlist__source__title",) - search_fields = ( - "position", - "position_change", - "tracks__title", - "tracks__artists", - "tracks__code", - ) - ordering = ("playlist", "position") - filter_horizontal = ("tracks",) - - -@admin.register(app_models.PlaylistTrack) -class PlaylistTrackAdmin(admin.ModelAdmin): - list_display = ( - "title", - "artists", - "source", - "code", - "musicbrainz_code", - ) - date_hierarchy = "modified_date" - list_filter = ("source__title",) - search_fields = ("code", "title", "artists") diff --git a/gather_vision/admin/shared.py b/gather_vision/admin/shared.py deleted file mode 100644 index 7c7134e..0000000 --- a/gather_vision/admin/shared.py +++ /dev/null @@ -1,8 +0,0 @@ -from django.contrib import admin - -import gather_vision.models as app_models - - -@admin.register(app_models.InformationSource) -class InformationSourceAdmin(admin.ModelAdmin): - list_display = ("name", "title", "info_url") diff --git a/gather_vision/admin/transport.py b/gather_vision/admin/transport.py deleted file mode 100644 index 9ce5efb..0000000 --- a/gather_vision/admin/transport.py +++ /dev/null @@ -1,41 +0,0 @@ -from django.contrib import admin - -import gather_vision.models as app_models - - -@admin.register(app_models.TransportItem) -class TransportItemAdmin(admin.ModelAdmin): - list_display = ( - "source_identifier", - "title", - "start_date", - "stop_date", - "notice_type", - "category", - "severity", - "timing", - "is_train", - ) - - list_filter = ( - "notice_type", - "category", - "severity", - "timing", - "is_train", - "source__title", - "lines__title", - ) - search_fields = ( - "title", - "body", - ) - ordering = ("-start_date", "-stop_date") - - -@admin.register(app_models.TransportLine) -class TransportLineAdmin(admin.ModelAdmin): - list_display = ("title",) - date_hierarchy = "modified_date" - list_filter = ("notices__source__title",) - ordering = ("title",) diff --git a/gather_vision/apps.py b/gather_vision/apps.py deleted file mode 100644 index ca95a60..0000000 --- a/gather_vision/apps.py +++ /dev/null @@ -1,9 +0,0 @@ -from django.apps import AppConfig -from django.utils.translation import gettext_lazy as _ - - -class GatherVisionConfig(AppConfig): - # ref: https://docs.djangoproject.com/en/4.0/ref/contrib/admin/#overriding-the-default-admin-site # noqa: E501 - default_auto_field = "django.db.models.BigAutoField" - name = "gather_vision" - verbose_name = _("Gather Vision") diff --git a/gather_vision/management/__init__.py b/gather_vision/management/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/management/commands/__init__.py b/gather_vision/management/commands/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/management/commands/visionprocess.py b/gather_vision/management/commands/visionprocess.py deleted file mode 100644 index 867198f..0000000 --- a/gather_vision/management/commands/visionprocess.py +++ /dev/null @@ -1,85 +0,0 @@ -import os -from pathlib import Path -from zoneinfo import ZoneInfo - -from django.core.management.base import BaseCommand, CommandError -from django.utils import timezone - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.manage.contact_tracing import ContactTracing -from gather_vision.process.manage.outages import Outages -from gather_vision.process.manage.petitions import Petitions -from gather_vision.process.manage.playlists import Playlists -from gather_vision.process.manage.transport import Transport - - -class Command(BaseCommand): - help = "Run a gather-vision management command." - - _processes = { - "contacttracing": ContactTracing, - "outages": Outages, - "petitions": Petitions, - "playlists": Playlists, - "transport": Transport, - } - - def add_arguments(self, parser): - parser.add_argument( - "process", - choices=sorted(self._processes.keys()), - help="The name of the process to run.", - ) - parser.add_argument( - "operation", - choices=["init", "import", "update"], - help="The operation to run.", - ) - parser.add_argument( - "timezone", - type=ZoneInfo, - help="The name of the timezone to use for dates and times.", - ) - parser.add_argument( - "--data-path", type=Path, help="The path to the data file to import." - ) - - def handle(self, *args, **options): - logger = Logger(stdout=self.stdout, style=self.style) - - http_client = HttpClient(logger) - process = options["process"] - operation = options["operation"] - tz = options["timezone"] - data_path = options.get("data_path") - - try: - timezone.activate(tz) - os.environ["TZ"] = str(tz) - - process_class = self._processes[process] - process_obj = process_class(logger, tz, http_client) - - attr = f"run_{operation}" - if not hasattr(process_obj, attr): - raise ValueError(f"Process '{process}' has no operation '{operation}'.") - - if operation == "import" and not data_path: - raise ValueError("The data path is required to run import.") - - attr_ref = getattr(process_obj, attr) - if operation == "init": - attr_ref() - elif operation == "import": - attr_ref(data_path) - elif operation == "update": - attr_ref() - - logger.info("Finished.") - - except Exception as e: - raise CommandError(e) - - finally: - timezone.deactivate() diff --git a/gather_vision/migrations/0001_initial.py b/gather_vision/migrations/0001_initial.py deleted file mode 100644 index b44f802..0000000 --- a/gather_vision/migrations/0001_initial.py +++ /dev/null @@ -1,12 +0,0 @@ -# Generated by Django 3.2.9 on 2021-11-25 11:08 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ] - - operations = [ - ] diff --git a/gather_vision/migrations/0002_initial.py b/gather_vision/migrations/0002_initial.py deleted file mode 100644 index 503426a..0000000 --- a/gather_vision/migrations/0002_initial.py +++ /dev/null @@ -1,194 +0,0 @@ -# Generated by Django 3.2.9 on 2021-11-30 12:41 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ('gather_vision', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='InformationSource', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('name', models.SlugField(help_text='The name of the information source.', unique=True)), - ('title', models.CharField(help_text='The displayed title.', max_length=100)), - ('info_url', models.URLField(blank=True, help_text='A link to details about the information source.')), - ], - options={ - 'ordering': ['modified_date'], - 'abstract': False, - }, - ), - migrations.CreateModel( - name='OutageGroup', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('retrieved_date', models.DateTimeField(help_text='The date this outage update was retrieved.')), - ('source_updated_date', models.DateTimeField(blank=True, help_text='The date the outage info was last updated by the source.', null=True)), - ('demand', models.PositiveIntegerField(help_text='The amount of demand.')), - ('rating', models.PositiveIntegerField(help_text='The rating of the demand level.')), - ('total_customers', models.PositiveIntegerField(help_text='The total number of customers affected.')), - ], - options={ - 'ordering': ['modified_date'], - 'abstract': False, - }, - ), - migrations.CreateModel( - name='TransportLine', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('title', models.CharField(help_text='The displayed title of the transport network line.', max_length=100)), - ], - options={ - 'ordering': ['modified_date'], - 'abstract': False, - }, - ), - migrations.CreateModel( - name='TransportItem', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('source_identifier', models.CharField(help_text='The identifier for this transport notice from the source.', max_length=200)), - ('title', models.CharField(help_text='The title of the transport notice.', max_length=200)), - ('body', models.TextField(blank=True, help_text='The text of the transport notice.')), - ('start_date', models.DateField(blank=True, help_text='The start date of this transport notice.', null=True)), - ('stop_date', models.DateField(blank=True, help_text='The finish date this transport notice.', null=True)), - ('is_train', models.BooleanField(help_text='Whether the transport notice includes train services.')), - ('view_url', models.URLField(blank=True, help_text='The url to view the transport notice.')), - ('notice_type', models.CharField(blank=True, help_text='The type of the transport notice.', max_length=200)), - ('category', models.CharField(blank=True, help_text='The category of the transport notice.', max_length=200)), - ('severity', models.CharField(blank=True, help_text='The severity of the transport notice.', max_length=200)), - ('timing', models.CharField(blank=True, help_text='The timing of the transport notice.', max_length=200)), - ('location', models.CharField(blank=True, help_text='The location covered by the transport notice.', max_length=500)), - ('lines', models.ManyToManyField(help_text='The lines involved in this transport notice.', related_name='notices', to='gather_vision.TransportLine')), - ('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='transport_items', to='gather_vision.informationsource')), - ], - options={ - 'ordering': ['modified_date'], - 'abstract': False, - }, - ), - migrations.CreateModel( - name='PlaylistTrack', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('code', models.CharField(help_text='The unique code assigned to this track by the music source.', max_length=100)), - ('title', models.CharField(help_text='The title of the track.', max_length=500)), - ('artists', models.CharField(help_text='The artists for the track.', max_length=800)), - ('info_url', models.URLField(blank=True, help_text='A link to the information provided by the source about the track.')), - ('image_url', models.URLField(blank=True, help_text='A link to the art for the track cover.')), - ('musicbrainz_code', models.UUIDField(blank=True, help_text='The MusicBrainz recording id for this track.', null=True)), - ('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='gather_vision.informationsource')), - ], - ), - migrations.CreateModel( - name='PlaylistItem', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('retrieved_date', models.DateTimeField(help_text='The date and time this playlist was retrieved.')), - ('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='playlists', to='gather_vision.informationsource')), - ], - options={ - 'ordering': ['modified_date'], - 'abstract': False, - }, - ), - migrations.CreateModel( - name='PlaylistEntry', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('position_change', models.IntegerField(blank=True, help_text='The position change of this entry compared to the previously generated playlist.', null=True)), - ('order', models.PositiveIntegerField(help_text='The order of this entry in the playlist.')), - ('playlist', models.ForeignKey(help_text='The playlist that contains this entry.', on_delete=django.db.models.deletion.CASCADE, related_name='entries', to='gather_vision.playlistitem')), - ('tracks', models.ManyToManyField(help_text='The tracks that match this playlist entry.', related_name='entries', to='gather_vision.PlaylistTrack')), - ], - options={ - 'verbose_name_plural': 'Playlist entries', - }, - ), - migrations.CreateModel( - name='PetitionItem', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('title', models.CharField(help_text='The title of the petition.', max_length=1000)), - ('code', models.CharField(help_text='The petition reference code.', max_length=50)), - ('view_url', models.URLField(help_text='The url to the petition.')), - ('principal', models.CharField(help_text='The name (and address) of the principal petitioner.', max_length=300)), - ('sponsor', models.CharField(help_text='The name of the sponsor of the petition.', max_length=100)), - ('eligibility', models.CharField(help_text='The eligibility to sign the petition.', max_length=100)), - ('body', models.TextField(help_text='The text of the petition.')), - ('opened_date', models.DateField(blank=True, help_text='The date this petition opened.', null=True)), - ('closed_date', models.DateField(help_text='The date this petition closed.')), - ('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='petitions', to='gather_vision.informationsource')), - ], - ), - migrations.CreateModel( - name='PetitionChange', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('retrieved_date', models.DateTimeField(help_text='The date this petition update was retrieved.')), - ('signatures', models.PositiveIntegerField(help_text='The number of signatures.')), - ('petition', models.ForeignKey(help_text='The petition.', on_delete=django.db.models.deletion.CASCADE, related_name='signature_changes', to='gather_vision.petitionitem')), - ], - options={ - 'ordering': ['modified_date'], - 'abstract': False, - }, - ), - migrations.CreateModel( - name='OutageItem', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_date', models.DateTimeField(auto_now_add=True, help_text='The date this item was created.')), - ('modified_date', models.DateTimeField(auto_now=True, help_text='The date this item was last changed.')), - ('event_name', models.CharField(blank=True, help_text='The name of the outage event.', max_length=500)), - ('council', models.CharField(help_text='The name of the council that covers the outage location.', max_length=500)), - ('suburb', models.CharField(help_text='The name of the suburb that covers the outage location.', max_length=500)), - ('post_code', models.CharField(blank=True, help_text='The location post code.', max_length=4)), - ('cause', models.CharField(help_text='The cause of the outage.', max_length=500)), - ('streets', models.CharField(blank=True, help_text='The name of the streets involved in the outage.', max_length=400)), - ('restored_date', models.DateTimeField(blank=True, help_text='The date this outage ended.', null=True)), - ('customers', models.PositiveIntegerField(help_text='The number of customers affected.')), - ('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='outages', to='gather_vision.outagegroup')), - ('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='outages', to='gather_vision.informationsource')), - ], - ), - migrations.AddConstraint( - model_name='playlisttrack', - constraint=models.UniqueConstraint(fields=('source', 'code'), name='music_track_unique_source_code'), - ), - migrations.AddConstraint( - model_name='playlistentry', - constraint=models.UniqueConstraint(fields=('playlist', 'order'), name='playlist_entry_unique_playlist_order'), - ), - migrations.AddConstraint( - model_name='outageitem', - constraint=models.UniqueConstraint(fields=('group', 'event_name'), name='outage_item_unique_group_event_name'), - ), - ] diff --git a/gather_vision/migrations/0003_alter_petitionchange_options_and_more.py b/gather_vision/migrations/0003_alter_petitionchange_options_and_more.py deleted file mode 100644 index ffef8c8..0000000 --- a/gather_vision/migrations/0003_alter_petitionchange_options_and_more.py +++ /dev/null @@ -1,55 +0,0 @@ -# Generated by Django 4.0 on 2021-12-11 00:22 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('gather_vision', '0002_initial'), - ] - - operations = [ - migrations.AlterModelOptions( - name='petitionchange', - options={'ordering': ['retrieved_date']}, - ), - migrations.RemoveConstraint( - model_name='playlistentry', - name='playlist_entry_unique_playlist_order', - ), - migrations.RenameField( - model_name='playlistentry', - old_name='order', - new_name='position', - ), - migrations.AlterField( - model_name='petitionitem', - name='eligibility', - field=models.CharField(blank=True, help_text='The eligibility to sign the petition.', max_length=100), - ), - migrations.AlterField( - model_name='petitionitem', - name='sponsor', - field=models.CharField(blank=True, help_text='The name of the sponsor of the petition.', max_length=100), - ), - migrations.AlterField( - model_name='playlistitem', - name='source', - field=models.ForeignKey(help_text='The source for this playlist.', on_delete=django.db.models.deletion.CASCADE, related_name='playlists', to='gather_vision.informationsource'), - ), - migrations.AlterField( - model_name='playlisttrack', - name='source', - field=models.ForeignKey(help_text='The source for this track information.', on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='gather_vision.informationsource'), - ), - migrations.AddConstraint( - model_name='petitionitem', - constraint=models.UniqueConstraint(fields=('source', 'code'), name='petition_item_unique_source_code'), - ), - migrations.AddConstraint( - model_name='playlistentry', - constraint=models.UniqueConstraint(fields=('playlist', 'position'), name='playlist_entry_unique_playlist_position'), - ), - ] diff --git a/gather_vision/migrations/__init__.py b/gather_vision/migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/models/__init__.py b/gather_vision/models/__init__.py deleted file mode 100644 index 54b4b6a..0000000 --- a/gather_vision/models/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# shared -from .abstract_base import AbstractBase # noqa: F401 -from .information_source import InformationSource # noqa: F401 - -# outages -from .outage_item import OutageItem # noqa: F401 -from .outage_group import OutageGroup # noqa: F401 - -# petitions -from .petition_change import PetitionChange # noqa: F401 -from .petition_item import PetitionItem # noqa: F401 - -# playlists -from .playlist_item import PlaylistItem # noqa: F401 -from .playlist_track import PlaylistTrack # noqa: F401 -from .playlist_entry import PlaylistEntry # noqa: F401 - -# transport -from .transport_item import TransportItem # noqa: F401 -from .transport_line import TransportLine # noqa: F401 diff --git a/gather_vision/models/abstract_base.py b/gather_vision/models/abstract_base.py deleted file mode 100644 index d1723bf..0000000 --- a/gather_vision/models/abstract_base.py +++ /dev/null @@ -1,16 +0,0 @@ -from django.db import models - - -class AbstractBase(models.Model): - """The abstract base model for hill models.""" - - created_date = models.DateTimeField( - auto_now_add=True, help_text="The date this item was created." - ) - modified_date = models.DateTimeField( - auto_now=True, help_text="The date this item was last changed." - ) - - class Meta: - abstract = True - ordering = ["modified_date"] diff --git a/gather_vision/models/information_source.py b/gather_vision/models/information_source.py deleted file mode 100644 index 4cf17d6..0000000 --- a/gather_vision/models/information_source.py +++ /dev/null @@ -1,41 +0,0 @@ -from django.db import models - -from gather_vision.models.abstract_base import AbstractBase - - -class InformationSource(AbstractBase): - """A source that provides information.""" - - name = models.SlugField( - unique=True, - help_text="The name of the information source.", - ) - title = models.CharField( - max_length=100, - help_text="The displayed title.", - ) - info_url = models.URLField( - blank=True, - help_text="A link to details about the information source.", - ) - - def __str__(self): - return self.name - - def long_dict(self): - return { - "name": self.name, - "title": self.title, - "info_url": self.info_url, - "created_date": self.created_date, - "modified_date": self.modified_date, - } - - def long_csv(self): - return { - "name": self.name, - "title": self.title, - "info_url": self.info_url, - "created_date": self.created_date, - "modified_date": self.modified_date, - } diff --git a/gather_vision/models/outage_group.py b/gather_vision/models/outage_group.py deleted file mode 100644 index 259482b..0000000 --- a/gather_vision/models/outage_group.py +++ /dev/null @@ -1,57 +0,0 @@ -from django.db import models -from django.db.models import Max, Min - -from gather_vision.models.abstract_base import AbstractBase - - -class OutageGroup(AbstractBase): - """A group of outages.""" - - retrieved_date = models.DateTimeField( - help_text="The date this outage update was retrieved.", - ) - source_updated_date = models.DateTimeField( - null=True, - blank=True, - help_text="The date the outage info was last updated by the source.", - ) - demand = models.PositiveIntegerField( - help_text="The amount of demand.", - ) - rating = models.PositiveIntegerField( - help_text="The rating of the demand level.", - ) - total_customers = models.PositiveIntegerField( - help_text="The total number of customers affected.", - ) - - def __str__(self): - date = self.retrieved_date.date() if self.retrieved_date else None - msg = f"{self.total_customers} customers" - if date: - msg += f" affected on {date}" - return msg - - @classmethod - def get_retrieved_date_range(cls): - query = cls.objects.aggregate(Max("retrieved_date"), Min("retrieved_date")) - return { - "min": query.get("retrieved_date__min"), - "max": query.get("retrieved_date__max"), - } - - @classmethod - def get_items(cls, **kwargs): - query = cls.objects.order_by("source_updated_date", "retrieved_date") - if kwargs: - query = query.filter(**kwargs) - return query - - @classmethod - def get_data_items(cls, start_date, stop_date): - date_filters = { - "retrieved_date__gte": start_date, - "retrieved_date__lte": stop_date, - } - query = cls.get_items(**date_filters) - return query diff --git a/gather_vision/models/outage_item.py b/gather_vision/models/outage_item.py deleted file mode 100644 index 3f798a3..0000000 --- a/gather_vision/models/outage_item.py +++ /dev/null @@ -1,69 +0,0 @@ -from django.db import models - -from gather_vision.models.abstract_base import AbstractBase -from gather_vision.models.information_source import InformationSource - - -class OutageItem(AbstractBase): - """An electricity outage.""" - - source = models.ForeignKey( - InformationSource, - related_name="outages", - on_delete=models.CASCADE, - ) - group = models.ForeignKey( - "OutageGroup", - related_name="outages", - on_delete=models.CASCADE, - ) - event_name = models.CharField( - blank=True, - max_length=500, - help_text="The name of the outage event.", - ) - council = models.CharField( - max_length=500, - help_text="The name of the council that covers the outage location.", - ) - suburb = models.CharField( - max_length=500, - help_text="The name of the suburb that covers the outage location.", - ) - post_code = models.CharField( - blank=True, - max_length=4, - help_text="The location post code.", - ) - cause = models.CharField( - max_length=500, - help_text="The cause of the outage.", - ) - streets = models.CharField( - blank=True, - max_length=400, - help_text="The name of the streets involved in the outage.", - ) - restored_date = models.DateTimeField( - null=True, - blank=True, - help_text="The date this outage ended.", - ) - customers = models.PositiveIntegerField( - help_text="The number of customers affected.", - ) - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["group", "event_name"], - name="outage_item_unique_group_event_name", - ) - ] - - def __str__(self): - return ( - f"{self.event_name} " - f"caused by {self.cause} " - f"restored on {self.restored_date}" - ) diff --git a/gather_vision/models/petition_change.py b/gather_vision/models/petition_change.py deleted file mode 100644 index a759d5e..0000000 --- a/gather_vision/models/petition_change.py +++ /dev/null @@ -1,37 +0,0 @@ -from django.db import models -from django.db.models import Min, Max - -from gather_vision.models.petition_item import PetitionItem - -from gather_vision.models.abstract_base import AbstractBase - - -class PetitionChange(AbstractBase): - """A change in the number of signatures for a petition.""" - - petition = models.ForeignKey( - PetitionItem, - models.CASCADE, - related_name="signature_changes", - help_text="The petition.", - ) - retrieved_date = models.DateTimeField( - help_text="The date this petition update was retrieved.", - ) - signatures = models.PositiveIntegerField( - help_text="The number of signatures.", - ) - - class Meta: - ordering = ["retrieved_date"] - - def __str__(self): - return f"{self.signatures} total signatures by {self.retrieved_date}" - - @classmethod - def get_retrieved_date_range(cls): - query = cls.objects.aggregate(Max("retrieved_date"), Min("retrieved_date")) - return { - "min": query.get("retrieved_date__min"), - "max": query.get("retrieved_date__max"), - } diff --git a/gather_vision/models/petition_item.py b/gather_vision/models/petition_item.py deleted file mode 100644 index 764c560..0000000 --- a/gather_vision/models/petition_item.py +++ /dev/null @@ -1,77 +0,0 @@ -from django.db import models - -from gather_vision.models import AbstractBase, InformationSource - - -class PetitionItem(AbstractBase): - """A petition to a governing body.""" - - source = models.ForeignKey( - InformationSource, - related_name="petitions", - on_delete=models.CASCADE, - ) - title = models.CharField( - max_length=1000, - help_text="The title of the petition.", - ) - code = models.CharField( - max_length=50, - help_text="The petition reference code.", - ) - view_url = models.URLField( - help_text="The url to the petition.", - ) - principal = models.CharField( - max_length=300, - help_text="The name (and address) of the principal petitioner.", - ) - sponsor = models.CharField( - blank=True, - max_length=100, - help_text="The name of the sponsor of the petition.", - ) - eligibility = models.CharField( - blank=True, - max_length=100, - help_text="The eligibility to sign the petition.", - ) - body = models.TextField( - help_text="The text of the petition.", - ) - opened_date = models.DateField( - blank=True, - null=True, - help_text="The date this petition opened.", - ) - closed_date = models.DateField( - help_text="The date this petition closed.", - ) - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["source", "code"], - name="petition_item_unique_source_code", - ) - ] - - def __str__(self): - return f'Started {self.opened_date}: "{self.title}"' - - @classmethod - def get_items(cls, **kwargs): - query = cls.objects.order_by("opened_date", "closed_date", "code") - if kwargs: - query = query.filter(**kwargs) - query = query.prefetch_related("source", "signature_changes") - return query - - @classmethod - def get_data_items(cls, start_date, stop_date): - date_filters = { - "signature_changes__retrieved_date__gte": start_date, - "signature_changes__retrieved_date__lte": stop_date, - } - query = cls.get_items(**date_filters) - return query diff --git a/gather_vision/models/playlist_entry.py b/gather_vision/models/playlist_entry.py deleted file mode 100644 index 7eea681..0000000 --- a/gather_vision/models/playlist_entry.py +++ /dev/null @@ -1,45 +0,0 @@ -from django.db import models - -from gather_vision.models import PlaylistTrack -from gather_vision.models import PlaylistItem -from gather_vision.models.abstract_base import AbstractBase - - -class PlaylistEntry(AbstractBase): - """An entry in a playlist that is linked to a number of tracks.""" - - playlist = models.ForeignKey( - PlaylistItem, - models.CASCADE, - related_name="entries", - help_text="The playlist that contains this entry.", - ) - tracks = models.ManyToManyField( - PlaylistTrack, - related_name="entries", - help_text="The tracks that match this playlist entry.", - ) - position_change = models.IntegerField( - blank=True, - null=True, - help_text="The position change of this entry " - "compared to the previously generated playlist.", - ) - position = models.PositiveIntegerField( - help_text="The order of this entry in the playlist.", - ) - - class Meta: - verbose_name_plural = "Playlist entries" - constraints = [ - models.UniqueConstraint( - fields=["playlist", "position"], - name="playlist_entry_unique_playlist_position", - ) - ] - - def __str__(self): - if self.position_change: - return f"in {self.position} (changed {self.position_change})" - else: - return f"in {self.position}" diff --git a/gather_vision/models/playlist_item.py b/gather_vision/models/playlist_item.py deleted file mode 100644 index 5533363..0000000 --- a/gather_vision/models/playlist_item.py +++ /dev/null @@ -1,22 +0,0 @@ -from django.db import models - - -from gather_vision.models import InformationSource -from gather_vision.models.abstract_base import AbstractBase - - -class PlaylistItem(AbstractBase): - """An ordered collection of playlist entries.""" - - source = models.ForeignKey( - InformationSource, - related_name="playlists", - on_delete=models.CASCADE, - help_text="The source for this playlist.", - ) - retrieved_date = models.DateTimeField( - help_text="The date and time this playlist was retrieved.", - ) - - def __str__(self): - return f"updated on {self.retrieved_date}" diff --git a/gather_vision/models/playlist_track.py b/gather_vision/models/playlist_track.py deleted file mode 100644 index 20b9158..0000000 --- a/gather_vision/models/playlist_track.py +++ /dev/null @@ -1,50 +0,0 @@ -from django.db import models - -from gather_vision.models import InformationSource -from gather_vision.models.abstract_base import AbstractBase - - -class PlaylistTrack(AbstractBase): - """A music track.""" - - source = models.ForeignKey( - InformationSource, - related_name="tracks", - on_delete=models.CASCADE, - help_text="The source for this track information.", - ) - code = models.CharField( - max_length=100, - help_text="The unique code assigned to this track by the music source.", - ) - title = models.CharField( - max_length=500, - help_text="The title of the track.", - ) - artists = models.CharField( - max_length=800, - help_text="The artists for the track.", - ) - info_url = models.URLField( - blank=True, - help_text="A link to the information provided by the source about the track.", - ) - image_url = models.URLField( - blank=True, - help_text="A link to the art for the track cover.", - ) - musicbrainz_code = models.UUIDField( - blank=True, - null=True, - help_text="The MusicBrainz recording id for this track.", - ) - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["source", "code"], name="music_track_unique_source_code" - ) - ] - - def __str__(self): - return f"{self.title} - {self.artists} ({self.code})" diff --git a/gather_vision/models/transport_item.py b/gather_vision/models/transport_item.py deleted file mode 100644 index 5396b20..0000000 --- a/gather_vision/models/transport_item.py +++ /dev/null @@ -1,311 +0,0 @@ -from django.db import models -from django.utils.translation import gettext as _ -from gather_vision.models import InformationSource -from gather_vision.models.abstract_base import AbstractBase -from gather_vision.process.service.transport.qld_rail_events import QldRailEvents -from gather_vision.process.service.transport.translink_notices import TranslinkNotices - - -class TransportItem(AbstractBase): - """A transport notice item.""" - - source = models.ForeignKey( - InformationSource, - related_name="transport_items", - on_delete=models.CASCADE, - ) - source_identifier = models.CharField( - max_length=200, - help_text="The identifier for this transport notice from the source.", - ) - lines = models.ManyToManyField( - "TransportLine", - related_name="notices", - help_text="The lines involved in this transport notice.", - ) - title = models.CharField( - max_length=200, - help_text="The title of the transport notice.", - ) - body = models.TextField( - blank=True, - help_text="The text of the transport notice.", - ) - start_date = models.DateField( - blank=True, - null=True, - help_text="The start date of this transport notice.", - ) - stop_date = models.DateField( - blank=True, - null=True, - help_text="The finish date this transport notice.", - ) - is_train = models.BooleanField( - # IsTrain - help_text="Whether the transport notice includes train services.", - ) - view_url = models.URLField( - # Link - blank=True, - help_text="The url to view the transport notice.", - ) - notice_type = models.CharField( - # EventType - blank=True, - max_length=200, - help_text="The type of the transport notice.", - ) - category = models.CharField( - # Category - blank=True, - max_length=200, - help_text="The category of the transport notice.", - ) - severity = models.CharField( - # Severity - blank=True, - max_length=200, - help_text="The severity of the transport notice.", - ) - timing = models.CharField( - # When - blank=True, - max_length=200, - help_text="The timing of the transport notice.", - ) - location = models.CharField( - # Locations - blank=True, - max_length=500, - help_text="The location covered by the transport notice.", - ) - - def __str__(self): - txt = f'{self.source}: "{self.title}"' - if self.start_date: - txt += f" starting {self.start_date.isoformat()}" - if self.stop_date: - txt += f" ending {self.stop_date.isoformat()}" - return txt - - def long_str(self): - text = "" - sep = "; " - - if self.source.name: - text += { - QldRailEvents.code: QldRailEvents.short_title, - TranslinkNotices.code: TranslinkNotices.short_title, - }[self.source.name] + ":" - - if self.start_date: - text += f" starting {self.start_date.isoformat()}" - if self.stop_date: - text += f" ending {self.stop_date.isoformat()}" - - if self.title and self.body: - text += " " + self.title + " - " + self.body - elif self.title and not self.body: - text += " " + self.title - elif not self.title and self.body: - text += " " + self.body - - if self.is_train: - text += sep + "Train" - - if self.lines: - text += sep + "Lines : " + ", ".join([str(i) for i in self.get_lines()]) - - if self.notice_type: - text += sep + self.prop_notice_type - - if self.category: - text += sep + self.prop_category - - if self.severity: - text += sep + self.prop_severity - - if self.timing: - text += sep + self.prop_timing - - if self.location: - text += sep + self.prop_location - - if self.view_url: - text += sep + "Url: " + self.view_url - - return text - - def long_dict(self): - return { - "source": self.source.long_dict(), - "source_identifier": self.source_identifier, - "lines": [str(i) for i in self.get_lines()], - "title": self.title, - "body": self.body, - "start_date": self.start_date, - "stop_date": self.stop_date, - "is_train": self.is_train, - "view_url": self.view_url, - "notice_type": self.prop_notice_type, - "category": self.prop_category, - "severity": self.prop_severity, - "timing": self.prop_timing, - "location": self.prop_location, - "created_date": self.created_date, - "modified_date": self.modified_date, - } - - @classmethod - def long_csv_headers(cls): - return [ - "source_name", - "source_title", - "source_identifier", - "lines", - "title", - "body", - "start_date", - "stop_date", - "is_train", - "view_url", - "notice_type", - "category", - "severity", - "timing", - "location", - "created_date", - "modified_date", - "source_created_date", - "source_modified_date", - "source_info_url", - ] - - def long_csv(self): - source = self.source.long_csv() - source_name = source["name"] - source_title = source["title"] - source_info_url = source["info_url"] - source_created_date = source["created_date"] - source_modified_date = source["modified_date"] - return { - "source_name": source_name, - "source_title": source_title, - "source_info_url": source_info_url, - "source_created_date": source_created_date, - "source_modified_date": source_modified_date, - "source_identifier": self.source_identifier, - "lines": ";".join([str(i) for i in self.get_lines()]), - "title": self.title, - "body": self.body, - "start_date": self.start_date, - "stop_date": self.stop_date, - "is_train": self.is_train, - "view_url": self.view_url, - "notice_type": self.prop_notice_type, - "category": self.prop_category, - "severity": self.prop_severity, - "timing": self.prop_timing, - "location": self.prop_location, - "created_date": self.created_date, - "modified_date": self.modified_date, - } - - @property - def prop_notice_type(self): - return self.notice_type.title() if self.notice_type else "" - - @property - def prop_category(self): - return self.category.title() if self.category else "" - - @property - def prop_severity(self): - return self.severity.title() if self.severity else "" - - @property - def prop_timing(self): - return self.timing.title() if self.timing else "" - - @property - def prop_location(self): - return self.location.title() if self.location else "" - - @property - def date_range(self): - same_day = ( - self.start_date.isoformat() - if self.start_date - else "" == self.stop_date.isoformat() - if self.stop_date - else "" - ) - f = "%a, %d %b %Y" - if self.start_date and self.stop_date and not same_day: - return _( - f"From {self.start_date.strftime(f)} " - f"to {self.stop_date.strftime(f)}" - ) - if self.start_date and self.stop_date and same_day: - return _(f"On {self.start_date.strftime(f)}") - elif self.start_date and not self.stop_date: - return _(f"From {self.start_date.strftime(f)}") - elif not self.start_date and self.stop_date: - return _(f"To {self.stop_date.strftime(f)}") - else: - raise ValueError() - - @classmethod - def get_items(cls, **kwargs): - query = cls.objects.order_by("start_date", "stop_date") - if kwargs: - query = query.filter(**kwargs) - query = query.prefetch_related("source", "lines") - return query - - @classmethod - def get_items_track_closures(cls): - return cls.get_items(is_train=True, category__in=["station", "track"]) - - @classmethod - def get_items_track_access(cls): - return cls.get_items( - is_train=True, category__in=["station", "track", "accessibility", "carpark"] - ) - - @classmethod - def get_items_available(cls): - return { - "all": cls.get_items, - "track-closures": cls.get_items_track_closures, - "track-access": cls.get_items_track_access, - } - - def get_lines(self): - return sorted((i for i in self.lines.all()), key=lambda x: str(x.title)) - - @property - def tags(self): - if self.is_train: - yield {"title": "Train", "classes": "bg-dark"} - if self.notice_type: - yield {"title": f"Type: {self.prop_notice_type}", "classes": "bg-secondary"} - if self.category: - yield { - "title": f"Category: {self.prop_category}", - "classes": "bg-info text-dark", - } - if self.severity: - s = self.prop_severity - yield { - "title": f"Severity: {s}", - "classes": "bg-warning text-dark" if s != "Major" else "bg-danger", - } - if self.timing: - yield {"title": f"Timing: {self.prop_timing}", "classes": "bg-secondary"} - if self.location: - yield {"title": f"Where: {self.prop_location}", "classes": "bg-secondary"} - if self.lines: - for line in self.get_lines(): - yield {"title": f"Line: {str(line)}", "classes": "bg-secondary"} diff --git a/gather_vision/models/transport_line.py b/gather_vision/models/transport_line.py deleted file mode 100644 index 6fcd05d..0000000 --- a/gather_vision/models/transport_line.py +++ /dev/null @@ -1,15 +0,0 @@ -from django.db import models - -from gather_vision.models.abstract_base import AbstractBase - - -class TransportLine(AbstractBase): - """A transport network line name.""" - - title = models.CharField( - max_length=100, - help_text="The displayed title of the transport network line.", - ) - - def __str__(self): - return self.title diff --git a/gather_vision/process/__init__.py b/gather_vision/process/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/cache/__init__.py b/gather_vision/process/cache/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/cache/available_expiration_time.py b/gather_vision/process/cache/available_expiration_time.py deleted file mode 100644 index 09fbf7c..0000000 --- a/gather_vision/process/cache/available_expiration_time.py +++ /dev/null @@ -1,46 +0,0 @@ -from datetime import timedelta -from enum import Enum - - -class AvailableExpirationTime(Enum): - """ - Enumeration of available cache times. - - - NEVER_EXPIRE - cache without expiry (i.e. keep) - - DONT_CACHE - don't cache at all - - others are the given time period - """ - - NEVER_EXPIRE = "never-expire" - DONT_CACHE = "dont-cache" - FIVE_MINUTES = timedelta(minutes=5) - TEN_MINUTES = timedelta(minutes=10) - THIRTY_MINUTES = timedelta(minutes=30) - ONE_DAY = timedelta(days=1) - ONE_WEEK = timedelta(weeks=1) - ONE_MONTH = timedelta(weeks=4) - - def to_django_cache_value(self): - """Get the cache timeout in seconds.""" - if self == AvailableExpirationTime.DONT_CACHE: - # A timeout of 0 won't cache the value. - return 0 - - if self == AvailableExpirationTime.NEVER_EXPIRE: - # Passing in None for timeout will cache the value forever. - return None - - # django built-in cache needs the numbers of seconds for cache timeout - return self.value.seconds - - def to_requests_cache_value(self): - """Get the cache timeout value.""" - if self == AvailableExpirationTime.DONT_CACHE: - # DO_NOT_CACHE = 0 - return 0 - - if self == AvailableExpirationTime.NEVER_EXPIRE: - # Never expire = None - return None - - return self.value diff --git a/gather_vision/process/cache/external_http_cache.py b/gather_vision/process/cache/external_http_cache.py deleted file mode 100644 index 6c67ded..0000000 --- a/gather_vision/process/cache/external_http_cache.py +++ /dev/null @@ -1,79 +0,0 @@ -from importlib import import_module - -from django.core import signals -from django.core.cache.backends.base import ( - InvalidCacheBackendError, -) -from django.utils.connection import BaseConnectionHandler, ConnectionProxy -from requests import Session -from requests_cache import CachedSession - -__all__ = [ - "external_http_cache", - "external_http_caches", - "DEFAULT_EXTERNAL_HTTP_CACHE_ALIAS", -] - -from gather_vision.process.cache.available_expiration_time import ( - AvailableExpirationTime, -) - -DEFAULT_EXTERNAL_HTTP_CACHE_ALIAS = "default" - - -class ExternalHttpCacheHandler(BaseConnectionHandler): - settings_name = "EXTERNAL_HTTP_CACHES" - exception_class = InvalidCacheBackendError - - def create_connection(self, alias): - params = self.settings[alias].copy() - backend: str = params.pop("BACKEND", None) - cache_name: str = params.pop("LOCATION", None) - expires: str = params.pop("EXPIRES", None) - backend_params: dict = params.pop("BACKEND_PARAMS", {}) - - if backend is None: - return Session() - - try: - backend_package, backend_resource = backend.rsplit(".", maxsplit=1) - backend_class = getattr(import_module(backend_package), backend_resource) - backend_instance = backend_class(**backend_params) - - cache_obj = CachedSession( - cache_name=cache_name, - backend=backend_instance, - cache_control=True, - expire_after=AvailableExpirationTime[expires].to_requests_cache_value(), - timeout=30, - ) - except ImportError as e: - msg = f"Could not find backend '{backend}': {e}" - raise InvalidCacheBackendError(msg) from e - - return cache_obj - - def all(self, initialized_only=False): - return [ - self[alias] - for alias in self - # If initialized_only is True, return only initialized caches. - if not initialized_only or hasattr(self._connections, alias) - ] - - -external_http_caches = ExternalHttpCacheHandler() - -external_http_cache = ConnectionProxy( - external_http_caches, DEFAULT_EXTERNAL_HTTP_CACHE_ALIAS -) - - -def close_external_http_caches(**kwargs): - # Some caches need to do a cleanup at the end of a request cycle. If not - # implemented in a particular backend cache.close() is a no-op. - for item in external_http_caches.all(initialized_only=True): - item.close() - - -signals.request_finished.connect(close_external_http_caches) diff --git a/gather_vision/process/cache/local_cache.py b/gather_vision/process/cache/local_cache.py deleted file mode 100644 index 16afef9..0000000 --- a/gather_vision/process/cache/local_cache.py +++ /dev/null @@ -1,83 +0,0 @@ -from typing import Optional, Any - -from django.core.cache import caches - -from gather_vision.process.cache.available_expiration_time import ( - AvailableExpirationTime, -) -from gather_vision.process.component.logger import Logger - - -class LocalCache: - """A local cache.""" - - def __init__(self, logger: Logger, cache_alias: Optional[str] = "default"): - self._logger = logger - self._cache_alias = cache_alias - if cache_alias: - self._cache = caches[cache_alias] - self._logger.debug(f"Using local cache '{cache_alias}'.") - else: - self._cache = None - self._logger.debug(f"Not using local cache.") - - def get( - self, key: str, default=None, version: Optional[int] = None - ) -> tuple[bool, Any]: - """Retrieve a value from the cache.""" - if self._cache is None: - return False, None - - # If the literal value None can be stored in the cache, - # use a sentinel object as the default - # to determine whether the object exists in the cache. - sentinel = object() - result = self._cache.get( - key=key, - default=sentinel if default is None else default, - version=version, - ) - if result is sentinel: - return False, None - return True, result - - def set( - self, - key: str, - value: Any, - cache_time: AvailableExpirationTime = AvailableExpirationTime.NEVER_EXPIRE, - version: Optional[int] = None, - ) -> None: - """Store a value in the cache.""" - if self._cache is None: - return - - self._cache.set( - key=key, - value=value, - timeout=cache_time.to_django_cache_value(), - version=version, - ) - - def get_or_set( - self, - key: str, - value: Any, - cache_time: AvailableExpirationTime = AvailableExpirationTime.NEVER_EXPIRE, - version: Optional[int] = None, - ) -> Any: - """Store a value if there is no existing value, then retrieve the value.""" - if self._cache is None: - return None - - return self._cache.get_or_set( - key=key, - default=value, - timeout=cache_time.to_django_cache_value(), - version=version, - ) - - def clear(self): - """Empty the cache.""" - if self._cache: - self._cache.clear() diff --git a/gather_vision/process/component/__init__.py b/gather_vision/process/component/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/component/cache.py b/gather_vision/process/component/cache.py deleted file mode 100644 index e9b7f81..0000000 --- a/gather_vision/process/component/cache.py +++ /dev/null @@ -1,24 +0,0 @@ -from gather_vision.process.component.logger import Logger - -from enum import Enum -from typing import Optional, Any - -from django.core.cache import caches - - -class CacheTime(Enum): - """ - Enumeration of available cache times. - - - DONT_CACHE - don't cache. - - KEEP - cache without expiry - - others are the number of seconds in the given time period - """ - - DONT_CACHE = 0 - KEEP = 1 - FIVE_MINUTES = 300 - THIRTY_MINUTES = 1800 - ONE_DAY = 86400 - ONE_WEEK = 604800 - ONE_MONTH = 2629800 diff --git a/gather_vision/process/component/html_extract.py b/gather_vision/process/component/html_extract.py deleted file mode 100644 index 8d78a68..0000000 --- a/gather_vision/process/component/html_extract.py +++ /dev/null @@ -1,49 +0,0 @@ -from html.parser import HTMLParser - - -class HtmlUrlParser(HTMLParser): - links = [] - current_url = None - current_text = "" - - def handle_starttag(self, tag, attrs): - if tag != "a": - return - for name, value in attrs: - if name != "href": - continue - if self.current_url and self.current_text: - self.links.append((self.current_url, self.current_text)) - if self.current_url or self.current_text: - raise ValueError() - self.current_url = value - break - - def handle_endtag(self, tag): - if tag != "a": - return - if self.current_url and self.current_text: - self.links.append((self.current_url, self.current_text)) - - def handle_data(self, data): - if self.current_url: - self.current_text += data - - def extract(self, html: str): - self.links = [] - self.current_url = None - self.current_text = "" - self.feed(html) - return self.links - - -class HtmlDataParser(HTMLParser): - text = "" - - def handle_data(self, data): - self.text += data - - def extract(self, html: str): - self.text = "" - self.feed(html) - return self.text diff --git a/gather_vision/process/component/http_client.py b/gather_vision/process/component/http_client.py deleted file mode 100644 index 13876f5..0000000 --- a/gather_vision/process/component/http_client.py +++ /dev/null @@ -1,69 +0,0 @@ -from typing import Optional - -import requests -from requests import Session - -from gather_vision.process.cache.external_http_cache import external_http_caches -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.metadata import Metadata - - -class HttpClient: - def __init__(self, logger: Logger, cache_alias: Optional[str] = "default"): - self._logger = logger - self._metadata = Metadata() - - agent_url = self._metadata.documentation_url() - agent = f"gather-vision (+{agent_url})" - self._headers = {"user-agent": agent} - self._logger.debug(f"User agent set to '{agent}'.") - - if cache_alias: - self._session = external_http_caches[cache_alias] - logger.debug(f"Using external http client cache '{cache_alias}'.") - else: - self._session = Session() - logger.debug(f"Not using an external http client cache.") - - @property - def session(self): - return self._session - - def get(self, url: str, **kwargs): - """HTTP GET""" - return self._send_request("GET", url, **kwargs) - - def head(self, url: str, **kwargs): - """HTTP HEAD""" - return self._send_request("HEAD", url, **kwargs) - - def put(self, url: str, **kwargs): - """HTTP PUT""" - return self._send_request("PUT", url, **kwargs) - - def post(self, url: str, **kwargs): - """HTTP POST""" - return self._send_request("POST", url, **kwargs) - - def _send_request(self, method: str, url: str, **kwargs): - """Send a request using the 'requests' library.""" - - # Add default headers. - # The default headers can be customised via the kwargs. - kwargs["headers"] = {**self._headers, **kwargs.get("headers", {})} - - if "timeout" not in kwargs: - # Set a default timeout. - # Surprisingly, requests does not set a default timeout. - # Requests only provides an easy way to set a timeout on individual requests. - kwargs["timeout"] = 30 - - result = self._session.request(method, url, **kwargs) - - if result.status_code != requests.codes.ok or not result.content: - self._logger.warning( - f"Response {result.status_code} '{result.reason}' " - f"length {len(result.content)} for {method} {url}" - ) - return None - return result diff --git a/gather_vision/process/component/ical.py b/gather_vision/process/component/ical.py deleted file mode 100644 index b849fb4..0000000 --- a/gather_vision/process/component/ical.py +++ /dev/null @@ -1,90 +0,0 @@ -from datetime import datetime -from typing import Optional - -import icalendar as cal - - -class ICal: - """Create and specify an iCalendar with events.""" - - def __init__(self, provider: str, title: str, description: str, tz: str, ttl: str): - c = cal.Calendar() - c.add("prodid", f"-//{title}//{provider}//EN") - c.add("version", "2.0") - c.add("calscale", "GREGORIAN") - c.add("method", "PUBLISH") - c.add("X-WR-CALNAME", title) - c.add("X-WR-CALDESC", description) - c.add("X-WR-TIMEZONE", tz) - c.add("X-PUBLISHED-TTL", ttl) - - self._c = c - - def get_calendar(self): - return self._c - - def add_event( - self, - title: str, - body: str, - date_start: datetime, - date_stop: datetime, - location: Optional[str] = None, - url: Optional[str] = None, - event_class: str = "PUBLIC", - uid: Optional[str] = None, - date_stamp: Optional[datetime] = None, - date_modified: Optional[datetime] = None, - date_created: Optional[datetime] = None, - sequence_num: Optional[int] = None, - ): - """Add a new event to the calendar.""" - - # vevent: - # https://datatracker.ietf.org/doc/html/rfc5545#section-3.6.1 - e = cal.Event() - e.add("summary", title) - e.add("description", body) - - # dtstart: - # https://datatracker.ietf.org/doc/html/rfc5545#section-3.8.2.4 - e.add("dtstart", date_start) - - # dtend: - # https://datatracker.ietf.org/doc/html/rfc5545#section-3.8.2.2 - e.add("dtend", date_stop) - - if location: - e.add("location", location) - - if url: - e.add("url", location) - - if event_class: - e.add("class", event_class) - - if uid: - # uid: (persistent, globally unique identifier) - e.add("uid", uid) - - if date_stamp: - # dtstamp: - # https://datatracker.ietf.org/doc/html/rfc5545#section-3.8.7.2 - e.add("dtstamp", date_stamp) - - if date_modified: - # last-modified: - # https://datatracker.ietf.org/doc/html/rfc5545#section-3.8.7.3 - e.add("last-modified", date_modified) - - if date_created: - # created: - # https://datatracker.ietf.org/doc/html/rfc5545#section-3.8.7.1 - e.add("created", date_created) - - if sequence_num is not None: - # sequence: - # https://datatracker.ietf.org/doc/html/rfc5545#section-3.8.7.4 - e.add("sequence", sequence_num) - - self._c.add_component(e) diff --git a/gather_vision/process/component/logger.py b/gather_vision/process/component/logger.py deleted file mode 100644 index 40e23e1..0000000 --- a/gather_vision/process/component/logger.py +++ /dev/null @@ -1,56 +0,0 @@ -import logging -from typing import Optional - -from django.utils import timezone - - -class Logger: - def __init__( - self, logger: Optional[logging.Logger] = None, stdout=None, style=None - ): - if logger and (stdout or style): - raise ValueError( - "Must specify at most one of logger and (stdout or style)." - ) - - if (stdout and not style) or (not stdout and style): - raise ValueError("Must both stdout and style.") - - if not logger and not stdout and not style: - log_fmt = "%(asctime)s [%(levelname)-8s] %(message)s" - date_fmt = "%Y-%m-%dT%H:M:%S%z" - logging.basicConfig(level=logging.DEBUG, format=log_fmt, datefmt=date_fmt) - logger = logging.getLogger() - - self._stdout = stdout - self._style = style - self._logger = logger - - def debug(self, message: str): - if self._stdout: - self._stdout_style("debug", self._style.NOTICE, message) - else: - self._logger.debug(message) - - def info(self, message: str): - if self._stdout: - self._stdout_style("info", self._style.SUCCESS, message) - else: - self._logger.info(message) - - def warning(self, message: str): - if self._stdout: - self._stdout_style("warning", self._style.WARNING, message) - else: - self._logger.warning(message) - - def error(self, message: str): - if self._stdout: - self._stdout_style("error", self._style.ERROR, message) - else: - self._logger.error(message) - - def _stdout_style(self, level: str, style, message: str): - timestamp = timezone.now().isoformat(timespec="seconds") - msg = f"{timestamp} [{level.upper():8}] {message}" - self._stdout.write(style(msg)) diff --git a/gather_vision/process/component/metadata.py b/gather_vision/process/component/metadata.py deleted file mode 100644 index 5042feb..0000000 --- a/gather_vision/process/component/metadata.py +++ /dev/null @@ -1,72 +0,0 @@ -class Metadata: - def version(self): - from_file = None - try: - from_file = self._from_file("tool", "poetry", "version") - except ValueError: - pass - - if from_file: - return from_file - - return self._from_package_version() - - def documentation_url(self): - from_file = None - try: - from_file = self._from_file("tool", "poetry", "documentation") - except ValueError: - pass - - if from_file: - return from_file - - from_package = self._from_package("Project-URL") - prefix = "Documentation" - prefix_len = len(prefix) - for i in from_package: - if i.startswith(prefix): - result = i[prefix_len:].strip(" ,") - return result - return None - - def _from_file(self, *args): - from importlib.resources import path - - with path("gather_vision", "apps.py") as p: - tom_path = (p.parent.parent / "pyproject.toml").absolute() - if tom_path.exists(): - import toml - - with open(tom_path, "rt") as f: - current = toml.load(f) - levels = [] - for arg in args: - levels.append(arg) - new_item = current.get(arg) - if not new_item: - raise ValueError( - f"Cannot find '{'.'.join(levels)}' in pyproject.toml." - ) - current = new_item - return current - return None - - def _from_package(self, key: str): - from importlib import metadata - - data = metadata.metadata("gather-vision") - - result = {} - for index, i in enumerate(data): - if i not in result: - result[i] = [] - result[i].append(data.values()[index]) - - return result.get(key) - - def _from_package_version(self): - from importlib import metadata - - ver = metadata.version("gather-vision") - return ver diff --git a/gather_vision/process/component/normalise.py b/gather_vision/process/component/normalise.py deleted file mode 100644 index 19eb271..0000000 --- a/gather_vision/process/component/normalise.py +++ /dev/null @@ -1,187 +0,0 @@ -import re -import unicodedata -from datetime import datetime -from typing import Iterable, Union, Optional -from zoneinfo import ZoneInfo - -from django.utils.timezone import is_aware - -from gather_vision.process.component.html_extract import HtmlUrlParser, HtmlDataParser - - -class Normalise: - - track_sep = "|" - track_sep_spaced = " | " - track_seps = [ - "[", - "]", - "{", - "}", - "(", - ")", - " ft ", - " ft. ", - " feat ", - " feat. ", - " featuring ", - " w/ ", - " x ", - ",", - " & ", - " - ", - " live at ", - " from the ", - ] - - def track( - self, - track_title: str, - primary_artists: Union[str, Iterable[str]], - featured_artists: Union[str, Iterable[str]], - ): - # normalise title - name = track_title - titles = self._track_norm_text(name) - - # normalise artists - if primary_artists and isinstance(primary_artists, str): - artist = primary_artists or "" - else: - artist = self.track_sep_spaced.join(primary_artists) - - if featured_artists and isinstance(featured_artists, str): - artist += self.track_sep_spaced + featured_artists - else: - artist += self.track_sep_spaced + self.track_sep_spaced.join( - featured_artists - ) - - artists = self._track_norm_text(artist) - - # extract title and artists - title = ([titles[0]] if len(titles) > 0 else [""])[0] - other = titles[1:] if len(titles) > 1 else [] - - primary = [artists[0]] if len(artists) > 0 else [""] - featured = artists[1:] if len(artists) > 1 else [] - featured = sorted(set(featured + other)) - - if not title or not primary: - raise ValueError(f"Invalid title '{title}' or primary artist '{primary}'.") - - # build the query strings - queries = set() - featured_count = len(featured) - while featured_count > -1: - featured_str = ", ".join(featured[0:featured_count]) - if featured_str: - featured_str = ", " + featured_str - - queries.add(f"{title} - {primary[0]}{featured_str}") - featured_count -= 1 - - queries.add(f"{title} - {primary[0]}") - - queries = list(sorted(queries, key=lambda x: len(x), reverse=True)) - - result = title, primary, featured, queries - return result - - def _track_norm_text(self, value: str): - norm = value or "" - - for sep in self.track_seps: - norm = norm.replace(sep, self.track_sep_spaced) - - norm = norm.split(self.track_sep) - - result = [] - for item in norm: - item = unicodedata.normalize("NFKC", item) - item = item.lower() - item = re.sub(r"[^\w\s\-\\'\.]+", " ", item) - item = item.replace("-", " ").replace("_", " ") - item = re.sub(r"\s+", " ", item) - item = item.strip() - if item: - result.append(item) - - return result - - def parse_date(self, value: str, tz: ZoneInfo): - if not value or not value.strip(): - return None - patterns = [ - "%a, %d %b %Y %H:%M:%S", - "%Y-%m-%dT%H:%M:%S%z", - "%Y-%m-%d %H:%M:%S", - # 4/17/2014 10:00:00 PM - "%m/%d/%Y %I:%M:%S %p", - # 16/05/2020 12:00 AM - "%d/%m/%Y %I:%M %p", - "%d/%m/%Y", - "%a, %d %b %Y", - # 12:00 AM - "%I:%M %p", - "%d %B %Y %I:%M %p", - "%Y-%m-%dT%H%M%S%z", - ] - for pattern in patterns: - try: - result = datetime.strptime(value.strip(), pattern) - if not is_aware(result): - result = result.replace(tzinfo=tz) - return result - except ValueError: - continue - except OverflowError: - continue - raise ValueError(f"No datetime pattern matched '{value}'.") - - def petition_text(self, value: str): - if not value: - value = "" - value = value.replace("\r\n", "\n") - value = value.replace("\n\r", "\n") - value = value.replace("\r", "\n") - value = value.replace("\n", ", ") - return value.strip() - - def regex_match( - self, - patterns: list[re.Pattern], - value: str, - unmatched_key: Optional[str] = None, - ): - for pattern in patterns: - match = pattern.search(value) - if not match: - continue - return match.groupdict() - if unmatched_key: - return {unmatched_key: value} - raise ValueError(f"No patterns matched '{value}'.") - - def norm_signatures(self, value: str): - value = value.replace("(View signature)", "") - value = value.replace("\t", "") - value = value.replace("\r", "") - value = value.replace("\n", "") - value = value.replace("signatures", "") - value = value.replace("signature", "") - value = value.strip() - sig = int(value, 10) if value else 0 - return sig - - def extract_url_text(self, html: str): - """Extract html anchor href and data.""" - parser = HtmlUrlParser() - links = parser.extract(html) - return links - - def extract_html_data(self, html: str): - """Extract plain text from html.""" - parser = HtmlDataParser() - links = parser.extract(html) - return links diff --git a/gather_vision/process/component/spotify_client.py b/gather_vision/process/component/spotify_client.py deleted file mode 100644 index bdfc418..0000000 --- a/gather_vision/process/component/spotify_client.py +++ /dev/null @@ -1,269 +0,0 @@ -import base64 -import secrets -import webbrowser -from urllib.parse import urlencode - -from zoneinfo import ZoneInfo -from requests import Response, codes - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger - - -class SpotifyClient: - """Spotify Music client.""" - - def __init__(self, logger: Logger, http_client: HttpClient, time_zone: ZoneInfo): - self._logger = logger - self._http_client = http_client - self._time_zone = time_zone - - def playlist_get( - self, - access_token: str, - playlist_id: str, - market: str = "AU", - ) -> tuple[int, dict]: - """Get the details for a playlist and the tracks.""" - if not access_token or not playlist_id or not market: - raise ValueError("Must provide access token and playlist id and market.") - - url = f"https://api.spotify.com/v1/playlists/{playlist_id}" - params = { - "fields": "id,href,name,description,public,type,uri", - "market": market, - } - headers = { - "Authorization": f"Bearer {access_token}", - } - r = self._http_client.get(url, params=params, headers=headers) - self._check_status(r) - return r.status_code, r.json() - - def playlist_tracks_get( - self, - access_token: str, - playlist_id: str, - limit: int, - offset: int = 0, - market: str = "AU", - ) -> tuple[int, dict]: - """Get the tracks in a playlist.""" - if not access_token or not playlist_id or not market: - raise ValueError("Must provide access token and playlist id and market.") - if not limit: - limit = 100 - if not offset: - offset = 0 - - url = f"https://api.spotify.com/v1/playlists/{playlist_id}/tracks" - params = { - "fields": "items(track(name,id,href,external_urls," - "artists(name,images),album(images)))", - "market": market, - "limit": limit, - "offset": offset, - } - headers = { - "Authorization": f"Bearer {access_token}", - } - r = self._http_client.get(url, params=params, headers=headers) - self._check_status(r) - return r.status_code, r.json() - - def playlist_tracks_set( - self, - access_token: str, - playlist_id: str, - song_ids: list[str], - ): - """Replace songs in a playlist.""" - if not access_token or not playlist_id or not song_ids: - raise ValueError("Must provide access token and playlist id and song ids.") - - url = f"https://api.spotify.com/v1/playlists/{playlist_id}/tracks" - params = {"uris": [f"spotify:track:{song_id}" for song_id in song_ids]} - headers = { - "Authorization": f"Bearer {access_token}", - } - r = self._http_client.put(url, json=params, headers=headers) - self._check_status(r) - return r.status_code, r.json() - - def playlist_details_set( - self, - access_token: str, - playlist_id: str, - title: str, - description: str, - is_public: bool, - ): - """Set playlist details.""" - if not access_token or not playlist_id: - raise ValueError("Must provide access token and playlist id.") - if not title or not description or is_public is None: - raise ValueError("Must provide title and description and is public.") - - url = f"https://api.spotify.com/v1/playlists/{playlist_id}" - data = { - "name": title, - "description": description, - "public": True if is_public else False, - } - headers = {"Authorization": f"Bearer {access_token}"} - r = self._http_client.put(url, json=data, headers=headers) - self._check_status(r) - return r.status_code, None - - def track_query_get( - self, access_token: str, query: str, limit: int = 5 - ) -> tuple[int, dict]: - """Find matching tracks.""" - if not access_token or not query: - raise ValueError("Must provide access token and query.") - if not limit: - limit = 5 - - url = "https://api.spotify.com/v1/search" - params = { - "q": query, - "limit": limit, - "offset": 0, - "type": "track", - "market": "AU", - } - headers = { - "Authorization": f"Bearer {access_token}", - } - r = self._http_client.get(url, params=params, headers=headers) - self._check_status(r) - return r.status_code, r.json() - - def login_authorise( - self, client_id: str, redirect_uri: str, request_state: str - ) -> None: - """Get the url to obtain the Authorization Code.""" - if not client_id or not redirect_uri or not request_state: - raise ValueError( - "Must provide client id and redirect uri and request state." - ) - - qs = urlencode( - { - "client_id": client_id, - "response_type": "code", - "redirect_uri": redirect_uri, - "scope": "playlist-modify-public", - "state": request_state, - } - ) - url = "https://accounts.spotify.com/authorize?{qs}".format(qs=qs) - webbrowser.open(url, new=2) - - def login_token_first( - self, client_id: str, client_secret: str, auth_code: str, redirect_uri: str - ) -> tuple[str, str, int]: - """Get the initial access token and refresh token.""" - if not client_id or not client_secret or not auth_code or not redirect_uri: - raise ValueError( - "Must provide client id and client secret " - "and auth code and redirect uri." - ) - data = { - "grant_type": "authorization_code", - "code": auth_code, - "redirect_uri": redirect_uri, - "client_id": client_id, - "client_secret": client_secret, - } - r = self._http_client.post("https://accounts.spotify.com/api/token", data=data) - self._check_status(r) - response = r.json() - - access_token = response.get("access_token") - expires_in = response.get("expires_in") - refresh_token = response.get("refresh_token") - return access_token, refresh_token, expires_in - - def login_token_next( - self, - client_id: str, - client_secret: str, - refresh_token: str, - ) -> str: - """Get the next login token.""" - if not client_id or not client_secret or not refresh_token: - raise ValueError( - "Must provide client id and client secret and refresh token." - ) - - self._logger.info("Get next Spotify login.") - - data = { - "grant_type": "refresh_token", - "refresh_token": refresh_token, - } - auth_basic_token = self._login_client_auth(client_id, client_secret) - headers = { - "Authorization": f"Basic {auth_basic_token}", - } - - url = "https://accounts.spotify.com/api/token" - r = self._http_client.post(url, data=data, headers=headers) - self._check_status(r) - response = r.json() - access_token = response.get("access_token") - - if not access_token: - raise ValueError("Invalid access token.") - - return access_token - - def login_init( - self, - client_id: str, - client_secret: str, - redirect_uri: str, - ) -> tuple[str, str, int]: - """Run the initial authorisation flow.""" - if not client_id or not client_secret or not redirect_uri: - raise ValueError( - "Must provide client id and client secret and redirect uri." - ) - - # docs: - # https://developer.spotify.com/documentation/general/guides/authorization-guide/#authorization-code-flow - self._logger.info("Initialise Spotify login.") - - if not client_id or not client_secret: - raise ValueError("Must provide client_id and client_secret.") - - request_state = secrets.token_hex(10) - - self.login_authorise(client_id, redirect_uri, request_state) - auth_code = input("Enter the 'code' from the authorisation url:") - - access_token, refresh_token, expires_in = self.login_token_first( - client_id, client_secret, auth_code, redirect_uri - ) - - self._logger.warning(f"Spotify access_token: {access_token}") - self._logger.warning(f"Spotify refresh_token: {refresh_token}") - self._logger.warning(f"Spotify expires_in: {expires_in / 60.0 / 60.0} hours") - - if not access_token or not refresh_token: - raise ValueError("Invalid access token or refresh token.") - - return access_token, refresh_token, expires_in - - def _login_client_auth(self, client_id: str, client_secret: str): - """Encode the client auth token.""" - basic = f"{client_id}:{client_secret}" - basic_b64 = base64.b64encode(basic.encode()) - return basic_b64.decode() - - def _check_status(self, r: Response): - """Check the http response code.""" - expected_codes = [codes.ok, codes.created] - if r.status_code not in expected_codes: - raise ValueError(f"Error in response - {r.status_code}:{r.text}.") diff --git a/gather_vision/process/component/sqlite_client.py b/gather_vision/process/component/sqlite_client.py deleted file mode 100644 index 48184ef..0000000 --- a/gather_vision/process/component/sqlite_client.py +++ /dev/null @@ -1,36 +0,0 @@ -import sqlite3 -from pathlib import Path -from sqlite3 import Connection - -from django.utils.text import slugify - - -class SqliteClient: - def __init__(self, path: Path): - self._path = path - - def get_sqlite_db(self) -> Connection: - conn = sqlite3.connect(self._path) - conn.row_factory = sqlite3.Row - return conn - - def get_table_names(self, conn: Connection): - sql = ( - "SELECT name " - "FROM sqlite_master " - "WHERE type = 'table' AND name <> 'sqlite_sequence'" - ) - with conn: - c = conn.execute(sql).fetchall() - for col_names in c: - for col_name in col_names: - yield col_name - - def get_table_data(self, conn: Connection, table: str): - name = slugify(table).replace("-", "_") - sql = f"SELECT * from {name}" - with conn: - cur = conn.cursor() - cur.execute(sql) - for row in cur: - yield row diff --git a/gather_vision/process/component/time_series.py b/gather_vision/process/component/time_series.py deleted file mode 100644 index 157ab2e..0000000 --- a/gather_vision/process/component/time_series.py +++ /dev/null @@ -1,100 +0,0 @@ -from datetime import datetime, timedelta -from zoneinfo import ZoneInfo - - -class TimeSeries: - """Create data appropriate to use as Plotly trace data.""" - - def __init__(self, start_date: datetime, stop_date: datetime, tz: ZoneInfo): - self.start_date = start_date.astimezone(tz) - self.stop_date = stop_date.astimezone(tz) - self.days = (stop_date - start_date).days - - def build_date_range(self): - def daterange(range_start_date, days): - for n in range(days): - yield range_start_date + timedelta(n) - - date_range = daterange(self.start_date, self.days) - for index, current_date in enumerate(date_range): - yield index, current_date - - def petitions(self, query): - # raw = self.build_date_range() - traces = {} - for item in query: - # source_name = item.source.name - # source_title = item.source.title - - petition_title = item.title - petition_code = item.code - # petition_opened_date = item.opened_date - # petition_closed_date = item.closed_date - - traces[petition_code] = { - "type": "scatter", - "mode": "lines", - "name": petition_title, - "x": [], - "y": [], - } - - x = traces[petition_code]["x"] - y = traces[petition_code]["y"] - - for change_item in item.signature_changes.all(): - date_key = change_item.retrieved_date.strftime("%Y-%m-%d") - change_signatures = change_item.signatures - - if date_key not in x: - x.append(date_key) - y.append(change_signatures) - - else: - day_index = x.index(date_key) - if y[day_index] < change_signatures: - y[day_index] = change_signatures - - result = list(traces.values()) - return result - - def outages(self, query): - traces = [] - demand = { - "type": "scatter", - "mode": "lines", - "name": "Demand", - "x": [], - "y": [], - } - rating = { - "type": "scatter", - "mode": "lines", - "name": "Rating", - "x": [], - "y": [], - "yaxis": "y2", - } - outages = { - "type": "scatter", - "mode": "lines", - "name": "Customers Affected", - "x": [], - "y": [], - } - - traces = [demand, rating, outages] - - for item in query: - date_key = item.retrieved_date.strftime("%Y-%m-%d %H:%M:00") - if date_key not in demand["x"]: - demand["x"].append(date_key) - demand["y"].append(item.demand) - if date_key not in rating["x"]: - rating["x"].append(date_key) - rating["y"].append(item.rating) - if date_key not in outages["x"]: - outages["x"].append(date_key) - outages["y"].append(item.total_customers) - - return traces diff --git a/gather_vision/process/component/youtube_music_client.py b/gather_vision/process/component/youtube_music_client.py deleted file mode 100644 index ed989c7..0000000 --- a/gather_vision/process/component/youtube_music_client.py +++ /dev/null @@ -1,124 +0,0 @@ -from pathlib import Path -from typing import Optional, Iterable - -from zoneinfo import ZoneInfo -from ytmusicapi import YTMusic - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.item.track import Track - - -class YoutubeMusicClient: - """YouTube Music client.""" - - def __init__(self, logger: Logger, http_client: HttpClient, time_zone: ZoneInfo): - self._logger = logger - self._http_client = http_client - self._time_zone = time_zone - - self._client: Optional[YTMusic] = None - - def playlist_tracks_get( - self, playlist_id: str, limit: Optional[int] = None - ) -> dict: - """Get the tracks in a playlist.""" - if not playlist_id: - raise ValueError("Must provide playlist id.") - - if limit is None: - raw = self._client.get_playlist(playlist_id) or {} - else: - raw = self._client.get_playlist(playlist_id, limit) or {} - return raw - - def playlist_tracks_set( - self, playlist_id: str, new_tracks: Iterable[Track], old_tracks: Iterable[Track] - ) -> bool: - """Replace songs in a playlist.""" - if not playlist_id or (not new_tracks and not old_tracks): - raise ValueError( - "Must provide playlist id and " - "at least one of new tracks and old tracks." - ) - - if old_tracks: - result = self._client.remove_playlist_items( - playlist_id, - [ - { - "videoId": t.track_id, - "setVideoId": t.raw.get("setVideoId"), - } - for t in old_tracks - ], - ) - - if result != "STATUS_SUCCEEDED": - return False - - result = self._client.add_playlist_items( - playlist_id, - [t.track_id for t in new_tracks], - source_playlist=None, - duplicates=False, - ) - if "status" not in result or result.get("status") != "STATUS_SUCCEEDED": - return False - - return True - - def playlist_details_set( - self, - playlist_id: str, - title: str = None, - description: str = None, - is_public: bool = None, - ): - """Set playlist details.""" - if not playlist_id: - raise ValueError("Must provide playlist id.") - if not title or not description or is_public is None: - raise ValueError("Must provide title and description and is public.") - - result = self._client.edit_playlist( - playlistId=playlist_id, - title=title, - description=description, - privacyStatus="PUBLIC" if is_public else "PRIVATE", - ) - return result == "STATUS_SUCCEEDED" - - def track_query_get(self, query: str, limit: int = 5) -> Iterable[dict]: - """Find matching tracks.""" - result = self._client.search( - query=query, filter="songs", limit=limit, ignore_spelling=False - ) - return result - - def login_token_next(self, credentials: str): - """Get the next login token.""" - self._logger.info("Get next YouTube Music login.") - self._client = YTMusic( - auth=credentials, requests_session=self._http_client.session - ) - - def login_init(self): - """Prompt for the initial YouTube headers.""" - self._logger.info("Initialise YouTube Music login.") - - msg = "Paste path to the request header file from https://music.youtube.com:" - file_path = input(msg) - if not file_path: - raise ValueError("Provide the file path.") - - path = Path(file_path) - if not path.is_file(): - raise ValueError(f"Invalid file path '{path}'.") - - request_headers = path.read_text() - creds_json = YTMusic.setup(filepath=None, headers_raw=request_headers) - - self._logger.warning(f"YouTubeMusic credentials: {creds_json}") - - return creds_json diff --git a/gather_vision/process/item/__init__.py b/gather_vision/process/item/__init__.py deleted file mode 100644 index ca92233..0000000 --- a/gather_vision/process/item/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .playlist import Playlist # noqa: F401 -from .track import Track # noqa: F401 -from .transport_event import TransportEvent # noqa: F401 diff --git a/gather_vision/process/item/contact_trace.py b/gather_vision/process/item/contact_trace.py deleted file mode 100644 index cd55636..0000000 --- a/gather_vision/process/item/contact_trace.py +++ /dev/null @@ -1,62 +0,0 @@ -from dataclasses import dataclass -from datetime import datetime - - -@dataclass -class ContactTrace: - start_datetime: datetime - stop_datetime: datetime - added_datetime: datetime - retrieved_datetime: datetime - - category: str - lgas: str - suburb: str - location: str - address: str - - @classmethod - def csv_headers(cls): - return [ - "start_datetime", - "stop_datetime", - "added_datetime", - "retrieved_datetime", - "lgas", - "suburb", - "location", - "address", - ] - - def csv_row(self): - return { - "start_datetime": self._format_datetime(self.start_datetime), - "stop_datetime": self._format_datetime(self.stop_datetime), - "added_datetime": self._format_datetime(self.added_datetime), - "retrieved_datetime": self._format_datetime(self.retrieved_datetime), - "lgas": self.lgas, - "suburb": self.suburb, - "location": self.location, - "address": self.address, - } - - def display_text(self): - return "" - - def compare(self, other: "ContactTrace"): - return all( - [ - self.start_datetime == other.start_datetime, - self.stop_datetime == other.stop_datetime, - self.added_datetime == other.added_datetime, - self.retrieved_datetime == other.retrieved_datetime, - self.category == other.category, - self.lgas == other.lgas, - self.suburb == other.suburb, - self.location == other.location, - self.address == other.address, - ] - ) - - def _format_datetime(self, value: datetime): - return value.isoformat(timespec="seconds") diff --git a/gather_vision/process/item/playlist.py b/gather_vision/process/item/playlist.py deleted file mode 100644 index 6f23859..0000000 --- a/gather_vision/process/item/playlist.py +++ /dev/null @@ -1,42 +0,0 @@ -from dataclasses import dataclass, field -from typing import Iterable - - -@dataclass(frozen=True) -class Playlist: - - name: str - title: str - - tracks: list["Track"] = field(default_factory=list, repr=False, compare=False) - - def add_track( - self, - service_name: str, - collection_name: str, - track_number: int, - track_id: str, - title: str, - primary_artists: Iterable[str], - featured_artists: Iterable[str], - queries: Iterable[str], - raw: dict, - ) -> None: - from gather_vision.process.item.track import Track - - self.tracks.append( - Track( - service_name=service_name, - collection_name=collection_name, - track_number=track_number, - track_id=track_id, - title=title, - primary_artists=list(primary_artists), - featured_artists=list(featured_artists), - queries=list(queries), - raw=raw, - ) - ) - - def __str__(self): - return f"{self.title} ({self.name}) with {len(self.tracks)} tracks" diff --git a/gather_vision/process/item/playlist_conf.py b/gather_vision/process/item/playlist_conf.py deleted file mode 100644 index 9ad214b..0000000 --- a/gather_vision/process/item/playlist_conf.py +++ /dev/null @@ -1,12 +0,0 @@ -from dataclasses import dataclass - - -@dataclass(frozen=True) -class PlaylistConf: - - source_code: str - source_collection: str - - target_code: str - target_playlist_id: str - target_title: str diff --git a/gather_vision/process/item/track.py b/gather_vision/process/item/track.py deleted file mode 100644 index bf7c119..0000000 --- a/gather_vision/process/item/track.py +++ /dev/null @@ -1,81 +0,0 @@ -from dataclasses import dataclass - -from django.utils.text import slugify - - -@dataclass(frozen=True) -class Track: - service_name: str - collection_name: str - track_number: int - track_id: str - title: str - primary_artists: list[str] - featured_artists: list[str] - queries: list[str] - raw: dict - - def matches_model(self, model: "PlaylistTrack"): - """Match this track to a playlist track model using service and id.""" - track_source_name = f"{self.service_name}_{self.collection_name}" - return track_source_name == model.source.name and model.code == self.track_id - - def matches_track_title_artists(self, other: "Track"): - """Match this track to another track using title and artists.""" - - if slugify(self.title) != slugify(other.title): - return False - - self_primary = sorted([slugify(i) for i in self.primary_artists]) - self_featured = sorted([slugify(i) for i in self.featured_artists]) - self_artists = sorted(self_primary + self_featured) - - other_primary = sorted([slugify(i) for i in other.primary_artists]) - other_featured = sorted([slugify(i) for i in other.featured_artists]) - other_artists = sorted(other_primary + other_featured) - - if self_primary == other_primary and self_featured == other_featured: - return True - - if self_artists == other_artists: - return True - - if all([i in self_artists for i in other_artists]): - return True - - if all([i in other_artists for i in self_artists]): - return True - - return False - - def matches_track_service(self, other: "Track"): - """Match this track to another track using service and id.""" - return all( - [ - self.service_name == other.service_name, - self.collection_name == other.collection_name, - self.track_id == other.track_id, - ] - ) - - @property - def iter_artists(self): - artists = (self.primary_artists or []) + (self.featured_artists or []) - for i in sorted(range(len(artists)), reverse=True): - start_index = 0 - stop_index = i + 1 - yield artists[start_index:stop_index] - - def __str__(self): - return ":".join( - [ - self.service_name, - self.collection_name, - str(self.track_number), - ", ".join(self.primary_artists), - self.title, - ] - ) - - def _compare_artists(self, a: list[str], b: list[str]): - return sorted([slugify(i) for i in a]) == sorted([slugify(i) for i in b]) diff --git a/gather_vision/process/item/transport_event.py b/gather_vision/process/item/transport_event.py deleted file mode 100644 index 379092d..0000000 --- a/gather_vision/process/item/transport_event.py +++ /dev/null @@ -1,39 +0,0 @@ -from dataclasses import dataclass -from datetime import datetime - - -@dataclass(frozen=True) -class TransportEvent: - raw: dict - - title: str - description: str - - tags: list[tuple[str, str]] - - lines: list[str] - - source_id: str - source_name: str - - event_start: datetime - event_stop: datetime - - @property - def sort_str(self): - return "-".join( - [ - str(self.event_start) if self.event_start else "", - str(self.event_stop) if self.event_stop else "", - ] - ) - - def __str__(self): - result = [ - ("source", f"{self.source_name}-{self.source_id}"), - ("title", self.title), - ("start", self.event_start.isoformat() if self.event_start else ""), - ("stop", self.event_stop.isoformat() if self.event_stop else ""), - ("lines", ", ".join(self.lines or [])), - ] - return "; ".join(f"{k}={v}" for k, v in result) diff --git a/gather_vision/process/manage/__init__.py b/gather_vision/process/manage/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/manage/contact_tracing.py b/gather_vision/process/manage/contact_tracing.py deleted file mode 100644 index 3b5a507..0000000 --- a/gather_vision/process/manage/contact_tracing.py +++ /dev/null @@ -1,14 +0,0 @@ -from zoneinfo import ZoneInfo - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger - - -class ContactTracing: - def __init__(self, logger: Logger, tz: ZoneInfo, http_client: HttpClient): - self._logger = logger - self._http_client = http_client - self._tz = tz - - def run_update(self): - pass diff --git a/gather_vision/process/manage/outages.py b/gather_vision/process/manage/outages.py deleted file mode 100644 index e0ba3a9..0000000 --- a/gather_vision/process/manage/outages.py +++ /dev/null @@ -1,48 +0,0 @@ -from pathlib import Path -from zoneinfo import ZoneInfo - -import gather_vision.models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.service.outages.energex_events import EnergexEvents -from gather_vision.process.service.outages.energex_import import EnergexImport - - -class Outages: - def __init__(self, logger: Logger, tz: ZoneInfo, http_client: HttpClient): - normalise = Normalise() - - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - def run_update(self): - self.create_energex() - ee = EnergexEvents( - self._logger, - self._http_client, - self._normalise, - self._tz, - ) - ee.update_outages() - - def run_import(self, path: Path): - self.create_energex() - ei = EnergexImport( - self._logger, - self._normalise, - self._tz, - ) - ei.import_outages(path) - - def create_energex(self): - obj, created = app_models.InformationSource.objects.get_or_create( - name=EnergexEvents.code, - defaults={ - "title": "Energex", - "info_url": "https://www.energex.com.au", - }, - ) - return obj, created diff --git a/gather_vision/process/manage/petitions.py b/gather_vision/process/manage/petitions.py deleted file mode 100644 index cf5a203..0000000 --- a/gather_vision/process/manage/petitions.py +++ /dev/null @@ -1,65 +0,0 @@ -from pathlib import Path -from zoneinfo import ZoneInfo - -from gather_vision import models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.service.petition.au_qld import AuQld -from gather_vision.process.service.petition.au_qld_bcc import AuQldBcc -from gather_vision.process.service.petition.petition_import import PetitionImport - - -class Petitions: - def __init__(self, logger: Logger, tz: ZoneInfo, http_client: HttpClient): - normalise = Normalise() - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - def run_update(self) -> None: - self.create_au_qld() - pu_au_qld = AuQld( - self._logger, - self._http_client, - self._normalise, - self._tz, - ) - pu_au_qld.update_petitions() - - self.create_au_qld_bcc() - pu_au_qld_bcc = AuQldBcc( - self._logger, - self._http_client, - self._normalise, - self._tz, - ) - pu_au_qld_bcc.update_petitions() - - def run_import(self, path: Path) -> None: - self.create_au_qld() - self.create_au_qld_bcc() - pi = PetitionImport(self._logger, self._normalise, self._tz) - pi.import_petitions(path) - - def create_au_qld(self): - url = "https://www.parliament.qld.gov.au/Work-of-the-Assembly/Petitions" - obj, created = app_models.InformationSource.objects.get_or_create( - name=AuQld.code, - defaults={ - "title": "Queensland Government Petitions", - "info_url": url, - }, - ) - return obj, created - - def create_au_qld_bcc(self): - obj, created = app_models.InformationSource.objects.get_or_create( - name=AuQldBcc.code, - defaults={ - "title": "Brisbane City Council Petitions", - "info_url": "https://www.epetitions.brisbane.qld.gov.au/", - }, - ) - return obj, created diff --git a/gather_vision/process/manage/playlists.py b/gather_vision/process/manage/playlists.py deleted file mode 100644 index 51e38dd..0000000 --- a/gather_vision/process/manage/playlists.py +++ /dev/null @@ -1,375 +0,0 @@ -from datetime import datetime, timedelta -from typing import Union, Iterable -from zoneinfo import ZoneInfo - -from django.conf import settings -from django.utils import timezone - -from gather_vision import models as app_models -from gather_vision.process import item as app_items -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.service.playlist import abstract as service_mixins -from gather_vision.process.service.playlist.abc_radio import AbcRadio -from gather_vision.process.service.playlist.abstract import ( - PlaylistDetails, - PlaylistSource, - PlaylistTarget, -) -from gather_vision.process.service.playlist.last_fm import LastFm -from gather_vision.process.service.playlist.radio_4zzz import Radio4zzz -from gather_vision.process.service.playlist.spotify import Spotify -from gather_vision.process.service.playlist.youtube_music import ( - YoutubeMusic, -) - - -class Playlists: - def __init__(self, logger: Logger, tz: ZoneInfo, http_client: HttpClient): - normalise = Normalise() - - spotify = Spotify(logger, http_client, normalise, tz) - yt_music = YoutubeMusic(logger, http_client, normalise, tz) - - abc_radio = AbcRadio(logger, http_client, normalise, tz) - last_fm = LastFm(logger, http_client, normalise, tz) - radio_4zzz = Radio4zzz(logger, http_client, normalise, tz) - - self._logger = logger - self._tz = tz - - self._http_client = http_client - self._normalise = normalise - - self._spotify = spotify - self._yt_music = yt_music - - self._abc_radio = abc_radio - self._last_fm = last_fm - self._radio_4zzz = radio_4zzz - - self._services: dict[ - str, Union[PlaylistDetails, PlaylistSource, PlaylistTarget] - ] = dict( - [ - (s.code, s) - for s in [ - self._spotify, - self._yt_music, - self._abc_radio, - self._last_fm, - self._radio_4zzz, - ] - ] - ) - - def run_init(self): - self._logger.info("Initialising playlists.") - - # self.init_spotify() - self.init_youtube_music() - - self._logger.info("Finished initialising playlists.") - - def init_spotify(self): - client_id = settings.SPOTIFY_AUTH_CLIENT_ID - client_secret = settings.SPOTIFY_AUTH_CLIENT_SECRET - redirect_uri = settings.SPOTIFY_AUTH_REDIRECT_URI - self._spotify.login_init(client_id, client_secret, redirect_uri) - - def init_youtube_music(self): - self._yt_music.login_init() - - def run_update(self): - self._logger.info("Updating playlists.") - - # create info sources - self.create_information_sources() - - # login - sp_client_id = settings.SPOTIFY_AUTH_CLIENT_ID - sp_client_secret = settings.SPOTIFY_AUTH_CLIENT_SECRET - sp_refresh_token = settings.SPOTIFY_AUTH_REFRESH_TOKEN - self._spotify.login_next(sp_client_id, sp_client_secret, sp_refresh_token) - - ym_config = settings.YOUTUBE_MUSIC_AUTH_CONFIG - self._yt_music.login_next(ym_config) - - lf_api_key = settings.LASTFM_AUTH_API_KEY - self._last_fm.login_next(lf_api_key) - - # record when retrieval was performed - retrieved_date = timezone.now() - - # don't update recently changed playlist entries, source tracks, and - # streaming service tracks - playlist_entries_time = 3 - playlist_entries_age = retrieved_date - timedelta(hours=playlist_entries_time) - - # build parameters - current_time = datetime.now(tz=self._tz) - start_date = current_time - timedelta(days=8) - end_date = current_time - timedelta(days=1) - limit = 100 - - # for each playlist source - playlist_settings = settings.PLAYLIST_SOURCES_TARGETS - for playlist_setting in playlist_settings: - source_service: PlaylistSource = self._services.get( - playlist_setting.source_code - ) - source_collection = playlist_setting.source_collection - - target_service: PlaylistTarget = self._services.get( - playlist_setting.target_code - ) - target_playlist_id = playlist_setting.target_playlist_id - target_title = playlist_setting.target_title - - playlist = source_service.get_playlist_tracks( - playlist_setting.source_code, - source_collection, - target_title, - start_date, - end_date, - limit, - ) - - # create/update the playlist model - source_model = app_models.InformationSource.objects.get( - name=source_service.code - ) - ( - playlist_model, - playlist_created, - ) = app_models.PlaylistItem.objects.update_or_create( - source=source_model, defaults={"retrieved_date": retrieved_date} - ) - old_tracks_model = list( - app_models.PlaylistTrack.objects.filter( - source__name=playlist.name, entries__playlist=playlist_model - ).prefetch_related("source", "entries") - ) - new_tracks_source = playlist.tracks - - # update the playlist entries if they weren't recently updated - # TODO: if not updating, then use the existing stored playlist entries / tracks from the source - # Otherwise, the source might've been updated, and the streaming tracks won't match. - if ( - not playlist_created - and playlist_model.retrieved_date > playlist_entries_age - ): - self._logger.warning( - f"Not updating playlist '{playlist.name}' as it was " - f"last updated less than {playlist_entries_time} hours ago." - ) - else: - # update the playlist entries - self.update_playlist_model( - source_model, - playlist_model, - source_service, - old_tracks_model, - new_tracks_source, - ) - - # update the streaming service tracks - self.update_playlist_tracks( - playlist_model, - playlist, - target_service, - target_playlist_id, - new_tracks_source, - ) - - self._logger.info("Finished updating playlists.") - - def create_information_sources(self): - # playlist sources - raw = [ - (self._radio_4zzz, "https://4zzz.org.au/"), - (self._abc_radio, "https://www.abc.net.au/triplej/"), - (self._last_fm, "https://www.last.fm/"), - ] - for service, url in raw: - app_models.InformationSource.objects.update_or_create( - name=service.code, - defaults={ - "title": service.title, - "info_url": url, - }, - ) - - # streaming services - raw = [ - (self._spotify, "https://www.spotify.com/"), - (self._yt_music, "https://music.youtube.com/"), - ] - for service, url in raw: - app_models.InformationSource.objects.update_or_create( - name=service.code, - defaults={ - "title": service.title, - "info_url": url, - }, - ) - - def update_playlist_model( - self, - source_model: app_models.InformationSource, - playlist_model: app_models.PlaylistItem, - playlist_service: Union[ - service_mixins.PlaylistSource, service_mixins.PlaylistDetails - ], - old_tracks_model: Iterable[app_models.PlaylistTrack], - new_tracks_source: Iterable[app_items.Track], - ): - """Update the stored playlist entries and source tracks.""" - - # for each new playlist entry - new_entries = [] - for new_track_source in new_tracks_source: - # check if there is a matching existing playlist entry - old_track_model = [ - i for i in old_tracks_model if new_track_source.matches_model(i) - ] - if len(old_track_model) > 1: - raise ValueError([new_track_source, old_track_model]) - - entry = None - if old_track_model: - old_track_model = old_track_model[0] - try: - entry = app_models.PlaylistEntry.objects.get( - playlist=playlist_model, tracks__pk=old_track_model.pk - ) - if entry.position and new_track_source.track_number: - entry.position_change = ( - entry.position - new_track_source.track_number - ) - if new_track_source.track_number: - entry.position = new_track_source.track_number - - except app_models.PlaylistEntry.DoesNotExist: - pass - - # create/update the model for the new track - new_playlist_track_model = playlist_service.get_model_track( - source_model, new_track_source - ) - - # build new playlist entry if none exists - if not entry: - entry = app_models.PlaylistEntry( - playlist=playlist_model, position=new_track_source.track_number - ) - - new_entries.append((entry, new_playlist_track_model)) - - # delete the current entries in the database for this playlist - delete_result = app_models.PlaylistEntry.objects.filter( - playlist=playlist_model - ).delete() - self._logger.info(f"Deleted old entries: {delete_result}.") - - # save the new entries and tracks for this playlist - for new_entry, new_entry_track in new_entries: - new_entry.save() - new_entry.tracks.add(new_entry_track) - self._logger.info(f"Created {len(new_entries)} new playlist entries.") - - def update_playlist_tracks( - self, - playlist_model: app_models.PlaylistItem, - playlist_source: app_items.Playlist, - streaming_service: Union[ - service_mixins.PlaylistDetails, - service_mixins.PlaylistTarget, - service_mixins.PlaylistSource, - ], - playlist_service_id: str, - new_tracks_source: Iterable[app_items.Track], - ): - """Update the stored playlist tracks from a streaming service.""" - - # get the streaming service information - service_model = app_models.InformationSource.objects.get( - name=streaming_service.code - ) - old_tracks_service = streaming_service.get_playlist_tracks( - playlist_service_id, playlist_source.name - ) - - # assess the tracks and build the models - new_entries = [] - old_track_service_matches = [] - for new_track_source in new_tracks_source: - try: - entry = app_models.PlaylistEntry.objects.get( - playlist=playlist_model, tracks__code=new_track_source.track_id - ) - except app_models.PlaylistEntry.DoesNotExist: - a = 1 - - # service - track_service = [ - i - for i in old_tracks_service.tracks - if new_track_source.matches_track_title_artists(i) - ] - if len(track_service) > 1: - raise ValueError([new_track_source, track_service]) - elif track_service: - track_service = track_service[0] - old_track_service_matches.append(track_service) - else: - track_service = self.select_service_track( - streaming_service, new_track_source - ) - - # create/update the model for the new track - if track_service: - new_playlist_track_model = streaming_service.get_model_track( - service_model, track_service - ) - - # store - new_entries.append((entry, new_playlist_track_model)) - - # tracks that didn't match - for old_track_service in old_tracks_service.tracks: - if old_track_service not in old_track_service_matches: - self._logger.warning( - f"Did not find '{old_track_service}' " - f"in '{streaming_service.code}'." - ) - - # save the new entries and tracks for this playlist - for new_entry, new_entry_track in new_entries: - new_entry.tracks.add(new_entry_track) - self._logger.info(f"Created {len(new_entries)} new playlist entries.") - - return None - - def select_service_track( - self, - streaming_service: service_mixins.PlaylistTarget, - new_track_source: app_items.Track, - ): - for artists in new_track_source.iter_artists: - tracks_service = streaming_service.search_tracks( - playlist_name=new_track_source.collection_name, - track=new_track_source.title, - artists=artists, - ) - # compare all artist - for track_service in tracks_service: - if new_track_source.matches_track_title_artists(track_service): - return track_service - return None - - def _build_streaming_service_identifier( - self, playlist_code: str, playlist_collection: str - ): - return "_".join([playlist_code, playlist_collection]).casefold().upper() diff --git a/gather_vision/process/manage/transport.py b/gather_vision/process/manage/transport.py deleted file mode 100644 index f2e8e27..0000000 --- a/gather_vision/process/manage/transport.py +++ /dev/null @@ -1,191 +0,0 @@ -from datetime import datetime, timedelta -from typing import Optional -from zoneinfo import ZoneInfo - -from django.utils import timezone -from django.utils.text import slugify - -from gather_vision import models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.item.transport_event import TransportEvent -from gather_vision.process.service.transport.qld_rail_events import QldRailEvents -from gather_vision.process.service.transport.translink_notices import TranslinkNotices - - -class Transport: - - train_lines = { - "ferny-grove": "D02130", - "beenleigh": "D02130", - "shorncliffe": "00467E", - "cleveland": "00467E", - "airport": "FFC420", - "gold-coast": "FFC420", - "caboolture": "008752", - "sunshine-coast": "008752", - "ipswich": "008752", - "rosewood": "008752", - "ipswichrosewood": "008752", - "redcliffe-peninsula": "0B79BD", - "springfield": "0B79BD", - "doomben": "A5449A", - "inner-city": "FFFFFF", - } - - def __init__(self, logger: Logger, tz: ZoneInfo, http_client: HttpClient): - normalise = Normalise() - - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - def run_update(self): - self._logger.info("Updating transport notices.") - self.create_au_qld_translink() - self.create_au_qld_rail() - - tl = TranslinkNotices( - self._logger, - self._http_client, - self._normalise, - self._tz, - ) - qr = QldRailEvents( - self._logger, - self._http_client, - self._normalise, - self._tz, - ) - - month_ago = timezone.now() - timedelta(days=30) - - sources = { - TranslinkNotices.code: app_models.InformationSource.objects.get( - name=TranslinkNotices.code - ), - QldRailEvents.code: app_models.InformationSource.objects.get( - name=QldRailEvents.code - ), - } - - events_seen = 0 - events_created = 0 - events_updated = 0 - - events = self.get_events(tl, qr, month_ago) - for event in events: - self._tidy(event) - obj, created = self.update_event(sources, event) - - events_seen += 1 - if created: - events_created += 1 - else: - events_updated += 1 - - if events_seen % 20 == 0: - self._logger.info( - f"Running total notices {events_seen} " - f"({events_updated} updated, {events_created} created)." - ) - - self._logger.info( - f"Notices {events_seen} " - f"({events_updated} updated, {events_created} created)." - ) - self._logger.info("Finished updating transport notices.") - - def update_event(self, sources, event: TransportEvent): - lines = [] - for line_str in event.lines: - line, _ = app_models.TransportLine.objects.get_or_create(title=line_str) - lines.append(line) - - obj, created = app_models.TransportItem.objects.update_or_create( - source=sources[event.source_name], - source_identifier=event.source_id, - defaults={ - "title": event.title, - "body": event.description, - "start_date": event.event_start, - "stop_date": event.event_stop, - "is_train": self._get_tag_values(event.tags, "IsTrain") == "Yes", - "view_url": self._get_tag_values(event.tags, "Link"), - "notice_type": self._get_tag_values(event.tags, "EventType"), - "category": self._get_tag_values(event.tags, "Category"), - "severity": self._get_tag_values(event.tags, "Severity"), - "timing": self._get_tag_values(event.tags, "When"), - "location": self._get_tag_values(event.tags, "Locations"), - }, - ) - obj.lines.set(lines) - return obj, created - - def get_events(self, tl, qr, threshold_date: datetime): - for event in tl.fetch(): - if self.keep(event, threshold_date): - yield event - - for event in qr.fetch(): - if self.keep(event, threshold_date): - yield event - - def keep( - self, event: TransportEvent, threshold_date: datetime - ) -> Optional[TransportEvent]: - """Remove events without dates or ended before threshold date.""" - has_start = event.event_start is not None - has_stop = event.event_stop is not None - if not has_start and not has_stop: - # ignore events with no dates - return None - - elif has_stop and event.event_stop < threshold_date: - # ignore events where the stop is before the filter date - return None - - # include events where start date is before filter date - # as some events have only start dates (continuing events) - return event - - def _tidy(self, event: TransportEvent): - is_train = False - for index, line in enumerate(event.lines): - line_str = str(line) - if line_str.endswith(" Line"): - line_str = line_str[0:-5] - event.lines[index] = line_str - line_str = slugify(line_str) - if line_str in self.train_lines: - is_train = True - - if is_train: - event.tags.append(("IsTrain", "Yes")) - - def _get_tag_values(self, tags: list[tuple[str, str]], key: str): - value = ", ".join(sorted(set([v for k, v in tags if k == key]))) - return value - - def create_au_qld_translink(self): - - (obj, created) = app_models.InformationSource.objects.get_or_create( - name=TranslinkNotices.code, - defaults={ - "title": TranslinkNotices.title, - "info_url": TranslinkNotices.page_url, - }, - ) - return obj, created - - def create_au_qld_rail(self): - obj, created = app_models.InformationSource.objects.get_or_create( - name=QldRailEvents.code, - defaults={ - "title": QldRailEvents.title, - "info_url": QldRailEvents.page_url, - }, - ) - return obj, created diff --git a/gather_vision/process/service/__init__.py b/gather_vision/process/service/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/service/contact_tracing/__init__.py b/gather_vision/process/service/contact_tracing/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/service/contact_tracing/au_qld.py b/gather_vision/process/service/contact_tracing/au_qld.py deleted file mode 100644 index c972e2b..0000000 --- a/gather_vision/process/service/contact_tracing/au_qld.py +++ /dev/null @@ -1,190 +0,0 @@ -import re -from datetime import datetime, tzinfo -from typing import Optional - -from lxml import html - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.item.contact_trace import ContactTrace - - -class AuQld: - """Retrieves and updates info about QLD contact tracing.""" - - default_url = "https://www.qld.gov.au/health/conditions/health-alerts/coronavirus-covid-19/current-status/contact-tracing" - - def __init__( - self, - logger: logging.Logger, - timezone: tzinfo, - http_client: HttpClient, - url: Optional[str] = None, - ): - self._logger = logger - self._timezone = timezone - self._url = url or self.default_url - self._http_client = http_client - - def get_new_items(self): - response = self.get_page() - last_updated, rows = self.get_data(response) - items = self.get_items(rows) - return items - - def get_page(self): - return self._http_client.get(self._url) - - def get_data(self, response): - tree = html.fromstring(response.text) - - if tree is None: - raise ValueError("No html available.") - - marker = tree.xpath('//div[@id="newrows-202041"]') - if len(marker) != 1: - raise ValueError("Could not find marker.") - - marker = marker[0] - - last_updated = marker.xpath("./preceding-sibling::p/text()") - if len(last_updated) < 1: - raise ValueError("Could not find paras.") - - last_updated = last_updated[-1] - if "This table last updated" not in last_updated: - raise ValueError("Could not find last updated.") - - table = marker.xpath(".//table") - if len(table) != 1: - raise ValueError("Could not find table.") - - table = table[0] - - rows = table.xpath(".//tbody//tr") - return last_updated, rows - - def get_items(self, rows): - date_retrieved = datetime.now(tz=self._timezone) - - for row in rows: - data_date = row.get("data-date") - data_lgas = row.get("data-lgas") - # data_advice = row.get("data-advice") - # data_location = row.get("data-location") - # data_address = row.get("data-address") - # data_suburb = row.get("data-suburb") - # data_datetext = row.get("data-datetext") - # data_timetext = row.get("data-timetext") - - # e.g. '2021-12-19T15:35' - data_added = datetime.strptime( - self._tidy(row.get("data-added")), "%Y-%m-%dT%H:%M" - ) - - cells = row.xpath("./td") - - td_exposure_date = cells[0].text - - # td_place = cells[1].xpath(".//text()") - td_location = cells[1].xpath('./span[@class="location"]') - if len(td_location) == 1: - td_location = td_location[0].text - else: - td_location = None - - td_address = cells[1].xpath('./span[@class="address"]') - if len(td_address) == 1: - td_address = td_address[0].text - else: - td_address = None - - td_suburb = cells[2].text - td_exposure_times = cells[3].text - td_category = cells[4].text - - start_datetime, stop_datetime = self._parse_exposures( - data_date, td_exposure_date, td_exposure_times - ) - - yield ContactTrace( - start_datetime=start_datetime, - stop_datetime=stop_datetime, - added_datetime=data_added, - retrieved_datetime=date_retrieved, - category=self._tidy(td_category), - lgas=self._tidy(data_lgas), - suburb=self._tidy(td_suburb), - location=self._tidy(td_location), - address=self._tidy(td_address), - ) - - def _parse_exposures( - self, - data_date: str, - td_exposure_date: str, - td_exposure_times: str, - ): - # td date - # e.g. 'Sunday 19 December 2021' - parsed_date = datetime.strptime(self._tidy(td_exposure_date), "%A %d %B %Y") - - # td times - # e.g. '11.32am - 11.40am': '11.32am' and '11.40am' - parsed_times = [] - for i in td_exposure_times.split(" - "): - parsed = False - for pattern in ["%I.%M%p", "%I%p"]: - try: - parsed_times.append(datetime.strptime(i, pattern)) - parsed = True - except ValueError: - continue - if not parsed: - raise ValueError(f"Could not parse '{i}'.") - - if not parsed_times: - raise ValueError() - - # date datetime start - # e.g. '2021-12-19T15:35' - parsed_datetime_start = datetime.strptime( - self._tidy(data_date), "%Y-%m-%dT%H:%M" - ) - parsed_datetime_start = parsed_datetime_start.replace(tzinfo=self._timezone) - - if len(parsed_times) > 0: - start_datetime = datetime( - parsed_date.year, - parsed_date.month, - parsed_date.day, - parsed_times[0].hour, - parsed_times[0].minute, - tzinfo=self._timezone, - ) - else: - raise ValueError() - - if parsed_datetime_start != start_datetime: - raise ValueError() - - if len(parsed_times) == 2: - stop_datetime = datetime( - parsed_date.year, - parsed_date.month, - parsed_date.day, - parsed_times[1].hour, - parsed_times[1].minute, - tzinfo=self._timezone, - ) - - else: - stop_datetime = start_datetime - - return start_datetime, stop_datetime - - def _tidy(self, value: str): - if not value or not value.strip(): - return "" - value = value.strip() - value = re.sub(r"\s+", " ", value) - return value diff --git a/gather_vision/process/service/discord/__init__.py b/gather_vision/process/service/discord/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/service/discord/client.py b/gather_vision/process/service/discord/client.py deleted file mode 100644 index b86c3d4..0000000 --- a/gather_vision/process/service/discord/client.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Optional - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.service.discord.webhook import Webhook - - -class Client: - """Interface to the Discord API.""" - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - bot_name: str, - bot_url: str, - bot_version: str, - base_url: Optional[str] = None, - ): - self._logger = logger - self._http_client = http_client - self._bot_name = bot_name - self._bot_url = bot_url - self._bot_version = bot_version - - # https://discord.com/developers/docs/reference#http-api - self._base_url = base_url or f"https://discord.com/api/v9" - - def execute_webhook(self, info: Webhook): - """Execute a discord webhook.""" - # https://discord.com/developers/docs/resources/webhook#execute-webhook - - params = {"wait": "true" if info.wait else "false"} - - if info.thread_id: - params["thread_id"] = info.thread_id - - json_data = info.to_json() - url = self._base_url + f"/webhooks/{info.webhook_id}/{info.webhook_token}" - self._http_client.post( - url, - params=params, - json=json_data, - headers={ - "user-agent": f"{self._bot_name} ({self._bot_url},{self._bot_version})" - }, - ) diff --git a/gather_vision/process/service/discord/embed.py b/gather_vision/process/service/discord/embed.py deleted file mode 100644 index ee63df6..0000000 --- a/gather_vision/process/service/discord/embed.py +++ /dev/null @@ -1,90 +0,0 @@ -from dataclasses import field -from datetime import datetime -from typing import Optional - -from gather_vision.process.service.discord.embed_field import EmbedField - - -class Embed: - """A Discord embed.""" - - # https://discord.com/developers/docs/resources/channel#embed-object - - title: Optional[str] = None - embed_type: Optional[str] = None - description: Optional[str] = None - url: Optional[str] = None - timestamp: Optional[datetime] = None - color: Optional[int] = None - - footer_text: Optional[str] = None - footer_icon_url: Optional[str] = None - footer_proxy_icon_url: Optional[str] = None - - provider_name: Optional[str] = None - provider_url: Optional[str] = None - - author_name: Optional[str] = None - author_url: Optional[str] = None - author_icon_url: Optional[str] = None - author_proxy_icon_url: Optional[str] = None - - fields: list[EmbedField] = field(default_factory=list) - - def to_json(self): - result = {} - - if self.title: - result["title"] = self.title - if self.embed_type: - result["type"] = self.embed_type - if self.description: - result["description"] = self.description - if self.url: - result["url"] = self.url - if self.timestamp: - result["timestamp"] = self.timestamp.isoformat(timespec="seconds") - if self.color: - result["color"] = self.color - - if any([self.footer_text, self.footer_icon_url, self.footer_proxy_icon_url]): - data = {} - if self.footer_text: - data["text"] = self.footer_text - if self.footer_icon_url: - data["icon_url"] = self.footer_icon_url - if self.footer_proxy_icon_url: - data["proxy_icon_url"] = self.footer_proxy_icon_url - result["footer"] = data - - if any([self.provider_name, self.provider_url]): - data = {} - if self.provider_name: - data["name"] = self.provider_name - if self.provider_url: - data["url"] = self.provider_url - result["provider"] = data - - if any( - [ - self.author_name, - self.author_url, - self.author_icon_url, - self.author_proxy_icon_url, - ] - ): - data = {} - if self.author_name: - data["name"] = self.author_name - if self.author_url: - data["url"] = self.author_url - if self.author_icon_url: - data["icon_url"] = self.author_icon_url - if self.author_proxy_icon_url: - data["proxy_icon_url"] = self.author_proxy_icon_url - result["author"] = data - - if self.fields: - result["fields"] = [i.to_json() for i in self.fields] - - return result diff --git a/gather_vision/process/service/discord/embed_field.py b/gather_vision/process/service/discord/embed_field.py deleted file mode 100644 index 4b57615..0000000 --- a/gather_vision/process/service/discord/embed_field.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Optional - - -class EmbedField: - """A Discord embed field.""" - - # https://discord.com/developers/docs/resources/channel#embed-object-embed-field-structure - name: str - value: str - inline: Optional[bool] = None - - def to_json(self): - result = {"name": self.name, "value": self.value} - if self.inline is not None: - result["inline"] = "true" if self.inline else "false" - return result diff --git a/gather_vision/process/service/discord/webhook.py b/gather_vision/process/service/discord/webhook.py deleted file mode 100644 index 2818aa0..0000000 --- a/gather_vision/process/service/discord/webhook.py +++ /dev/null @@ -1,43 +0,0 @@ -from dataclasses import field -from typing import Optional - -from gather_vision.process.service.discord.embed import Embed - - -class Webhook: - """A discord webhook. Supports content and embeds.""" - - # https://discord.com/developers/docs/resources/webhook#execute-webhook-jsonform-params - - webhook_id: str - webhook_token: str - - content: Optional[str] = None - embeds: list[Embed] = field(default_factory=list) - - tts: bool = False - - username: Optional[str] = None - avatar_url: Optional[str] = None - - wait: bool = False - thread_id: Optional[str] = None - - def to_json(self): - if self.content and self.embeds: - raise ValueError("Cannot provide both content and embeds.") - - result = { - "tts": "true" if self.tts else "false", - } - - if self.content: - result["content"] = self.content - if self.embeds: - result["embeds"] = [i.to_json() for i in self.embeds] - if self.username: - result["username"] = self.username - if self.avatar_url: - result["avatar_url"] = self.avatar_url - - return result diff --git a/gather_vision/process/service/outages/__init__.py b/gather_vision/process/service/outages/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/service/outages/energex_events.py b/gather_vision/process/service/outages/energex_events.py deleted file mode 100644 index 3a252c4..0000000 --- a/gather_vision/process/service/outages/energex_events.py +++ /dev/null @@ -1,190 +0,0 @@ -from zoneinfo import ZoneInfo - -from django.utils import timezone -from django.utils.text import slugify - -import gather_vision.models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise - - -class EnergexEvents: - code = "energex" - - base_url = "https://www.energex.com.au" - api_url = f"{base_url}/api/outages/v0.3" - usage_url = f"{base_url}/static/Energex/Network%20Demand/networkdemand.txt" - outage_summary_url = f"{api_url}/summary" - outage_councils_url = f"{api_url}/council" - outage_council_suburbs_url = f"{api_url}/suburb" - outage_suburb_url = f"{api_url}/search" - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - self._source = app_models.InformationSource.objects.get(name=self.code) - - def update_outages(self): - self._logger.info("Updating outages.") - - groups_seen = 0 - groups_imported = 0 - items_seen = 0 - items_imported = 0 - - retrieved_date = timezone.now() - - usage = self.update_usage() - demand_amount = usage.get("demand") - demand_rating = usage.get("rating") - - summary = self.update_summary() - customer_count = summary.get("total_cust") - updated_date = summary.get("updated_date") - - group, group_created = app_models.OutageGroup.objects.get_or_create( - source_updated_date=updated_date, - retrieved_date=retrieved_date, - defaults={ - "demand": demand_amount, - "rating": demand_rating, - "total_customers": customer_count, - }, - ) - groups_seen += 1 - if group_created: - groups_imported += 1 - - for event in self.update_events(): - event_name = event.get("event_name") - council = event.get("council") - suburb = event.get("suburb") - post_code = event.get("post_code") - customers = event.get("cust") - cause = event.get("cause") - restored_date = event.get("restore_date") - streets = event.get("streets") - item, item_created = app_models.OutageItem.objects.update_or_create( - source=self._source, - event_name=event_name, - group=group, - defaults={ - "council": council or "", - "suburb": suburb or "", - "post_code": post_code or "", - "cause": cause or "", - "streets": streets or "", - "restored_date": restored_date, - "customers": customers or "", - }, - ) - items_seen += 1 - if item_created: - items_imported += 1 - - if groups_seen % 10 == 0: - self._logger.info( - f"Running total groups {groups_seen} ({groups_imported} imported) " - f"items {items_seen} ({items_imported} imported)." - ) - - self._logger.info( - f"Groups {groups_seen} ({groups_imported} imported) " - f"total items {items_seen} ({items_imported} imported)." - ) - - self._logger.info("Finished updating outages.") - - def update_usage(self): - r = self._http_client.get(self.usage_url) - demand = r.text - rating = self.demand_rating(demand) - return { - "demand": demand, - "rating": rating, - } - - def update_summary(self): - r = self._http_client.get(self.outage_summary_url) - summary = r.json() - data = summary.get("data", {}) - total_cust = data.get("totalCustomersAffected", 0) - - updated_at = data.get("lastUpdated", "") - updated_at = self._normalise.parse_date(updated_at, self._tz) - - return { - "total_cust": total_cust, - "updated_date": updated_at, - } - - def update_events(self): - r = self._http_client.get(self.outage_councils_url, params={"council": ""}) - councils = r.json().get("data", []) - for council in councils: - council_name = council.get("name", "") - - r = self._http_client.get( - self.outage_council_suburbs_url, - params={"council": council_name, "suburb": ""}, - ) - suburbs = r.json().get("data", []) - for suburb in suburbs: - suburb_name = suburb.get("name", "") - - r = self._http_client.get( - self.outage_suburb_url, params={"suburb": suburb_name} - ) - events = r.json().get("data", []) - for event in events: - restore_date = self._normalise.parse_date( - event.get("restoreTime", "").replace(":", ""), self._tz - ) - streets = str.join( - ",", sorted(s.title() for s in event.get("streets", [])) - ) - yield { - "event_name": slugify(event.get("event", "")), - "council": event.get("council", "").title(), - "suburb": event.get("suburb", "").title(), - "post_code": event.get("postcode", ""), - "cust": event.get("customersAffected", ""), - "cause": event.get("cause", ""), - "restore_date": restore_date, - "streets": streets, - } - - def demand_rating(self, demand: str): - demand = int(demand) - - # demand min: 0, demand max: 5500 - # found in: - # https://www.energex.com.au/__data/assets/js_file_folder/0011/653996/main.js?version=0.3.59 - - # divided into 4 equal parts: low, moderate, high, extreme - # then into 3 parts = approx 458.3 per smallest part - # demand_min = 0 - demand_max = 5500 - rating_min = 1 - rating_max = 12 - - demand_part = demand_max / 4 / 3 - rating = int(demand / demand_part) - - if rating < rating_min: - rating = rating_min - - if rating > rating_max: - rating = rating_max - - return rating diff --git a/gather_vision/process/service/outages/energex_import.py b/gather_vision/process/service/outages/energex_import.py deleted file mode 100644 index 426c660..0000000 --- a/gather_vision/process/service/outages/energex_import.py +++ /dev/null @@ -1,224 +0,0 @@ -import hashlib -from pathlib import Path -from zoneinfo import ZoneInfo - -from django.utils.text import slugify - -import gather_vision.models as app_models -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.component.sqlite_client import SqliteClient -from gather_vision.process.service.outages.energex_events import EnergexEvents - - -class EnergexImport: - def __init__( - self, - logger: Logger, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._normalise = normalise - self._tz = tz - - self._source = app_models.InformationSource.objects.get(name=EnergexEvents.code) - - def get_demand(self, data: dict): - demand = data.get("demand") - rating = data.get("rating") - return { - "demand_amount": demand, - "demand_rating": rating, - } - - def get_summary(self, data: dict): - total_cust = data.get("total_cust") - updated_date = self._normalise.parse_date(data.get("updated_at"), self._tz) - return { - "customer_count": total_cust, - "customer_updated_date": updated_date, - } - - def get_info(self, data: dict): - event_name = data.get("event_name") - council = data.get("council") - suburb = data.get("suburb") - post_code = data.get("post_code") - customers = data.get("cust") - cause = data.get("cause") - streets = data.get("streets") - restored_date = self._normalise.parse_date(data.get("restore_at"), self._tz) - return { - "event_name": event_name, - "council": council, - "suburb": suburb, - "post_code": post_code, - "customers": customers, - "cause": cause, - "streets": streets, - "restored_date": restored_date, - } - - def is_demand(self, data: dict): - actual = sorted(data.keys()) - expected = ["demand", "rating", "retrieved_at"] - return actual == expected - - def is_summary(self, data: dict): - actual = sorted(data.keys()) - expected = ["retrieved_at", "total_cust", "updated_at"] - return actual == expected - - def is_info(self, data: dict): - actual = sorted(data.keys()) - expected = [ - "cause", - "council", - "cust", - "event_name", - "id", - "post_code", - "restore_at", - "retrieved_at", - "streets", - "suburb", - ] - return actual == expected - - def import_outages(self, path: Path) -> None: - self._logger.info("Importing outages.") - - db = SqliteClient(path) - conn = db.get_sqlite_db() - table_names = list(db.get_table_names(conn)) - - results = {} - for table_name in table_names: - for row in db.get_table_data(conn, table_name): - row_keys = list(row.keys()) - row_values = list(row) - data = dict(zip(row_keys, row_values)) - retrieved_date = data.get("retrieved_at") - retrieved_date = self._normalise.parse_date(retrieved_date, self._tz) - - if retrieved_date not in results: - results[retrieved_date] = {"demand": [], "summary": [], "info": []} - - if self.is_demand(data): - demand = self.get_demand(data) - results[retrieved_date]["demand"].append(demand) - - elif self.is_summary(data): - summary = self.get_summary(data) - results[retrieved_date]["summary"].append(summary) - - elif self.is_info(data): - info = self.get_info(data) - results[retrieved_date]["info"].append(info) - else: - raise ValueError(f"Unrecognised data format: '{data}'.") - - self.import_data(results) - self._logger.info("Finished importing outages.") - - def import_data(self, data: dict) -> None: - groups_seen = 0 - groups_imported = 0 - items_seen = 0 - items_imported = 0 - - for retrieved_date, details in data.items(): - demand_data = details.get("demand") - if len(demand_data) == 1: - demand_amount = demand_data[0].get("demand_amount") - demand_rating = demand_data[0].get("demand_rating") - else: - demand_amount = 0 - demand_rating = 0 - - summary_data = details.get("summary") - if len(summary_data) == 1: - customer_count = summary_data[0].get("customer_count") - updated_date = summary_data[0].get("customer_updated_date") - else: - customer_count = 0 - updated_date = None - - group, group_created = app_models.OutageGroup.objects.get_or_create( - retrieved_date=retrieved_date, - source_updated_date=updated_date, - defaults={ - "demand": demand_amount, - "rating": demand_rating, - "total_customers": customer_count, - }, - ) - groups_seen += 1 - if group_created: - groups_imported += 1 - - info_data = details.get("info") - for info in info_data: - event_name = info.get("event_name") - council = info.get("council") - suburb = info.get("suburb") - post_code = info.get("post_code") - customers = info.get("customers") - cause = info.get("cause") - streets = info.get("streets") - restored_date = info.get("restored_date") - - if not event_name: - event_name = self._event_name( - [ - i - for i in [ - self._source.name, - str(retrieved_date) if retrieved_date else "", - str(updated_date) if updated_date else "", - council, - suburb, - str(post_code) if post_code else "", - str(customers) if customers else "", - cause, - streets, - str(restored_date) if restored_date else "", - ] - if i and i.strip() - ] - ) - - item, item_created = app_models.OutageItem.objects.get_or_create( - source=self._source, - group=group, - event_name=event_name, - defaults={ - "council": council or "", - "suburb": suburb or "", - "post_code": post_code or "", - "cause": cause or "", - "streets": streets or "", - "restored_date": restored_date, - "customers": customers or "", - }, - ) - items_seen += 1 - if item_created: - items_imported += 1 - - if groups_seen % 200 == 0: - self._logger.info( - f"Running total groups {groups_seen} ({groups_imported} imported) " - f"items {items_seen} ({items_imported} imported)." - ) - - self._logger.info( - f"Groups {groups_seen} ({groups_imported} imported) " - f"total items {items_seen} ({items_imported} imported)." - ) - - def _event_name(self, value: list[str]): - slug = slugify("-".join(value)) - hashed = hashlib.sha256(slug.encode()).hexdigest() - return hashed diff --git a/gather_vision/process/service/petition/__init__.py b/gather_vision/process/service/petition/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/service/petition/au_qld.py b/gather_vision/process/service/petition/au_qld.py deleted file mode 100644 index 75da367..0000000 --- a/gather_vision/process/service/petition/au_qld.py +++ /dev/null @@ -1,205 +0,0 @@ -from zoneinfo import ZoneInfo -from lxml import html - -from django.utils import timezone -from gather_vision import models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise - - -class AuQld: - - code = "au_qld" - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - self._source = app_models.InformationSource.objects.get(name=self.code) - - def update_petitions(self): - self._logger.info("Updating Queensland Government petitions.") - - retrieved_date = timezone.now() - - petitions_seen = 0 - petitions_created = 0 - changes_seen = 0 - changes_added = 0 - - petitions = self.get_petitions() - for petition in petitions: - data = self.get_petition(petition) - - body = data["body"] - eligibility = data["eligibility"] - principal = data["principal"] - signatures = data["signatures"] - sponsor = data["sponsor"] - opened_date = data["posted_date"] - closed_date = data["closing_date"] - title = data["title"] - - ref_id = petition["ref_id"] - view_url = petition["view_url"] - - obj, created = app_models.PetitionItem.objects.update_or_create( - source=self._source, - code=ref_id, - defaults={ - "title": title, - "view_url": view_url, - "principal": principal, - "body": body, - "closed_date": closed_date, - "sponsor": sponsor, - "opened_date": opened_date, - "eligibility": eligibility, - }, - ) - - petitions_seen += 1 - if created: - petitions_created += 1 - - change, created = app_models.PetitionChange.objects.get_or_create( - petition=obj, - retrieved_date=retrieved_date, - defaults={ - "signatures": signatures, - }, - ) - changes_seen += 1 - if created: - changes_added += 1 - - if petitions_seen % 5 == 0: - self._logger.info( - f"Running total petitions {petitions_seen} " - f"({petitions_created} created) " - f"changes {changes_seen} ({changes_added} added)." - ) - - self._logger.info( - f"Petitions {petitions_seen} ({petitions_created} created) " - f"changes {changes_seen} ({changes_added} added)." - ) - self._logger.info("Finished updating petitions.") - - def get_petitions(self): - url_base = "https://www.parliament.qld.gov.au" - url = f"{url_base}/Work-of-the-Assembly/Petitions/Current-EPetitions" - r = self._http_client.get(url) - tree = html.fromstring(r.text) - - if tree is None: - raise ValueError("No html available.") - - table = tree.xpath('//div[contains(@class, "current-petitions")]') - if len(table) != 1: - raise ValueError("Found other than 1 table in html.") - - rows = table[0].xpath('div[contains(@class,"petitions-listing")]') - for row in rows: - title = row.xpath(".//a/text()")[0].strip() - closed_date = ( - row.xpath('.//span[@class="petitions-listing__subtext"]/text()')[0] - .split(":")[-1] - .strip() - ) - view_url = row.xpath(".//a/@href")[0].strip() - signatures = ( - row.xpath( - './/span[@class="petitions-listing__signatures-highlight"]/text()' - )[0] - .strip() - .replace("Signatures", "") - .replace("Signature", "") - .replace(",", "") - ) - ref_id = view_url.split("=")[-1] - item = { - "ref_id": ref_id, - "title": title, - "view_url": url_base + view_url, - "signatures": int(signatures), - "closed_at": self._normalise.parse_date(closed_date, self._tz), - } - yield item - - def get_petition(self, data: dict): - url = data.get("view_url") - r = self._http_client.get(url) - tree = html.fromstring(r.text) - - if tree is None: - raise ValueError("No html available.") - - table = tree.xpath('//div[@class="petition-details"]') - if len(table) != 1: - raise ValueError("Found other than 1 table in html.") - - details = table[0] - title = details.xpath("./h3/text()")[0].strip() - eligibility = ( - details.xpath('.//span[@class="petition-details__elegibility"]/text()')[0] - .replace("Eligibility - ", "") - .strip() - ) - principal = details.xpath( - './/div[@class="petition-details__petitioner-details-wrapper"]//text()' - ) - principal = ", ".join([i.strip() for i in principal if i.strip()]) - signatures = ( - " ".join( - details.xpath('.//div[@class="petition-details__signatures"]//text()') - ) - .replace("Total Signatures", "") - .replace(",", "") - .replace("-", "") - .strip() - ) - body = ", ".join( - [ - i.strip() - for i in details.xpath( - './/div[@class="petition-details__content--body"]//text()' - ) - if i.strip() - ] - ).strip() - - sponsor = None - posted_date = None - closing_date = None - - props = details.xpath('.//div[@class="petition-details__prop"]') - for prop in props: - prop_text = " ".join([i.strip() for i in prop.xpath(".//text()")]).strip() - if prop_text.startswith("Sponsoring Member:"): - sponsor = prop_text[18:].strip() - if prop_text.startswith("Posting Date:"): - posted_date = prop_text[13:].strip() - if prop_text.startswith("Closing Date:"): - closing_date = prop_text[13:].strip() - - item = { - "title": title, - "body": body, - "eligibility": eligibility, - "principal": principal, - "signatures": int(signatures), - "sponsor": sponsor, - "posted_date": self._normalise.parse_date(posted_date, self._tz), - "closing_date": self._normalise.parse_date(closing_date, self._tz), - } - return item diff --git a/gather_vision/process/service/petition/au_qld_bcc.py b/gather_vision/process/service/petition/au_qld_bcc.py deleted file mode 100644 index 8a247ba..0000000 --- a/gather_vision/process/service/petition/au_qld_bcc.py +++ /dev/null @@ -1,193 +0,0 @@ -import re -import string -import urllib.parse -from itertools import groupby - -from zoneinfo import ZoneInfo -from lxml import html - -from django.utils import timezone -from gather_vision import models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise - - -class AuQldBcc: - code = "au_qld_bcc" - - _regex_collapse_newline = re.compile(r"[\n\r]+") - _regex_collapse_whitespace = re.compile(r"\s{2,}") - _regex_signatures = re.compile("signatures.*", re.DOTALL) - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - self._source = app_models.InformationSource.objects.get(name=self.code) - - def update_petitions(self): - self._logger.info("Updating Brisbane City Council petitions.") - - retrieved_date = timezone.now() - - petitions_seen = 0 - petitions_created = 0 - changes_seen = 0 - changes_added = 0 - - petitions = self.get_petitions() - for petition in petitions: - data = self.get_petition(petition) - - ref_id = petition["ref_id"] - title = petition["title"] - closed_date = petition["closed_at"] - - view_url = data["view_url"] - principal = data["principal"] - body = data["body"] - - signatures = data["signatures"] - - obj, created = app_models.PetitionItem.objects.update_or_create( - source=self._source, - code=ref_id, - defaults={ - "title": title, - "view_url": view_url, - "principal": principal, - "body": body, - "closed_date": closed_date, - }, - ) - - petitions_seen += 1 - if created: - petitions_created += 1 - - change, created = app_models.PetitionChange.objects.get_or_create( - petition=obj, - retrieved_date=retrieved_date, - defaults={ - "signatures": signatures, - }, - ) - changes_seen += 1 - if created: - changes_added += 1 - - if petitions_seen % 5 == 0: - self._logger.info( - f"Running total petitions {petitions_seen} " - f"({petitions_created} created) " - f"changes {changes_seen} ({changes_added} added)." - ) - - self._logger.info( - f"Petitions {petitions_seen} ({petitions_created} created) " - f"changes {changes_seen} ({changes_added} added)." - ) - self._logger.info("Finished updating petitions.") - - def get_petitions(self): - url = "https://epetitions.brisbane.qld.gov.au/" - r = self._http_client.get(url) - tree = html.fromstring(r.text) - - if tree is None: - raise ValueError("No html available.") - - table = tree.xpath('//table[contains(@class, "petitions")]') - if len(table) != 1: - raise ValueError("Found other than 1 table in html.") - - rows = table[0].xpath("//tr") - for row in rows: - if len(row.xpath("th")) == 3 and len(row.xpath("td")) == 0: - continue - cells = row.xpath("td") - title = cells[0].xpath(".//text()")[0].strip() - principal = cells[1].xpath("text()")[0].strip() - closed_date = cells[2].xpath("text()")[0].strip() - view_url = cells[0].xpath("a/@href")[0].strip() - ref_id = view_url.split("/")[-1] - item = { - "ref_id": ref_id, - "title": title, - "view_url": view_url, - "principal": principal, - "closed_at": self._normalise.parse_date(closed_date, self._tz), - } - yield item - - def get_petition(self, data: dict): - ref_id = data["ref_id"] - url = "https://epetitions.brisbane.qld.gov.au/petition/view/pid/" - url += urllib.parse.quote(ref_id) - r = self._http_client.get(url) - tree = html.fromstring(r.text) - - title = tree.xpath( - '//div[@class="page-title"]/h1/text()', - )[0].strip() - - principal = tree.xpath( - '((//table[@class="petition-details"]//tr)[1]/td)[2]/text()', - )[0].strip() - - closed_at = tree.xpath( - '((//table[@class="petition-details"]//tr)[2]/td)[2]/text()', - )[0].strip() - - sig_xpath = '((//table[@class="petition-details"]//tr)[3]/td)[2]' - signatures = tree.xpath(sig_xpath)[0].text_content() or "" - signatures = ( - signatures.casefold() - .replace("(view signatures)", "") - .replace("(view signature)", "") - .replace("signatures", "") - .replace("signature", "") - ).strip() - - body = tree.xpath('//div[@id="petition-details"]')[0].text_content() or "" - body = self._regex_collapse_whitespace.sub( - " ", self._regex_collapse_newline.sub("\n", body) - ).strip() - - item = { - "title": title, - "principal": principal, - "body": body, - "signatures": int(signatures) if signatures else 0, - "closed_at": self._normalise.parse_date(closed_at, self._tz), - "view_url": url, - "ref_id": ref_id, - } - - return item - - def _allowed_chars(self): - return string.digits + string.ascii_letters + string.punctuation - - def _normalise_string(self, value): - if not value: - return "" - - value = value.replace("’", "'") - remove_newlines = value.replace("\n", " ").replace("\r", " ").strip() - result = "".join( - c if c in self._allowed_chars() else " " for c in remove_newlines - ).strip() - return result - - def _custom_split(self, value, chars): - return ["".join(gp) for _, gp in groupby(value, lambda char: char in chars)] diff --git a/gather_vision/process/service/petition/petition_import.py b/gather_vision/process/service/petition/petition_import.py deleted file mode 100644 index e7c8818..0000000 --- a/gather_vision/process/service/petition/petition_import.py +++ /dev/null @@ -1,184 +0,0 @@ -from pathlib import Path - -from zoneinfo import ZoneInfo - -from gather_vision import models as app_models -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.component.sqlite_client import SqliteClient -from gather_vision.process.service.petition.au_qld import AuQld -from gather_vision.process.service.petition.au_qld_bcc import AuQldBcc - - -class PetitionImport: - def __init__(self, logger: Logger, normalise: Normalise, tz: ZoneInfo): - self._logger = logger - self._normalise = normalise - self._tz = tz - - self._source_au_qld = app_models.InformationSource.objects.get(name=AuQld.code) - self._source_au_qld_bcc = app_models.InformationSource.objects.get( - name=AuQldBcc.code - ) - - def import_petitions(self, path: Path): - self._logger.info("Importing petitions.") - - petitions_seen = 0 - petitions_imported = 0 - changes_seen = 0 - changes_imported = 0 - - db = SqliteClient(path) - conn = db.get_sqlite_db() - table_names = list(db.get_table_names(conn)) - for table_name in table_names: - for row in db.get_table_data(conn, table_name): - row_keys = list(row.keys()) - row_values = list(row) - data = dict(zip(row_keys, row_values)) - - if self.is_au_qld(data): - ( - petition, - petition_created, - change, - change_created, - ) = self.import_au_qld(data) - elif self._is_au_qld_bcc(data): - ( - petition, - petition_created, - change, - change_created, - ) = self.import_au_qld_bcc(data) - else: - raise ValueError(f"Unrecognised data format: '{data}'.") - - petitions_seen += 1 - if petition_created: - petitions_imported += 1 - - changes_seen += 1 - if change_created: - changes_imported += 1 - - if petitions_seen % 1000 == 0: - self._logger.info( - f"Running total petitions {petitions_seen} " - f"({petitions_imported} imported) " - f"changes {changes_seen} ({changes_imported} imported)." - ) - - self._logger.info( - f"petitions {petitions_seen} ({petitions_imported} imported) " - f"changes {changes_seen} ({changes_imported} imported)." - ) - self._logger.info("Finished importing petitions.") - - def import_au_qld(self, data: dict): - title = self._normalise.petition_text(data.get("subject")) - code = data.get("reference_num") - view_url = data.get("url") - principal = self._normalise.petition_text(data.get("principal")) - body = data.get("body") - opened_date = self._normalise.parse_date(data.get("posted_at"), self._tz) - closed_date = self._normalise.parse_date(data.get("closed_at"), self._tz) - eligibility = self._normalise.petition_text(data.get("eligibility")) - sponsor = self._normalise.petition_text(data.get("sponsor")) - - retrieved_date = self._normalise.parse_date(data.get("retrieved_at"), self._tz) - signatures = str(data.get("signatures", "")) - signatures = int(signatures, 10) if signatures else 0 - - petition, petition_created = app_models.PetitionItem.objects.get_or_create( - source=self._source_au_qld, - code=code, - defaults={ - "title": title, - "view_url": view_url, - "principal": principal, - "body": body, - "opened_date": opened_date, - "closed_date": closed_date, - "eligibility": eligibility, - "sponsor": sponsor, - }, - ) - - change, change_created = app_models.PetitionChange.objects.get_or_create( - petition=petition, - retrieved_date=retrieved_date, - defaults={"signatures": signatures}, - ) - - return petition, petition_created, change, change_created - - def import_au_qld_bcc(self, data: dict): - title = self._normalise.petition_text(data.get("title")) - code = data.get("reference_id") - view_url = data.get("url") - principal = self._normalise.petition_text(data.get("principal")) - body = data.get("body") - opened_date = ( - None # self._normalise.parse_date(data.get("posted_at"), self._tz) - ) - closed_date = self._normalise.parse_date(data.get("closed_at"), self._tz) - - retrieved_date = self._normalise.parse_date(data.get("retrieved_at"), self._tz) - signatures = self._normalise.norm_signatures(str(data.get("signatures", ""))) - - petition, petition_created = app_models.PetitionItem.objects.get_or_create( - source=self._source_au_qld_bcc, - code=code, - defaults={ - "title": title, - "view_url": view_url, - "principal": principal, - "body": body, - "opened_date": opened_date, - "closed_date": closed_date, - }, - ) - - change, change_created = app_models.PetitionChange.objects.get_or_create( - petition=petition, - retrieved_date=retrieved_date, - defaults={"signatures": signatures}, - ) - - return petition, petition_created, change, change_created - - def is_au_qld(self, data: dict): - actual = sorted(data.keys()) - expected = [ - "addressed_to", - "body", - "closed_at", - "eligibility", - "posted_at", - "principal", - "reference_name", - "reference_num", - "retrieved_at", - "signatures", - "sponsor", - "subject", - "url", - ] - return actual == expected - - def _is_au_qld_bcc(self, data: dict): - actual = sorted(data.keys()) - expected = [ - "body", - "closed_at", - "principal", - "reference_id", - "retrieved_at", - "sign_uri", - "signatures", - "title", - "url", - ] - return actual == expected diff --git a/gather_vision/process/service/playlist/__init__.py b/gather_vision/process/service/playlist/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/service/playlist/abc_radio.py b/gather_vision/process/service/playlist/abc_radio.py deleted file mode 100644 index 095910d..0000000 --- a/gather_vision/process/service/playlist/abc_radio.py +++ /dev/null @@ -1,247 +0,0 @@ -from datetime import datetime, timedelta -from typing import Optional -from zoneinfo import ZoneInfo - -from gather_vision import models as app_models -from gather_vision.process import item as app_items -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.service.playlist import abstract as service_mixins - - -class AbcRadio(service_mixins.PlaylistDetails, service_mixins.PlaylistSource): - """Get playlists from ABC Radio.""" - - @property - def code(self): - return "abcradio" - - @property - def title(self): - return "ABC Radio" - - @property - def collections(self): - return ["doublej_most_played", "triplej_most_played", "unearthed_most_played"] - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - self._url = "https://music.abcradio.net.au/api/v1/recordings/plays.json" - - self._collection_config = { - "doublej_most_played": "doublej", - "triplej_most_played": "triplej", - "unearthed_most_played": "unearthed", - } - - def get_playlist(self, identifier: str, name: str, title: str): - playlist = app_items.Playlist(name=name, title=title) - return playlist - - def get_playlist_tracks( - self, - identifier: str, - name: str, - title: str, - start_date: Optional[datetime] = None, - end_date: Optional[datetime] = None, - limit: Optional[int] = None, - ): - - # set the limit - if not limit: - limit = 100 - elif limit < 1: - limit = 1 - - # construct the time span (1 week) - current_time = datetime.now(tz=self._tz) - current_day = current_time.date() - if not start_date and not end_date: - start_date = current_day - timedelta(days=8) - end_date = current_day - timedelta(days=1) - elif start_date and not end_date: - end_date = start_date + timedelta(days=7) - elif not start_date and end_date: - start_date = end_date - timedelta(days=7) - - # build the url - url_name = self._collection_config[name] - qs = self.build_qs( - name=url_name, - start_date=start_date, - end_date=end_date, - limit=limit, - ) - - # download the tracks - self._logger.info( - f"Downloading up to {limit} tracks " - f"from '{self.code}' collection '{name}'." - ) - data = self._http_client.get(self._url, params=qs) - - # build the playlist - playlist = self.get_playlist(identifier, name, title) - for index, item in enumerate(data.json()["items"]): - track_number = index + 1 - track_title = item["title"] - track_id = item["arid"] - original_artists = item["artists"] - - # get primary artist and featured artists - sep = self._normalise.track_sep_spaced - primary_artists = "" - featured_artists = "" - for raw_artist in original_artists: - artist_type = raw_artist["type"] - artist_name = raw_artist["name"] - - if artist_type == "primary": - primary_artists = f"{primary_artists}{sep} {artist_name}" - - elif artist_type == "featured": - featured_artists = f"{featured_artists}{sep} {artist_name}" - - else: - raise Exception( - f"Unrecognised artist '{artist_name}' ({artist_type})." - ) - - # normalise title and artists - ( - title_norm, - primary_artists_norm, - featured_artists_norm, - queries, - ) = self._normalise.track(track_title, primary_artists, featured_artists) - - # add track to playlist - playlist.add_track( - service_name=self.code, - collection_name=name, - track_number=track_number, - track_id=track_id, - title=title_norm, - primary_artists=primary_artists_norm, - featured_artists=featured_artists_norm, - queries=queries, - raw=item, - ) - - self._logger.info( - f"Retrieved {len(playlist.tracks)} tracks " - f"from '{self.code}' collection '{name}'." - ) - return playlist - - def build_qs( - self, - name: str, - start_date: datetime, - end_date: datetime, - order: str = "desc", - limit: int = 50, - ) -> dict: - - if not name: - raise ValueError("Must provide name.") - if not start_date: - raise ValueError("Must provide start date.") - if not end_date: - raise ValueError("Must provide end date.") - if not order or order not in ["desc", "asc"]: - raise ValueError("Must provide order, one of 'desc' or 'asc'.") - if not limit or limit < 1: - raise ValueError("Must provide limit greater than 0.") - - qs = { - "order": order, - "limit": limit, - "service": name, - "from": f"{start_date.strftime('%Y-%m-%dT%H:%M:%SZ')}", - "to": f"{end_date.strftime('%Y-%m-%dT%H:%M:%SZ')}", - } - return qs - - def get_model_track( - self, - info: app_models.InformationSource, - track: app_items.Track, - ): - if not info or not track or not track.raw: - raise ValueError( - f"Cannot build spotify playlist track from '{info}' '{track}'." - ) - - code = track.raw.get("arid", "") - title = track.raw.get("title", "") - artists = ", ".join( - [ - i.get("name", "") - for i in track.raw.get("artists", []) - if i.get("name", "") - ] - ) - - urls1 = track.raw.get("links", []) - urls2 = [j for i in track.raw.get("releases", []) for j in i.get("links", [])] - urls3 = [j for i in track.raw.get("artists", []) for j in i.get("links", [])] - urls = urls1 + urls2 + urls3 - info_url = next( - (i.get("url") for i in urls if i and "musicbrainz" not in i.get("url")), "" - ) - - images = [ - j for i in track.raw.get("releases", []) for j in i.get("artwork", []) - ] + track.raw.get("artwork", []) - - image_urls = sorted(images, reverse=True, key=lambda x: x.get("width")) - image_url = next((i.get("url") for i in image_urls if self._valid_url(i)), "") - - musicbrainz_links = track.raw.get("links", []) + [ - j for i in track.raw.get("releases", []) for j in i.get("links", []) - ] - musicbrainz_code = next( - ( - i.get("id_component") - for i in musicbrainz_links - if i and "musicbrainz" in i.get("url") - ), - None, - ) - - obj, created = app_models.PlaylistTrack.objects.update_or_create( - source=info, - code=code, - defaults={ - "title": title, - "artists": artists, - "info_url": info_url, - "image_url": image_url, - "musicbrainz_code": musicbrainz_code, - }, - ) - return obj - - def _valid_url(self, item: dict): - image_max = 600 - return all( - [ - item, - item.get("url"), - item.get("width") < image_max, - item.get("height") < image_max, - ] - ) diff --git a/gather_vision/process/service/playlist/abstract.py b/gather_vision/process/service/playlist/abstract.py deleted file mode 100644 index 7778026..0000000 --- a/gather_vision/process/service/playlist/abstract.py +++ /dev/null @@ -1,103 +0,0 @@ -from abc import ABC -from datetime import datetime -from typing import Optional, Iterable - -from gather_vision.process import item as app_items -from gather_vision import models as app_models - - -class PlaylistSource(ABC): - """A service that provides playlists.""" - - @property - def collections(self) -> list: - """ - The internal codes for the playlists available from this service. - """ - raise NotImplementedError() - - def get_playlist( - self, identifier: str, name: str, title: str - ) -> app_items.Playlist: - """Get a playlist.""" - raise NotImplementedError() - - def get_playlist_tracks( - self, - identifier: str, - name: str, - title: str, - start_date: Optional[datetime] = None, - end_date: Optional[datetime] = None, - limit: Optional[int] = None, - ) -> app_items.Playlist: - """Get a playlist and tracks.""" - raise NotImplementedError() - - def get_model_track( - self, - info: app_models.InformationSource, - track: app_items.Track, - ) -> app_models.PlaylistTrack: - """Convert from a track from this source to the model used to store a track.""" - raise NotImplementedError() - - -class PlaylistTarget(ABC): - """A service that can store playlists.""" - - def set_playlist_tracks( - self, - identifier: str, - new_tracks: Iterable[app_items.Track], - old_tracks: Iterable[app_items.Track], - ) -> bool: - """Set the tracks for a playlist.""" - raise NotImplementedError() - - def set_playlist_details( - self, - collection_name: str, - playlist_id: str, - title: str = None, - description: str = None, - is_public: bool = None, - ) -> bool: - """Set playlist details.""" - raise NotImplementedError() - - def search_tracks( - self, - playlist_name: str, - track: str, - artists: list[str], - limit: int = 5, - ) -> Iterable[app_items.Track]: - """Search the tracks available from a service.""" - raise NotImplementedError() - - -class AuthRequiredService(ABC): - """A service that requires authentication.""" - - def login_init(self, *args, **kwargs) -> None: - """Get the initial set of login details.""" - raise NotImplementedError() - - def login_next(self, *args, **kwargs) -> None: - """Get the next set of login details.""" - raise NotImplementedError() - - -class PlaylistDetails(ABC): - """Provided basic information about a playlist service.""" - - @property - def code(self) -> str: - """The internal code for the service.""" - raise NotImplementedError() - - @property - def title(self) -> str: - """The displayed title for the service.""" - raise NotImplementedError() diff --git a/gather_vision/process/service/playlist/last_fm.py b/gather_vision/process/service/playlist/last_fm.py deleted file mode 100644 index 2040a56..0000000 --- a/gather_vision/process/service/playlist/last_fm.py +++ /dev/null @@ -1,186 +0,0 @@ -from datetime import datetime -from typing import Optional - -from zoneinfo import ZoneInfo - -from gather_vision.process import item as app_items -from gather_vision import models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.service.playlist import abstract as service_mixins - - -class LastFm( - service_mixins.PlaylistDetails, - service_mixins.AuthRequiredService, - service_mixins.PlaylistSource, -): - """Get playlists from Last.fm.""" - - @property - def code(self): - return "lastfm" - - @property - def title(self): - return "Last.fm" - - @property - def collections(self): - return ["most_popular"] - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - self._api_key = None - - self._url = "https://ws.audioscrobbler.com/2.0/" - self._collection_config = { - "most_popular": { - "method": "geo.gettoptracks", - "country": "australia", - } - } - - def get_playlist(self, identifier: str, name: str, title: str): - playlist = app_items.Playlist(name=name, title=title) - return playlist - - def get_playlist_tracks( - self, - identifier: str, - name: str, - title: str, - start_date: Optional[datetime] = None, - end_date: Optional[datetime] = None, - limit: Optional[int] = None, - ): - # set the limit - if not limit: - limit = 100 - elif limit < 1: - limit = 1 - - # build the url - if name not in self._collection_config: - raise ValueError(f"Unrecognised collection name '{name}'.") - - # download the tracks - self._logger.info( - f"Downloading up to {limit} tracks " - f"from '{self.code}' collection '{name}'." - ) - - config = self._collection_config[name] - params = self.build_qs(**config, limit=limit) - - data = self._http_client.get(self._url, params=params) - if data: - data = data.json().get("tracks", {}).get("track", {}) - else: - data = [] - - # build the playlist - playlist = self.get_playlist(identifier, name, title) - for index, item in enumerate(data): - track_number = index + 1 - track_id = item.get("url") - track_title = item["name"] - artists = item["artist"]["name"] - - # normalise title and artists - ( - title_norm, - primary_artists_norm, - featured_artists_norm, - queries, - ) = self._normalise.track(track_title, artists, "") - - # add track to playlist - playlist.add_track( - service_name=self.code, - collection_name=name, - track_number=track_number, - track_id=track_id, - title=title_norm, - primary_artists=primary_artists_norm, - featured_artists=featured_artists_norm, - queries=queries, - raw=item, - ) - - self._logger.info( - f"Retrieved {len(playlist.tracks)} tracks " - f"from '{self.code}' collection '{name}'." - ) - return playlist - - def get_model_track( - self, - info: app_models.InformationSource, - track: app_items.Track, - ): - code = None - title = None - artists = None - info_url = None - image_url = None - musicbrainz_code = None - obj, created = app_models.PlaylistTrack.objects.update_or_create( - source=info, - code=code, - defaults={ - "title": title, - "artists": artists, - "info_url": info_url, - "image_url": image_url, - "musicbrainz_code": musicbrainz_code, - }, - ) - return obj - - def login_init(self, *args, **kwargs): - pass - - def login_next(self, api_key: str): - """Get the next login token.""" - self._api_key = api_key - - def build_qs( - self, - method: str, - country: str, - output_format: str = "json", - limit: int = 50, - page: int = 1, - ): - if not method: - raise ValueError("Must provide method.") - if not country: - raise ValueError("Must provide country.") - if not output_format or output_format not in ["json"]: - raise ValueError("Must provide output format, one of 'json'.") - if not limit or limit < 1: - raise ValueError("Must provide limit greater than 0.") - if not page or page < 1: - raise ValueError("Must provide page greater than 0.") - - qs = { - "api_key": self._api_key, - "method": method, - "country": country, - "format": output_format, - "limit": limit, - "page": page, - } - return qs diff --git a/gather_vision/process/service/playlist/radio_4zzz.py b/gather_vision/process/service/playlist/radio_4zzz.py deleted file mode 100644 index c2a702c..0000000 --- a/gather_vision/process/service/playlist/radio_4zzz.py +++ /dev/null @@ -1,302 +0,0 @@ -from datetime import datetime, timedelta -from typing import Optional -from zoneinfo import ZoneInfo - -from django.utils.text import slugify - -from gather_vision import models as app_models -from gather_vision.process import item as app_items -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.service.playlist import abstract as service_mixins - - -class Radio4zzz(service_mixins.PlaylistDetails, service_mixins.PlaylistSource): - """Get playlists from Radio 4zzz.""" - - @property - def code(self): - return "radio4zzz" - - @property - def title(self): - return "Radio 4zzz" - - @property - def collections(self): - return ["most_played"] - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - self._url_programs = "https://airnet.org.au/rest/stations/4ZZZ/programs" - - self._collection_config = { - "most_played": self.build_most_played, - } - - def get_playlist(self, identifier: str, name: str, title: str): - playlist = app_items.Playlist(name=name, title=title) - return playlist - - def get_playlist_tracks( - self, - identifier: str, - name: str, - title: str, - start_date: Optional[datetime] = None, - end_date: Optional[datetime] = None, - limit: Optional[int] = None, - ): - if name not in self._collection_config: - raise ValueError(f"Unrecognised collection name '{name}'.") - - # set the limit - if not limit: - limit = 100 - elif limit < 1: - limit = 1 - - # construct the time span (1 week) - current_time = datetime.now(tz=self._tz) - if not start_date and not end_date: - start_date = current_time - timedelta(days=8) - end_date = current_time - timedelta(days=1) - elif start_date and not end_date: - end_date = start_date + timedelta(days=7) - elif not start_date and end_date: - start_date = end_date - timedelta(days=7) - - # download the tracks - self._logger.info( - f"Downloading up to {limit} tracks " - f"from '{self.code}' collection '{name}'." - ) - - # first collect the tracks - tracks = {} - - # programs - for program in self.get_programs(): - program_url = program.get("programRestUrl") - program_archived = program.get("archived") or False - - if program_archived: - continue - - # episodes - for episode in self.get_episodes(program_url, start_date, end_date): - episode_url = episode.get("episodeRestUrl") - - # tracks - for track in self.get_tracks(episode_url): - track_key = track.get("key") - track["program_info"] = program - track["episode_info"] = episode - - if track_key in tracks: - tracks[track_key].append(track) - else: - tracks[track_key] = [track] - - playlist = self._collection_config[name](tracks, limit) - return playlist - - def get_model_track( - self, - info: app_models.InformationSource, - track: app_items.Track, - ): - code = None - title = None - artists = None - info_url = None - image_url = None - musicbrainz_code = None - obj, created = app_models.PlaylistTrack.objects.update_or_create( - source=info, - code=code, - defaults={ - "title": title, - "artists": artists, - "info_url": info_url, - "image_url": image_url, - "musicbrainz_code": musicbrainz_code, - }, - ) - return obj - - def get_programs(self): - data = self._http_client.get(self._url_programs) - return data.json() - - def get_episodes(self, program_url: str, start_date: datetime, end_date: datetime): - tz = self._tz - url = f"{program_url}/episodes" - data = self._http_client.get(url) - episodes = data.json() or [] - for episode in episodes: - episode_start = self._normalise.parse_date(episode.get("start", ""), tz) - episode_end = self._normalise.parse_date(episode.get("end", ""), tz) - - # must be fully inside the from -> to dates - if episode_start < start_date or episode_end > end_date: - continue - - episode["start_date"] = episode_start - episode["end_date"] = episode_end - - yield episode - - def get_tracks(self, episode_url: str): - url = f"{episode_url}/playlists" - data = self._http_client.get(url) - tracks = data.json() or [] - for track in tracks: - track_type = track.get("type") - track_artist = track.get("artist") - track_title = track.get("title") - track_track = track.get("track") - - if track_type != "track": - raise ValueError( - f"Track type is expected to be 'track', but is {track_type}." - ) - - if track_title != track_track: - raise ValueError( - "Title and track are expected to match, " - f"but do not: '{track_title}' != '{track_track}'" - ) - - track_key = "-".join([slugify(track_artist), slugify(track_track)]) - track["key"] = track_key - - yield track - - def build_most_played(self, tracks, limit: int): - name = "most_played" - - # find the top {limit} most played tracks - most_played_tracks = sorted( - [(len(v), k, v) for k, v in tracks.items()], reverse=True - )[:limit] - - # build playlist - playlist = self.get_playlist(name, name) - for index, item in enumerate(most_played_tracks): - track_number = index + 1 - play_count = item[0] - track_key = item[1] - track_infos = item[2] - - info = {"play_count": play_count} - - for track_info in track_infos: - # program - program = track_info.get("program_info", {}) - self._i(info, program, "program", "slug") - self._i(info, program, "program", "broadcasters") - self._i(info, program, "program", "gridDescription") - self._i(info, program, "program", "name") - - # episode - episode = track_info.get("episode_info", {}) - self._i(info, episode, "ep", "url") - self._i(info, episode, "ep", "start") - self._i(info, episode, "ep", "end") - self._i(info, episode, "ep", "duration") - self._i(info, episode, "ep", "multipleEpsOnDay") - self._i(info, episode, "ep", "title") - self._i(info, episode, "ep", "description") - self._i(info, episode, "ep", "currentEpisode") - self._i(info, episode, "ep", "imageUrl") - self._i(info, episode, "ep", "smallImageUrl") - self._i(info, episode, "ep", "episodeRestUrl") - self._i(info, episode, "ep", "start_date") - self._i(info, episode, "ep", "end_date") - - # track - self._i(info, track_info, "", "key") - self._i(info, track_info, "", "type") - self._i(info, track_info, "", "id") - self._i(info, track_info, "", "artist") - self._i(info, track_info, "", "title") - self._i(info, track_info, "", "track") - self._i(info, track_info, "", "release") - self._i(info, track_info, "", "time") - self._i(info, track_info, "", "notes") - self._i(info, track_info, "", "twitterHandle") - self._i(info, track_info, "", "wikipedia") - self._i(info, track_info, "", "image") - self._i(info, track_info, "", "video") - self._i(info, track_info, "", "url") - self._i(info, track_info, "", "approximateTime") - self._i(info, track_info, "", "thispart") - - # track - content - track_content = track_info.get("contentDescriptors", {}) - self._i(info, track_content, "track", "isAustralian") - self._i(info, track_content, "track", "isLocal") - self._i(info, track_content, "track", "isFemale") - self._i(info, track_content, "track", "isIndigenous") - self._i(info, track_content, "track", "isNew") - - # track - testing - track_testing = track_info.get("testing") - self._i(info, track_testing, "track_test", "date") - self._i(info, track_testing, "track_test", "timezone_type") - self._i(info, track_testing, "track_test", "timezone") - - track_track = list(info.get("track", [])) - track_track = track_track[0] if len(track_track) == 1 else "" - - track_artists = list(info.get("artist", [])) - track_artists = track_artists[0] if len(track_artists) == 1 else "" - - # normalise title and artists - ( - title_norm, - primary_artists_norm, - featured_artists_norm, - queries, - ) = self._normalise.track(track_track, track_artists, []) - - # add track to playlist - playlist.add_track( - service_name=self.code, - collection_name=name, - track_number=track_number, - track_id=track_key, - title=title_norm, - primary_artists=primary_artists_norm, - featured_artists=featured_artists_norm, - queries=queries, - raw=info, - ) - return playlist - - def _i(self, container: dict, raw_container: dict, prefix: str, key: str) -> None: - if not container or not raw_container or not key: - return - - value = raw_container.get(key) or "" - value = str(value) - if not value or not value.strip(): - return - - container_key = f"{prefix}-{key}" if prefix else key - - if container_key not in container: - container[container_key] = set() - container[container_key].add(value) diff --git a/gather_vision/process/service/playlist/spotify.py b/gather_vision/process/service/playlist/spotify.py deleted file mode 100644 index e223c7f..0000000 --- a/gather_vision/process/service/playlist/spotify.py +++ /dev/null @@ -1,246 +0,0 @@ -from datetime import datetime -from typing import Optional, Iterable -from zoneinfo import ZoneInfo - -from requests import codes - -from gather_vision.process import item as app_items -from gather_vision import models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.component.spotify_client import SpotifyClient -from gather_vision.process.service.playlist import abstract as service_mixins - - -class Spotify( - service_mixins.PlaylistDetails, - service_mixins.AuthRequiredService, - service_mixins.PlaylistSource, - service_mixins.PlaylistTarget, -): - """Retrieve and set playlists for the Spotify music streaming service.""" - - @property - def code(self): - return "spotify" - - @property - def title(self): - return "Spotify" - - @property - def collections(self): - return [] - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - market: str = "AU", - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - self._market = market - - self._client = SpotifyClient(logger, http_client, tz) - self._access_token: str = "" - self._refresh_token: str = "" - - def get_playlist(self, identifier: str, name: str, title: str): - # get playlist details - status, content = self._client.playlist_get( - access_token=self._access_token, - playlist_id=identifier, - market=self._market, - ) - playlist = app_items.Playlist(name=name, title=content.get("name")) - return playlist - - def get_playlist_tracks( - self, - identifier: str, - name: str, - title: str, - start_date: Optional[datetime] = None, - end_date: Optional[datetime] = None, - limit: Optional[int] = None, - ): - self._logger.info("Retrieving tracks for Spotify playlist.") - - playlist = self.get_playlist(identifier, name, "") - - # get playlist tracks - status, content = self._client.playlist_tracks_get( - access_token=self._access_token, - playlist_id=identifier, - limit=limit, - offset=0, - market=self._market, - ) - for index, track in enumerate(content.get("items", [])): - track_number = index + 1 - item = track.get("track", {}) - track = self._build_track(name, track_number, item) - playlist.tracks.append(track) - return playlist - - def set_playlist_tracks( - self, - identifier: str, - new_tracks: Iterable[app_items.Track], - old_tracks: Iterable[app_items.Track], - ): - self._logger.info(f"Setting new tracks for Spotify playlist '{identifier}'.") - song_ids = [t.track_id for t in new_tracks] - status, content = self._client.playlist_tracks_set( - access_token=self._access_token, - playlist_id=identifier, - song_ids=song_ids, - ) - return status == codes.created - - def set_playlist_details( - self, - collection_name: str, - playlist_id: str, - title: str = None, - description: str = None, - is_public: bool = None, - ): - self._logger.info(f"Setting details for Spotify playlist '{collection_name}'.") - status, content = self._client.playlist_details_set( - access_token=self._access_token, - playlist_id=playlist_id, - title=title, - description=description, - is_public=is_public, - ) - return status == codes.ok - - def search_tracks( - self, playlist_name: str, track: str, artists: list[str], limit: int = 5 - ): - query_status, query_result = self._client.track_query_get( - access_token=self._access_token, - query=f"{' '.join(artists)} {track}", - limit=limit, - ) - track_hits = query_result.get("tracks", {}).get("items", []) - - # stop if there are no results - if not query_result or not track_hits: - self._logger.warning( - f"No match for {self.code} track matching " - f"'{playlist_name}': '{track}' - '{artists}'." - ) - return [] - - tracks = [] - for index, item in enumerate(track_hits): - track_number = index + 1 - track = self._build_track(playlist_name, track_number, item) - tracks.append(track) - return tracks - - def login_init(self, client_id: str, client_secret: str, redirect_uri: str): - """ - Run the initial authorisation flow to get an access token and refresh token. - """ - self._access_token, self._refresh_token, expires_in = self._client.login_init( - client_id, client_secret, redirect_uri - ) - - def login_next(self, client_id: str, client_secret: str, refresh_token: str): - """Get the next login token.""" - self._access_token = self._client.login_token_next( - client_id, client_secret, refresh_token - ) - - def get_model_track( - self, info: app_models.InformationSource, track: app_items.Track - ): - if not info or not track or not track.raw: - raise ValueError(f"Cannot build playlist track from '{info}' '{track}'.") - - code = track.raw.get("id", "") - title = track.raw.get("name", "") - artists = ", ".join( - [ - i.get("name", "") - for i in track.raw.get("artists", []) - if i.get("name", "") - ] - ) - info_url = next( - ( - i - for i in [ - track.raw.get("href", "").strip(), - track.raw.get("external_urls", {}).get("spotify", "").strip(), - ] - if i - ), - "", - ) - images = track.raw.get("album", {}).get("images", []) - image_urls = sorted(images, reverse=True, key=lambda x: x.get("width")) - image_url = next((i.get("url") for i in image_urls if self._valid_url(i)), "") - - obj, created = app_models.PlaylistTrack.objects.update_or_create( - source=info, - code=code, - defaults={ - "title": title, - "artists": artists, - "info_url": info_url, - "image_url": image_url, - "musicbrainz_code": None, - }, - ) - return obj - - def _build_track( - self, - collection_name: str, - track_number: int, - item: dict, - ) -> app_items.Track: - track_id = item.get("id") - track_name = item.get("name") - track_artists = [a.get("name") for a in item.get("artists")] - - ( - title_norm, - primary_artists_norm, - featured_artists_norm, - queries, - ) = self._normalise.track(track_name, track_artists, []) - - track = app_items.Track( - service_name=self.code, - collection_name=collection_name, - track_number=track_number, - track_id=track_id, - title=title_norm, - primary_artists=primary_artists_norm, - featured_artists=featured_artists_norm, - queries=queries, - raw=item, - ) - return track - - def _valid_url(self, item: dict): - image_max = 600 - return all( - [ - item, - item.get("url"), - item.get("width") < image_max, - item.get("height") < image_max, - ] - ) diff --git a/gather_vision/process/service/playlist/youtube_music.py b/gather_vision/process/service/playlist/youtube_music.py deleted file mode 100644 index ad74aa8..0000000 --- a/gather_vision/process/service/playlist/youtube_music.py +++ /dev/null @@ -1,211 +0,0 @@ -from datetime import datetime -from typing import Optional, Iterable -from zoneinfo import ZoneInfo - -from gather_vision.process import item as app_items -from gather_vision import models as app_models -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.component.youtube_music_client import ( - YoutubeMusicClient, -) -from gather_vision.process.service.playlist import abstract as service_mixins - - -class YoutubeMusic( - service_mixins.PlaylistDetails, - service_mixins.AuthRequiredService, - service_mixins.PlaylistSource, - service_mixins.PlaylistTarget, -): - @property - def code(self): - return "ytmusic" - - @property - def title(self): - return "YouTube Music" - - @property - def collections(self): - return [] - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - self._client = YoutubeMusicClient(logger, http_client, tz) - self._credentials: str = "" - - def get_playlist(self, identifier: str, name: str, title: str): - playlist_raw = self._client.playlist_tracks_get(identifier, limit=1) - playlist = app_items.Playlist(name=name, title=playlist_raw.get("title")) - return playlist - - def get_playlist_tracks( - self, - identifier: str, - name: str, - title: str, - start_date: Optional[datetime] = None, - end_date: Optional[datetime] = None, - limit: Optional[int] = None, - ): - self._logger.info("Retrieving tracks for YouTube Music playlist.") - playlist_raw = self._client.playlist_tracks_get(identifier, limit) - - playlist = app_items.Playlist(name=name, title=playlist_raw.get("title")) - - items = playlist_raw.get("tracks", []) - for index, item in enumerate(items): - track_number = index + 1 - track = self._build_track(name, track_number, item) - playlist.tracks.append(track) - return playlist - - def set_playlist_tracks( - self, - identifier: str, - new_tracks: Iterable[app_items.Track], - old_tracks: Iterable[app_items.Track], - ): - """Replace songs in a playlist.""" - msg = f"Setting new tracks for YouTube Music playlist '{identifier}'." - self._logger.info(msg) - return self._client.playlist_tracks_set(identifier, new_tracks, old_tracks) - - def set_playlist_details( - self, - collection_name: str, - playlist_id: str, - title: str = None, - description: str = None, - is_public: bool = None, - ): - """Set playlist details.""" - self._logger.info( - f"Setting details for YouTube Music playlist '{collection_name}'." - ) - result = self._client.playlist_details_set( - playlist_id=playlist_id, - title=title, - description=description, - is_public=is_public, - ) - return result == "STATUS_SUCCEEDED" - - def search_tracks( - self, playlist_name: str, track: str, artists: list[str], limit: int = 5 - ): - """Find matching tracks.""" - query_result = self._client.track_query_get( - query=f"{track} {' '.join(artists)}", - limit=limit, - ) - - # stop if there are no results - if not query_result: - self._logger.warning( - f"No match for {self.code} track matching " - f"'{playlist_name}': '{track}' - '{artists}'." - ) - return [] - - tracks = [] - for index, item in enumerate(query_result): - track_number = index + 1 - track = self._build_track(playlist_name, track_number, item) - tracks.append(track) - return tracks - - def login_init(self): - """Run the initial authorisation flow to get the credentials.""" - self._credentials = self._client.login_init() - - def login_next(self, credentials: str): - """Get the next login token.""" - self._credentials = credentials - self._client.login_token_next(credentials) - - def get_model_track( - self, info: app_models.InformationSource, track: app_items.Track - ): - if not info or not track or not track.raw: - raise ValueError(f"Cannot build playlist track from '{info}' '{track}'.") - - code = track.raw.get("videoId", "") - title = track.raw.get("title", "") - artists = ", ".join( - [ - i.get("name", "") - for i in track.raw.get("artists", []) - if i.get("name", "") - ] - ) - info_url = "https://music.youtube.com/watch?v=" + code - images = track.raw.get("thumbnails", []) - image_urls = sorted(images, reverse=True, key=lambda x: x.get("width")) - image_url = next((i.get("url") for i in image_urls if self._valid_url(i)), "") - - obj, created = app_models.PlaylistTrack.objects.update_or_create( - source=info, - code=code, - defaults={ - "title": title, - "artists": artists, - "info_url": info_url, - "image_url": image_url, - "musicbrainz_code": None, - }, - ) - return obj - - def _build_track( - self, - collection_name: str, - track_number: int, - item: dict, - ) -> app_items.Track: - track_id = item.get("videoId") - track_title = item.get("title") - track_artists = [a.get("name") for a in item.get("artists")] - - ( - title_norm, - primary_artists_norm, - featured_artists_norm, - queries, - ) = self._normalise.track(track_title, track_artists, []) - - track = app_items.Track( - service_name=self.code, - collection_name=collection_name, - track_number=track_number, - track_id=track_id, - title=title_norm, - primary_artists=primary_artists_norm, - featured_artists=featured_artists_norm, - queries=queries, - raw=item, - ) - return track - - def _valid_url(self, item: dict): - image_max = 600 - return all( - [ - item, - item.get("url"), - item.get("width") < image_max, - item.get("height") < image_max, - ] - ) diff --git a/gather_vision/process/service/transport/__init__.py b/gather_vision/process/service/transport/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/service/transport/qld_rail_events.py b/gather_vision/process/service/transport/qld_rail_events.py deleted file mode 100644 index f65972b..0000000 --- a/gather_vision/process/service/transport/qld_rail_events.py +++ /dev/null @@ -1,138 +0,0 @@ -import json - -from zoneinfo import ZoneInfo -from django.utils.text import slugify - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.item.transport_event import TransportEvent - - -class QldRailEvents: - code = "qldrail" - title = "Queensland Rail Track Closures" - short_title = "Qld Rail" - - page_url = ( - "https://www.queenslandrail.com.au/forcustomers/trackclosures/12monthcalendar" - ) - cal_url = "https://www.queenslandrail.com.au/SPWebApp/api/ContentQuery/GetItems" - params = { - "WebUrl": "/Customers", - "ListName": "Planned Track Closings", - "ViewFields": [ - "Title", - "Description", - "EventDate", - "EndDate", - "ID", - "TrackClosureName0", - "LineAffected", - "fRecurrence", - "fAllDayEvent", - "WorksInclude", - "Is_x0020_CRR_x0020_Event", - ], - "RowLimit": 3000, - } - headers = { - "Accept": "application/json", - "Host": "www.queenslandrail.com.au", - "Origin": "https://www.queenslandrail.com.au", - "Referer": "https://www.queenslandrail.com.au/forcustomers/trackclosures/12monthcalendar", # noqa: E501 - } - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - def fetch(self): - items = self.get_items() - for item in items: - yield self.get_event(item) - - def get_items(self): - # GET first to populate cookies - self._http_client.get(self.page_url) - # POST to retrieve event data - r2 = self._http_client.post( - self.cal_url, json=self.params, headers=self.headers - ) - # the json has syntax errors - fix_syntax_errors = r2.text.replace(r"\":,", r"\":\"\",") - items1 = json.loads(fix_syntax_errors) - items2 = json.loads(items1) - return items2 - - def get_event(self, item: dict) -> TransportEvent: - tags = self.get_links(item) - - all_day = item.get("fAllDayEvent") - if all_day is True or all_day == "True": - tags.append(("AllDayEvent", "Yes")) - - recurrence = item.get("fRecurrence") - if recurrence != "False": - tags.append(("IsReoccurrence", "Yes")) - - crr = item.get("Is_x0020_CRR_x0020_Event") - if crr is True or crr == "True": - tags.append(("IsDueToCrossRiverRail", "Yes")) - - tags.append(("Severity", "Major")) - tags.append(("Category", "track")) - - title = item.get("Title", "") - description = self.get_description(item) - source_id = slugify(item.get("ID")) - lines = self.get_lines(item) - event_start = self._normalise.parse_date(item.get("EventDate"), self._tz) - event_stop = self._normalise.parse_date(item.get("EndDate"), self._tz) - - result = TransportEvent( - raw=item, - title=title, - description=description, - tags=tags, - lines=lines, - source_id=source_id, - source_name=self.code, - event_start=event_start, - event_stop=event_stop, - ) - return result - - def get_description(self, data: dict) -> str: - items = [data.get("Description"), data.get("WorksInclude")] - items = [i for i in items if i] - result = "; ".join(items).strip(" ;") - return result - - def get_links(self, data: dict) -> list[tuple[str, str]]: - result = [] - - raw = data.get("TrackClosureName0") - - service_updates_url = "https://translink.com.au/service-updates" - - for url, title in self._normalise.extract_url_text(raw): - if url == service_updates_url: - continue - if url: - result.append((url, title)) - - return result - - def get_lines(self, data: dict) -> list[str]: - result = data.get("LineAffected", "").split(";#") - result = [i for i in result if i] - return result diff --git a/gather_vision/process/service/transport/translink_notices.py b/gather_vision/process/service/transport/translink_notices.py deleted file mode 100644 index ad9595d..0000000 --- a/gather_vision/process/service/transport/translink_notices.py +++ /dev/null @@ -1,220 +0,0 @@ -import re -from zoneinfo import ZoneInfo - -import xmltodict -from django.utils.text import slugify - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.item.transport_event import TransportEvent - - -class TranslinkNotices: - code = "translink" - title = "Translink Service Updates" - short_title = "Translink" - - # from https://translink.com.au/about-translink/open-data - # see also https://translink.com.au/service-updates - notice_url = "https://translink.com.au/service-updates/rss" - page_url = "https://translink.com.au/service-updates" - - summary_patterns = [ - re.compile( - r"^\((?P[^)]+)\)\s*(?P.+)\.\s*Starts\s*affecting:\s*(?P.+)\s*Finishes affecting:\s*(?P.+)$" # noqa: E501 - ), - re.compile( - r"^Start\s*date:\s*(?P[^a-z]+),\s*End\s*date:\s*(?P[^a-z]+),\s*Services:\s*(?P.+)$" # noqa: E501 - ), - re.compile( - r"^\((?P[^)]+)\)\s*(?P.+)\.\s*Starts\s*affecting:\s*(?P.+)$" # noqa: E501 - ), - re.compile( - r"^Start\s*date:\s*(?P[^a-z]+),\s*Services:\s*(?P.+)$" - ), - ] - - title_patterns = [ - re.compile( - r"^(?P.+)\s*[:-]\s*temporary\s*stop\s*closure$", - re.IGNORECASE, - ), - re.compile( - r"^(?P.+)\s*[:-]\s*temporary\s*stop\s*closures$", - re.IGNORECASE, - ), - re.compile(r"^(?P.+)\s*carpark\s*closure$", re.IGNORECASE), - ] - - tag_keys = { - "Current": "When", - "Upcoming": "When", - "Minor": "Severity", - "Major": "Severity", - "Informative": "EventType", - } - - def __init__( - self, - logger: Logger, - http_client: HttpClient, - normalise: Normalise, - tz: ZoneInfo, - ): - self._logger = logger - self._http_client = http_client - self._normalise = normalise - self._tz = tz - - def fetch(self): - items = self.get_data() - for item in items.get("rss", {}).get("channel", {}).get("item", []): - event = self.get_event(item) - - # ignore events where the lines are all numbers - lines = event.lines - if lines and all(i[-1].isnumeric() for i in lines): - continue - yield event - - def get_data(self): - r = self._http_client.get(self.notice_url) - data = xmltodict.parse(r.content) - return data - - def get_event(self, item: dict) -> TransportEvent: - tz = self._tz - - tags = [] - - # item data - title = item.get("title", "").strip("⚠ⓘ☒").strip() - description = self._normalise.extract_html_data(item.get("description")) - link = item.get("link", "").strip() - guid = slugify(item.get("guid" "").split("/")[-1]) - categories = item.get("category") - - # links - if link: - tags.append(("Link", link)) - - # categories - if isinstance(categories, list): - for category in categories: - tags.append((self.tag_keys[category], category)) - elif isinstance(categories, str): - tags.append((self.tag_keys[categories], categories)) - else: - raise ValueError() - - # description - summary_match = self._normalise.regex_match( - self.summary_patterns, description, unmatched_key="description" - ) - - event_type = summary_match.get("type") - if event_type: - tags.append(("EventType", event_type)) - - description = summary_match.get("description", "") - - lines = summary_match.get("services", "").split(",") - lines = sorted([i.strip(" .") for i in lines if i and i.strip(" .")]) - - event_start = self._normalise.parse_date(summary_match.get("date_start"), tz) - event_stop = self._normalise.parse_date(summary_match.get("date_stop"), tz) - - # title data - title_text = self._normalise.extract_html_data(title) - title_match = self._normalise.regex_match( - self.title_patterns, title_text, unmatched_key="description" - ) - - locations = [title_match.get("location")] + title_match.get( - "locations", "" - ).split(",") - locations = [i.strip() for i in locations if i and i.strip()] - if locations: - tags.append(("Locations", ", ".join(locations))) - - if self._is_station_closure(title_text): - tags.append(("Category", "station")) - if self._is_carpark_closure(title_text): - tags.append(("Category", "carpark")) - if self._is_track_closure(title_text): - tags.append(("Category", "track")) - if self._is_accessibility_closure(title_text): - tags.append(("Category", "accessibility")) - if self._is_stop_closure(description): - tags.append(("Category", "stop")) - - description += ", " + title_match.get("description", "") - description = description.strip(" ,") - - result = TransportEvent( - raw=item, - title=title, - description=description if description != title else "", - tags=tags, - lines=lines, - source_id=guid, - source_name=self.code, - event_start=event_start, - event_stop=event_stop, - ) - return result - - def _is_station_closure(self, value: str): - value = slugify(value) - return all( - [ - "station" in value, - ("closure" in value or "reopening" in value), - "park" not in value, - "escalator" not in value, - "lift" not in value, - ] - ) - - def _is_carpark_closure(self, value: str): - value = slugify(value) - return all( - [ - "station" in value, - ("closure" in value or "changes" in value or "alternatives" in value), - ("park" in value or "parking" in value), - ] - ) - - def _is_track_closure(self, value: str): - value = slugify(value) - return all( - [ - "track" in value, - "closure" in value, - "park" not in value, - "car" not in value, - "station" not in value, - "escalator" not in value, - "lift" not in value, - ] - ) - - def _is_accessibility_closure(self, value: str): - value = slugify(value) - return all( - [ - ("closure" in value or "outage" in value), - ("lift" in value or "escalator" in value), - ] - ) - - def _is_stop_closure(self, value: str): - value = slugify(value) - return any( - [ - "temporary-stop-closure" == value, - "temporary-stop-closures" == value, - ] - ) diff --git a/gather_vision/process/support/__init__.py b/gather_vision/process/support/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/support/select_format_mixin.py b/gather_vision/process/support/select_format_mixin.py deleted file mode 100644 index e0f166a..0000000 --- a/gather_vision/process/support/select_format_mixin.py +++ /dev/null @@ -1,40 +0,0 @@ -from django.utils.html import escape - - -class SelectFormatMixin: - def select_format(self, extension: str): - """ - Select the format to render data. - The format can be selected by - extension in the final path segment or querystring 'ext'. - - Returns dict with keys 'status_code', 'message', 'extension', 'media_type'. - """ - - available_formats = { - "txt": "text/plain", - "json": "application/json", - "csv": "text/csv", - "ics": "text/calendar", - } - default_ext = "txt" - - if not extension or not extension.strip(): - extension = default_ext - - extension = extension.strip().strip(".") - - if extension not in available_formats: - return { - "status_code": 406, - "message": escape(f"Format {extension} is not available."), - "extension": None, - "media_type": None, - } - - return { - "status_code": 200, - "message": escape("Found matching media type."), - "extension": escape(extension), - "media_type": available_formats[extension], - } diff --git a/gather_vision/process/support/transport/__init__.py b/gather_vision/process/support/transport/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/process/support/transport/render_csv_mixin.py b/gather_vision/process/support/transport/render_csv_mixin.py deleted file mode 100644 index 4bebdf7..0000000 --- a/gather_vision/process/support/transport/render_csv_mixin.py +++ /dev/null @@ -1,39 +0,0 @@ -import csv -from typing import Iterable - -from django.http import StreamingHttpResponse -from django.utils import timezone - -from gather_vision import models as app_models - - -class RenderCsvMixin: - class Echo: - """An object that implements just the write method of the file-like - interface. - """ - - def write(self, value): - """Write the value by returning it, instead of storing in a buffer.""" - return value - - def get_data_csv(self, items: Iterable[app_models.TransportItem]): - time_stamp = timezone.now() - time_formatted = time_stamp.isoformat(timespec="seconds") - time_filename = time_formatted.replace(":", "_").replace("+", "_") - file_name = f"transport-{time_filename}.csv" - - return StreamingHttpResponse( - self._write_csv(items), - content_type="text/csv;charset=utf-8", - headers={"Content-Disposition": f'attachment; filename="{file_name}"'}, - ) - - def _write_csv(self, items: Iterable[app_models.TransportItem]): - pseudo_buffer = RenderCsvMixin.Echo() - csv_headers = app_models.TransportItem.long_csv_headers() - writer = csv.DictWriter(pseudo_buffer, csv_headers) - yield writer.writeheader() - - for item in items: - yield writer.writerow(item.long_csv()) diff --git a/gather_vision/process/support/transport/render_ics_mixin.py b/gather_vision/process/support/transport/render_ics_mixin.py deleted file mode 100644 index 40d446c..0000000 --- a/gather_vision/process/support/transport/render_ics_mixin.py +++ /dev/null @@ -1,50 +0,0 @@ -from datetime import datetime, time -from typing import Iterable - -from django.http import HttpResponse -from django.utils import timezone - -from gather_vision import models as app_models -from gather_vision.process.component.ical import ICal - - -class RenderIcsMixin: - def get_data_ics(self, items: Iterable[app_models.TransportItem]): - time_stamp = timezone.now() - time_formatted = time_stamp.isoformat(timespec="seconds") - time_filename = time_formatted.replace(":", "_").replace("+", "_") - file_name = f"transport-{time_filename}.ics" - - return HttpResponse( - self._write_ics(items), - content_type="text/plain;charset=utf-8", - headers={"Content-Disposition": f'attachment; filename="{file_name}"'}, - ) - - def _write_ics(self, items: Iterable[app_models.TransportItem]): - calendar = ICal( - provider="gather-vision", - title="Public Transport Notices", - description="Changes and closures in the public transport network.", - tz="Australia/Brisbane", - ttl="PT6H", - ) - - for item in items: - calendar.add_event( - title=item.title, - body=item.body, - date_start=datetime.combine(item.start_date, time.min), - date_stop=datetime.combine(item.stop_date or item.start_date, time.max), - location="", - url=item.view_url or "", - uid="-".join([item.source.name, item.source_identifier]), - date_stamp=item.modified_date, - date_modified=item.modified_date, - date_created=item.created_date, - # sequence_num="", - ) - - cal = calendar.get_calendar() - cal_str = cal.to_ical() - return cal_str diff --git a/gather_vision/process/support/transport/render_json_mixin.py b/gather_vision/process/support/transport/render_json_mixin.py deleted file mode 100644 index 3fa67c0..0000000 --- a/gather_vision/process/support/transport/render_json_mixin.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Iterable - -from django.http import JsonResponse - -from gather_vision import models as app_models - - -class RenderJsonMixin: - def get_data_json(self, items: Iterable[app_models.TransportItem]): - return JsonResponse({"items": [i.long_dict() for i in items]}) diff --git a/gather_vision/process/support/transport/render_txt_mixin.py b/gather_vision/process/support/transport/render_txt_mixin.py deleted file mode 100644 index 1c99bcd..0000000 --- a/gather_vision/process/support/transport/render_txt_mixin.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -from typing import Iterable - -from django.http import StreamingHttpResponse -from django.utils import timezone - -from gather_vision import models as app_models - - -class RenderTxtMixin: - def get_data_txt(self, items: Iterable[app_models.TransportItem]): - time_stamp = timezone.now() - time_formatted = time_stamp.isoformat(timespec="seconds") - time_filename = time_formatted.replace(":", "_").replace("+", "_") - file_name = f"transport-{time_filename}.txt" - return StreamingHttpResponse( - (item.long_str() + os.linesep for item in items), - content_type="text/plain;charset=utf-8", - headers={"Content-Disposition": f'filename="{file_name}"'}, - ) diff --git a/gather_vision/static/gather_vision/android-chrome-192x192.png b/gather_vision/static/gather_vision/android-chrome-192x192.png deleted file mode 100644 index 43729a81a9ec6da79dd709e44d6549baf452a626..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7717 zcmbVRbx>4M+rO|#vw(|)2nfg`u%HMM0+NbLxJWlkhe$~;v2>{*-Hmj2H%LiK%2J{t zAs}7v_5Hq?@BeRRYv$fFd(U&ud43h9@=~6H^foB~02Ig&gkl*t=X7Cs z+RF*^q`QxG%E;rHIO91`T%62nJ+8L>;&(ap4t^q;*Iv~TD&}tZ#fZe597E(H>g1>a zR+GC&y4Uhy=fDpl+tk+4r-T8-#WtAV(<8&z=e2XsFJ;3<8omyzxX{Yfjs@P%Wf(MN zReuMkasCypU}0ir9lW2|DEigHxZugW9klyMP@&u#kxx z7usY~Ej)A(RWA&s!ksqpq( zoNQA1cYFq2y(+Jg=@*0qUi)-UXRu+f$ z=6p8WlOw0>$5iHblk=$EZN5&XF?CL6*fb(1?3VGSG_9HN^axnzHvhVMa+pSc@N`II z&!`UB4(l)X^;5Md-C7$U-F!;NaDJ946g26+C=7&t1ka{mKzm_)u^vcaz+gf_f`QGBgD=k9{{ zS(LxF7-zouWYU_eqpOotYvt~KlSJs_V*sHo`1C17F*7N$G%$(Z?x8k)(M!B9RtjRt z!p4hYsfX*R`B+>~8ZXP|r+nK3&SMo7F^b@I`Tp`QFs+6qngKm9etpls0nCKR@>h_NpDZ1!MZ;}Srb{8@>UA#1EI||D zo%@q8v*>fy) zU+$V6b83VGnNx zahHAjhrn{#`FL%3`|^G*-QW(3G7TVA*h)pm;uC2lUiG&xp!f44Z0py<062W^0WHff zKqWJb1oLE=KK(bOdDwMKn=QPrKc5EAZwt6^D0ZD2$RPEbl)ys1-Kr=6{Jn@eWRyh4 z4Dj-seg9qROjb_oo_|c|AyS7Aq=Vuvq0!MI40SKS=x3Vu!5_f!OkTP#= zF^#^3O+LEm?k{iw@9avE`n95Rh2Ie*k(7Q>xIr>vg6gN$xs1PsZFMYgdc>w=zl!@+BZhRm)(V@bU2Y$)-3fA*|NB#{_DKT*k6dRPNvLGE{d;ZyEaJMUK z$XBDMH9O1%SmxX6OfRKBAP+l(J2aj3Ao5G@X640?GMI%Z zaU-^nark&If@R~_a0CMmFVZPZOYx#~7uYQd-wvNEIU1viXh?wkAcxOl_iQGV&QfP_ z1|Rf)kks6e5-OHhoFl~CqCd)2=mZ>um(i&2U0gg}Bfkc`x1(Hv7w|?xxl{3NmfozP z1saJ{>uh(RiY#O{=$#`U_y9mheXi&?4Xsrh)Dtdd>^eo^?;5oFy0A3g>yRl*Kx?+I z^oW=RQa`T-KX(!|Dg264!2JLyVPGVmf;g6|{8h<^0z6mCt- z1hN)z7_4)Bai(fh3sh(?io8?o3`BE`@KMAGTCytx9msIi&8==$4oK#7^oDIND2nuvQ*)2tb#OI1SUrkJGgpA19U;w$6(M@t91372~vUi8!i46(Pk7A(FB+>s`_ zX)808tM?TqdMsmy%5+@!(@OLJAzj(A&A zS*~UK*o#I~I|?4B-5Ntlrc9^D&NEX;p2kdw1A;nUu=OI>pZyEx)L|jdl-s@|Zc?so zr9s3w&0K3G)VRyHsf$_wEPOz9vs9ETuMBK?jUBPeIHcvyvf$T~Q1-DS3?6+0Tkdt6pT9Yo68zn2%9?8}F+#75@Q5bnma%2?+rjJe& zv(L4K)G)-4N)>ToGC7CtEm#@pv@?%*OP0A4qn||dbXj>g@pU&g6&6dFbfpK~(_(&l z8Z)Q|M8iTAn<+h31} zHPRt>rMIIj0nB`T$qA$8CxED#$I<-f%}27~#T##5cO*wd{nV(Qlb60D3E<9V!kWh^#qW`YbXDGc`K@SZfS{TR2L6y0p(*3OOm+L)~99_=U^@=hf%k&CoKT zg*kaagnnkum-m4qTg0>U$r{X6ubVKnt(MMBUxNMsYHHzg=dB+XNSz)ENPZx}5!*cx zbJ*16trNO1?!7h#5u>e%sZvg`)M(9fwi|?B2J^0+qnqMMZ4pvEAt1!Lvwd(tg!1nS z+-YtT*v7@rlT{WUve!tQl3ay`%!E89qwV}DKA(OOXi&;h+Ca3tz>{IJDE1B<1J(f? zFEthEmqa8gNiSDAfwx>(6fC(67FBA8MY_8?M#QC>7UVH`-d%PepudkCMK21o*|=%@ zE2AfV$f)5Myq5m{F!$}2QJNiZA(31xd3G59L+`M3kH4Ot{aPV?^gECC7eE{v#NXLP zQ$nSis4{8jpFb%{rd^1%2dN$^-=}Y?hRNK$MZmsFBB6Zf6oqF0SM z#CgMm=0q|z<&)e^2+=f-NYD8N4bsA{WK|67Ak(udBj2mzLVEKfmuRh03kqzUPQ>Ph zvdDNd*&!t=Gbdwtvl_0i7UoIFI5u_6{$+6~0>r-9xtXvw+U5lz)7eY8i4Rl{WFGzl z^YN>=xtXj_-ellyAscNh;}XkJmuCmlah3^V_?=TrFt2r!x2J$2~n6auByhpC_ zCOwxlC4t+Lu_kR5jC4L%LbAL*gNGG#*?W7|NRuliT2#;2$%)`DlsfM4x8v0gZkc4H ztjWGnC|^%VYFa5EFf)dhF z&4&)u0kPxdPV&^ZSu#4kW>W-k>lGkJN&R_wQ*9`Gz(u$k^9G z+_YqFZwhYBUZNC_hW*^)WMEc*ZQSfe|1r^dTPM~w#)t06*;dB@09UO+*hhJ%a$^dl zfFQ$C!YJ20w(an`cZ`}n4W;yLJ+aemKXQxfW=j3iXyi2C9meRtAWw?F3{w8kzR7%C ztyNf1UEr_z=+q{i<|3B1U+;Sx#f%>ORtw6Z{|12~nyObXx5dCu31P* zM%<2EmfJ1kn8_V(d-+#Lfy)*=IQ!=NwDM(kqbal3<_Ccmm3mTTfx-Yz|5UJ;S=grj zj0nfu2+uI|q!c(P}uXPE5=Rz6lbqsyu@CelgT+MH zk59}homo2bj%40_e+ap#Qa(w&Ur)!o_xoq94RVxICB{Rh+Mks025-RA)XzBcF{Tj~ z1jYcY6I+TA4{yNBv(&0nj(mxvrs^?$B@@6(W<_q7uLpJ-r;n^~)Msu=bbWI=yzO1Z z(e(v)sW%^@F^Y!V^hh9%SP4nNRB(qqOjZH^Z)Lnkxv@V>!ufeEX;8oj9AGp zWaH|UAybBRRRb$LK8J!IL|M{#s<$gY?3c-!wZ7Hs>p95|`|UX4$$wS=q*OEX&)C7! z)0c}2QgLQ>yk!?=Hb-#N_RdFl`kiHRyXGd4Ns}$vH?{uq20Mb@A@4`XpJta%ywlD(;dBU)pwRlzQd{23(iuju%M8*PT+8Rhta@N2-Tt2f!G#w!%o+U&45 ze*BZ!dHKwNtcX+?l!*^<`x-Eudh#hWIK}VS48%S_8vVg&<3?bjVtX6S)RY zV!?K%m zjN>({_KbDVkidNpJ(>iJR8`%5S`!892)1hpD(CWhw#MwD!0Fw#_myY)yU1vPFisQt z#J2W|Z}z9S_1mUNfBUrAav>dRdW=_C`*JVRh1Tv0Ns$kr<$%UaKF>{wogQHX7lIw` z&G#xzKwHQUWP-_OK1z%SdZ8rK%8Yy6BNE^M!i83Q$CJ$;v-2IhYX%8WfYNcqkp;EX zb#buKz#nKT{SFiI@4EGM-4xjEbtRvlPm&Qo-?;I(e;ce+aox49qsv+fPX>BQ;KbzZ zVknEZnp;J)x4_?xhNL`5MRr5Z3;D%MYff7Pb?-`XhPhp&k0azCHZoZQ7nAgB z?4VinvmX9}0%6g=IVZt!0Oc`e8rH%5b3b`!C}NX+q+GX2(6O`j++Dd zNL#y3Oiq|Zms_h5D-Fe;{r96`N2eek;do?>3SoHZ=G({VJFyx@zscg0Fe$r#Vwt8R z@(pt4$6%cciOTM+$Tu4MP?X9XGnj~Oc60At`xt+>!-?wW%9!deK%DNs`qn>2yR(2r zzI^+0QaRY-oN`R@j9Ljs&9K$92y+ z?RXr?7RbIEn~l0NdRJ)tLMbILFGj*)v0~#sJbmJ_Y;8u-16BJiytzYSKs5h#Zo$p~ zqOV1^|it=r`BXV$g{Sx{|MxlH2SoAG zC>w}U@AGQ z#`OfXJQ!n^6bwpsiPfgr_hiHvRgM2~oTBSEEp=#TLi7kSurFY+1O$u;Dq3l@Ht+}( zg40J0avY5fUtTZ97@fQ~k^pp{0TqAXKbOL}t_uoqt{yc8V!5J#VZk=_ts{qG2^mIH zjyyrI6_#R|w&n&+>E@iY>Hw3*^9iB2X<L zmSYXJ$8tvtH|_DK@uG^OEh<%6fI9@I;Dd4lPC63^%}oL+|1|R*AB`P^8U-+yAXsh) zD(YJN4z6?6O$hfnT;&EZ7bMZK&K&|yRrudv8YpOEOwBWQ8b^NqC*FMkLw%;3E8DAI z9^I14bO5S!wpXI{F>k3rzl039SMKe~xSa;tDbTzhBsAnwj|Mo(ay{lH!(|idzh7|W zG64;kjzgxKS_<3h(pma;a@C#!QZZX+P)8gAsxs#kGS|<_1Aqtpa7A-mtkEfLeKp4m4;ynb z&q|B%l-D(H_pQRO0AU*J^Tch=jU}KFaDk7Nd87mXA5O->-tqE{% z&UAlYQTtteCf0b5QEdrdO4^F+u)vZ^gM!iQQ0GV^Q=tC1)O}Nw=k1$FmG8p9MRyjc z-m2Z8gFEG000_(De@}+1s2qmOFJ~95H7;T#3%!W2YEF>JNpsdvj%+J$CuUG5Rh@H- z0fJd77Dgf9SP=V&?X74*K~e38{0psL1(mG%F)GvK0ZL&Wp#|qrjn6AKcNfQK5m$Cc-d|LcaEf4Z1`=6OFXy4UV_6YQ4O<#Ajm>qLz zgyTz*{eS)C|Fx#cX=QrXU1b!D9B9^xpjaETOol=BdYG;%s1!lq0qvkiz)f3xi2!J< zShyKWMx7ZcgOv=$;BKg^nK|j$pFj8h&q;SC)%=$fOhzNfA6sNH6e-)u*L(sK3^w|- z1ahnj0=sMH#c|$H!#N4M@mZVeZgP4>c1xioA9=qeR3kIU#PqJzjvTF3go+2!-|H%yr@bnj~`4TM3&Bj3U>Xt+Q~TWy@FIRS^r{?hDO8FrJ3E! z3G_pH@)TsJWD3b+wr11Yv7o$G#*i>8FG`71kBaP>hTUiWfRi!^d1iE^&!I#7J^KOu z^>kECQBl2NE!7&UK$do6+GNonQ?H1JsW3P!kW(-KJrxP+5AcPaq6D{(cTBzGrX~~j z=Ha_t?(o`&eL1_{m#B8Z++;AhgD4l|lRBt$Kg#N=WClg|+UMGB5$|GXzWwwHC_1KU z?7@VVZ>^FW5yWP;Pn?p9{l9tyh>mj}?32Va*w zIJ>1>U~gqIMYis2t<;y8q|z^GcYfS%KhZP${5KBLRKp%$_ll_|;0#A7 zo1A}r@Pas4Yxu~2#yX`k;VbJ{o=>Hq>^Htzx)vP`^ON527|Cg&41)u`Q6- zEM!_01=J-=L*$xbAN)%LlT>znE+*TrTUphHM|WMoL~zsn{P7vSOCUTp!s2fgi2-TM z`*2<2?^R&!1i8G}w^c2LG=qwa5|N$Pc_bQ00rlTqt_rQOx(WIUV?u+Ltm@5k7A60d z$^{wh_PC^|bW`JBj+I>E(XCYAHqzrBYVl5@=Kbv)l3S`BLF0M+VC+*%#f>&D;7`sf u8}D%*>!QEb1}9bxR_pq}!#G9i+Rb6d6)S zW}D1m8)9SI`**L-dEWQ;{s-^p^E~G~SnIpi`VRMf-Pd*9PmJj~T^7duj0l3T=<8{p zM-XcGpVY|SJ@BvfyWh7FLc$P3*G9QA z=C3*% zI@;=$CWCBOl6+j@(Tj}dU+`Fdc=1}s(Um99`jmP9ZnN&w&>fX**GuPJ7T!}<6)@)J zl*Vl7Ho52L-F~DTj4Qk=3j=XWQN?;pIP&#^lpBAV^OD^DK+FCkZ`oc3Dgot3ew zOI;z!zDs9HL8tqBwO+zT8tzMN!t>73rdMV0S;E|8S}qQbcMm+w_9fqL3OkHYr-`FCNQ$(V5B!y<@f|tZX^O@}|(@qQ#Y_Y?<3}j-}Bd zoH5bS|A@NurbHN7@>dlY{rCt*Hnm5)h%I1Y_C=jVd`W8@ayc~8nAbIV(Dh2Ki5`AD%D(a{fe z+OuWMd3@{>bqrOvQ!DXfG3j6PBp2o}WWUt@{yKNF$7wwA_t7gy-&$HOSy%X^rdEb- z3+r^x6|_xQkO#AKNv-f-nDi(fA0c`S2S;*Y__C;!=~-B;tM(>a-KvA0WP|nL1cS1h zUvS;BrmQv=Q|XqL(-8_s;o=93+=)7tHL~+HVm8O8#^Tb)-Xz>I+cJDpNP3y8`2oJa zv}H%UML`j+9X=#mc)98tZa1FOb7$|(!*~BU+J5)p=W(}x2P-XZZ6qq1U47{Xcd?H7 zbGi8%w#t3A+r>KFfA+Ox?@oqR-1q{!N!@xl`vOsmlY^t>;7GPiFpu%pH}tRE8e>n5 z(N%pnScCg2Ywg#VEh833@#>A!)z;=7Yr^C>zPB^k3f1o3c9fXOUVWN6SVM7Z8FfTOrCT8D z6AzwA#%H`P>XV&6>F^sK;v!%CUh{PsPmy`}V}vzSR8;g}?LXAHGUZMq7zP2d)!$!n z)bdzL$88;>vT`38yZGIwUzG&=B;2Ni`&G)P-%Hrertg%m=aW|7Mevf~onmrxWvv-g z-sCNFl~gx7rsrz%^N=&)4yjrbY+lgy!GwDTL_Z2M%u9*aBVl-E>bebM3LEA!2sa z-Xby-Jr!jp%8Xsztcn zI3r8_s)8?Kc(%xBhv$~YjS1QPC1YFi^@W|E`ri+nxb3V^?{*?Q?rWRZ;?i}ykAeHg zRD>EM@`7Hx8%yLmrsiEC!xZ&pw5_t~LIh@y=*9PKJjedrx7>$K{60pGx3rXj;PYtw z9(ymN^$+gfWp($Or)*bz+*w0vc6BX$-dq!|i%;h0mR7qbZ68*&+iUnsTDO(-9Dj8nPEBGI!EZ*MS` z#i>lNJ?62yaVlXt>&Anx7%ivNOSo^$-K^sq%`$c$){Z4udNwaT96M-exQ~1{=9*3( zOerOoQS<7(RUB*v2)+yk1SI+ze{v(m^2(v2QPqg_n83ZW$Jz0+p3(CoVpY zfY*t@_=*N(CFz(ic^ux#4=vBe7u#8oKj_$2sXPkY<{S=@7nziq5%QUnuP^<-f6D z1Cz$7v6JD_rNXjvU$}Ja1Ha8+0DeJR8^>A3EhRMPm`~~N;K7;{Q#L3i*Sm()(p;|*_yKTe~Ki(I9mmA1gBt&D8Kg^ctX9jtt-y; z=6Tqe+~vu$8A6Swn(J>Jk0*0_75t_l3u=lBo8&#e6);dtk$e8_GJ~+R)V8pJb|H1# zbM))YR>yTacdlx86M0(&ymH=7D&LA7d*ZYhJu3~tX*~pzfoI6>A6y zH~Z#LTJ`9*Q9VLKcCrYYdQ7KF?6?B#?kg@%VCnR zo_0wA4~pDrXz7S>Jqb?(RF*=213X1?qhKZ|c_1Ci!Fs!jcm zqnrAvAoN9V3&i})lCPK~M@J{q~i8>Jt^(`Yz8i{KZJ!PKs%)+`Cjl6Eun81v_E z^^@o~@niKkQ|SyEvWq(Iw%ppST83zfoegE@kgg8dWIi+^mObP1{m4vO zWZiCeQ~%RwZtJ5GNj4=OGlzMMRXxleXA(l$%^nV!t?&0(SA&>>Ue2YgbFgUNs(x1fN5q5^xY(FUS^tt0rQ!Dfgov4yyd|zk6at zF5q0+6Z}BP3r}^~pqDWZ?ehxx0NMq#WXsscQO*!Cu~MloJ=Gt;CB3%?D;_8?Pa1US z;}?vyV=tLKUO`7}qt+B3g+0gT$j76RZ`@JgDmYl@8Bu)1>{}mqbGA%hv9-mO&L+6( zh3~ShQDCc1_w~NA@eCcCnSJO$jIwkFDVatJ~G>+{)923nmz3GsEaT~#@oJjM$ufzQzj-P+0Gh?7H_21)Gqx5qcZRyqOlV25B(B^-AuL=Vb>3%!$x0@ zp@TJ&MYOpM;U5p3s6?`@FH?y;q-8=lS~y`8aTrDF$^Y+%5=pW&JPnac)_wQL`Aa{} zUxNHX5s>z^Mx=TnS6M&xPh`S%uV|bT=Rr=NQrOXuKPKEfqU*!=VyTHpQp_99fn|=M zx)wZu;T7%Hvvx=#o_+_HxWk$AWmA>dUTpmrl5t_SlY0nKn9bd{7h*l|Xu+x1`2z4vFOch;DI;E2o_QAT_ z?e9G~$)1w8OA%AipUcBx6P3b>@RtCo=pN-|???`<_8vTg?0Dm>+>4L?rbL{^Kne0m zuX54&`}&~ba@y(kz3+&|eTvMqXQ}WLaJ?6_x&h#s#!v$$YCg^YO{k<4^HeJJ>=$t+4 zOqF@}v8zc`&TjX!JBgw$vu{G;;aSY*d=BD*-VoYSn5_|`Cyw>|*z!MeFw^}*+H-^N zVD;`=!JPDE20cw#ggbHd_Z<8bHiTiDl&BxElRtQ-?I%g>36{@^O3@6y2fLFGl9Ud?*Kz$=9~@^3$l1{4 zh@qnKpa|XwaF?ZG2&cnN=%9=TUSSr0_4J`jRMEUEOR=vtWI6Lnw#-q%$=8=l|H=2KKuR%xepqUOO$ zb$mPTP4Ui2azmbMH*fiui#$OF;Bm~N&11vHAyx-y_H5@5+8{;^@cveE`&!&FwcnF* zS6i)zjpOL|eomyrYR3uwE_eF2QU509Qn=~Z@-O?puUNxP%2MVEkdoS7p4M-2r6+Ko zMhDizo%mt$5YL7L=(R!YE-4(Q7a4@TT2kjlb7&;z{4NK-;y6QRPE9`h2U)7~5bDfs zEzT7zNry{{^b=H=X>!)iv{%+i2gP%8Fk$s$hw2kol^S}ZsZ5-xEJya=S{%M;ejMgi zt4MQjA1Zf)-&wx)h5vr^=lcz->^nIm=I?3f65YEQGte6)Ome1@I# zc51NCbl_c+zd!*4$ij{0*kv2+dEn2}Ta$ zrgI4d1+2pCzTIy`OJ=@jf6n3ahjb%~%|)H56ng>SJGp%nnG)thh(5wHHAQ0G$z4o0 z#T%$KO95ji&FJas^!pHfGG?E)a56X+Ta5begBZb8?-28VuE9mM#WSnE`{11Le)gVS zFCT~IQ@VQ$u}-px%W=ST!3oUOCyNhe?&#peL-skZoXm~9r#xE9O(TmaCApfkeGQQ$ zJq+Q3Tcwt3V}ldEs^2mf;X#xiSsFH;YP*`^=kWM0u{+1RL{QgG7#{8dt1RZ9;Ke}D zzgx<}E>Csk<%JSJrB)u*2O;64wiM=G#@#;h=4`8e>=l>`?&bfNmeZkL}8H7%sABjHh$kMJAW6+*@&ffq|B(h8^P8lTjC6%jWE zLRQ73GV6(|653CEOo&dJrC|%z3{Q%$-C511g(Oki$0{r}1iGlu&QfGAyRgJKCtT}A zS>(G+m3i$;fHFzJo7nfz8ljCz!4j5iS0yrcOb5F-*&%0g%b7%me(Y=xebVfx$b;OQ z$f7+lq)M>05JcGbU*K?Lu4{ab!r(TZ z(lKFaC-P_sX^43#(sS`sn`9_!XleC%w%!~@lrsO^C?AB#)EunqO!GzT5-6gu}1-?>1 zh+zkgGN=5_Hh;2boT;K|K5QKQ{)b1*Wj32q`P4{rv-yWVi+{o$yxYERYh+euRd5bA z@FQR1&V2`XL=!c;NDR~(e0RxEo+8sSvk;iyILa? zM!y)x3q35?$9BtB_GlE$|C8i4bBs#iJ{>mwv73pmW!0YI3W57uW@D~wY@ws9f_`vM zk|oO+jopB~$o6vR03>vXS=t8-8)9MQ`p{gvB%MO6_QH6q4|sq2!=XYOXD*9)%hYnT ze=L}-k>3t>O64QaPMh_af8s3Bf_?{x!+62EBSuvx&YTCwGBF=;Tl`_+=Y4527d2N-AqQy$lZlX?d zf)vWjq`8VGNG4k&aE3L5E>r2r3zEh6Giioxf1qs-yd+}Spiqj4C*I{F`_#1vk{J@{ zMDAVDSz=8h@-+I7F)J|JeWFeh45=SCS;B{g^O7Xgd9bv+Y-x0*wBc}LeKj9>_?P5$ z_=0@#bj!IMiVJmn8LpT;cG$IJJJR+0H^|1ZZcDq{e8KOaTQX8;F*6?QlP~pt>>w&$T zO5-fmWpIc{sA~A`ex`ypMfzSkI2-XPMno+RIbmYXQ8?B?{O3@wMO>Bu*0+EVtkCzA)XhN z$(g*5?djqqM7%NY02F^rArO{ju$b9>b;E|Auqv}>n>G1YJE{yy9AH5aAQ;=<=cCL7 z)ri8j{S62M|7FSK5=Wadsdf)D#n2PqT(@ALBu`AFJ!0^w@Z6U{C$! zq`Wh3Z6v(LFO^QDUfZ`{7Bbp0VY55udQXH{QDQ_f@8>8w?34d17jBkHJc^cpd>8Vm zjgWDsFO$j9i`b_OLD$AVff-_`+-QfaBXtd1uFzAjh7S@4D;2L$MYGw&MY%opwR^Y` zhNEK*y-3A_1QbS%-JnZOwZFMwY&JJRI|A>(uc+n> zt2jt=a<$Hx=HDK5hkP*6R8Ikft!h0vek#-it48V_gcp0RE=|Q*Z$_QW$v&WDx>QbM zjeIJF)g~?>u#ah$L{5YQmAFFpx2A3la7qf*UBssHt96rpqzo+fM3vwfeer> z)1Mw$GkQ7vLDKAICyH~teC*Glb=gIYI6BP-@CJ7F{jY?`AtN1llRPJQ6ne5M`}Z`k z?~Z4Cpaw`s>}fQ zi_^gdg9D#Eru!61o70cACB`qWDLweg7URIvr#yF{<q=IEX z5hx&2`rnvd2GG{0UG%nS-bx3TAur(B6F+~zIM$82-t%NwTRBbk(@zyZU^cZ(J2&;K zcBd!6UnkzZ-L3A0q)0jOvmqA3IFc%J%hTt|g?qD`Kdwvy8B&vPdY9pZRUJ$4Blqm5 z%@t6wPTUzekmV+LfE>Xo?6%kzU(YG%9n5`U#@ZADzet~s?}ayLg532<<@2IP6QpC+|Y`=;6*%n2)Ixcr0i6w3ZukjQcdecuIoTMwH8i_i|8X9{Lc0^1&dA_I!=D-yo zkIVb#4g#x!mkl(}t=L`@D&no<&dYUwrT=qqTJ@V*pWFQ*4E>pZ@ME?Q;4CNSzT+it z8hpM-hrQfdn2X{VkXew(E}%^Rh3QqZ6ElLJs>sX zlA1FfWw9$S^F@MW=$>45%)@EscbIZAlhQLh!h^Dn<0Cj+Mc^?i+__>Y^%;pk>n-8 ztIVfR);7Stb)_fW%0O-#Hl#oF8=tRXtO`B1-W)OqI2Ur(q|;nrUQus%8mavxNi1;Z zHt|sqJn&?4IgN{kj$)a!lX3Ca^>Ui}w?yu*XMTA9G1t`G%<-u{(B|g*YVJ*f2KY+p5kPqY=_XVs*?0C!|J{BVrYkipEbL=CgCibE_!XltZ9fzWjtu| z60?y{Y)HF8XOEFWNKG*JKxKU1JKbM;j($!K6ZmCq2vOGm4RQ?NhI$~;&!Ks5Oo*qW z=pLwpEjF(~rcJG6h5KN}1e?ouvxNx{d(WIa(9N2e`A6XQdrxI|ArzA@^^P;hYPO06 z@7A!X>=^P!g~k$a!r3#Is4}aQ+)T~@WV>^QclQ9!rS`N+VU4A^VS~fYAy9(kM^va_ zuTfmoiiYi4Jb(Sq3$Qi#-B$cIj683JXQ|P_{F0bt3g}fdt*M^lh{B*O!o=BX+7wMT zIvrfU0>5L=eS=4anw9QiXsqcfRXfkD1>uKYtcnoa$MIX2u>>FY<}jLwhG&`8tJY+P=uO*$&vCW z&|HkX(WA13m@kz+-Tt5rWa`38E!Ybln73i&lcHTJ2hOI8i5*HCys0pZ^%P*e9CW(A zkG)wyQzw$r*5pXhb^~bQt`)uXsz2>lKR)xg z>TGEa_=-*_=WBLGrK3dIj`P1*%$yoRb$Ak#BCv~UsZ3=*Xm}64ep~E8E z*C&3EQ;x!x$%-(<3hvMXoeOD>2k$2O28f$pjZv9()qw3&Ky(<~fm)H^G9X(Jy4pDL z5}D}NKD+aBIoq$y_~+W%I%}vP8ReX%qyZcLET@4Sp_wjrhIp|ZMN4_7pC-*5Dyzcy z5p!Ics5SK`KT~Oic^rAO?6)e~FoUY7+adW_*Q+ZQm;=Ah=tt6F9~%6U!dnX~Lsj|s z3D?gnfoF*O+iyHwWU=#5rJ^}&lp)~R@Kr2WS{fMoZ=cKmRuBPFeB}qr$1|Fr?#pPs z#WxliR-%kUG8Mpp(97Nm*4XZcLso(G4 zM?H=JK9D(Dc#BMoLeGJ2hq4x(coTg9(+@BHTbG$f8CzCG{+@`}I^YO$h+W|RbN!2r zxzm$9OE2%2?}`=%GWH&w)quIS$*A&346pa^~mt zZ|~x*2 z>GXv#X1=Nx)bF+TUGpEPn1MA@!g-zon1Yd}=6$|e+Od%*-QAJ?aIA44#QO*Do&?Yo zFRz)o@Zu!asdYs`4`SCqb9|7)Mxfot+xtOoRS;B6?wf?S_I?{6;5#t-U^~APm9A<$ z*dTI3Lef;^w*H}bR19P$I*Q=Ms0Hth=@6GRQ(CzA6YQ!Ub&I;}T>;oxVz=*0)o#i` z9er81ccIDgf_51Io{M3y`{aiN+Hbp{dz5>G!U~o8>;cb~<~a}MZkEj83VvV@Yitla1^O#T1|8-3I{D>kEjVQf6G>!2g!^ zrw1AlkRcj&OTjI3OK9$=J?VJmzIsN7DU5V4WE}`y6nKtiv#`qmokt_!R)C zNxJzmIGTu}WxQz2Z>b;_%;+{Flni_Ko_w$Pe1*PlHsfZSNfkvZ$USEEDJr8vWp>7F z$e+1;!_X&r>|#z-806ikfaRM;DqNx{(<$<>;oi4R(;qpPzG(22)5KopAL&t=JD_vL zkBT*=UNWdICK@!`Y7i*_;Y6w~TUhumKROn9$*B<&m)FP`WSJ@*G!pYr2*g`Wl*rsv zD;>`;p2bt{)KaND-*cE~j3hfTm?N+?BOy?uPKU1HpK)ku*NhOG{xEQ-%nnPo9QxSw&RC5K-##=v5s7eixBk2rAHM)emz6g!^%h~;& zItyMJUrs7aw%Ix!#F4}x|BH}(K4kZM;N2kZHNOfxIv0syC#yOQ9-#s!OazAa@Ou!1 zRy#0F8J@dTz&*iX=vC{Dy!ZMX^VjvuH_05} zKU%yd><%oGG&(!ru7LDkN$8)t_{pB?r`h^RFkpb4L3eonTTQ?wfP=trR6QUS5#s?e zBVVe3cH6Zh9;_)7^DoW6g_e<4{<#3_Z_>_0wtAk#(KI`J=CvnB=eP6dMkEKRsjsV@ zO;b`<+3VRO7qG>5y7$a!_;T=Apr$D9QMITXFpl7?E{GK|Hq2bXfnX~MwcDjpmGb%K z)6YJ|ANZyO1()A}9}|NkeLrV@%94C`-sV`^z@arRu-&y1(BS?weuuYfL|ugB?|%_e z22Gb73g4j|5!~*4$iROPya=dN49tnQY*#FHovxuUvYW2zy}c&PI(#4LRrgz|M@wd? zT2rqi3=y4!e6g|sj}M(O6AA)OZz}l^)kq>(GKjUmH=8mt-nOjmfd^tiOaiXpG@Hb7 z)zMA^=LRlo&6%GSiA1HYit~yN4oXZEDn|nKX%vLsi~&e@ytE&vEPqDKoG`HSk3D{R#UjdNn#BmL0IT zot-|=IyFkZ+J?Qi6493TLbRGwEzY40@@Dm)63*N8*Lyn}#Wf?Z2AKA21D3+cExt7P zwhs#&cGQ<((AQ-@Jv|5+7dB$D zd*G40Zez4OaU%)>o+N$m);`vk?w#Ue*P2zvmL!2Up*{~|yK6vz&aUrnEY|1mQ$E^v zAG~6v{q@pUix(dq`=~f~0RQK(LVK;}{uu#G;=tO&nf3-i0<)>r-DBJQ=A5I%FA?DM z(h3brsX0bgmSnuER+>eopJEFN?wl!l|MT@$9$hQXT<1-Wg&2y4R7l{JW3~$Pn5H} zt{PLirJJ20b|@x0>J9*N{N1mteGWqq-YODDr>ib1aA`0X#>{+Yb?=XeEc z|83$CrLQkM_d}_XJZ*gwPv?YUe7`j$(sf50qhiE)D02rYCXYdlSr6o>gI{4{p!I*Ac$Hw%dBYb>kEg*rQRa?_CGy6rP%v}H&8d}e~ zOX2yRt5gtz1%C8zf>|ltWP8$+q98=G=W3lfv23ER;FP4KTRDoBxA98)>9BwByUQi{ zzx+PiAY~O6s16Q0Iy-|Y@-(mql~sHLExjKPj}N}=t6dFcVVDo8t|`)4+(L8ZYS$am zd`7Y;sMA1_+0RxXO^m3;D{EA(!-VqSXv}oh(V>bzz$ToQiZR)-bz>F_Z@W$NlP5PY z-=u%>^w=+&UbeVP>92^5Y+B2V?B@}{IAqi7r@KFC^e%KOn!xfLWlhnYVjO`|4;&B( zX*<+-Wud;nF7rdj#vx!XiE|mrGJr2m*rdtb^h6HQIiaZg#;GoN{vG}o^PiyDa|?;j z_~@ThyeuWo$2e}#@%b1=81e~vt1{iYG3`J3x^qkoATD_CTzWj~QvZdsz}s5;2)7TEEghk@gD64QEa>*3@bOxbr~fxec<7-~HnYVL2v*kGcHj_dwSO z5;OZ6HR{X-Jg!r$(kz<>-p-0Z*9*J*DYd_8LUx5)0M!E0b^6iWmf=a^$hpm?gJF)e zIbJ1fbwHFW-2hMFqJ9tL=djLSRI_3pf8GKPQyA(5{_8zml z%R`pCXKz@VoAzI5VoBwDePoKiIaT08j3d(Z&$4YT=J};x#tUA%gn_cxHmNy0-i1!!y$|d*~`2(4Bmu#_Y*11bWRwapz`egv`gt}3B6Z=+VI>*C$bOj zsHG8!g=Ax~$z4>4!0*}6I$D^zxmJ<)d4hNd6|sIUde-OaGu}L?$)QHJv7`dY(*>Rg zTQu)0Tp+uA&J$G$tG0m*5K&x+5JVehugdXKExq^D8q#|-8`Yqw(7E~9i(@=hAW&(FpR4X4m^_Ggmx_bA zz#m{^NCl^APm8MO3Nzn!I&v4dzU9Xx}E z3M)(WltO*_;n)!|pC$pScVtp>wKU>rc{MwDy>j-?)n`{G`-6e@u=#ffeT+QQV}oQr zNy?hkawZp6Q5Vh1Vzd#k=@UP? zaiQR{Ocu6FrKtcJ>f!^|A{exqxzz+4u9O#fp85&SyR1d zv}f)eb5FXJeU*Nh*6G!`Z-VqUfmA7!r-w6_-T&fJ$iBZcuFP-wtr0k;iC1@=_#$Vc zxHgrTX%c7l{IhO{rlk1$Q|ejC$;wAY^+~T!`C0Z0w%AjF2N*Sm0_Nyl=u*UedF#j6 z;yzY9A6B0KV^hsvNRvBT;N<+yOSh12v*njlBf1Y#K2WN?Er$}?+^K$k&YS#t-#iCSAI=bG`}*ftSr-6i^xNHC+p96;gozMys{$Q+kQ zPos1`7YZjJE48kc^iYEzh|pFj3jjAZgq3d(80a7z139r+P}j<|92$k-=&XGp_kyj? zEbtV7{cPYsRc3j1d-(Q1d3nO2j>cwi``2NF=kXY|;_55d#*+PB3uEV3?S&gVSgu*^ zquB#^@Py6B!-d_SW1a&t|9JSnl!X>J78ozU$dEvZ*%RjL55~B5gB0~&sE!@|%|?Cz zd}Pk{Hd@~wcrZMXr}-xO0AsfETRI56iSxJ1pqxulCsLs}i!E7y;!eUyg2w&Bv&5t_jM{ zm{2`Pf)WfLW$P$9#IqA0`U*bF^0e=YZS}1gx5`_G@_@KLdDa$n24BZDFUp4qj7+{G z@r+AkM}^HEz_|{#m;t_o`sLnEqCmAOJ69jnv)+qc(!by1%IC}FIExT#3%ZYnqW}*Q zYyFPq@ay<1Tb*6tX|Hx0nHfoxSeTqJ@wp}d32{295{ zW$5_C`899&zPON|Mq!eCjG6ug`ypuZ1pL5N&^2U7;+}UXjK$`i=B=e7pK8;_08tBc z3K;2QJ}1ST2)vC6od`G$X0sI|Q1nWlX(&KR?e%)=#3sFyQJl*7*$z_{wEJS?nBa}a z1UeBr>hpm7Xi$7w;cB;jyG`5_Ksx!eFWS8U^|?~8y(e(8P_its5???vfOL~x#x>c| zUg`cRB2Nsek*Ra{j4X~U@}qV&Qr5S!JTAFMd5HCQLN^uEn>m^PfpY-RC^+8{i-+a5 z{#jQAT&9!D(m}1Wdh56Jcxm<=;yEyBqH-%jUFAP`e(gYEWLm$QqFH;75nJq1Xq38c zP0}N>mDU$(_R@C*%NL%Vx?~-PCw2QEL&WGaMN1W9-4ShrSAIDKLqBebnD=7h;4V-2e1lg7B^*Lcgb-m( z8QfDQYL-<3P_aQxy9b<$d*YBaBX}A#OQ5XJhbeLJx9KSnGkl>!2U6DC6A3Tg76$M& zu)W)q@zizky(b8-IoE{hV`3uONN2HImXKz_`LcaT7%DAjD)?xOdw1gbL?NOLN%1sz z8Y(nAni?$V^E4*aedJ;$S!$F{VICy(@&KyJ98rr3>eoH*aEHtL_0(OW+q8JdNaO?d z*6Ap(+$nKi?z1zWaMw_)XyzRzw8f_N-mUd>!);IuA6QNK#K3>@YAqKCa`DhV;{;$R zf8N(wSCEAqT`h}nM12WK>{HpuB#`~kO3Z%JuNO?(x<`V+Y5yN#06-q>EK*%x?@B^Z z)Ce|Yh=JLVvH@ch%=dZ$3{R*&b=G&KwVF8ZuKPdIfIBidXv~E_^4|4sT0ypuhrUFN zTmB-8VDV@|h6jZMyl%66?r5{cp1hMQz(qg}49zUzl$WM=fD}<$}3IHkqBA4R3Wm8GwomzQ$ruh!YJ5fjJqt#qio=Xi|4X`HL?K%6aHplzf292 zRM$SZXTh%ZBW1Kq# zCz)Vj03}|99xC;o+tg47-7BBWCg8YY6Uw>{_bdg+0pEiM&IAvT@z+yN@bKCb7ZfgX z<{|7YIW)P;S(h@I(=~4i^11Ns87BL~Mn&tzrv416ROlaIK>gpq|9R1K?rdl_D=N;U zp{xk>nsL`JX@5=tl8PE7df`kYaZtJ~oazAH=AAsd% zBZbse!J?U9btMbjHPV~b!!JayHGq|ZnWXl%+(hK12JxXU77V`_bQVQfjvJH(iku>SwKW? z8as{y%?Xr|nmJjn?M+n<1ZGHT;H~dH5;8CwnZVyo`+Sc*vF5nsJpB$R>;QFyP%;&B zG&tsq$FW+qF&6>gpTSg3GruDwRYMU%12H@nkPnSj44d;oPH5?vMJW;VN!7za)QdS) z7uM(0_!uX^1_h#uD7?5dB@bxnoZ$82!(`Rhv6^VVL1#{AnGkPUqI^1*XQT~RKFb0| z2~bYxp@=zrlKvOqXH+UcCs2N+vPU}`IA}SvyA29UsOmu*^S5;TC~$T!Ft{*3sjHsD zD91gvPB*CQ*(_kq?puYWqB(dbN@2DUA_Y3u0j_$gQgG$~-C7YiV^wFO1=I-2MgkMK zHiTs7!y{hf@whfJYte{C{7MA=7+X8}^+t>BXea-Pk8xwI5v@V$d>`l_$N|Tc#Bp&A zx3i_huB-(>sxu(J2O6N<{7lk(-B2~;%oXABuAISRln&QgE~r;fDO8q9I}UWEIUAJ! zaA_C_;qVAu_nj0-6?pJDcp2qxY}&3sY0x0NE?TgY6%}Tt>e8;r3@e3Q3cdCFI{Xz# zrq{3Y)sl?sdP09s8N0@RW6!xa8=-A^tpBeMYzhJOJjfPYI+{#eFZ<*-NwbjD?5{oTXS9a}fxr&>Z9Ty%&twKLUmM%#JU9U=3iO*+|aH zbHu;^(Y_nY7Vge9N3isMrl-6oy}*^d_~&{9V%E(aL!8z@<(A1?kaoK;MCNwYlSut zR4<01a_*nha;A@sw+Mycth- z)7yGZU0?(>k{McNzuxyJu7uV!8|-u8gf+U*RhCes1bF63_ne+%s3Q-+E;xdl0mB0# ziYQ*V1?>S05NhaLVy%ar|-X)!-wWm`zD6U7;hZ4aV;T%v= zWN8TNXG<&;L*bX||K#r-E)`@tmvER;Ur(y+yd z;uhkTQ6ofL=DqboUav_(e#j<6<|pubdoRuyhx`&G9~osJ-Ca2RTS_@J>FV{@Xmm@k za=-7pEa;)}R=fZ1;j`0wpuG$1C>Pc}Hx^DozaaW#pxjZ>t!ubFet_-)+Q|i-*}z;G zh4?z0!VODeC9o3$Oi0T+^}xGJPCg@xK78z_`VT$MU z@dIQK@qYW&yUlwpz_=4G?#H1eBNO~^uXP0j?|VQ~_gM%Vz#<*pb!i5<2PECAgOfIf z=`VyDXu_ouU>6L>2+l3Ab{eXaUbZWYF}~5q?YLfFK#PQHn_cKDdQj{-4`hH{xBsU- z2#Hz*m#hiUFO~3{JFIF9r<(;%+HTyskn8)C3YjZ7<;&;sJVDY8`q}5K{JXK zh^M*IuHsE}{ja|Jx_)+Xo_q}@>j5`D4|u8xcnV1!{hk*(;t>}NCqxIG0AAhwo zP<=YoNV~~+O6hAipj?EY*{A7+=S9?<2WbO5wJnf(jqG#&Z|`-KN_Z^ZhF$wG7=^x- z+<1Lrd^qe|LdRY`;*N*QW);-S^@{#P*Xyr|m3Vfj=e04vEoV?#QDr)fT=N;+nU^vx z20fuBL*3S3%ZZS{qM6}*JK95}uSVj%e3T?t7=~(WU?1}>cqjg^b@G;TiJ=3Smq_ON znq7au3}Oh6=Ik>+7?U1FhYb%@CJkLVKrUAjAVZTMP_)oWnKT7;ov25;W24-tt*P~nq9#*lpD3*vq}Xgt31>N1*sODEYk+P78Qa0A ze-*l#*0l_tAe<0)F3TK{C&mrW2ruA0&Gwo?5FBNhD(nf|*^sfp|FEf;+crhex{l*ebQ4K34p$*YSyrV;yvby4+ zX5nP>U87S!XLRblg2RAk>y5i~ar@Aa*V}-!l0U(>ut_P!jb02y{~;%cpwn<124gr19lx=Lg1Zq)I#xqvbk~B6uLQ2fx}$klS`V90Mc1BP2ml8CV6S2M|HpNn z@(e=u{NG3@le8X&BM2;jt4~m#VhR@auLIs3fHZnLoG+rL(L20*9cNq}-+y*uLv>9YX#t_i~w}Xx` zL_a7!YQK3@PyE;Kl3V*V7CiIqErpgN&u1kr9v{u5hfClF@Y3q&{4!6ejVdO3=X56k zKk?0@qKR7M+%MlBXm6IX99Y2O0Sf;bXt|lE9TP=2PVrnGbsnl&Qp1j&dO|&d_0WuS zYN#ptiKTYCcS-Xe)d)r|@-j>1O`E1OpY`W+4Rt9^?0M}r2*>yMAR z_vs1_t(@7k(=eJp-SNc5rgHLAaeF{dzzN@*1OKEg2Zrw4e|4qpq5Eg??hn5<>4V6U zY|!D9ZHVvZ7dIbeZ1oQ-0nYS$ggiY0nLRQA!jzu z%PG#Wyg9i?z?51<(i9mlBHo`PWEdMgD#VZZ_$vH}w2~Gg+hrmJnkpj|ueTTm`yC|% zKkz(T_unUUpXFPnW#q&?YQ%7wEucVbSg(9Vy3CS=;$?n_#N@}bdp-*(I2d@%vLH0^ zw?vv>S(~L_xPTe;cu^+O@Q`@_@ppcYSE)qtCXB8r@;xirACe24n-)sD z`8>h+ylXXs1oDKI4&k{XPm=u3MhTetd$r81vkcEP8L$$ksjzC(Y*y!an=qd6=wB(E zJrSHpTsSr2M~lx0l|tsV7_gV&$EvAt!ZM?Z>PgZEO)Aq7I|kFuCe0MYnwcA^me7sB z-Iph(u`yz|O{p`V!AFI?@M?72u)^d=p)Yq1-!Up$0MVjak$s7l5$Ws@UN?2EHpAQQ zPV_GlW=Mn?YPtsfvNXEaHO}M1a$!GZ_wqziBPyRGaW!eAPsKw+xD!noOx#F*?^osA zZq{n=a*-6Inx7FF76~lre(-;)yYhdizwdu%m~2THNwT~v8rj|{REkNd&}1xGBId0@ z!XTnZGsYHW$yS+|v6tSK23eA^q(!oqoh+pZQTAoV_s-|@`1}#yAG{t9bI(2Zo^#K6 zKF@j0yk?jK>v?92mK*{-7mM(bAM)>d>w3dX{QN_T%5Fvec|!!ngE?v-OSjh9coju0 z%PlPU@n3amdM3TeuiwmHSevG}LybbhV7v7FYe@NHf7_Zp2(KO`Uw!)cW!c3Cj^U$i zC}Bzxy8*`~DBakqlMs{d+)uRJcMvDAW$M`$C7^`|lxs+-GICwcIp;{Ljk%cotnNdD zhO~i#bJ8UCREj2a#AIF?c5O}O<4dCE^6@Kh_NjG&^94dym=Z@tSz&DSb0I7II)!mn zYtMR@`*5_3v75(y;+ zeDr8f3gIc zBJp6j+!Iy4G-IK&;WpmoPuzF9Jo92_kLeaPW$c?+o+)m_ayk9gKK`nsKLd~{~9$faDsO%9TWdfKXe{8&Kc_ta^3Ic$wio8qp=9g^}bW4 zms+QD!l|rNt`!GPXh~!}G(EzLdJGDl{NmOMi^G}MgzlPue4Nh1fnObNVdIWK8c^-ZVcBm!g6)4;O%Hb+0b;v|4#_!CXBLCd&B2u0?mnkLv6 z(2<7l`)+;>h0Xh7S;SD^3tGU|X_&r_E4U$lbeHAK*UsWP-Xy7%+0jWf?gs z37pPPdVPH1FaW+{{)sX~_&oAou{jW&qbY8#CX<;lYTe&ytrYV52}CyiB4taYg|)@f zN8Q;LIVFc^iLY3weq1aF0|T>fE`EQu;HDBpfYomP15Z^4Pp76QawJ;yqOG17h6KM? zSUBgTYP%Z^1bD#?Q{hIVfmC+J=#KuurLu+c>wD0^_8g8uJ=PdYOj2KWc@ik`?8wYi zadsLuVxgR>7lr(~T{~3xn!?|(){Vb)P`zsC!*pk(<+ipgFS=265+V^BooGC$?H9Gki#;y1(%7 z*$K(Hb}k|*tC_S_BL7!usQ=1%0OJ)Eu8{Pa@9HKcfpcBsNCz=|N$io8WSusm;1mM? z2=X3Z*rKq4^sf9cH$E%bOVOeyuLBhL6|T`4OequF$#m*O?N5BflbSdygx@7h$>L?~ zRvkRVw4ARI2GLgct6P8N_P3kt-j=u*1AOv3WkWfXrNy2PckHBh()L8V|J><`6~gZn zrcgKP{a4!F=BL#B4Aj`LdC;czu-35&35jQK@V2`JN{YdPw(kghBylUWFvWS}q}Yu} zS$AdqT+&JuZ9DfC^wzx;bwnoj1k-8w);ndI>zF0vz0*SY?ZOnpjr!*ED`tB(Ot%XI zwFpvbM_%fFmG1oOLXTRKDJJYGlpl1`gKeA2X>reFx#^>4f{vEOd{Nw(hwG;{yQOyA zQ#Z4C>RmDY@Ye8)4|(j~eK#U$cqE#NWB_tWVs!)VupA9o^Y?M(VP^u@Q!ad>ogn?YsDavdezP$;mB`@TA2&V27UtTiW}ds2+D29g#Eo6d@f?!n@Kym zLnZKd3_vBE=ykL1n+fSUyEaBpsDjBy?-USx7M$z@2@Z0h70iuW0qPr)1@hC9`M)w1 zz1*KSJg7~CHq}CElclQusAH(C&-^{xOYBm+Ag;C(O*tO)a#UP+iIm+}nRa&Vt>wXE zhr6E$kt-G!58S{2ykNDDlb)|t6WN4$R;Xfe22xeiHk4xg@G%Qh8&}OlFMH-@7Aftf zmdS}m^|GoN_fAxU6ADf|2lM9VR^3FGd?HJmv6{}c#nr!2{R(OOoBPGM;F&=g70EiPGcTTdQ6URw`Tb`NH*(3Np3JJ7U`Q(4f3tJqQO5s>n{bOMr<2V~6 z-*ey_LKqvzc9of#(tU3+(ZHJa(vCX2oeQ%U^zoo@*R?!bSO?5p_fi|k3gK)aS+??_ zDZA%kp;zCOZE~AKAGW=c7I#1VsZN>6T7O+(CZGtz)PfVu^Z4QUef`$en|W9a53aYk zrFQ+e$y!+VD1CKJPDZHXqagV#76C3o#Q7;h%Dj5rdTAi??p@_QHhAy{Kx1yZ9>tw5 zI>)(UJo`L$E^rqb;G+#}jZZWdqH*sv5if}7vq_UY16EtEr`>H<1%l*BLj?Hz)B?FD zTIM!if&c~_{eI}9`{_Y}}^nTg(B%ACre0(8mVbcCz z=${`et6{f}{-gT3!XRKpRR;sU?t?^0Q^{y)>q9_ZiHmz_O3vWLa6Al;{EpwDyXhhc zM|<6JL`R*7cu?nFltQsm%b!h2PRSA5-*BYra2r%R{h6b`i>6OcdIhaFQJ6k77!wu} z1CixO$6~(V_p$1HC_bNbYTwZdnDeFfGOrEj zq)Sf7!p<+Ojgq~dg(PZ7v-D;KZ={=VxP>k^!Huctqqe=mPSwD_Ws4f1IEs7{#r9! z{z%sI2d2b66{h}@%-LkllhXH0OSExI`(xADj-^1S+{s-JP01U%{3vXqP5;ry`-gL2 zK(NQUF-?{SW0bNF>-c4?y&cYP^^xRcPQF`c7=D*)V2y$O2u4{p&8A&J%sVsAhc7lx zto`(a@F(Rw_dP)oFy`135hSg+%`}Jzf33j&C*ia-sS~HW+M@O6@wYPNMz_?YHnqJ& zYK^TnN?TAM2Dj!{q&8o*WyCgk_g{UXAEvIh?UBLQB}utox~*aZwhGys@=@M6RO z0Q*-a{137Z5EeoKtT+Zhw1bxzm~s?Dm?n7G1Oc?f{})v`Y-*E>fgs>*zoD~|Fg{3p zIW2(1|4h@rc~3dphB)az?5d-01o^wOM+S5r9wI)hgu)44%j?Bm-yVxF{f>Dbl6h?y zY|wikBppE!*R?Yor!EGY1CTSY^y|65?=`D{3zJ?NZ4Qi~S(XM*rq7||V5~^MBN11X zISMf8r$y;>L`nYBcQn1C>ThjB`}i6)C}wi`JDqzcR%FN67U)gS;xfNoHMdQ8x+05~ zIyLp^Nwuw@PL0I8l4esWTS-ts$#2IR2R~a>Ip+8K(e#tnfvxw`8T4s;$SeqoUG;}* z?PB(D3(EAse-1L)uN+bYN*n&H_$`|BWS&^Z zZ^W9{@)`D&2)JKK@O7NLROFk)(Uw*#sN+LoZuQX-o=g|FfY!G!)sNlbDtZZ8oBUVh zy<(kSmV1WWw4XzX473Rdo+%7@g-Lw$AbsE{jSW{1sxWr40q_6rYw554qj@bY&wY^i z&R12Dvg=L|L3GjS*afS7Bgp#>GmSl);AcstPkJ{qTVz>T!}ffrz!pH|H8@7U)v*vV zT7CAO7?^vypp61&28lwX%Ha%EkT)exD!ID=Mc<&Z>2kKGLLLr3Odkn60U?#W2qCI? zKOjC*`>B1o)&s@3*2YI}MbVPEb>M5-_u)WH+THi9b4q51#WV4n@|U7Exd}xy5@y&Mj2wjU-WzoRG0dB!f1s zq%n60P!2!>_L{9VRKUf)Z~bG`Ruz46ZG2Qchvw4RBMyq9r>I_C`udJt9Pq9U024b) z*8TgEtbp!cl&;@G!nfyi3cVdPLO6gBr_r0!g9)C?L(W-^htNlrgX72D0LZn{4T-G+YuN|aF1OwD|2}q5rJjDSiM??4?A56bIv!SR{tj~jj05*X zzP#!rkOK#hbpa)2tLK-+st8wvkNW`Iy6VWY@YY<+o9L1Ds%W2jDw$YA|7pg_xWn|; zKG9RTdsK|^(s#(;o1oX5PqGJxaa{{1#5P2w|BV{AhX({JSeTm~n#aDFmjvoT=$}yE zC~moPTHS3p)5-teY=s$)!kS3uQ z5<1(}rk9~K{>J-9ZxtV;8g9`kvgh~X>Bq@;mokP;ym#LYg_YK-Pl^G>(&3x(fUVrx z_x#yPa5XKj5G@vxA`);{M7kCQd<2KovKMiiICvN6$70d7{za|_?-;B9);=$te+ITd z+TQK&Afe@F`D9@3p=+yId_`>JB8-!~i?n5Os)&uc^fYr}{}w1vaotF_@A>EN%(jsmi1x#{ zd2J?Zk}9Op`!CXL8ywd^qidHe@};`^S0=gHc*Si}CM+aMWZ68<53(+>N*bf4PeiVp zzYH5n>f*&xaHY?QCY0XuI!Va9l%tK+K7OycqpbJtD{gMFFQy$yC3L?~Q7W8^_Kgr( z*0lMYLO!TDN>48r{pK`DotdEGROWIJ5F7fANEgl2v@XWhM3Mt)-iD2IqCCD*KVjx3 zc%$aYB0Cn+ipbAe7iYN@X>Q?DBUOaV7QTenJhOB670^6cjxq^2ytFn3kLu}rg81bUOUJO+IkIA$E3IaWNe62lhs4o-tyhiq z*Q3_&ir84J8S+8NI>HWV&lc53Y;6azKdwth`>N9Cf9vS4*bEY%*HCZx4yr)gazBkx zFMm_kE0X>7^D{NX4zhj7moF*g4v)$^Zz4PrL;@~`O2hpz@M;>pl^VS&BxbB?(_h zRfi|EX^ix=mhOw(zhUU0?uqOerN-V84Y=QPFr=K|=3-Zq-MHbv$?a*gtNG@e9NG)wc#ktlPBh{6*%n3YW?9^<;s09?RHyF*pAR zFQt)~|77T2QEbMgCUAJpBSNmL`XDh#G$)m~4-p%y2?PH1^1=Np@xeUaP|>qY*UJfU zo+*lAO-L2(_aE(VM$smyMwRtr4kb79&^cNa7PgOltZ@pu!Ur}0u P^1#^W!s#MI0`>m@2{sbTbAPL4_HE9Oii`Myftr zFCVrr`$IpZ_F9zZ*P#L7r&@Ao! zA2ISnyCa95CLVu}4y}D?xwv}SE4YEV4{*Strx7DJ-LH^Mf6tqLx6;E^4uu4%NOY*W zZuZte73xzrDwYW|?T8AyAvti5z&{FQh?n14MC-w6`I=4Tz_fv4PlDrSlA^+5d9iKl zh9q+S4TXc=8J>{sYpwAvBT{FF(jWvrR8sqnpJdu^GCNg$_ib2?LE@Zj5W4i#4*Fk* z3x|#o-bj{EQ-+YYm2~rg+FzSIzc6T-s0UQk7J~rz7i>U8#Ns+P$52-(%Mq%Ln-u<* z0MFX^JM6ss{hq#T5rLPXBgb!b5-T|@<8Z%bIua`^btm|Zjz+!kq{w!;`Q5RkfcD?X zb#TL=i38;;607d!5n^$f#mM_Iy!_qRCy9wxQZg~e=e9&W0pc>>NG02I%`r^;^N4vP z$zfK3`;t-o3EGp%-v2!p@flflu&8u92%u-T>;G|L&tuDw*Qc{at&wn`6ke(5@jag3 zaj>fjeJ$3|kYnKEc$88%wPr}TGG(o7x1EN4ZTb87Y^U8D#GBg$zj5(xt8#e@1eT_s zX?~xWn}z<^3!GoVSyA-S z_{pEZU&m)LmLs(NrEcyv-?v6BU5gp`_EciK6{KCD!{z$PH?$v0`70wO?CLTrw|GoL z4tlu8tNuQZ1yTU}8<*te3zPI^F&03Ey;$9ulZMMCCfOb#mRpcpXu~E3C#ufYC3Arx ziLX*_)F`Latx;Qxz<2q@4w%(X1&fS2_63r&dFYTn+a+~VH&wu|26dAu5u(W#x{gXS zgv!u01uVY=RV$xSRk&<=Xlfl2N{n#$D}5)l)uuBRGGa5~dpDsk63oZ;yPye6ae8^8 zmU(zUR#`CA?0qJLhM59(H-O!akqOy4ip6v_u-N1~GnJTZkrr}NYhgr5i+ z?`=S)A!-N|hbczKR12f?J+JRs!wN0eT*r9myOSz~tYE_V+pM~&(nL0Y_0V_TvRErq z>#bs9@j6iI0W}7S6U_kEd*Wg9I0juVB*}E7b+>ay^oqQYa4c}ZrIz+i+x=rBDMv6$ zT^NzIG`e~)JV7RZpA@8W(&#s1FXMZy^AXbGH`B$mX8^X3_E6Txaf z+dEOPyob78gF%*Ie$7T_uS{O2;(AIO$3rrhlS?rS$Z*b0{=F4u62bJVJThEVRp*ub z5%z-l2~^}Ct?}zrY|mIb;xJ3Nu&C*#~=h%9MGo#7$F&~A$=ZJMMP z!=WV~Qz(%1Ni^#LNbIC2*~xs`$xUp}20P;0{Df~&rmFF;%qQdZYGp+C+cDtmT2&s* zd)iWx_^gQ#gRD)ZHjJg!idW1-Wg&(?GRGLfU-QPYnU*lslTC(_71H!OZ;zv1f7j%} zLliF(QPD#}<;*9;6?0H6(^SZD!WudAp5iCCoJIX9;37LuL@_eEU3mgBgjY)PRce=H zXEF9Lww3bII88LB$R>(k5x0FxI|`EVVg=)rmPoz>hU~&c3P2_{7Cz-_tzQw{T9!QP z-%!M9&#l%9#AS}yIu{e(XN19(vr;>wjTO%2ATB*kYnIR0KubFn3NAgxTm|4KXZf1j z(p6r>V0G$oYTXa#zQ7vvRKfPj$3lRu5GvdA;x!n=h(EmhBC+oaWE;1De@%}_S#On$ zsDZm#6HJaoIf;Z>>P>ap2y6*s4@F|a*~d={^?MaH$#CooTR|++g&4e>P_o|d?uIhq z_hm|#zlr2P>J;{cW$Z;QTAL<{7)Ba%lUD38thfbcrY;!7q@M3W(5!q^QT!l~WpkM` z`8f#yX$}amb%Z!uwx8HitP~ercD)x8kF*wftrbY zr0Rpzz@67khL*iKX@5(pfZbj6qK(N9r@c!z91$Z&@oK{j+Nk8yKSzZLR(BWF)zjT8 zpS&#mlJk`j0N*sQFW;i}r)a$PxyH)YfQvP193=Wt)>0HZoF;@p9Xc?1pKtq=89|Bq zUV0^ijK&}n^7Ah)6iq4;-0xTvyHGWM>X$qPC!p4|{VC=L*s>l>;7zey?W|$&tO^rk%|Mky>*p zF;x;Qv0VnGEOH)HSU}91CWEBrzAS{n8JeDdVO%~o8NAvz$@CC~D`x|)X)E3kiy#L4nWco~`)qzYWvzUe` z+173kh)&V2h7*0z>*Tpb2Tz+rH2Lk5@i05qWZP}U0APwp@hG;XT4t17iS*P9Y-G#H z=^h}&I}K8*LFf(-RYfHzvLhAru#7w2jud~plouG<%312P_lp1 z0d$>|j{jIoiOuJI#FiR@X7V6yuq}l#nUIwd3(F`h^~0RKEFrQRr*u(tqUr260|MG36TzOYWISG zq9l=SJf_q|7$Id$D`?WhP*Dwkg=$thl4JF<7Ylstg@l5Najn=() z=!sQtoBFf*0CdTmFV5(Ygsv14JRo9vIcOC0OxW! zO`aTr^QZ6Pb+h630`FC|il4a)g$5}D)#+EUtTk&8g@*|_L*6T)r%JwP z^5sJp!}EXQvs?#zL61v2eQA;#tm~)O3tm`oxzU&Mb;d3&Wu<+S*n@mw1Y%b%Szo;Z zZ#8hLjU;s4ZQiuEEo;0H3N@1lPVVjXJ=zCt3`9Ufav{3bB=tM)f)LMbKS+S&O5q$E)5ZbWA^UHn4%-UDPJ=lsf`oDZ=HPCf}q( z3hge+?spHr`TggXXDh)TE!AF=qqC_Euwo!oSGNFsA8l=bl_>+2>)U0o$Y}AWp&r3x zG#!l#DRAP8Fw#gJ%%zPW-2TV~vfNISN!p6ZzE~;6BeQ;hY1W$;g*W}#h`_2Rm7aXk zZ-)%}gE-)h1nRW7=q%QL#rmk8-U3(Z@9qAbpmd2M?IP^EOEnqvWr;o9{~Kf6 zG=Mw*%ISg1?>e2l#MvF*5+b};#cwk|PLQAfwu^wEWC=>Rrgce0V^w6uBepZVJA8qvG>h7*;Kf?4>TReqs=9yU zK>zuf0(X{1oHf9Ad-2e-*Ubez#_<+R!S@#hN2>?0Hv@rfu9ZHT=JWgvI@z}_@<3zP zh&$jHMT+X?M8`SMyhF)+j$ZkpXU(PAzNt7;l1z~Z{_Nyg-FKYveWMd*Yv*zil15va zhSL~6O9t;OLwTeQ0VP(t{$O@%A>?XNZ6NUD=%~ew+@_In;Qr$dkK%`a0c5|P)XWCc zbX0G>`KLQG+X7IGfO>K}A_<_C9N2QHi|O)c88g|tg`=zb21mBO^}^?uz^z0l4kUPI zK=`dt^GyL+o$bfvd#81zt^x_iEZ1enD>?_W~#zD{k~Xw_PLj;HSQ zB+kp>&g6LmVGEjZj&iy|H$x*KeK9?Aa)T}FC;O#CTmaRJHhv?+F|i16zH5MQ-WoS) zoIR_I5v!A!JGX(mOWLD_K;J*4zgC7yq|^o-2UafMP$3s2gTu?^ckRlw7A6mPfhzN5 zNAl_nIWo>vl^yJ%X>N8_-LJFeJrFQ9SVG=X*WhS<66#GxKd(f>+!1C7HCkXu?=c_Q zl0?_=a>{FeTzG%8+u`HQuSI*^AsGh+014#9AhXZ~t~?-MI-@~Z+|{zIMurnW{j%xc zUHkf4-fIA8Kf*f#z_tAGHAdN;Cw2MSzUe|{zt+xCiIm$)mC8)1bPXvJg1#>$6M=19 zrRwSJY_aBikpa;Tw8!%8x4!}hQefz2EM$cWi3CuX7R}@pC!ye!7mHlL{ z7OhnXQvIU0&cA(6n$9t@dbmir)ru`N_ui)7KF5L4!vu7>RpR!3jpyF|@UnuNt-Yq& zgkXAipQ*f}&e3yf?XJP!&GuC4up~}^(SFEHWijh(I6ue8pPkI5dX6+I%$KEp&5tyW zULNeeC#VfZSJX6pwWDZ8Pyar&XiPDZ@_It|murtdh51xmc;3`CsS3Wo$-rBv zX73SLVkMmr*1%v((oYY4Izv@y=ujp07d9GG#DDq+lMy;nPh-{fOAE`99k|uuczI6x z3FDbHyU53@ThnLbmr2SiN&)wLz-|S;Uhin|Pm8Z(#Gkg#HU1W<4Y1RnMNYNidfXH{ zI0GH{GT=Z>Z^?1#N)izB|grx2(oS2IcZDsS!sIcHno0nzo}`yL4lLO4|#R zOR_VaDw(maBH_HxCJmZMYXw_cn6;POEGixCe70&rR5H_fnvZD4kq4n#Of{r+|rJU4*=w?MPL(-m%#oz%Qw>8Qss zf@exn`Yh@mS%4QKn9Hnqm6M_~0L1Al=vt^1q+x=^9P zabYPWZ>~s%yKp@yDzvN=5r8L+sZPOJKLN3w75Gi z4O=P1Lt<^TTyh?Yc!1TQ{ATd(Exb5g&?GIFCFI>jm8OWW9S%u&-3Zk}8$Avn1>9w& zR(0h+fGlx`aS3*J8DZtp`cZ@RoR={rs&oW^{&n12jy}!kU_L#LdP#)YR1^Bnb&3ZZ zs%iGj587zgyT2QU3*7BaKV-6$i$aiW`roNs%m3O^wIxokSCJfrP8C|AB`SQ8qpZKV z&4aGw(919O$ahRuh(sjMLNiE5KyNU9KLsKI#ME|>GK=)x#U1kL|6Q#JdWcMTDC|Ve zV-rXrpjRF0&x`ar{iFiMr~uo5>Xyq6$_dtoXSSJylnk`6NEbg?I^A)H2?JhOo`+?6 z!z2L$TXKcCreTAa4>raTU}N-tr2+h(U9Nl;8b}L|=ZHQWcCHT7D9r10$9yS<`-+N1 z+HB(wsbIT`^c2I6NrRi0t+tH%j2qzTkT^7Ln)unb*Ki`+>N$H2JyufG?HA&{EQXY* z)F?x)6*ZA666sKgqyar()9Ko3V`;h!h=rtrfF+7AfPXTB=F4d-fC8Ob)!l0NI9#7Z zU%#e0{``pr`kLUWo-O9mUU|2CfZCr}awH8Z3VDPFdi6FdBl~GLLpmfzb#-|WEJK(x{;jZlOKAn1|E1ak z>jDqfcr8Z1$M@+q?t`}O;J4`S40&36of{qwlO@;%!3rzJL#GS>ALEu{2V?`Pmi_;G zz7gyxh$aff)Ca>ipCEO7o}jQ|md0hoLj~X`Dreh5U+Ssx^55A}*li#J1qY;GU-roVD-E+q|f29_0lF4+U*(xYK{~pT;Z* z@IV2rv6K1WhUfRr&+{4^?Bhz-#lR)4--@(5JaB9agv)YA{%;}S!e1Iy1v&X<%}#FG zvV{axGxIEL`wtH3odRPcQC=TF=-{$ZU8(M^WgNVQ6W;E zlDO04hcBUbi0v$Lmexvwtn%l}`~Oz`y?p8V3+Vv(*E3BN!0>=-Xy^=ZqFJ&T*C|DI z^!WDa{{oW(>e$}T#@^4sK7*?cu2ZWUzk<{_`p>98y9eNEe>4`CaEHt&{OC&(v9&Az zSHCetux(9ZwWK7xRc@K@PDbzT-4Q~;=3)tJq#w)Ve#q>`!4p8qxgBoVH=2t_=a|wH zYl_+b?+_+>aCl?|68L}SEed!fX-Q75Wb|~VO)$@?pLNGBfA6^11e^C!dADdLUQ&8s zUK3td?USKOs;&4~_4(g}ig=CEvpS`aW&b#9G)-JDE$3);hVtr(BXS@sdt}WoT#0HHaZz1 zJe?eWSo%+wdWdvnlJIw85eehZ6iy^hY7qg7qtp(y!WkxEr*Tov(fnna7@S`NXOY&X zdySyoo2Q;`x73zIVd)A4V>L~jW;~3##-2{M9K?BP;E~!@6xgUs80@o@CZh35C+dB{ za?-QF6v)aynSIg_{+2*kbyTOW>4#1X`=kUsf1aUePnJmn%vsXZ0*vkeIL!UjyOUI} zsued_F#k*B)umBp4}WV|Z63|p%_-N=Y!5Hyo0lxR1XA7N4V?-G2i%^^O3L0Geb*e^ b{)g3m_0yA3Z511sN@A!eXvo+6XA$;4N;_{Y diff --git a/gather_vision/static/gather_vision/favicon-16x16.png b/gather_vision/static/gather_vision/favicon-16x16.png deleted file mode 100644 index 2d3acc5b817cd3e6fe6f6ba50ffcbffc8c6ca4ab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 372 zcmV-)0gL{LP)(6L1Cu7NsQ;6~cQRc&dCxkT4^6LZZ zWMKr{9>2If3gc~^EU-LITc30vE{eNEn0ljtF!fN}B{tt~AHX=pHf07x2QQ*P}QYR|@u`Lav-hLy8he{A!C+bSZ z*@^uX1v@JRqLCESnyCZDAnL7*gPMB=##UgAq3%}Xve8t>iYA$M&%Z7S5eQa)9ec3ji>Ec4Hxu0OVR3OrKo< zK;N^H1LRvrn9WCPh+gXw%7d^R5;e|eM~Ab^+#5D}T+0fbM^3(f`D$;1bU zH~^90djWPb5h@2={0;$3gwieqpc56j%iVHxW)~ZP?n6WPX=;Vj5!;bRQZxHq$8gcqM4Or90Y> z=ihJ1x8Nnryj{Y|+aT!-7%f}j=i1xy%#*Ir}AD57mk0qheNo*XBcC%6Dfgk|4A zvTvT?0`xtL8$1cX$4m%~Wamx+eb3?wOVXh<-#S8HbSOh)N6vxNX(yJc4bFw(bi)W9XNrGq!%5MQh3JZ6U&!P$h5wK7M4h?_cn|pPu);;W9=@grd+dkI+(^ z|72`kK6AA`pH;#7`o?LB8tmt9O_R%xtILJcGN!w~N7pd=B99@tZ~ZV^P29e)MApR{ zf2%&Je)@^q7AnU2`)Gf(?F-d=txsilepP%Iq~{%D7#pPL@6666B|QA}hL$m5w5HF` zqvuMmqPo)hdF=YLQ_bDmD78uLI@77KnfLxD^MjqSlc~u3Oy~v&2X%!9edaw+Xw3Yc zls3AW%2;Fv>CJndhTf0Gw9(h#2M2BX%=@UP-&WwD`fch;%Q$eNEA4VTCJynL3+mFdw&0oT0hJSc7`sjLaW!WWeuA7wb72qO&%8-6k>D|XNF0A zSt{`C%5T^>UGsvcO7Lq81-mqbu#U)B7}@B)%4uW&0Zp$XRx1 zy*@W=J-t_LmA^A`h9L&iW{izKsvmmdD6uT8GRMuowTs@LYT38$+%DRmHW;u2bHUg! z9`qq2xL=IlI#2Hf=`&eKPMW!jNYs0Tnb|}$1ARAdW`MgVup+s+=&5e%bp!Z9=wxW>|y60iSjS@3-&Db zvfzGNPG|F8@}I*)xo@Jpmwj{m*5vsovWq!zKGT&n7c}~~Jcnd-jmS^d`FGU4!2jVL z_at&f>%Aeh+&xMCx>--xH&3mpZ^<^*<+8o?3GXdj<|*+Q!2~v0*DGtk&%XFd>x5@S zH4p4E_wQ)jvhEY}e}J?aYfI)`bf|BhTvH9~qg!X_yIk&KN4L(bxLw5Nz08Ben#M}o z{Jkx6RhQf^NEFonXumIgm6n4qi5_Pw&9~`Dt|advmajaLuRQ+3dPo%GqjcWZ^Rqbu z7N70X7e$lkU6tZY`yU62xJ_fH^d-WWGA@jfwZ|R&D9L49<)_7uEM$&po#iYu3>(<@ z_a0N{uU%42;=iLdPIcm!2LGs*sjBgCPGt6J*=cmZ2JoQ_+`g%n%Gt_AhotzyzVzTa zEen~*21A%_UHX{EMu{ai1Um&CRvm3An~NvGPcBT1LIwqy%tIW%4busme0^|jQHSfd z2eb}R^AHz5GQbXRgB_Xlhhd7_2Os?3@Gs{}aqz?2?=#3?|ELspqboThI=J|fD}L^7 zf<6OwWEr`Yd}0$9Uvj~Z|E)ZFZ%(mGC4I)GchQ$_6?`?!m%{Mx@aPXthW9z zUz)q8NsY`;Q8PN0s~^uC)Ri`UbM~xwXg*`pA?QnMj&Ig}LjQ+tpX+Mk$?9XRdQMoY z)r>PXthRpUVa}dLwfcCg&Qm^aU#xyPe?ncj-mfcd`p9d(6CD@}jg4Q@esU!Ile0SK z-Ex(f!nYs3@r;!356GP)I4|G4TJ(L>xP0@P)`2nj65j@(g-1C#yayB5YE&Vsef5azVlQY4AYWvSc{Ab2}_Gdnb51jYOzc~ZA z)D5z)vKCpBdi`!+WLdvg_1Z43U)j&tr(I+)228|UFa~`m?cXMT68nh{#31IuA3rCx zFVgQA!yXrvKcvMIhdy|oStEM@^u!s~GZnw0kNtt-n#aY#kz_1(51wb(oA4#p12L&X zDzTOQfp;c( z;SA%K`=EU(?0&{}26n-ZE)U9=TztlFTUY$hz`L5h1pgABak!s}gWu>7=YGj=tEjfV zKa@K>Tb!}c#>EexFkfPCNppWVVcUY@{cW84nI!npA?Qood#Aa-)%WM;j%poN=(|+6 zoG&SUqXTEid-vz*zWo)xUo40=l}`Rz$A^S1nspx1@&zc5Dojr`^W?B+Kpg?Y%X5IxRTV@;>k8%+GxL7t8LZo^;) M|Ksx50!c0KFMhR&iU0rr diff --git a/gather_vision/static/gather_vision/graph-change-over-time.js b/gather_vision/static/gather_vision/graph-change-over-time.js deleted file mode 100644 index 1499787..0000000 --- a/gather_vision/static/gather_vision/graph-change-over-time.js +++ /dev/null @@ -1,5 +0,0 @@ -const element = 'graph-change-over-time-container'; -const data = JSON.parse(document.getElementById('graph-change-over-time-data').textContent); -const layout = JSON.parse(document.getElementById('graph-change-over-time-layout').textContent); -const config = JSON.parse(document.getElementById('graph-change-over-time-config').textContent); -Plotly.newPlot(element, data, layout, config); diff --git a/gather_vision/static/gather_vision/site.webmanifest b/gather_vision/static/gather_vision/site.webmanifest deleted file mode 100644 index 12ecd16..0000000 --- a/gather_vision/static/gather_vision/site.webmanifest +++ /dev/null @@ -1 +0,0 @@ -{"name":"Vision","short_name":"Vision","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#004000","background_color":"#004000","display":"standalone"} diff --git a/gather_vision/templates/gather_vision/about/index.html b/gather_vision/templates/gather_vision/about/index.html deleted file mode 100644 index b076fca..0000000 --- a/gather_vision/templates/gather_vision/about/index.html +++ /dev/null @@ -1,251 +0,0 @@ -{% extends "gather_vision/layout.html" %} -{% load gather_vision_extras %} -{% load static %} - -{% block page_title %}{% view_attr 'page_title' %}{% endblock %} - -{% block page_body %} - {% bs_breadcrumb 'vision:home-index' 'vision:about-index' %} - -

    About

    - -

    About the website and the legal notices.

    - -
    - -
    Purpose
    -

    What are we doing?

    -

    - This website gathers data and tries to - provide a new vision and insights from the data. -

    - -
    - -
    Contact
    -

    Let's chat!

    -

    - Get in touch with the creators of this website on - Twitter or - Github. -

    -

    - If you find a problem with the website, missing or incorrect data, - or something else isn't right, please - let us know. -

    - -
    -
    Feedback
    -

    We welcome feedback and suggestions.

    -

    - All types of feedback are welcome – - corrections, feature suggestions, questions, criticisms. -

    -

    You can contact the creators of this website - using the methods in the Contact section of this page. -

    - -
    - - -

    The website code and the data.

    -

    - This website uses data from other websites. - That data remains the copyright of the source website. -

    -

    - The - source code - is available under the - Apache License 2.0. -

    -

    - The website icon is the 'sparkle' emoji image by - Twitter Emoji (CC-BY). -

    - - -
    -
    Privacy
    -

    This website collects information about your browser.

    -

    - This website collects information about your web browser and computer - and pages you visit on this website. - This information must be collected to be able to serve the website. - Any information collected will not be shared unless required by law. -

    -

    - Please note that browsers may have built-in or add-on - functionality that can access webpages and send information - to remote locations. This is not under our control. -

    -

    - This website may use local browser storage mechanisms, - including but not limited to - cookies - and - local storage. -

    -

    - This website may use third-party tools to - monitor errors and analyse website usage. - These tools have their own privacy statements. -

    -

    - This website uses - Honeybadger - to monitor the website and address errors. -

    - -
    - -
    Terms of Service
    -

    This website is maintained as a hobby project.

    -

    There are no guarantees of future updates or continuing support.

    -

    - Your use of this website and other data, content, or source code from - this website does not create or imply a relationship between you and us. -

    -

    - Essentially, this website aims to be useful and helpful, - but this website is maintained in spare time. - So, please take it easy. -

    - -
    - -
    Disclaimer
    -

    - This website aims to contain relevant and up-to-date information, - however some content or links may be out of date or incorrect. -

    -

    - The data, content, and source code on the website is provided "as is", - without warranties of any kind, to the extent permitted by law. -

    -

    - Your use of the information on this website is at your own discretion and risk. -

    -

    - Please consult any relevant official information to ensure - your decisions are based on reliable information. -

    - -
    -
    Data Sources and APIs
    -

    Where is the data sourced from and sent to?

    -

    - This website uses data and APIs from various places. - Please consult these places directly to obtain up-to-date information. -

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Type of dataWebsiteLinks
    Playlist - - Spotify - - - - API - -
    Playlist - - YouTube Music - - - - ytmusic library - -
    Outages - - Energex - - - - Emergency outages - -
    Petitions - - Brisbane City Council - - - - Brisbane City Council Petitions - -
    Petitions - - Queensland Government - - - - Queensland Government Petitions - -
    Transport - - Queensland Rail - - - - Queensland Rail Track Closures - -
    Transport - - Translink - - - - Translink Service Updates - -
    - -
    - -{% endblock %} - - - diff --git a/gather_vision/templates/gather_vision/contact_tracing/index.html b/gather_vision/templates/gather_vision/contact_tracing/index.html deleted file mode 100644 index eac831d..0000000 --- a/gather_vision/templates/gather_vision/contact_tracing/index.html +++ /dev/null @@ -1,8 +0,0 @@ -{% extends "gather_vision/layout.html" %} -{% load gather_vision_extras %} - -{% block page_title %}{% view_attr 'page_title' %}{% endblock %} - -{% block page_body %} - {% bs_breadcrumb 'vision:home-index' 'vision:contact-tracing-index' %} -{% endblock %} diff --git a/gather_vision/templates/gather_vision/home/index.html b/gather_vision/templates/gather_vision/home/index.html deleted file mode 100644 index 46c649c..0000000 --- a/gather_vision/templates/gather_vision/home/index.html +++ /dev/null @@ -1,76 +0,0 @@ -{% extends "gather_vision/layout.html" %} -{% load gather_vision_extras %} - -{% block page_title %}{% view_attr 'page_title' %}{% endblock %} - -{% block page_body %} - {% bs_breadcrumb 'vision:home-index' %} - -
    -
    -
    -
    Transport
    -
    -
    Public transport notices
    -

    - Notices collected from the agencies that operate the services. - Options to filter by category. -

    -

    - Provided in iCalendar (ics), web page, csv, and json api formats. -

    - - Check the notices - -
    -
    -
    -
    -
    -
    Petitions
    -
    -
    Online petitions
    -

    - Keeps track of the number of signatures for petitions over time. - Can reveal the growth pattern of signature counts. -

    - - Follow the signature counts - -
    -
    -
    -
    -
    -
    Outages
    - -
    -
    Electricity outages
    -

    - A log of the electricity outages over time. - Shows how long outages lasted for and - any changes in the number of customers affected. -

    - - Have a look at the outages - -
    -
    -
    -
    -
    -
    Playlists
    -
    -
    Music playlists
    -

    - Builds music playlists for music streaming services - from a number of online music charts. -

    - - See which tracks are on the playlists - -
    -
    -
    -
    -{% endblock %} diff --git a/gather_vision/templates/gather_vision/layout.html b/gather_vision/templates/gather_vision/layout.html deleted file mode 100644 index 0720949..0000000 --- a/gather_vision/templates/gather_vision/layout.html +++ /dev/null @@ -1,81 +0,0 @@ -{% load gather_vision_extras %} -{% load static %} - - - - - - - - - - - - - - - - - {% block page_links %}{% endblock %} - - {% block page_title %}Page{% endblock %} | Vision - - - - - -
    - {% block page_body %}Vision{% endblock %} -
    - - - - -{% block page_scripts %}{% endblock %} - - - diff --git a/gather_vision/templates/gather_vision/nav_breadcrumb.html b/gather_vision/templates/gather_vision/nav_breadcrumb.html deleted file mode 100644 index 30f193f..0000000 --- a/gather_vision/templates/gather_vision/nav_breadcrumb.html +++ /dev/null @@ -1,14 +0,0 @@ - diff --git a/gather_vision/templates/gather_vision/nav_link.html b/gather_vision/templates/gather_vision/nav_link.html deleted file mode 100644 index 978e719..0000000 --- a/gather_vision/templates/gather_vision/nav_link.html +++ /dev/null @@ -1 +0,0 @@ -{{ display }} diff --git a/gather_vision/templates/gather_vision/outages/index.html b/gather_vision/templates/gather_vision/outages/index.html deleted file mode 100644 index a2f25a7..0000000 --- a/gather_vision/templates/gather_vision/outages/index.html +++ /dev/null @@ -1,20 +0,0 @@ -{% extends "gather_vision/layout.html" %} -{% load gather_vision_extras %} -{% load static %} - -{% block page_title %}{% view_attr 'page_title' %}{% endblock %} - -{% block page_body %} - {% bs_breadcrumb 'vision:home-index' 'vision:outages-index' %} - - {{ graph_data | json_script:"graph-change-over-time-data" }} - {{ graph_layout | json_script:"graph-change-over-time-layout" }} - {{ graph_config | json_script:"graph-change-over-time-config" }} - -
    - -{% endblock %} - -{% block page_scripts %} - -{% endblock %} diff --git a/gather_vision/templates/gather_vision/petitions/index.html b/gather_vision/templates/gather_vision/petitions/index.html deleted file mode 100644 index 5e64fe2..0000000 --- a/gather_vision/templates/gather_vision/petitions/index.html +++ /dev/null @@ -1,19 +0,0 @@ -{% extends "gather_vision/layout.html" %} -{% load gather_vision_extras %} -{% load static %} - -{% block page_title %}{% view_attr 'page_title' %}{% endblock %} - -{% block page_body %} - {% bs_breadcrumb 'vision:home-index' 'vision:petitions-index' %} - - {{ graph_data | json_script:"graph-change-over-time-data" }} - {{ graph_layout | json_script:"graph-change-over-time-layout" }} - {{ graph_config | json_script:"graph-change-over-time-config" }} - -
    - -{% endblock %} -{% block page_scripts %} - -{% endblock %} diff --git a/gather_vision/templates/gather_vision/playlists/index.html b/gather_vision/templates/gather_vision/playlists/index.html deleted file mode 100644 index 08566d2..0000000 --- a/gather_vision/templates/gather_vision/playlists/index.html +++ /dev/null @@ -1,8 +0,0 @@ -{% extends "gather_vision/layout.html" %} -{% load gather_vision_extras %} - -{% block page_title %}{% view_attr 'page_title' %}{% endblock %} - -{% block page_body %} - {% bs_breadcrumb 'vision:home-index' 'vision:playlists-index' %} -{% endblock %} diff --git a/gather_vision/templates/gather_vision/transport/index.html b/gather_vision/templates/gather_vision/transport/index.html deleted file mode 100644 index 3ddb998..0000000 --- a/gather_vision/templates/gather_vision/transport/index.html +++ /dev/null @@ -1,94 +0,0 @@ -{% extends "gather_vision/layout.html" %} -{% load gather_vision_extras %} - -{% block page_title %}{% view_attr 'page_title' %}{% endblock %} - -{% block page_body %} - {% bs_breadcrumb 'vision:home-index' 'vision:transport-index' %} - -

    Transport Notices

    - -
    -
    -
    -
    -
    Choose a format
    -
    View transport events in different formats
    - -
    -
    -
    -
    -
    -
    -
    Choose the events to display
    -
    Show the transport events you are interested in
    - -
    -
    -
    -
    - -
    - -

    Notices ({{ items | length }})

    - -
    - - {% for item in items %} -
    -
    -
    -
    {{ item.title }}
    -
    -
    {{ item.body }}
    - {{ item.date_range }} -
    - {% for tag in item.tags %} - {{ tag.title }} - {% endfor %} -
    -
    -
    - {% endfor %} - -
    - -
    - -{% endblock %} diff --git a/gather_vision/templatetags/__init__.py b/gather_vision/templatetags/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/templatetags/gather_vision_extras.py b/gather_vision/templatetags/gather_vision_extras.py deleted file mode 100644 index 132ed7b..0000000 --- a/gather_vision/templatetags/gather_vision_extras.py +++ /dev/null @@ -1,40 +0,0 @@ -from django import template -from django.urls import reverse, resolve -from django.utils.html import format_html - -register = template.Library() - - -@register.inclusion_tag("gather_vision/nav_link.html", takes_context=True) -def bs_nav_link(context, url_name, display: str = None, **kwargs): - """Render a bootstrap nav anchor element.""" - item_url = reverse(url_name, kwargs=kwargs) - if not display: - view_class = resolve(item_url).func.view_class - display = view_class.page_title - is_active = context.request.path.startswith(item_url) - return { - "is_active": " active" if is_active else "", - "url": item_url, - "aria": format_html(' aria-current="page"') if is_active else "", - "display": display, - } - - -@register.simple_tag(takes_context=True) -def view_attr(context, attr): - """Render an attribute from the view class.""" - view_class = resolve(context.request.path).func.view_class - return getattr(view_class, attr) - - -@register.inclusion_tag("gather_vision/nav_breadcrumb.html", takes_context=True) -def bs_breadcrumb(context, *args): - """Render a list of breadcrumbs.""" - items = [] - for url_name in args: - item_url = reverse(url_name) - view_class = resolve(item_url).func.view_class - page_title = view_class.page_title - items.append({"text": page_title, "url": item_url}) - return {"items": items} diff --git a/gather_vision/tests/__init__.py b/gather_vision/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/tests/process/__init__.py b/gather_vision/tests/process/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/tests/process/component/__init__.py b/gather_vision/tests/process/component/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/tests/process/component/test_abc_radio.py b/gather_vision/tests/process/component/test_abc_radio.py deleted file mode 100644 index 4a39a89..0000000 --- a/gather_vision/tests/process/component/test_abc_radio.py +++ /dev/null @@ -1,136 +0,0 @@ -import uuid -from datetime import datetime, timedelta - -from zoneinfo import ZoneInfo -import requests_mock -from django.test import TestCase - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.service.playlist.abc_radio import AbcRadio - - -class TestPlaylistsComponentAbcRadio(TestCase): - def setUp(self) -> None: - logger = Logger() - tz = ZoneInfo("Australia/Brisbane") - normalise = Normalise() - http_client = HttpClient(logger) - - self._logger = logger - self._normalise = normalise - self._http_client = http_client - self._tz = tz - - self._service = AbcRadio(logger, http_client, normalise, tz) - - def test_build_url_missing(self): - # arrange - - # act - with self.assertRaises(ValueError) as ar: - self._service.build_qs( - name=None, start_date=datetime.now(), end_date=datetime.now() - ) - - # assert - self.assertEqual(str(ar.exception), "Must provide name.") - - def test_build_qs(self): - # arrange - collection_name = "testing" - start = datetime.now() - timedelta(days=8) - end = start + timedelta(days=7) - order = "desc" - limit = 20 - - # act - qs = self._service.build_qs( - name=collection_name, - start_date=start, - end_date=end, - order=order, - limit=limit, - ) - - # assert - expected_qs = { - "order": order, - "limit": limit, - "service": collection_name, - "from": f"{start.year:04}-{start.month:02}-{start.day:02}T" - f"{start.hour:02}:{start.minute:02}:{start.second:02}Z", - "to": f"{end.year:04}-{end.month:02}-{end.day:02}T" - f"{end.hour:02}:{end.minute:02}:{end.second:02}Z", - } - self.assertEqual(qs, expected_qs) - - def test_get_playlist(self): - # arrange - service_name = "abcradio" - collection_name = "doublej_most_played" - collection_id = "doublej" - start_date = datetime.now() - timedelta(days=8) - end_date = start_date + timedelta(days=7) - order = "desc" - limit = 20 - self.maxDiff = None - - track_title = "title" - track_arid = f"id-{uuid.uuid4()}" - artist1 = f"artist1-{uuid.uuid4()}".replace("-", " ") - artist2 = f"artist2-{uuid.uuid4()}".replace("-", " ") - - expected_url = ( - "https://music.abcradio.net.au/api/v1/recordings/plays.json?" - + f"order={order}&limit={limit}&service={collection_id}&" - + f"from={start_date.year:04}-{start_date.month:02}-{start_date.day:02}T" - + f"{start_date.hour:02}%3A{start_date.minute:02}%3A{start_date.second:02}Z&" - + f"to={end_date.year:04}-{end_date.month:02}-{end_date.day:02}T" - + f"{end_date.hour:02}%3A{end_date.minute:02}%3A{end_date.second:02}Z" - ) - - # act - with requests_mock.Mocker() as m, self.assertLogs() as al: - m.get( - expected_url, - json={ - "items": [ - { - "title": track_title, - "arid": track_arid, - "artists": [ - {"type": "primary", "name": artist1}, - {"type": "featured", "name": artist2}, - ], - } - ] - }, - ) - - playlist = self._service.get_playlist_tracks( - identifier=collection_name, - name=collection_name, - start_date=start_date, - end_date=end_date, - limit=limit, - ) - - # assert - self.assertEqual( - [str(i) for i in playlist.tracks], - [f"{service_name}:{collection_name}:1:{artist1}:{track_title}"], - ) - self.assertEqual( - al.output, - [ - f"INFO:root:Downloading up to {limit} tracks from '{service_name}' collection '{collection_name}'.", - f"INFO:root:Retrieved 1 tracks from '{service_name}' collection '{collection_name}'.", - ], - ) - self.assertEqual(len(playlist.tracks), 1) - self.assertEqual( - str(playlist.tracks[0]), - f"{service_name}:{collection_name}:1:{artist1}:{track_title}", - ) diff --git a/gather_vision/tests/process/component/test_last_fm.py b/gather_vision/tests/process/component/test_last_fm.py deleted file mode 100644 index 4cd72df..0000000 --- a/gather_vision/tests/process/component/test_last_fm.py +++ /dev/null @@ -1,118 +0,0 @@ -import uuid - -from zoneinfo import ZoneInfo -import requests_mock -from django.test import TestCase - -from gather_vision.process.component.http_client import HttpClient -from gather_vision.process.component.logger import Logger -from gather_vision.process.component.normalise import Normalise -from gather_vision.process.service.playlist.last_fm import LastFm - - -class TestPlaylistsComponentLastFm(TestCase): - def setUp(self) -> None: - logger = Logger() - tz = ZoneInfo("Australia/Brisbane") - normalise = Normalise() - api_key = f"api_key-{uuid.uuid4()}" - http_client = HttpClient(logger) - - self._logger = logger - self._normalise = normalise - self._http_client = http_client - self._api_key = api_key - self._tz = tz - - self._service = LastFm(logger, http_client, normalise, tz) - self._service.login_next(api_key) - - def test_build_qs_missing(self): - # arrange - - # act - with self.assertRaises(ValueError) as ar: - self._service.build_qs(None, None) - - # assert - self.assertEqual(str(ar.exception), "Must provide method.") - - def test_build_url(self): - # arrange - - method = f"method1-{uuid.uuid4()}" - country = f"country1-{uuid.uuid4()}" - order = "desc" - limit = 20 - - # act - qs = self._service.build_qs( - method=method, - country=country, - limit=limit, - ) - - # assert - expected_qs = { - "api_key": self._api_key, - "method": method, - "country": country, - "format": "json", - "limit": limit, - "page": 1, - } - - self.assertEqual(qs, expected_qs) - - def test_get_playlist(self): - # arrange - service_name = "lastfm" - collection_name = "most_popular" - limit = 20 - - track_title = "title" - track_arid = f"id-{uuid.uuid4()}" - artist1 = f"artist1-{uuid.uuid4()}".replace("-", " ") - artist2 = f"artist2-{uuid.uuid4()}".replace("-", " ") - - # act - with requests_mock.Mocker() as m, self.assertLogs() as al: - m.get( - f"https://ws.audioscrobbler.com/2.0/?api_key={self._api_key}&method=geo.gettoptracks&country=australia&format=json&limit=20&page=1", - json={ - "tracks": { - "track": [ - { - "name": track_title, - "url": track_arid, - "artist": { - "name": f"{artist1}, {artist2}", - }, - } - ] - } - }, - ) - playlist = self._service.get_playlist_tracks( - identifier=collection_name, - name=collection_name, - limit=limit, - ) - - # assert - self.assertEqual( - [str(i) for i in playlist.tracks], - [f"{service_name}:{collection_name}:1:{artist1}:{track_title}"], - ) - self.assertEqual( - al.output, - [ - f"INFO:root:Downloading up to {limit} tracks from '{service_name}' collection '{collection_name}'.", - f"INFO:root:Retrieved 1 tracks from '{service_name}' collection '{collection_name}'.", - ], - ) - self.assertEqual(len(playlist.tracks), 1) - self.assertEqual( - str(playlist.tracks[0]), - f"{service_name}:{collection_name}:1:{artist1}:{track_title}", - ) diff --git a/gather_vision/tests/support.py b/gather_vision/tests/support.py deleted file mode 100644 index b55a7b7..0000000 --- a/gather_vision/tests/support.py +++ /dev/null @@ -1,71 +0,0 @@ -import contextlib -from collections import namedtuple -from typing import Iterable, Optional -from unittest import mock - - -def example_data_dir(): - from importlib import resources - - with resources.path("gather_vision_proj", "settings.py") as p: - return p.resolve().parent.parent / ".local" / "example_data" - - -@contextlib.contextmanager -def mock_http_client_send_request(set_side_effect): - with mock.patch( - "gather_vision.management.commands.visionprocess.HttpClient._send_request", - spec=True, - spec_set=True, - ) as http_request: - http_request.side_effect = set_side_effect - try: - yield http_request - finally: - pass - - -def match_output( - context, - actual: str, - expected: list[tuple[str, str]], - start: Optional[int] = None, - stop: Optional[int] = None, -): - actual = actual.strip() if actual else "" - actual = actual.split("\n") - actual = [i.strip() for i in actual if i and i.strip()] - - if start is None: - start = 0 - if stop is None: - stop = len(actual) - actual = actual[start:stop] - - if len(actual) != len(expected): - context.fail( - f"Actual has {len(actual)} entries, expected has {len(expected)}: {actual}; {expected}" - ) - - for index, (act_line, (exp_lvl, exp_line)) in enumerate(zip(actual, expected)): - act_date, act_rest = act_line.split(" ", maxsplit=1) - - exp_lvl_str = f"[{exp_lvl:8}] " - context.assertTrue( - act_rest.startswith(exp_lvl_str), - f"Actual did not match expected log level '{exp_lvl_str}'.", - ) - - exp_rest = f"{exp_lvl_str}{exp_line}" - context.assertEqual( - exp_rest, - act_rest, - f"Actual did not match expected '{act_rest}' != '{exp_rest}'.", - ) - - -RequestsTestResponse = namedtuple( - "RequestsTestResponse", - ["text", "json", "content"], - defaults=[None, None, None], -) diff --git a/gather_vision/tests/test_cache.py b/gather_vision/tests/test_cache.py deleted file mode 100644 index 925033d..0000000 --- a/gather_vision/tests/test_cache.py +++ /dev/null @@ -1,62 +0,0 @@ -import uuid - -from django.test import TestCase - -from gather_vision.process.cache.local_cache import LocalCache -from gather_vision.process.component.logger import Logger - - -class TestComponentLocalCache(TestCase): - def setUp(self) -> None: - self._logger = Logger() - self._cache = LocalCache(self._logger) - - def tearDown(self) -> None: - self._cache.clear() - - def test_get_miss(self): - # arrange - key = f"test-{uuid.uuid4()}" - - # act - found, result = self._cache.get(key) - - # assert - self.assertFalse(found) - self.assertIsNone(result) - - def test_get_hit(self): - # arrange - key = "test-" + str(uuid.uuid4()) - value = str(uuid.uuid4()) - - # act - self._cache.set(key, value) - found, result = self._cache.get(key) - - # assert - self.assertTrue(found) - self.assertEqual(value, result) - - def test_get_or_set_miss(self): - # arrange - key = str(uuid.uuid4()) - value = str(uuid.uuid4()) - - # act - result = self._cache.get_or_set(key, value) - - # assert - self.assertEqual(value, result) - - def test_get_or_set_hit(self): - # arrange - key = str(uuid.uuid4()) - value = str(uuid.uuid4()) - - # act - self._cache.set(key, value) - result = self._cache.get_or_set(key, value) - - # assert - self.assertEqual(value, result) diff --git a/gather_vision/tests/test_component_normalise.py b/gather_vision/tests/test_component_normalise.py deleted file mode 100644 index 9bfeb34..0000000 --- a/gather_vision/tests/test_component_normalise.py +++ /dev/null @@ -1,32 +0,0 @@ -from django.test import TestCase - -from gather_vision.process.component.normalise import Normalise - - -class TestPlaylistsComponentNormalise(TestCase): - def test_tracks(self): - n = Normalise() - - tricky = [ - ( - ("Intimidated (feat. H.E.R.)", ["KAYTRANADA"], "H.E.R."), - ("intimidated", ["kaytranada"], ["h.e.r."]), - ), - ( - ("Call My Name (feat. Robyn)", ["Smile"], ["Robyn"]), - ("call my name", ["smile"], ["robyn"]), - ), - ( - ("Kiss Of Life", "Kylie Minogue & Jessie Ware", []), - ("kiss of life", ["kylie minogue"], ["jessie ware"]), - ), - ] - - for (in_t, in_p, in_f), (exp_t, exp_p, exp_f) in tricky: - with self.subTest( - in_t=in_t, in_p=in_p, in_f=in_f, exp_t=exp_t, exp_p=exp_p, exp_f=exp_f - ): - act_t, act_p, act_f, act_q = n.track(in_t, in_p, in_f) - self.assertEqual(act_t, exp_t) - self.assertEqual(act_p, exp_p) - self.assertEqual(act_f, exp_f) diff --git a/gather_vision/tests/test_mgmt_cmd_contact_tracing.py b/gather_vision/tests/test_mgmt_cmd_contact_tracing.py deleted file mode 100644 index c4b8f5f..0000000 --- a/gather_vision/tests/test_mgmt_cmd_contact_tracing.py +++ /dev/null @@ -1,163 +0,0 @@ -from io import StringIO - -from django.core.management import call_command, CommandError -from django.test import TestCase - -from gather_vision import models as app_models -from gather_vision.process.component.metadata import Metadata -from gather_vision.tests.support import mock_http_client_send_request, match_output - - -class ContactTracingTest(TestCase): - _cmd = "visionprocess" - _process = "contacttracing" - _tz = "Australia/Melbourne" - - def test_init(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - - stdout = StringIO() - stderr = StringIO() - - operation = "init" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, f"Process '{self._process}' has no operation '{operation}'." - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ], - ) - - def test_import(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - stdout = StringIO() - stderr = StringIO() - operation = "import" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, f"Process '{self._process}' has no operation '{operation}'." - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ], - ) - - def test_incorrect(self): - # arrange - stdout = StringIO() - stderr = StringIO() - - operation = "incorrect" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, - f"Error: argument operation: invalid choice: '{operation}' " - "(choose from 'init', 'import', 'update')", - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output(self, stdout.getvalue(), []) - - def test_update(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - stdout = StringIO() - stderr = StringIO() - - operation = "update" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with mock_http_client_send_request(http_request_side_effect): - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ("INFO", "Finished."), - ], - ) - - objs = app_models.InformationSource.objects.all() - self.assertEquals(len(objs), 0) - # self.assertEqual(repr(objs[0]), "") diff --git a/gather_vision/tests/test_mgmt_cmd_outages.py b/gather_vision/tests/test_mgmt_cmd_outages.py deleted file mode 100644 index 6b3ab8a..0000000 --- a/gather_vision/tests/test_mgmt_cmd_outages.py +++ /dev/null @@ -1,312 +0,0 @@ -import unittest -from io import StringIO - -from django.core.management import call_command, CommandError -from django.test import TestCase - -from gather_vision import models as app_models -from gather_vision.process.component.metadata import Metadata -from gather_vision.tests.support import ( - mock_http_client_send_request, - match_output, - example_data_dir, - RequestsTestResponse, -) - - -class ManagementCommandOutageTest(TestCase): - - _cmd = "visionprocess" - _process = "outages" - _tz = "Australia/Melbourne" - - def test_init(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - - stdout = StringIO() - stderr = StringIO() - - operation = "init" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, f"Process '{self._process}' has no operation '{operation}'." - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ], - ) - - def test_import_no_path(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - stdout = StringIO() - stderr = StringIO() - operation = "import" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, f"The data path is required to run {operation}." - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ], - ) - - @unittest.skip("Takes a while to run") - def test_import_with_path(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - stdout = StringIO() - stderr = StringIO() - operation = "import" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_http = mock_http_client_send_request(http_request_side_effect) - - can_import = ["data(3).sqlite", "data(4).sqlite"] - - # act - with with_http: - for path in example_data_dir().iterdir(): - if not path.is_file() or path.suffix != ".sqlite": - continue - with self.subTest(file_name=path.name): - - command_name = self._cmd - args = [ - self._process, - operation, - self._tz, - "--data-path", - str(path), - ] - options = {"stdout": stdout, "stderr": stderr} - - if path.name not in can_import: - with self.assertRaisesRegex( - CommandError, "^Unrecognised data format: .*" - ): - call_command(command_name, *args, **options) - else: - call_command(command_name, *args, **options) - - # assert - match_output(self, stderr.getvalue(), []) - - stdout_act = stdout.getvalue() - match_output( - self, - stdout_act, - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ("INFO", "Importing outages."), - ], - start=0, - stop=3, - ) - if path.name in can_import: - match_output( - self, - stdout_act, - [ - ("INFO", "Finished importing outages."), - ("INFO", "Finished."), - ], - start=-2, - ) - - def test_incorrect(self): - # arrange - stdout = StringIO() - stderr = StringIO() - - operation = "incorrect" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, - f"Error: argument operation: invalid choice: '{operation}' " - "(choose from 'init', 'import', 'update')", - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output(self, stdout.getvalue(), []) - - def test_update(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - stdout = StringIO() - stderr = StringIO() - operation = "update" - - base_url = "https://www.energex.com.au/" - - demand_args = ( - "GET", - f"{base_url}static/Energex/Network%20Demand/networkdemand.txt", - ) - network_demand = "2057" - - summary_args = ("GET", f"{base_url}api/outages/v0.3/summary") - - def summary_data(): - return { - "data": { - "totalCustomersAffected": 123, - "lastUpdated": "2021-12-23 10:10:10", - } - } - - council_args = ("GET", f"{base_url}api/outages/v0.3/council") - council_kwargs = {"params": {"council": ""}} - - def council_data(): - return {"data": [{"name": "council1"}]} - - suburb_args = ("GET", f"{base_url}api/outages/v0.3/suburb") - suburb_kwargs = {"params": {"council": "council1", "suburb": ""}} - - def suburb_data(): - return {"data": [{"name": "suburb1"}]} - - search_args = ("GET", "https://www.energex.com.au/api/outages/v0.3/search") - search_kwargs = {"params": {"suburb": "suburb1"}} - - def search_data(): - return { - "data": [ - { - "restoreTime": "2021-12-24T101010+1000", - "streets": ["street1"], - "event": "event1", - "council": "council1", - "suburb": "suburb1", - "postcode": "1234", - "customersAffected": "50", - "cause": "cause1", - } - ] - } - - def http_request_side_effect(*args, **kwargs): - if args == demand_args and not kwargs: - return RequestsTestResponse(text=network_demand) - elif args == summary_args and not kwargs: - return RequestsTestResponse(json=summary_data) - elif args == council_args and kwargs == council_kwargs: - return RequestsTestResponse(json=council_data) - elif args == suburb_args and kwargs == suburb_kwargs: - return RequestsTestResponse(json=suburb_data) - elif args == search_args and kwargs == search_kwargs: - return RequestsTestResponse(json=search_data) - else: - raise ValueError(f"args: '{args}'; kwargs: '{kwargs}'.") - - with mock_http_client_send_request(http_request_side_effect): - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ("INFO", "Updating outages."), - ("INFO", "Groups 1 (1 imported) total items 1 (1 imported)."), - ("INFO", "Finished updating outages."), - ("INFO", "Finished."), - ], - ) - - objs = app_models.InformationSource.objects.all() - self.assertEquals(len(objs), 1) - self.assertEqual(repr(objs[0]), "") - - objs = app_models.OutageGroup.objects.all() - self.assertEquals(len(objs), 1) - self.assertEqual( - repr(objs[0]), "" - ) - - objs = app_models.OutageItem.objects.all() - self.assertEquals(len(objs), 1) - self.assertEquals( - repr(objs[0]), - "", - ) diff --git a/gather_vision/tests/test_mgmt_cmd_petitions.py b/gather_vision/tests/test_mgmt_cmd_petitions.py deleted file mode 100644 index 387eb07..0000000 --- a/gather_vision/tests/test_mgmt_cmd_petitions.py +++ /dev/null @@ -1,321 +0,0 @@ -import unittest -from io import StringIO - -from django.core.management import call_command, CommandError -from django.test import TestCase - -from gather_vision import models as app_models -from gather_vision.process.component.metadata import Metadata -from gather_vision.tests.support import ( - mock_http_client_send_request, - match_output, - example_data_dir, - RequestsTestResponse, -) - - -class ManagementCommandPetitionsTest(TestCase): - - _cmd = "visionprocess" - _process = "petitions" - _tz = "Australia/Melbourne" - - def test_init(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - - stdout = StringIO() - stderr = StringIO() - - operation = "init" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, f"Process '{self._process}' has no operation '{operation}'." - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ], - ) - - def test_import_no_path(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - stdout = StringIO() - stderr = StringIO() - operation = "import" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, f"The data path is required to run {operation}." - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ], - ) - - @unittest.skip("Takes a while to run") - def test_import_with_path(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - stdout = StringIO() - stderr = StringIO() - operation = "import" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_http = mock_http_client_send_request(http_request_side_effect) - - can_import = [ - "data.sqlite", - "data(2).sqlite", - "data(5).sqlite", - "data(6).sqlite", - ] - - # act - with with_http: - for path in example_data_dir().iterdir(): - if not path.is_file() or path.suffix != ".sqlite": - continue - with self.subTest(file_name=path.name): - - command_name = self._cmd - args = [ - self._process, - operation, - self._tz, - "--data-path", - str(path), - ] - options = {"stdout": stdout, "stderr": stderr} - - if path.name not in can_import: - with self.assertRaisesRegex( - CommandError, "^Unrecognised data format: .*" - ): - call_command(command_name, *args, **options) - else: - call_command(command_name, *args, **options) - - # assert - match_output(self, stderr.getvalue(), []) - - stdout_act = stdout.getvalue() - match_output( - self, - stdout_act, - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ("INFO", "Importing petitions."), - ], - start=0, - stop=3, - ) - if path.name in can_import: - match_output( - self, - stdout_act, - [ - ("INFO", "Finished importing petitions."), - ("INFO", "Finished."), - ], - start=-2, - ) - - def test_incorrect(self): - # arrange - stdout = StringIO() - stderr = StringIO() - - operation = "incorrect" - - def http_request_side_effect(*args, **kwargs): - raise ValueError() - - with_raises = self.assertRaisesMessage( - CommandError, - f"Error: argument operation: invalid choice: '{operation}' " - "(choose from 'init', 'import', 'update')", - ) - with_http = mock_http_client_send_request(http_request_side_effect) - - with with_http, with_raises: - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output(self, stdout.getvalue(), []) - - def test_update(self): - # arrange - metadata = Metadata() - user_agent = f"gather-vision (+{metadata.documentation_url()})" - stdout = StringIO() - stderr = StringIO() - operation = "update" - - base_url1 = "https://www.parliament.qld.gov.au" - base_url2 = f"/Work-of-the-Assembly/Petitions/" - - current_args = ( - "GET", - f"{base_url1}{base_url2}Current-EPetitions", - ) - current_data = f"""
    -
    - title1 - closed date: 24/12/2021 - 20 Signatures -
    -
    """ - - instance_args = ("GET", f"{base_url1}{base_url2}viewurl1=instanceid1") - - instance_data = """ -
    -

    title1

    - Eligibility - el1 -
    pet1
    -
    Total Signatures - 20
    -
    body1
    -
    Sponsoring Member: sponsor1
    -
    Posting Date: 23/12/2021
    -
    Closing Date: 24/12/2021
    -
    - """ - - base_url3 = "https://epetitions.brisbane.qld.gov.au/" - - list_args = ("GET", base_url3) - list_data = """ - - - -
    titlewhoclosed date
    title2principal224/12/2021
    """ - - instance2_args = ("GET", f"{base_url3}petition/view/pid/instanceid2") - instance2_data = """ -

    title2

    - - - - -
    whoprincipal2
    closed date24/12/2021
    signaturessignatures 40
    -
    body2
    - """ - - def http_request_side_effect(*args, **kwargs): - if args == current_args and not kwargs: - return RequestsTestResponse(text=current_data) - elif args == instance_args and not kwargs: - return RequestsTestResponse(text=instance_data) - elif args == list_args and not kwargs: - return RequestsTestResponse(text=list_data) - elif args == instance2_args and not kwargs: - return RequestsTestResponse(text=instance2_data) - else: - raise ValueError(f"args: '{args}'; kwargs: '{kwargs}'.") - - with mock_http_client_send_request(http_request_side_effect): - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ("INFO", "Updating Queensland Government petitions."), - ("INFO", "Petitions 1 (1 created) changes 1 (1 added)."), - ("INFO", "Finished updating petitions."), - ("INFO", "Updating Brisbane City Council petitions."), - ("INFO", "Petitions 1 (1 created) changes 1 (1 added)."), - ("INFO", "Finished updating petitions."), - ("INFO", "Finished."), - ], - ) - - objs = app_models.InformationSource.objects.all() - self.assertEquals(len(objs), 2) - self.assertEqual(repr(objs[0]), "") - self.assertEqual(repr(objs[1]), "") - - objs = app_models.PetitionItem.objects.all() - self.assertEquals(len(objs), 2) - self.assertEqual(repr(objs[0]), '') - self.assertEqual(repr(objs[1]), '') - - objs = app_models.PetitionChange.objects.all() - self.assertEquals(len(objs), 2) - self.assertTrue( - repr(objs[0]).startswith(" - - - TransLink service notices feed - Current and upcoming service notices or the TransLink public transport network - {date1.strftime('%a, %d %b %Y %H:%M:%S %z')} - Laminas_Feed_Writer 2 (https://getlaminas.org) - https://translink.com.au/service-updates - - - McKean Street, Caboolture - temporary stop closure -
  • (Minor) McKean Street, Caboolture - temporary stop closure. - Starts affecting: {date2.strftime('%Y-%m-%dT%H:%M:%S%z')[:-2] + ':00'}
  • ]]>
    - https://translink.com.au/updates/14396 - https://translink.com.au/updates/14396 - - - 0 -
    - - Long weekend track closure - Beenleigh, Gold Coast and Cleveland lines -
  • (Major) Long weekend track closure - Beenleigh, Gold Coast and Cleveland lines. - Starts affecting: {date3.strftime('%Y-%m-%dT%H:%M:%S%z')[:-2] + ':00'} - Finishes affecting: {date4.strftime('%Y-%m-%dT%H:%M:%S%z')[:-2] + ':00'}
  • ]]>
    - https://translink.com.au/updates/66966 - https://translink.com.au/updates/66966 - - - 0 -
    -
    -
    - """.encode( - "utf-8" - ) - - qld_rail1_args = ( - "GET", - "https://www.queenslandrail.com.au/forcustomers/trackclosures/12monthcalendar", - ) - qld_rail1_data = "" - - qld_rail2_args = ( - "POST", - "https://www.queenslandrail.com.au/SPWebApp/api/ContentQuery/GetItems", - ) - qld_rail2_kwargs = { - "json": QldRailEvents.params, - "headers": QldRailEvents.headers, - } - - qld_rail2_data = r""" - "[{\"Title\":\"Narangba to Gympie North\",\"Description\":\"\",\"EventDate\":\"%s\",\"EndDate\":\"%s\",\"ID\":\"2710\",\"TrackClosureName0\":\"Narangba to Gympie North\",\"LineAffected\":\";#Caboolture;#Sunshine Coast;#\",\"fRecurrence\":\"False\",\"fAllDayEvent\":,\"WorksInclude\":\"Overhead maintenance, track maintenance\",\"Is_x0020_CRR_x0020_Event\":\"\"},{\"Title\":\"Ipswich to Rosewood\",\"Description\":\"\",\"EventDate\":\"%s\",\"EndDate\":\"%s\",\"ID\":\"2711\",\"TrackClosureName0\":\"Ipswich to Rosewood\",\"LineAffected\":\";#Ipswich/Rosewood;#\",\"fRecurrence\":\"False\",\"fAllDayEvent\":,\"WorksInclude\":\"Overhead maintenance, track maintenance\",\"Is_x0020_CRR_x0020_Event\":\"\"}]" - """ % ( - date5.strftime("%m/%d/%Y %I:%M:%S %p"), - date6.strftime("%m/%d/%Y %I:%M:%S %p"), - date7.strftime("%m/%d/%Y %I:%M:%S %p"), - date8.strftime("%m/%d/%Y %I:%M:%S %p"), - ) - - def http_request_side_effect(*args, **kwargs): - if args == rss_args and not kwargs: - return RequestsTestResponse(content=rss_data) - elif args == qld_rail1_args and not kwargs: - return RequestsTestResponse(content=qld_rail1_data) - elif args == qld_rail2_args and kwargs == qld_rail2_kwargs: - return RequestsTestResponse(text=qld_rail2_data) - else: - raise ValueError(f"args: '{args}'; kwargs: '{kwargs}'.") - - with mock_http_client_send_request(http_request_side_effect): - # act - call_command( - self._cmd, - self._process, - operation, - self._tz, - stdout=stdout, - stderr=stderr, - ) - - # assert - match_output(self, stderr.getvalue(), []) - match_output( - self, - stdout.getvalue(), - [ - ("DEBUG", f"User agent set to '{user_agent}'."), - ("DEBUG", "Using external http client cache 'default'."), - ("INFO", "Updating transport notices."), - ("INFO", "Notices 4 (0 updated, 4 created)."), - ("INFO", "Finished updating transport notices."), - ("INFO", "Finished."), - ], - ) - - objs = app_models.InformationSource.objects.all() - self.assertEquals(len(objs), 2) - self.assertEqual(repr(objs[0]), "") - self.assertEqual(repr(objs[1]), "") - - objs = app_models.TransportItem.objects.all() - self.assertEquals(len(objs), 4) - self.assertEqual( - repr(objs[0]), - f'', - ) - self.assertEqual( - repr(objs[1]), - f'', - ) - self.assertEqual( - repr(objs[2]), - f'', - ) - self.assertEqual( - repr(objs[3]), - f'', - ) - - objs = app_models.TransportLine.objects.all() - self.assertEquals(len(objs), 3) - self.assertEqual(repr(objs[0]), "") - self.assertEqual(repr(objs[1]), "") - self.assertEqual(repr(objs[2]), "") diff --git a/gather_vision/urls.py b/gather_vision/urls.py deleted file mode 100644 index 0a7213c..0000000 --- a/gather_vision/urls.py +++ /dev/null @@ -1,60 +0,0 @@ -from django.urls import path -from django.views.decorators.cache import cache_page - -from gather_vision.views.contact_tracing import ContactTracingIndexView -from gather_vision.views.general import HomeIndexView, AboutIndexView -from gather_vision.views.outages import OutageIndexView -from gather_vision.views.petitions import PetitionIndexView -from gather_vision.views.playlists import PlaylistIndexView -from gather_vision.views.transport import TransportIndexView, TransportDataView - -_cache_sec = 60 * 10 - -app_name = "gather_vision" -urlpatterns = [ - path( - "", - cache_page(_cache_sec)(HomeIndexView.as_view()), - name="home-index", - ), - path( - "about/", - cache_page(_cache_sec)(AboutIndexView.as_view()), - name="about-index", - ), - path( - "outages/", - cache_page(_cache_sec)(OutageIndexView.as_view()), - name="outages-index", - ), - path( - "petitions/", - cache_page(_cache_sec)(PetitionIndexView.as_view()), - name="petitions-index", - ), - path( - "playlists/", - cache_page(_cache_sec)(PlaylistIndexView.as_view()), - name="playlists-index", - ), - path( - "contact-tracing/", - cache_page(_cache_sec)(ContactTracingIndexView.as_view()), - name="contact-tracing-index", - ), - path( - "transport//", - cache_page(_cache_sec)(TransportIndexView.as_view()), - name="transport-filter", - ), - path( - "transport/", - cache_page(_cache_sec)(TransportIndexView.as_view()), - name="transport-index", - ), - path( - "transport//data.", - cache_page(_cache_sec)(TransportDataView.as_view()), - name="transport-data", - ), -] diff --git a/gather_vision/views/__init__.py b/gather_vision/views/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision/views/contact_tracing.py b/gather_vision/views/contact_tracing.py deleted file mode 100644 index 36fe012..0000000 --- a/gather_vision/views/contact_tracing.py +++ /dev/null @@ -1,7 +0,0 @@ -from django.views.generic import TemplateView -from django.utils.translation import gettext as _ - - -class ContactTracingIndexView(TemplateView): - template_name = "gather_vision/contact_tracing/index.html" - page_title = _("Contact Tracing") diff --git a/gather_vision/views/general.py b/gather_vision/views/general.py deleted file mode 100644 index 1f730db..0000000 --- a/gather_vision/views/general.py +++ /dev/null @@ -1,12 +0,0 @@ -from django.views.generic import TemplateView -from django.utils.translation import gettext as _ - - -class HomeIndexView(TemplateView): - template_name = "gather_vision/home/index.html" - page_title = _("Home") - - -class AboutIndexView(TemplateView): - template_name = "gather_vision/about/index.html" - page_title = _("About") diff --git a/gather_vision/views/outages.py b/gather_vision/views/outages.py deleted file mode 100644 index 6e18114..0000000 --- a/gather_vision/views/outages.py +++ /dev/null @@ -1,83 +0,0 @@ -from datetime import timedelta -from zoneinfo import ZoneInfo - -from django.views.generic import TemplateView -from django.utils.translation import gettext as _ - -from gather_vision import models as app_models -from gather_vision.process.component.time_series import TimeSeries - - -class OutageIndexView(TemplateView): - template_name = "gather_vision/outages/index.html" - page_title = _("Outages") - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context["graph_data"] = self._graph_data() - context["graph_layout"] = self._graph_layout() - context["graph_config"] = self._graph_config() - return context - - def _graph_data(self): - # get date range - date_range = app_models.OutageGroup.get_retrieved_date_range() - stop_date = date_range.get("max") - start_date = stop_date - timedelta(days=30 * 12 * 2) - ts = TimeSeries(start_date, stop_date, ZoneInfo("Australia/Brisbane")) - - # get data - query = app_models.OutageGroup.get_data_items(start_date, stop_date) - petition_data = ts.outages(query) - return petition_data - - def _graph_layout(self): - return { - "title": "Electricity Outages and Demand Over Time", - "xaxis": { - "autorange": True, - "rangeselector": { - "buttons": [ - { - "count": 1, - "label": "1m", - "step": "month", - "stepmode": "backward", - }, - { - "count": 6, - "label": "6m", - "step": "month", - "stepmode": "backward", - }, - { - "count": 1, - "label": "1y", - "step": "year", - "stepmode": "backward", - }, - { - "step": "all", - }, - ] - }, - "rangeslider": True, - "type": "date", - }, - "yaxis": { - "title": "Demand and Outage Count", - "autorange": True, - "type": "linear", - }, - "yaxis2": { - "title": "Rating", - "autorange": True, - "overlaying": "y", - "side": "right", - }, - } - - def _graph_config(self): - return { - "responsive": True, - } diff --git a/gather_vision/views/petitions.py b/gather_vision/views/petitions.py deleted file mode 100644 index d531375..0000000 --- a/gather_vision/views/petitions.py +++ /dev/null @@ -1,77 +0,0 @@ -from datetime import timedelta -from zoneinfo import ZoneInfo - -from django.utils.translation import gettext as _ -from django.views.generic import TemplateView - -from gather_vision import models as app_models -from gather_vision.process.component.time_series import TimeSeries - - -class PetitionIndexView(TemplateView): - template_name = "gather_vision/petitions/index.html" - page_title = _("Petitions") - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context["graph_data"] = self._graph_data() - context["graph_layout"] = self._graph_layout() - context["graph_config"] = self._graph_config() - return context - - def _graph_data(self): - # get date range - date_range = app_models.PetitionChange.get_retrieved_date_range() - stop_date = date_range.get("max") - start_date = stop_date - timedelta(days=30 * 12 * 2) - ts = TimeSeries(start_date, stop_date, ZoneInfo("Australia/Brisbane")) - - # get data - query = app_models.PetitionItem.get_data_items(start_date, stop_date) - petition_data = ts.petitions(query) - return petition_data - - def _graph_layout(self): - return { - "title": "Petition Signature Counts Over Time", - "showlegend": False, - "xaxis": { - "autorange": True, - "rangeselector": { - "buttons": [ - { - "count": 1, - "label": "1m", - "step": "month", - "stepmode": "backward", - }, - { - "count": 6, - "label": "6m", - "step": "month", - "stepmode": "backward", - }, - { - "count": 1, - "label": "1y", - "step": "year", - "stepmode": "backward", - }, - { - "step": "all", - }, - ] - }, - "rangeslider": True, - "type": "date", - }, - "yaxis": { - "autorange": True, - "type": "linear", - }, - } - - def _graph_config(self): - return { - "responsive": True, - } diff --git a/gather_vision/views/playlists.py b/gather_vision/views/playlists.py deleted file mode 100644 index 4ae38be..0000000 --- a/gather_vision/views/playlists.py +++ /dev/null @@ -1,7 +0,0 @@ -from django.views.generic import TemplateView -from django.utils.translation import gettext as _ - - -class PlaylistIndexView(TemplateView): - template_name = "gather_vision/playlists/index.html" - page_title = _("Playlists") diff --git a/gather_vision/views/transport.py b/gather_vision/views/transport.py deleted file mode 100644 index 156fa0c..0000000 --- a/gather_vision/views/transport.py +++ /dev/null @@ -1,84 +0,0 @@ -from typing import Optional - -from django.http import HttpResponse -from django.utils.translation import gettext as _ -from django.views import View -from django.views.generic import TemplateView - -from gather_vision.process.support.select_format_mixin import SelectFormatMixin -from gather_vision.process.support.transport.render_csv_mixin import RenderCsvMixin -from gather_vision.process.support.transport.render_ics_mixin import RenderIcsMixin -from gather_vision.process.support.transport.render_json_mixin import RenderJsonMixin -from gather_vision.process.support.transport.render_txt_mixin import RenderTxtMixin -from gather_vision import models as app_models - - -class TransportIndexView(TemplateView): - template_name = "gather_vision/transport/index.html" - page_title = _("Transport") - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - - if "transport_filter" not in context or not context["transport_filter"]: - context["transport_filter"] = "all" - - transport_filter = context["transport_filter"] - available = app_models.TransportItem.get_items_available() - if transport_filter not in available: - context["items"] = [] - else: - context["items"] = available[transport_filter]() - - return context - - -class TransportDataView( - RenderCsvMixin, - RenderIcsMixin, - RenderJsonMixin, - RenderTxtMixin, - SelectFormatMixin, - View, -): - def get( - self, - request, - transport_filter: Optional[str] = None, - cust_ext: Optional[str] = None, - ): - """ - Get the transport data in various formats. - The available formats are csv, json, ics. - - The format can be selected by the extension in the final path segment. - """ - - selected_format = self.select_format(cust_ext) - - if selected_format["status_code"] != 200: - return HttpResponse( - selected_format["message"], status=selected_format["status_code"] - ) - - if not transport_filter: - transport_filter = "all" - - available = app_models.TransportItem.get_items_available() - if transport_filter not in available: - data = [] - else: - data = available[transport_filter]() - - if selected_format["extension"] == "csv": - response = self.get_data_csv(data) - elif selected_format["extension"] == "ics": - response = self.get_data_ics(data) - elif selected_format["extension"] == "json": - response = self.get_data_json(data) - elif selected_format["extension"] == "txt": - response = self.get_data_txt(data) - else: - response = HttpResponse("Invalid format.", status=400) - - return response diff --git a/gather_vision_proj/__init__.py b/gather_vision_proj/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gather_vision_proj/admin.py b/gather_vision_proj/admin.py deleted file mode 100644 index c71ee0e..0000000 --- a/gather_vision_proj/admin.py +++ /dev/null @@ -1,8 +0,0 @@ -from django.contrib import admin -from django.utils.translation import gettext as _ - - -class GatherVisionAdminSite(admin.AdminSite): - site_title = _("Vision site admin") - site_header = _("Vision admin") - index_title = _("Site admin") diff --git a/gather_vision_proj/apps.py b/gather_vision_proj/apps.py deleted file mode 100644 index 80df4b6..0000000 --- a/gather_vision_proj/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.contrib.admin.apps import AdminConfig - - -class GatherVisionAdminConfig(AdminConfig): - default_site = "gather_vision_proj.admin.GatherVisionAdminSite" diff --git a/gather_vision_proj/asgi.py b/gather_vision_proj/asgi.py deleted file mode 100644 index 39256c5..0000000 --- a/gather_vision_proj/asgi.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -ASGI config for gather_vision project. - -It exposes the ASGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/ -""" - -import os - -from django.core.asgi import get_asgi_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gather_vision_proj.settings") - -application = get_asgi_application() diff --git a/gather_vision_proj/gather_vision_env.py b/gather_vision_proj/gather_vision_env.py deleted file mode 100644 index 4f3b0b2..0000000 --- a/gather_vision_proj/gather_vision_env.py +++ /dev/null @@ -1,97 +0,0 @@ -from urllib.parse import urlparse, parse_qs - -from environ import FileAwareEnv, ImproperlyConfigured - -from gather_vision.process.item.playlist_conf import PlaylistConf - - -class GatherVisionEnv(FileAwareEnv): - - DEFAULT_EXTERNAL_HTTP_CACHE_ENV = "EXTERNAL_HTTP_CACHE_URL" - - # https://requests-cache.readthedocs.io/en/stable/user_guide/backends.html - EXTERNAL_HTTP_CACHE_SCHEMES = { - "sqlite": "requests_cache.backends.sqlite.SQLiteCache", - "noop": None, - "filesystem": "requests_cache.backends.filesystem.FileCache", - "memory": "requests_cache.backends.base.BaseCache", - } - - DEFAULT_PLAYLIST_SOURCES_TARGETS_ENV = "PLAYLIST_SOURCES_TARGETS" - - def external_http_cache_url( - self, - var=DEFAULT_EXTERNAL_HTTP_CACHE_ENV, - default=FileAwareEnv.NOTSET, - backend=None, - ): - """Returns a config dictionary, defaulting to EXTERNAL_HTTP_CACHE_URL. - - :rtype: dict - """ - return self.external_http_cache_url_config( - self.url(var, default=default), backend=backend - ) - - @classmethod - def external_http_cache_url_config(cls, url, backend=None): - """Pulled from DJ-Cache-URL, parse an arbitrary Cache URL. - - :param url: - :param backend: - :return: - """ - if not isinstance(url, cls.URL_CLASS): - if not url: - return {} - else: - url = urlparse(url) - - if url.scheme not in cls.EXTERNAL_HTTP_CACHE_SCHEMES: - raise ImproperlyConfigured("Invalid cache schema {}".format(url.scheme)) - - location = url.netloc.split(",") - if len(location) == 1: - location = location[0] - - querystring = parse_qs(url.query) if url.query else {} - backend_params = {} - for key, values in querystring.items(): - if len(values) == 0: - backend_params[key] = None - elif len(values) == 1: - backend_params[key] = values[0] - else: - backend_params[key] = values - - config = { - "BACKEND": cls.EXTERNAL_HTTP_CACHE_SCHEMES[url.scheme], - "LOCATION": location or url.path, - "EXPIRES": backend_params.pop( - "expires", backend_params.pop("EXPIRES", None) - ), - "BACKEND_PARAMS": backend_params, - } - - return config - - def playlist_sources_targets( - self, - var=DEFAULT_PLAYLIST_SOURCES_TARGETS_ENV, - default=None, - backend=None, - ) -> list[PlaylistConf]: - - items = self.json(var, default or []) - result = [] - for item in items: - result.append( - PlaylistConf( - source_code=item.get("source", {}).get("code"), - source_collection=item.get("source", {}).get("collection"), - target_code=item.get("target", {}).get("code"), - target_playlist_id=item.get("target", {}).get("playlist_id"), - target_title=item.get("target", {}).get("title"), - ) - ) - return result diff --git a/gather_vision_proj/settings.py b/gather_vision_proj/settings.py deleted file mode 100644 index 469452e..0000000 --- a/gather_vision_proj/settings.py +++ /dev/null @@ -1,153 +0,0 @@ -""" -Django settings for gather_vision project. - -Generated by 'django-admin startproject' using Django 3.2.9. - -For more information on this file, see -https://docs.djangoproject.com/en/4.0/topics/settings/ - -For the full list of settings and their values, see -https://docs.djangoproject.com/en/4.0/ref/settings/ -""" - -from importlib import resources -from gather_vision_proj.gather_vision_env import GatherVisionEnv - - -# Use django-environ to load settings. -env = GatherVisionEnv() - -# Build paths inside the project like this: BASE_DIR / 'subdir'. -with resources.path("gather_vision_proj", "settings.py") as p: - BASE_DIR = p.resolve().parent.parent - -# Load the settings from the env file. -# Check the DJANGO_ENV_FILE env var first, then default to local .env file. -env.read_env(env.str("DJANGO_ENV_FILE", BASE_DIR / ".env")) - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = env.str("SECRET_KEY") - -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = env.bool("DEBUG", False) - -ALLOWED_HOSTS = env.list("ALLOWED_HOSTS", str, []) -INTERNAL_IPS = env.list("INTERNAL_IPS", str, ["127.0.0.1"]) - - -# Installed applications -# https://docs.djangoproject.com/en/4.0/ref/settings/#std:setting-INSTALLED_APPS -INSTALLED_APPS = [ - "django.contrib.admindocs", - "gather_vision_proj.apps.GatherVisionAdminConfig", - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.sessions", - "django.contrib.messages", - "django.contrib.staticfiles", - "gather_vision.apps.GatherVisionConfig", - "debug_toolbar", -] - -# Middleware: Django 'plugins' to alter input and output -# https://docs.djangoproject.com/en/4.0/topics/http/middleware/ -MIDDLEWARE = [ - "debug_toolbar.middleware.DebugToolbarMiddleware", - "django.middleware.security.SecurityMiddleware", - "django.contrib.sessions.middleware.SessionMiddleware", - "django.middleware.common.CommonMiddleware", - "django.middleware.csrf.CsrfViewMiddleware", - "django.contrib.auth.middleware.AuthenticationMiddleware", - "django.contrib.messages.middleware.MessageMiddleware", - "django.middleware.clickjacking.XFrameOptionsMiddleware", -] - -# Root URL conf: import path for the project-level url conf. -# https://docs.djangoproject.com/en/4.0/ref/settings/#root-urlconf -ROOT_URLCONF = "gather_vision_proj.urls" - -# Django template engines. -# https://docs.djangoproject.com/en/4.0/ref/settings/#templates -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [BASE_DIR / "templates"], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - ], - }, - }, -] - -# WSGI app for the Django dev server. -# https://docs.djangoproject.com/en/4.0/ref/settings/#wsgi-application -WSGI_APPLICATION = "gather_vision_proj.wsgi.application" - -# Database -# https://docs.djangoproject.com/en/4.0/ref/settings/#databases -DATABASES = {"default": env.db_url()} - -# Password validation -# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators -AUTH_PASSWORD_VALIDATORS = [ - { - "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", # noqa: E501 - }, - { - "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", - }, -] - - -# Internationalization -# https://docs.djangoproject.com/en/4.0/topics/i18n/ -LANGUAGE_CODE = "en-au" - -TIME_ZONE = "UTC" - -USE_I18N = True - -USE_L10N = True - -USE_TZ = True - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/4.0/howto/static-files/ -STATIC_URL = "/static/" - -# Default primary key field type -# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field -DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" - -# Cache -# https://docs.djangoproject.com/en/4.0/topics/cache -CACHES = {"default": env.cache_url()} - -# (custom) External HTTP client cache -# Works in a similar way to Django's cache settings. -# https://requests-cache.readthedocs.io/en/stable/user_guide/backends.html -EXTERNAL_HTTP_CACHES = {"default": env.external_http_cache_url()} - -# (custom) Playlist sources and targets. -PLAYLIST_SOURCES_TARGETS = env.playlist_sources_targets() - -# (custom) Playlist service config -SPOTIFY_AUTH_REFRESH_TOKEN = env.str("SPOTIFY_AUTH_REFRESH_TOKEN", None) -SPOTIFY_AUTH_CLIENT_ID = env.str("SPOTIFY_AUTH_CLIENT_ID", None) -SPOTIFY_AUTH_CLIENT_SECRET = env.str("SPOTIFY_AUTH_CLIENT_SECRET", None) -SPOTIFY_AUTH_REDIRECT_URI = env.str("SPOTIFY_AUTH_REDIRECT_URI", None) - -YOUTUBE_MUSIC_AUTH_CONFIG = env.str("YOUTUBE_MUSIC_AUTH_CONFIG", None) - -LASTFM_AUTH_API_KEY = env.str("LASTFM_AUTH_API_KEY", None) diff --git a/gather_vision_proj/urls.py b/gather_vision_proj/urls.py deleted file mode 100644 index 22fb4a7..0000000 --- a/gather_vision_proj/urls.py +++ /dev/null @@ -1,58 +0,0 @@ -"""gather_vision URL Configuration - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/4.0/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" -import debug_toolbar -from django.contrib import admin -from django.contrib.auth import views as auth_views -from django.urls import path, include - -urlpatterns = [ - path( - "admin/password_reset/", - auth_views.PasswordResetView.as_view(), - name="admin_password_reset", - ), - path( - "admin/password_reset/done/", - auth_views.PasswordResetDoneView.as_view(), - name="password_reset_done", - ), - path( - "reset///", - auth_views.PasswordResetConfirmView.as_view(), - name="password_reset_confirm", - ), - path( - "reset/done/", - auth_views.PasswordResetCompleteView.as_view(), - name="password_reset_complete", - ), - path( - "admin/doc/", - include("django.contrib.admindocs.urls"), - ), - path( - "admin/", - admin.site.urls, - ), - path( - "", - include("gather_vision.urls", namespace="vision"), - ), - path( - "__debug__/", - include(debug_toolbar.urls), - ), -] diff --git a/gather_vision_proj/wsgi.py b/gather_vision_proj/wsgi.py deleted file mode 100644 index c825520..0000000 --- a/gather_vision_proj/wsgi.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -WSGI config for gather_vision project. - -It exposes the WSGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/ -""" - -import os - -from django.core.wsgi import get_wsgi_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gather_vision_proj.settings") - -application = get_wsgi_application() diff --git a/manage.py b/manage.py deleted file mode 100644 index 139ec48..0000000 --- a/manage.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -"""Django's command-line utility for administrative tasks.""" -import os -import sys - - -def main(): - """Run administrative tasks.""" - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gather_vision_proj.settings") - try: - from django.core.management import execute_from_command_line - except ImportError as exc: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) from exc - execute_from_command_line(sys.argv) - - -if __name__ == "__main__": - main() diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 807eb41..0000000 --- a/poetry.lock +++ /dev/null @@ -1,925 +0,0 @@ -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "arrow" -version = "0.14.7" -description = "Better dates & times for Python" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -python-dateutil = "*" - -[[package]] -name = "asgiref" -version = "3.4.1" -description = "ASGI specs, helper code, and adapters" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.extras] -tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] - -[[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "attrs" -version = "21.2.0" -description = "Classes Without Boilerplate" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] - -[[package]] -name = "black" -version = "21.12b0" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -click = ">=7.1.2" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0,<1" -platformdirs = ">=2" -tomli = ">=0.2.6,<2.0.0" -typing-extensions = [ - {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, - {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, -] - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -python2 = ["typed-ast (>=1.4.3)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "cattrs" -version = "1.9.0" -description = "Composable complex class support for attrs and dataclasses." -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[package.dependencies] -attrs = ">=20" - -[[package]] -name = "certifi" -version = "2021.10.8" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "charset-normalizer" -version = "2.0.9" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.5.0" - -[package.extras] -unicode_backport = ["unicodedata2"] - -[[package]] -name = "click" -version = "8.0.3" -description = "Composable command line interface toolkit" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.4" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "coverage" -version = "6.2" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "django" -version = "4.0" -description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." -category = "main" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -asgiref = ">=3.4.1,<4" -sqlparse = ">=0.2.2" -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -argon2 = ["argon2-cffi (>=19.1.0)"] -bcrypt = ["bcrypt"] - -[[package]] -name = "django-coverage-plugin" -version = "2.0.2" -description = "Django template coverage.py plugin" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -coverage = "*" -six = ">=1.4.0" - -[[package]] -name = "django-debug-toolbar" -version = "3.2.4" -description = "A configurable set of panels that display various debug information about the current request/response." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -Django = ">=2.2" -sqlparse = ">=0.2.0" - -[[package]] -name = "django-environ" -version = "0.8.1" -description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." -category = "main" -optional = false -python-versions = ">=3.4,<4" - -[package.extras] -develop = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)", "furo (>=2021.8.17b43,<2021.9.0)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] -docs = ["furo (>=2021.8.17b43,<2021.9.0)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] -testing = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)"] - -[[package]] -name = "docutils" -version = "0.18.1" -description = "Docutils -- Python Documentation Utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - -[[package]] -name = "flake8-bugbear" -version = "21.11.29" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=3.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] - -[[package]] -name = "flake8-django" -version = "1.1.2" -description = "Plugin to catch bad style specific to Django Projects." -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" - -[package.dependencies] -flake8 = ">=3.8.4,<4.0.0" - -[[package]] -name = "icalendar" -version = "4.0.9" -description = "iCalendar parser/generator" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -python-dateutil = "*" -pytz = "*" - -[[package]] -name = "ics" -version = "0.7" -description = "Python icalendar (rfc5545) parser" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -arrow = ">=0.11,<0.15" -python-dateutil = "*" -six = ">1.5" -tatsu = ">4.2" - -[[package]] -name = "idna" -version = "3.3" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "lxml" -version = "4.7.1" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["beautifulsoup4"] -source = ["Cython (>=0.29.7)"] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - -[[package]] -name = "pathspec" -version = "0.9.0" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[[package]] -name = "platformdirs" -version = "2.4.0" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "pycodestyle" -version = "2.7.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pyflakes" -version = "2.3.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pyparsing" -version = "3.0.6" -description = "Python parsing module" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pytest" -version = "6.2.5" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2021.3" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" - -[[package]] -name = "requests" -version = "2.26.0" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] - -[[package]] -name = "requests-cache" -version = "0.8.1" -description = "A transparent persistent cache for the requests library" -category = "main" -optional = false -python-versions = ">=3.7,<4.0" - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -attrs = ">=21.2,<22.0" -cattrs = ">=1.8,<2.0" -requests = ">=2.22,<3.0" -url-normalize = ">=1.4,<2.0" -urllib3 = ">=1.25.5,<2.0.0" - -[package.extras] -dynamodb = ["boto3 (>=1.15,<2.0)", "botocore (>=1.18,<2.0)"] -all = ["boto3 (>=1.15,<2.0)", "botocore (>=1.18,<2.0)", "pymongo (>=3.0,<4.0)", "redis (>=3.0,<4.0)", "itsdangerous (>=2.0,<3.0)", "pyyaml (>=5.4)", "ujson (>=4.0)"] -mongodb = ["pymongo (>=3.0,<4.0)"] -redis = ["redis (>=3.0,<4.0)"] -bson = ["bson (>=0.5)"] -security = ["itsdangerous (>=2.0,<3.0)"] -yaml = ["pyyaml (>=5.4)"] -json = ["ujson (>=4.0)"] -docs = ["furo (>=2021.8.11-beta.42)", "linkify-it-py (>=1.0.1,<2.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx (==4.1.2)", "sphinx-autodoc-typehints (>=1.11,<2.0)", "sphinx-automodapi (>=0.13,<0.14)", "sphinx-copybutton (>=0.3,<0.5)", "sphinx-inline-tabs (>=2021.4.11-beta.9,<2022.0.0)", "sphinx-notfound-page", "sphinx-panels (>=0.6,<0.7)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] - -[[package]] -name = "requests-mock" -version = "1.9.3" -description = "Mock out responses from the requests package" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -requests = ">=2.3,<3" -six = "*" - -[package.extras] -fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.18)", "testtools"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "sqlparse" -version = "0.4.2" -description = "A non-validating SQL parser." -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "tatsu" -version = "5.7.0" -description = "TatSu takes a grammar in a variation of EBNF as input, and outputs a memoizing PEG/Packrat parser in Python." -category = "main" -optional = false -python-versions = ">=3.8" - -[package.extras] -future-regex = ["regex"] - -[[package]] -name = "tblib" -version = "1.7.0" -description = "Traceback serialization library." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "1.2.3" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "typing-extensions" -version = "4.0.1" -description = "Backported and Experimental Type Hints for Python 3.6+" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "tzdata" -version = "2021.5" -description = "Provider of IANA time zone data" -category = "main" -optional = false -python-versions = ">=2" - -[[package]] -name = "url-normalize" -version = "1.4.3" -description = "URL normalization for Python" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.dependencies] -six = "*" - -[[package]] -name = "urllib3" -version = "1.26.7" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" - -[package.extras] -brotli = ["brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "xmltodict" -version = "0.12.0" -description = "Makes working with XML feel like you are working with JSON" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "ytmusicapi" -version = "0.19.5" -description = "Unofficial API for YouTube Music" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -requests = ">=2.22" - -[package.extras] -dev = ["pre-commit", "flake8", "yapf", "coverage", "sphinx", "sphinx-rtd-theme"] - -[metadata] -lock-version = "1.1" -python-versions = "^3.9" -content-hash = "825dc29b2fbb478aa33c963c0948a02deaace0365bace562f94e627814935bfb" - -[metadata.files] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -arrow = [ - {file = "arrow-0.14.7-py2.py3-none-any.whl", hash = "sha256:4bfacea734ead51495dc47df00421ecfd4ca1f2c0fbe58b9a26eaeddedc31caf"}, - {file = "arrow-0.14.7.tar.gz", hash = "sha256:67f8be7c0cf420424bc62d8d7dc40b44e4bb2f7b515f9cc2954fb36e35797656"}, -] -asgiref = [ - {file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"}, - {file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, -] -black = [ - {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, - {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, -] -cattrs = [ - {file = "cattrs-1.9.0-py3-none-any.whl", hash = "sha256:8eca49962b1bfc09c24d442aa55688be88efe5c24aeef89d3be135614b95c678"}, - {file = "cattrs-1.9.0.tar.gz", hash = "sha256:1ef33f089e0a494e8d1b487508356f055c865b1955b125c00c991a4358543c80"}, -] -certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.9.tar.gz", hash = "sha256:b0b883e8e874edfdece9c28f314e3dd5badf067342e42fb162203335ae61aa2c"}, - {file = "charset_normalizer-2.0.9-py3-none-any.whl", hash = "sha256:1eecaa09422db5be9e29d7fc65664e6c33bd06f9ced7838578ba40d58bdf3721"}, -] -click = [ - {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, - {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -coverage = [ - {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, - {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, - {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, - {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, - {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, - {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, - {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, - {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, - {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, - {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, - {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, - {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, - {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, - {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, - {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, - {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, - {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, - {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, - {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, - {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, -] -django = [ - {file = "Django-4.0-py3-none-any.whl", hash = "sha256:59304646ebc6a77b9b6a59adc67d51ecb03c5e3d63ed1f14c909cdfda84e8010"}, - {file = "Django-4.0.tar.gz", hash = "sha256:d5a8a14da819a8b9237ee4d8c78dfe056ff6e8a7511987be627192225113ee75"}, -] -django-coverage-plugin = [ - {file = "django_coverage_plugin-2.0.2-py3-none-any.whl", hash = "sha256:4206c85ffba0301f83aecc38e5b01b1b9a4b45a545d9456a827e3fabea18d952"}, - {file = "django_coverage_plugin-2.0.2.tar.gz", hash = "sha256:e91e3a0c8de2b3766a144cdd30dbbf7a79e5c532a5dcc1373ce7eaad83b358b3"}, -] -django-debug-toolbar = [ - {file = "django-debug-toolbar-3.2.4.tar.gz", hash = "sha256:644bbd5c428d3283aa9115722471769cac1bec189edf3a0c855fd8ff870375a9"}, - {file = "django_debug_toolbar-3.2.4-py3-none-any.whl", hash = "sha256:6b633b6cfee24f232d73569870f19aa86c819d750e7f3e833f2344a9eb4b4409"}, -] -django-environ = [ - {file = "django-environ-0.8.1.tar.gz", hash = "sha256:6f0bc902b43891656b20486938cba0861dc62892784a44919170719572a534cb"}, - {file = "django_environ-0.8.1-py2.py3-none-any.whl", hash = "sha256:42593bee519a527602a467c7b682aee1a051c2597f98c45f4f4f44169ecdb6e5"}, -] -docutils = [ - {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, - {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, -] -flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] -flake8-bugbear = [ - {file = "flake8-bugbear-21.11.29.tar.gz", hash = "sha256:8b04cb2fafc6a78e1a9d873bd3988e4282f7959bb6b0d7c1ae648ec09b937a7b"}, - {file = "flake8_bugbear-21.11.29-py36.py37.py38-none-any.whl", hash = "sha256:179e41ddae5de5e3c20d1f61736feeb234e70958fbb56ab3c28a67739c8e9a82"}, -] -flake8-django = [ - {file = "flake8-django-1.1.2.tar.gz", hash = "sha256:b4314abb5bacda450d2eae564a0604447111b1b98188e46bca41682ad2ab59d6"}, - {file = "flake8_django-1.1.2-py3-none-any.whl", hash = "sha256:f8bfdbe8352c2c5f3788c2a2f6652dd2604af24af07a5aa112206d63ae228fdc"}, -] -icalendar = [ - {file = "icalendar-4.0.9-py2.py3-none-any.whl", hash = "sha256:cf1446ffdf1b6ad469451a8966cfa7694f5fac796ac6fc7cd93e28c51a637d2c"}, - {file = "icalendar-4.0.9.tar.gz", hash = "sha256:cc73fa9c848744843046228cb66ea86cd8c18d73a51b140f7c003f760b84a997"}, -] -ics = [ - {file = "ics-0.7-py2.py3-none-any.whl", hash = "sha256:bf5fbdef6e1e073afdadf1b996f0271186dd114a148e38e795919a1ae644d6ac"}, - {file = "ics-0.7-py3.7.egg", hash = "sha256:3b606205b9582ad27dff77f9b227a30d02fdac532731927fe39df1f1ddf8673f"}, - {file = "ics-0.7.tar.gz", hash = "sha256:81113a2bb3166c1afcd71cd450c968d40efc385601e9d8344733e00ad8f53429"}, -] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -lxml = [ - {file = "lxml-4.7.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d546431636edb1d6a608b348dd58cc9841b81f4116745857b6cb9f8dadb2725f"}, - {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6308062534323f0d3edb4e702a0e26a76ca9e0e23ff99be5d82750772df32a9e"}, - {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f76dbe44e31abf516114f6347a46fa4e7c2e8bceaa4b6f7ee3a0a03c8eba3c17"}, - {file = "lxml-4.7.1-cp27-cp27m-win32.whl", hash = "sha256:d5618d49de6ba63fe4510bdada62d06a8acfca0b4b5c904956c777d28382b419"}, - {file = "lxml-4.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:9393a05b126a7e187f3e38758255e0edf948a65b22c377414002d488221fdaa2"}, - {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50d3dba341f1e583265c1a808e897b4159208d814ab07530202b6036a4d86da5"}, - {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44f552e0da3c8ee3c28e2eb82b0b784200631687fc6a71277ea8ab0828780e7d"}, - {file = "lxml-4.7.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e662c6266e3a275bdcb6bb049edc7cd77d0b0f7e119a53101d367c841afc66dc"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4c093c571bc3da9ebcd484e001ba18b8452903cd428c0bc926d9b0141bcb710e"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3e26ad9bc48d610bf6cc76c506b9e5ad9360ed7a945d9be3b5b2c8535a0145e3"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a5f623aeaa24f71fce3177d7fee875371345eb9102b355b882243e33e04b7175"}, - {file = "lxml-4.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b5e2acefd33c259c4a2e157119c4373c8773cf6793e225006a1649672ab47a6"}, - {file = "lxml-4.7.1-cp310-cp310-win32.whl", hash = "sha256:67fa5f028e8a01e1d7944a9fb616d1d0510d5d38b0c41708310bd1bc45ae89f6"}, - {file = "lxml-4.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:b1d381f58fcc3e63fcc0ea4f0a38335163883267f77e4c6e22d7a30877218a0e"}, - {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38d9759733aa04fb1697d717bfabbedb21398046bd07734be7cccc3d19ea8675"}, - {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dfd0d464f3d86a1460683cd742306d1138b4e99b79094f4e07e1ca85ee267fe7"}, - {file = "lxml-4.7.1-cp35-cp35m-win32.whl", hash = "sha256:534e946bce61fd162af02bad7bfd2daec1521b71d27238869c23a672146c34a5"}, - {file = "lxml-4.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:6ec829058785d028f467be70cd195cd0aaf1a763e4d09822584ede8c9eaa4b03"}, - {file = "lxml-4.7.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ade74f5e3a0fd17df5782896ddca7ddb998845a5f7cd4b0be771e1ffc3b9aa5b"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41358bfd24425c1673f184d7c26c6ae91943fe51dfecc3603b5e08187b4bcc55"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6e56521538f19c4a6690f439fefed551f0b296bd785adc67c1777c348beb943d"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b0f782f0e03555c55e37d93d7a57454efe7495dab33ba0ccd2dbe25fc50f05d"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:490712b91c65988012e866c411a40cc65b595929ececf75eeb4c79fcc3bc80a6"}, - {file = "lxml-4.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c22eb8c819d59cec4444d9eebe2e38b95d3dcdafe08965853f8799fd71161d"}, - {file = "lxml-4.7.1-cp36-cp36m-win32.whl", hash = "sha256:2a906c3890da6a63224d551c2967413b8790a6357a80bf6b257c9a7978c2c42d"}, - {file = "lxml-4.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:36b16fecb10246e599f178dd74f313cbdc9f41c56e77d52100d1361eed24f51a"}, - {file = "lxml-4.7.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a5edc58d631170de90e50adc2cc0248083541affef82f8cd93bea458e4d96db8"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:87c1b0496e8c87ec9db5383e30042357b4839b46c2d556abd49ec770ce2ad868"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0a5f0e4747f31cff87d1eb32a6000bde1e603107f632ef4666be0dc065889c7a"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bf6005708fc2e2c89a083f258b97709559a95f9a7a03e59f805dd23c93bc3986"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc15874816b9320581133ddc2096b644582ab870cf6a6ed63684433e7af4b0d3"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5e96e25e70917b28a5391c2ed3ffc6156513d3db0e1476c5253fcd50f7a944"}, - {file = "lxml-4.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ec9027d0beb785a35aa9951d14e06d48cfbf876d8ff67519403a2522b181943b"}, - {file = "lxml-4.7.1-cp37-cp37m-win32.whl", hash = "sha256:9fbc0dee7ff5f15c4428775e6fa3ed20003140560ffa22b88326669d53b3c0f4"}, - {file = "lxml-4.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1104a8d47967a414a436007c52f533e933e5d52574cab407b1e49a4e9b5ddbd1"}, - {file = "lxml-4.7.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fc9fb11b65e7bc49f7f75aaba1b700f7181d95d4e151cf2f24d51bfd14410b77"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:317bd63870b4d875af3c1be1b19202de34c32623609ec803b81c99193a788c1e"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:610807cea990fd545b1559466971649e69302c8a9472cefe1d6d48a1dee97440"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:09b738360af8cb2da275998a8bf79517a71225b0de41ab47339c2beebfff025f"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a2ab9d089324d77bb81745b01f4aeffe4094306d939e92ba5e71e9a6b99b71e"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eed394099a7792834f0cb4a8f615319152b9d801444c1c9e1b1a2c36d2239f9e"}, - {file = "lxml-4.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:735e3b4ce9c0616e85f302f109bdc6e425ba1670a73f962c9f6b98a6d51b77c9"}, - {file = "lxml-4.7.1-cp38-cp38-win32.whl", hash = "sha256:772057fba283c095db8c8ecde4634717a35c47061d24f889468dc67190327bcd"}, - {file = "lxml-4.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:13dbb5c7e8f3b6a2cf6e10b0948cacb2f4c9eb05029fe31c60592d08ac63180d"}, - {file = "lxml-4.7.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:718d7208b9c2d86aaf0294d9381a6acb0158b5ff0f3515902751404e318e02c9"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:5bee1b0cbfdb87686a7fb0e46f1d8bd34d52d6932c0723a86de1cc532b1aa489"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e410cf3a2272d0a85526d700782a2fa92c1e304fdcc519ba74ac80b8297adf36"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:585ea241ee4961dc18a95e2f5581dbc26285fcf330e007459688096f76be8c42"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a555e06566c6dc167fbcd0ad507ff05fd9328502aefc963cb0a0547cfe7f00db"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:adaab25be351fff0d8a691c4f09153647804d09a87a4e4ea2c3f9fe9e8651851"}, - {file = "lxml-4.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:82d16a64236970cb93c8d63ad18c5b9f138a704331e4b916b2737ddfad14e0c4"}, - {file = "lxml-4.7.1-cp39-cp39-win32.whl", hash = "sha256:59e7da839a1238807226f7143c68a479dee09244d1b3cf8c134f2fce777d12d0"}, - {file = "lxml-4.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a1bbc4efa99ed1310b5009ce7f3a1784698082ed2c1ef3895332f5df9b3b92c2"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:0607ff0988ad7e173e5ddf7bf55ee65534bd18a5461183c33e8e41a59e89edf4"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:6c198bfc169419c09b85ab10cb0f572744e686f40d1e7f4ed09061284fc1303f"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a58d78653ae422df6837dd4ca0036610b8cb4962b5cfdbd337b7b24de9e5f98a"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:e18281a7d80d76b66a9f9e68a98cf7e1d153182772400d9a9ce855264d7d0ce7"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8e54945dd2eeb50925500957c7c579df3cd07c29db7810b83cf30495d79af267"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:447d5009d6b5447b2f237395d0018901dcc673f7d9f82ba26c1b9f9c3b444b60"}, - {file = "lxml-4.7.1.tar.gz", hash = "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] -platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, -] -pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, -] -pyparsing = [ - {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, - {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, -] -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] -pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, -] -requests = [ - {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, - {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, -] -requests-cache = [ - {file = "requests-cache-0.8.1.tar.gz", hash = "sha256:27d3eb276ab3affa9864dfc0475241d6d960dd566d57ec46ffa7759c2c74ed1c"}, - {file = "requests_cache-0.8.1-py3-none-any.whl", hash = "sha256:f36104f95ee78bba3d13a2692fe7c0fa8623872275949836789d98d359e79390"}, -] -requests-mock = [ - {file = "requests-mock-1.9.3.tar.gz", hash = "sha256:8d72abe54546c1fc9696fa1516672f1031d72a55a1d66c85184f972a24ba0eba"}, - {file = "requests_mock-1.9.3-py2.py3-none-any.whl", hash = "sha256:0a2d38a117c08bb78939ec163522976ad59a6b7fdd82b709e23bb98004a44970"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sqlparse = [ - {file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"}, - {file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"}, -] -tatsu = [ - {file = "TatSu-5.7.0-py2.py3-none-any.whl", hash = "sha256:9eebadfc2889d8e82e197df22913df56ff204bf4cfc62db49a5c7edd084e10b4"}, - {file = "TatSu-5.7.0.zip", hash = "sha256:428136cd4aa9600fcd01428bd5667fc752062f54bd0148dc1e64fee7b8d05fa4"}, -] -tblib = [ - {file = "tblib-1.7.0-py2.py3-none-any.whl", hash = "sha256:289fa7359e580950e7d9743eab36b0691f0310fce64dee7d9c31065b8f723e23"}, - {file = "tblib-1.7.0.tar.gz", hash = "sha256:059bd77306ea7b419d4f76016aef6d7027cc8a0785579b5aad198803435f882c"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, - {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, -] -typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, -] -tzdata = [ - {file = "tzdata-2021.5-py2.py3-none-any.whl", hash = "sha256:3eee491e22ebfe1e5cfcc97a4137cd70f092ce59144d81f8924a844de05ba8f5"}, - {file = "tzdata-2021.5.tar.gz", hash = "sha256:68dbe41afd01b867894bbdfd54fa03f468cfa4f0086bfb4adcd8de8f24f3ee21"}, -] -url-normalize = [ - {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, - {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, -] -urllib3 = [ - {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, - {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, -] -xmltodict = [ - {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, - {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, -] -ytmusicapi = [ - {file = "ytmusicapi-0.19.5-py3-none-any.whl", hash = "sha256:23ed35df2b7977e3e7662c0ac478864fb23d025ba5de8f2c269cabf2903b8d25"}, - {file = "ytmusicapi-0.19.5.tar.gz", hash = "sha256:85a65ee5a7f0914f300fcb3696b98726e568e933b709c43fd13b896c73e6aaf5"}, -] diff --git a/pyproject.toml b/pyproject.toml index d121c3e..df97623 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,88 +1,79 @@ -[tool.poetry] -name = "gather-vision" -version = "0.2.0" -description = "A collection of data and ideas related to society and government in Queensland, Australia." -license = "Apache-2.0" -authors = [ - "Mark Cottman-Fields " +[build-system] +requires = [ + "setuptools>=63.0.0", ] +build-backend = "setuptools.build_meta" + +[project] +name = "gather-vision" +description = "Obtain, extract, organise, and store information. " readme = "README.md" -homepage = "https://github.com/anotherbyte-net/gather-vision" -repository = "https://github.com/anotherbyte-net/gather-vision" -documentation = "https://github.com/anotherbyte-net/gather-vision" +requires-python = ">=3.7" classifiers = [ - "Development Status :: 4 - Beta", - "Framework :: Django", + "Programming Language :: Python :: 3", + "License :: OSI Approved :: Apache Software License", + "Operating System :: POSIX :: Linux", + "Operating System :: Microsoft :: Windows", + "Development Status :: 3 - Alpha", + "Topic :: Scientific/Engineering :: Information Analysis", + "Topic :: Utilities", +] +dynamic = [ + "version", + "dependencies", + "optional-dependencies", ] +[project.urls] +"Homepage" = "https://github.com/anotherbyte-net/gather-vision" +"Changelog" = "https://github.com/anotherbyte-net/gather-vision/blob/main/CHANGELOG.md" +"Source" = "https://github.com/anotherbyte-net/gather-vision" +"Tracker" = "https://github.com/anotherbyte-net/gather-vision/issues" -[tool.poetry.dependencies] -python = "^3.9" - -# general -Django = "^4.0" -docutils = "^0.18" -requests = "^2.26.0" -requests-cache = "^0.8.1" -django-environ = "^0.8.1" - -# calendar -ics = "^0.7" -icalendar = "^4.0.9" - -# music -ytmusicapi = "^0.19.4" -xmltodict = "^0.12.0" -lxml = "^4.6.4" -django-debug-toolbar = "^3.2.2" - +[project.scripts] +gather-vision = 'gather_vision.cli:main' -[tool.poetry.dev-dependencies] -black = "^21.10b0" -flake8 = "^3.9.2" -requests-mock = "^1.9.3" -coverage = "^6.2" -django-coverage-plugin = "^2.0.2" -pytest = "^6.2.5" -flake8-django = "^1.1.2" -flake8-bugbear = "^21.9.2" -tblib = "^1.7.0" +[tool.setuptools.packages.find] +where = [ + "src", +] +# include and exclude accept strings representing glob patterns. +include = [ + "gather_vision*", +] -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" +[tool.setuptools.dynamic] +version = { file = [ + "VERSION", +] } +dependencies = { file = [ + "requirements.txt", +] } -[tool.poetry.scripts] -gather-vision = "manage:main" +[tool.setuptools.dynamic.optional-dependencies] +dev = { file = [ + "requirements-dev.txt", +] } [tool.pytest.ini_options] -minversion = "6.0" +minversion = "7.0" addopts = "-ra --quiet" -testpaths = [ - "gather_vision/tests", +pythonpath = [ + "src", ] -python_files = [ - 'tests.py', - 'test_*.py', - '*_tests.py', +testpaths = [ + "tests", ] -DJANGO_SETTINGS_MODULE = 'gather_vision_proj.settings' [tool.coverage.run] # "Specifying the source option enables coverage.py to report on unexecuted files, # since it can search the source tree for files that haven’t been measured at all." source = [ - "gather_vision", - "gather_vision_proj", - "templates", + 'src', ] omit = [ - "*/site-packages/*", - "*/tests/*", - "gather_vision/migrations/", -] -plugins = [ - "django_coverage_plugin", + '*/site-packages/*', + 'tests/*', ] [tool.coverage.report] @@ -91,15 +82,30 @@ skip_empty = true [tool.coverage.html] directory = "coverage-html" -[tool.black] -line-length = 88 -target-version = [ - "py38", - "py39", - "py310", +[tool.isort] +profile = "black" +src_paths = [ + "src", ] -extend-exclude = """ -# Exclude files in addition to the defaults. -# A regex preceded with ^/ will apply only to files and directories in the root of the project. -gather_vision/migrations + +[tool.tox] +legacy_tox_ini = """ +[tox] +isolated_build = True +envlist = py37,py38,py39,py310,py311 + +[testenv] +# recreate = true +deps = + -r requirements.txt + -r requirements-dev.txt +commands = + gather-vision --help + python -X dev -m pytest --doctest-modules --junitxml=pytest-coverage.xml.cover --cov-report=term-missing:skip-covered --cov=src/ tests/ """ + +[tool.pydocstyle] +convention = 'google' + +[tool.mypy] +ignore_missing_imports = true diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..9da9fca --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,41 @@ +# package management +pip==22.2.2 +setuptools==65.3.0 +wheel==0.37.1 +build==0.8.0 +twine==4.0.1 + +# tests +pytest==7.1.3 +pytest-mock==3.8.2 +pytest-cov==3.0.0 +tblib==1.7.0 +tox==3.26.0 +coverage==6.4.4 +hypothesis==6.54.6 + +# linters +black==22.8.0 +flake8==5.0.4 +flake8-annotations-coverage==0.0.6 +flake8-black==0.3.3 +flake8-bugbear==22.9.11 +flake8-comprehensions==3.10.0 +flake8-unused-arguments==0.0.11 +flake8-requirements==1.7.1 + +# type checking +mypy==0.971 +pylint==2.15.3 +pydocstyle[toml]==6.1.1 +pyright==1.1.272 +types-dateparser==1.1.4 +types-PyYAML==6.0.11 +types-requests==2.28.11 +types-backports==0.1.3 +types-urllib3==1.26.24 +pytype==2022.8.3;python_version<="3.10" and platform_system != "Windows" +pyre-check==0.9.16; platform_system != "Windows" + +# docs +pdoc3==0.10.0 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..609e663 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +# version support +importlib_metadata==4.2.0;python_version<"3.8" +importlib_metadata==4.12.0;python_version>="3.8" +importlib_resources==5.9.0 +typing_inspect==0.8.0;python_version<"3.8" diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..fd4d9b9 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,4 @@ +[flake8] +max-line-length=88 +modern-annotations-type-alias=always +docstring-convention=google diff --git a/src/gather_vision/__init__.py b/src/gather_vision/__init__.py new file mode 100644 index 0000000..0533994 --- /dev/null +++ b/src/gather_vision/__init__.py @@ -0,0 +1,5 @@ +"""Documentation for the leaf focus package. + +.. include:: ../../README.md +.. include:: ../../CHANGELOG.md +""" diff --git a/src/gather_vision/app.py b/src/gather_vision/app.py new file mode 100644 index 0000000..2fb8a09 --- /dev/null +++ b/src/gather_vision/app.py @@ -0,0 +1,108 @@ +"""The main application features.""" +import typing + +from importlib_metadata import EntryPoints, entry_points + +from gather_vision import model, plugin, utils + + +class App: + """The main application.""" + + group = "gather_vision.plugin" + + entry_points: typing.Optional[EntryPoints] = None + plugins: typing.Dict[str, plugin.Entry] = {} + + def collect(self) -> EntryPoints: + """Collect the available plugins. + + Returns: + A collection of EntryPoints. + """ + if self.entry_points is None: + self.entry_points = entry_points(group=self.group) + return self.entry_points + + def load(self) -> typing.Dict[str, plugin.Entry]: + """Load the plugin class for each plugin. + + Returns: + A list of + """ + if not self.plugins: + for entry_point in self.collect(): + self.plugins[entry_point.name] = entry_point.load() + return self.plugins + + def get(self, name: str) -> typing.Optional[plugin.Entry]: + """Get the class for a plugin. + + Args: + name: The name of the plugin. + + Returns: + The plugin entry class. + """ + if name in self.plugins: + return self.plugins[name] + + entry_pts = entry_points(group=self.group, name=name) + if entry_pts and len(entry_pts) == 1: + entry_point = entry_pts[0] + self.plugins[entry_point.name] = entry_point.load() + + return self.plugins.get(name) + + def update(self, args: model.UpdateArgs) -> model.UpdateResult: + """Execute the update action for the plugin with the given name. + + Args: + args: The update arguments. + + Returns: + The result of running the plugin's update process. + """ + named_plugin = self.plugins.get(args.name) + if not named_plugin: + raise utils.GatherVisionException( + f"Could not find plugin named '{args.name}'." + ) + result = named_plugin.update(args) + return result + + def show(self, args: model.ShowArgs) -> model.ShowResult: + """Execute the show action for the plugin with the given name. + + Args: + args: The show arguments. + + Returns: + The details of the plugin. + """ + named_plugin = self.plugins.get(args.name) + if not named_plugin: + raise utils.GatherVisionException( + f"Could not find plugin named '{args.name}'." + ) + result = named_plugin.show(args) + return result + + def list( + self, args: model.ListArgs # noqa: U100 pylint: disable=unused-argument + ) -> model.ListResult: + """List all available plugins. + + Args: + args: The list arguments. + + Returns: + A list of plugins. + """ + names = [] + for item in self.collect(): + if not item: + continue + names.append(item.name) + result = model.ListResult(sorted(names)) + return result diff --git a/src/gather_vision/cli.py b/src/gather_vision/cli.py new file mode 100644 index 0000000..27659a3 --- /dev/null +++ b/src/gather_vision/cli.py @@ -0,0 +1,179 @@ +"""Command line for gather vision.""" + +import argparse +import logging +import sys +import typing + +from gather_vision import app, model, utils + + +def cli_update(args: argparse.Namespace) -> bool: + """Run the update action from the cli. + + Args: + args: The arguments for the update action. + + Returns: + True if there were no errors. + """ + logger = logging.getLogger(__name__) + + app_args = model.UpdateArgs(name=args.name) + main_app = app.App() + + logger.info("Updating '%s'.", args.name) + main_app.update(app_args) + return True + + +def cli_show(args: argparse.Namespace) -> bool: + """Run the show action from the cli. + + Args: + args: The arguments for the show action. + + Returns: + True if there were no errors. + """ + logger = logging.getLogger(__name__) + + app_args = model.ShowArgs(name=args.name) + main_app = app.App() + + logger.info("Showing '%s'.", args.name) + main_app.show(app_args) + return True + + +def cli_list( + args: argparse.Namespace, # noqa: U100 pylint: disable=unused-argument +) -> bool: + """Run the list action from the cli. + + Args: + args: The arguments for the list action. + + Returns: + True if there were no errors. + """ + logger = logging.getLogger(__name__) + + app_args = model.ListArgs() + main_app = app.App() + result = main_app.list(app_args) + + logger.info("Listing %s plugins.", len(result.names)) + for index, name in enumerate(result.names): + logger.info(" %s) %s", index + 1, name) + return True + + +def main(args: typing.Optional[typing.List[str]] = None) -> int: + """Run as a command line program. + + Args: + args: The program arguments. + + Returns: + int: Program exit code. + """ + if args is None: + args = sys.argv[1:] + + # configure logging + logging.basicConfig( + format="%(asctime)s [%(levelname)-8s] %(message)s", level=logging.DEBUG + ) + logger = logging.getLogger(__name__) + + # create the top-level parser + parser = argparse.ArgumentParser( + prog=utils.get_name_dash(), + description="Obtain, extract, organise, and store information.", + ) + parser.add_argument( + "--version", + action="version", + version=f"%(prog)s {utils.get_version()}", + ) + parser.add_argument( + "--log-level", + default="info", + choices=["debug", "info", "warning", "error", "critical"], + help="the log level: debug, info, warning, error, critical", + ) + subparsers = parser.add_subparsers( + title="Available subcommands", + description="The actions available for plugins", + dest="subcommand_action", + required=False, + help="The subcommands available to interact with installed plugins.", + metavar="action", + ) + + # create the parser for the "update" command + parser_update = subparsers.add_parser("update") + parser_update.add_argument( + "name", + help="The name of the update to run.", + ) + parser_update.set_defaults(func=cli_update) + + # create the parser for the "show" command + parser_show = subparsers.add_parser("show") + parser_show.add_argument( + "name", + help="The name of the group of information to show.", + ) + parser_show.set_defaults(func=cli_show) + + # create the parser for the "list" command + parser_list = subparsers.add_parser("list") + parser_list.set_defaults(func=cli_list) + + try: + parsed_args = parser.parse_args(args) + + logging.getLogger().setLevel((parsed_args.log_level or "info").upper()) + + if not parsed_args.subcommand_action: + parser.print_help(file=sys.stderr) + sys.exit(1) + + if logger.isEnabledFor(logging.DEBUG): + logger.debug( + "Starting %s with arguments '%s'.", utils.get_name_dash(), args + ) + else: + logger.info("Starting %s.", utils.get_name_dash()) + + if parsed_args.subcommand_action and hasattr(parsed_args, "func"): + result = parsed_args.func(parsed_args) + else: + logger.warning("Not sure what to do with arguments '%s'.", args) + result = False + + outcome = 0 if result is True else 1 + if outcome == 0: + logger.info("Finished.") + else: + logger.info("Finished with exit code %s.", outcome) + + return sys.exit(outcome) + + except utils.GatherVisionException as error: + if logger.isEnabledFor(logging.DEBUG): + raise + logger.error("Error: %s - %s", error.__class__.__name__, str(error)) + return sys.exit(1) + + except Exception as error: # pylint: disable=broad-except + if logger.isEnabledFor(logging.DEBUG): + raise + logger.error("Error: %s - %s", error.__class__.__name__, str(error)) + return sys.exit(2) + + +if __name__ == "__main__": + main() diff --git a/src/gather_vision/model.py b/src/gather_vision/model.py new file mode 100644 index 0000000..b859b2a --- /dev/null +++ b/src/gather_vision/model.py @@ -0,0 +1,39 @@ +"""Models used by other modules.""" +import dataclasses +import typing + + +@dataclasses.dataclass +class UpdateArgs: + """The arguments for the update command.""" + + name: str + + +@dataclasses.dataclass +class UpdateResult: + """The result from the update command.""" + + +@dataclasses.dataclass +class ShowArgs: + """The arguments for the show command.""" + + name: str + + +@dataclasses.dataclass +class ShowResult: + """The result from the show command.""" + + +@dataclasses.dataclass +class ListArgs: + """The arguments for the list command.""" + + +@dataclasses.dataclass +class ListResult: + """The result from the list command.""" + + names: typing.List[str] diff --git a/src/gather_vision/plugin.py b/src/gather_vision/plugin.py new file mode 100644 index 0000000..641d6fa --- /dev/null +++ b/src/gather_vision/plugin.py @@ -0,0 +1,33 @@ +"""Available to plugins.""" +import abc + +from gather_vision import model + + +class Entry(abc.ABC): + """The entry point class for plugins. + Compatible plugins must implement this class.""" + + @abc.abstractmethod + def update(self, args: model.UpdateArgs) -> model.UpdateResult: # noqa: U100 + """Run the update action. + + Args: + args: The arguments for update. + + Returns: + The result of the update action. + """ + raise NotImplementedError("Must implement 'update'.") + + @abc.abstractmethod + def show(self, args: model.ShowArgs) -> model.ShowResult: # noqa: U100 + """Run the show action. + + Args: + args: The arguments for show. + + Returns: + The result of the show action. + """ + raise NotImplementedError("Must implement 'show'.") diff --git a/src/gather_vision/utils.py b/src/gather_vision/utils.py new file mode 100644 index 0000000..925942c --- /dev/null +++ b/src/gather_vision/utils.py @@ -0,0 +1,64 @@ +"""Small utility functions.""" +import pathlib +import typing + +from importlib_metadata import PackageNotFoundError, distribution +from importlib_resources import as_file, files + + +def get_name_dash() -> str: + """Get the package name with word separated by dashes.""" + return "gather-vision" + + +def get_name_under() -> str: + """Get the package name with word separated by underscores.""" + return "gather_vision" + + +def get_version() -> typing.Optional[str]: + """Get the package version.""" + try: + dist = distribution(get_name_dash()) + return dist.version + except PackageNotFoundError: + pass + + try: + with as_file(files(get_name_under()).joinpath("cli.py")) as file_path: + return (file_path.parent.parent.parent / "VERSION").read_text().strip() + except FileNotFoundError: + pass + + return None + + +def validate(name: str, value, expected: typing.List) -> None: + """Validate that a value is one of the expected values.""" + if value is not None and value not in expected: + opts = ", ".join(sorted([str(i) for i in expected])) + raise GatherVisionException( + f"Invalid {name} '{value}'. Expected one of '{opts}'." + ) + + +def validate_path( + name: str, value: pathlib.Path, must_exist: bool = False +) -> pathlib.Path: + """Validate a path.""" + if not value: + raise GatherVisionException(f"Must provide path {name}.") + + try: + if must_exist is True: + abs_path = value.resolve(strict=True) + else: + abs_path = value.absolute() + + return abs_path + except Exception as error: + raise GatherVisionException(f"Invalid path '{value}'.") from error + + +class GatherVisionException(Exception): + """A gather vision error.""" diff --git a/templates/admin/base.html b/templates/admin/base.html deleted file mode 100644 index 1c1c1ff..0000000 --- a/templates/admin/base.html +++ /dev/null @@ -1,17 +0,0 @@ -{% extends 'admin/base.html' %} - -{% block extrastyle %}{{ block.super }} - -{% endblock %} diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..4577eb0 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,92 @@ +import pytest + +from gather_vision.cli import main + + +@pytest.mark.parametrize("main_args,exit_code", [([], 1), (["--help"], 0)]) +def test_cli_no_args(capsys, caplog, main_args, exit_code): + with pytest.raises(SystemExit, match=str(exit_code)): + main(main_args) + + prog_help = ( + "usage: gather-vision [-h] [--version]\n" + " [--log-level {debug,info,warning,error,critical}]\n" + " action ...\n" + "\n" + "Obtain, extract, organise, and store information.\n" + "\n" + "options:\n" + " -h, --help show this help message and exit\n" + " --version show program's version number and exit\n" + " --log-level {debug,info,warning,error,critical}\n" + " the log level: debug, info, warning, error, " + "critical\n" + "\n" + "Available subcommands:\n" + " The actions available for plugins\n" + "\n" + " action The subcommands available to interact with " + "installed\n" + " plugins.\n" + ) + + stdout, stderr = capsys.readouterr() + if main_args == ["--help"]: + assert stdout == prog_help + assert stderr == "" + assert caplog.record_tuples == [] + + if main_args == []: + assert stdout == "" + assert stderr == prog_help + assert caplog.record_tuples == [] + + +def test_cli_list(capsys, caplog): + with pytest.raises(SystemExit, match="0"): + main(["list"]) + + stdout, stderr = capsys.readouterr() + assert stdout == "" + assert stderr == "" + assert caplog.record_tuples == [ + ("gather_vision.cli", 20, "Starting gather-vision."), + ("gather_vision.cli", 20, "Listing 0 plugins."), + ("gather_vision.cli", 20, "Finished."), + ] + + +def test_cli_show_not_available(capsys, caplog): + with pytest.raises(SystemExit, match="1"): + main(["show", "not-available"]) + + stdout, stderr = capsys.readouterr() + assert stdout == "" + assert stderr == "" + assert caplog.record_tuples == [ + ("gather_vision.cli", 20, "Starting gather-vision."), + ("gather_vision.cli", 20, "Showing 'not-available'."), + ( + "gather_vision.cli", + 40, + "Error: GatherVisionException - Could not find plugin named 'not-available'.", + ), + ] + + +def test_cli_update_not_available(capsys, caplog): + with pytest.raises(SystemExit, match="1"): + main(["update", "not-available"]) + + stdout, stderr = capsys.readouterr() + assert stdout == "" + assert stderr == "" + assert caplog.record_tuples == [ + ("gather_vision.cli", 20, "Starting gather-vision."), + ("gather_vision.cli", 20, "Updating 'not-available'."), + ( + "gather_vision.cli", + 40, + "Error: GatherVisionException - Could not find plugin named 'not-available'.", + ), + ]