diff --git a/.DS_Store b/.DS_Store index 4ae40d263..e20c5b107 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/.github/workflows/cleanup-images.yaml b/.github/workflows/cleanup-images.yaml new file mode 100644 index 000000000..97e5b992c --- /dev/null +++ b/.github/workflows/cleanup-images.yaml @@ -0,0 +1,93 @@ +name: Scheduled cleanup unused images +on: + workflow_dispatch: + schedule: + - cron: "0 0 * * 0" +# At 00:00 on Sunday. + +jobs: + cleanup-images: + runs-on: ubuntu-latest + steps: + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools + + - name: cleanup-images + continue-on-error: true + run: | + + #!/bin/bash + + # This script will delete all image tags for both frontend and backend except the one being referenced + + # The sample of search_string is lcfs-backend:0.2.0-20240403221450 + # The sample of oc_output could include + # lcfs-backend:0.2.0-20240403210040 + # lcfs-backend:0.2.0-20240403211844 + # lcfs-backend:0.2.0-20240403221450 + # The script will remove the first two image tags + + delete_resources() { + local search_string="$1" + local oc_output="$2" + local namepace="$3" + + # Check if the oc_output is empty + if [ -z "$oc_output" ]; then + echo "Error: No output provided." + return 1 + fi + + # Loop through each line in the oc output + while IFS= read -r line; do + # Check if the line contains the search string + if [[ "$line" != *"$search_string"* ]]; then + # Extract the name of the resource from the line + resource_name=$(echo "$line" | awk '{print $1}') + # Delete the resource + oc -n "$namepace" delete imagetag/"$resource_name" + fi + done <<< "$oc_output" + } + + # Define the search string + search_string=$(oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev describe deployment/lcfs-backend-dev | grep Image | awk -F '/' '{print $NF}') + # Run the oc command and store the output in a variable + oc_output=$(oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev get imagetags | grep lcfs-backend | awk '{print $1}') + namespace="${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev" + echo "Will delete all lcfs-bakcend image tags in ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev except $search_string" + delete_resources "$search_string" "$oc_output" "$namespace" + + # Define the search string + search_string=$(oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev describe deployment/lcfs-frontend-dev | grep Image | awk -F '/' '{print $NF}') + # Run the oc command and store the output in a variable + oc_output=$(oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev get imagetags | grep lcfs-frontend | awk '{print $1}') + echo "Will delete all lcfs-frontend image tags in ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev except $search_string" + namespace="${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev" + delete_resources "$search_string" "$oc_output" "$namespace" + + echo "will delete images in tools env" + frontendimages=$(oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools get imagetags | grep lcfs-frontend | awk '{print $1}') + if [ ! -z "$frontendimages" ]; then + oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools get imagetags | grep lcfs-frontend | awk '{print $1}' | xargs oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools delete imagetag + fi + + backendimages=$(oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools get imagetags | grep lcfs-backend | awk '{print $1}') + if [ ! -z "$backendimages" ]; then + oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools get imagetags | grep lcfs-backend | awk '{print $1}' | xargs oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools delete imagetag + fi + + echo "Cleaning up Completed pods on Dev except CrunchyDB pods" + oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev get pods | grep Completed | grep -v crunchy | awk '{print $1}' | xargs oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev delete pod || true + + echo "Cleaning up Complete and Failed builds on Tools" + oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools get builds | grep Complete | awk '{print $1}' | xargs oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools delete build || true + oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools get builds | grep Failed | awk '{print $1}' | xargs oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools delete build || true + + echo "Cleaning up buildconfigs on Tools" + oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools get buildconfig | awk '{print $1}' | xargs oc -n ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools delete buildconfig || true diff --git a/.github/workflows/cron-cleanup-workflow-runs.yaml b/.github/workflows/cron-cleanup-workflow-runs.yaml new file mode 100644 index 000000000..33e95e297 --- /dev/null +++ b/.github/workflows/cron-cleanup-workflow-runs.yaml @@ -0,0 +1,19 @@ +name: Scheduled cleanup old workflow runs +on: + workflow_dispatch: + schedule: + - cron: "0 8 * * *" + +jobs: + del_runs: + runs-on: ubuntu-latest + permissions: + actions: write + steps: + - name: Delete workflow runs + uses: Mattraks/delete-workflow-runs@v2.0.6 + with: + token: ${{ github.token }} + repository: ${{ github.repository }} + retain_days: 15 + keep_minimum_runs: 0 diff --git a/.github/workflows/cypress-auto-test.yaml b/.github/workflows/cypress-auto-test.yaml new file mode 100644 index 000000000..26f153f58 --- /dev/null +++ b/.github/workflows/cypress-auto-test.yaml @@ -0,0 +1,125 @@ +# # Workflow starts automatically on push or can be triggered manually. +# # This is a desirable pattern as it allows for adhoc test runs without a code push +# name: Cypress tests +# on: [push, workflow_dispatch] +# # on: +# # push: +# # branches: +# # - 'release-0.2.0' +# # pull_request: +# # workflow_dispatch: +# jobs: +# cypress-run: +# runs-on: ubuntu-latest +# strategy: +# fail-fast: false +# # In order for Cypress to run well, it needs to be fed the needed parameters. +# # The below is a mix of Cypress environment variables (All capital) and ones that are needed for this specific set of scripts (Mix of capital and lowercase) +# env: +# admin_idir_username: ${{ secrets.ADMIN_IDIR_USERNAME }}, +# admin_idir_password: ${{ secrets.ADMIN_IDIR_PASSWORD }}, +# org1_bceid_username: ${{ secrets.ORG1_BCEID_USERNAME }}, +# org1_bceid_password: ${{ secrets.ORG1_BCEID_PASSWORD }}, +# org1_bceid_id: ${{ secrets.ORG1_BCEID_ID }}, +# org1_bceid_userId: ${{ secrets.ORG1_BCEID_USERID }}, +# org2_bceid_username: ${{ secrets.ORG2_BCEID_USERNAME }}, +# org2_bceid_password: ${{ secrets.ORG2_BCEID_PASSWORD }}, +# org2_bceid_id: ${{ secrets.ORG2_BCEID_ID }}, +# org2_bceid_userId: ${{ secrets.ORG2_BCEID_USERID }} +# steps: +# # Checkout the PR branch +# - name: Checkout Target Branch +# uses: actions/checkout@v4 + +# # We are cache-ing our node modules to slightly speed up execution in the future. +# - name: Cache node modules +# id: cache-npm +# uses: actions/cache@v4 +# env: +# cache-name: cache-node-modules +# with: +# # npm cache files are stored in `~/.npm` on Linux/macOS +# path: ~/.npm +# key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} +# restore-keys: | +# ${{ runner.os }}-build-${{ env.cache-name }}- +# ${{ runner.os }}-build- +# ${{ runner.os }}- +# - if: ${{ steps.cache-npm.outputs.cache-hit != 'true' }} +# name: List the state of node modules +# continue-on-error: true +# run: npm list +# - name: Install dependencies +# run: sudo apt-get install -y libgtk2.0-0 libgtk-3-0 libgbm-dev + + +# # Many solutions have an API and APP that start fairly slow (specifically when not many resources are available). +# # Since the APP needs the API and Cypress needs the APP, we first have to test if the API is running and accessible. +# # Often an OK from the OpenShift deploy does not automatically mean that the API/APP is functional and/or reachable. +# # We like to use a straightforward CURL command, which we have included as a script in this repo. +# # - name: Wait for API response +# # env: +# # url: ${{ secrets.CYPRESS_API_HOST }} +# # code: 200 # The HTML Code we expect to get back +# # wait_time: 10 # Wait time in seconds between each attempt total wait time = wait_time * max_attempts +# # max_attempts: 5 +# # run: | +# # cd frontend +# # chmod +x waitforconnection.sh +# # bash ./waitforconnection.sh + +# # - name: Wait for APP response +# # env: +# # url: ${{ secrets.CYPRESS_HOST }} +# # code: 200 +# # wait_time: 10 +# # max_attempts: 5 +# # run: | +# # cd frontend +# # chmod +x waitforconnection.sh +# # bash ./waitforconnection.sh + +# # This is the actual Cypress action, invoked with a set of extra parameters. +# # Wait-on/wait-on-timeout: This is the Cypress way of waiting for a response from the application under test, but it does not address the API readiness hence the above Waits. +# # record: Switches on recording to the cloud-based Cypress dahsboard +# # install-command: Since we are forcing our libraries (keycloak) to be installed because of version issues, we have to specify "npm ci -f" to install as the normal "npm ci" would results in errors. +# # If you do not run additional libraries or forced the libraries to install, the this option can be left out. +# # working-directory: points to where you test scripts are located +# # browser: Specifies which browser you want to use. The default is 'Electron', but we would typically user 'Chrome' as this represent the majority of our users. +# # ci-build-id: to communicate the PR/Build number to the Cypress Dashboard +# # - name: E2E Smoke tests +# # uses: cypress-io/github-action@v6 +# # # let's give this action an ID so we can refer +# # # to its output values later +# # id: smoke +# # continue-on-error: false +# # with: +# # wait-on: ${{ secrets.CYPRESS_HOST }} +# # wait-on-timeout: 120 +# # start: npm run cypress:dev +# # install-command: npm ci -f +# # working-directory: frontend +# # browser: chrome +# # ci-build-id: ${{ github.event.number }} +# - name: Install Dependencies +# run: npm ci -f +# working-directory: frontend + +# - name: Run Cypress Tests +# run: npm run cypress:dev +# working-directory: frontend + +# - name: Debug Cypress Run +# if: failure() +# run: cat /home/runner/work/lcfs/lcfs/frontend/cypress/reports/*.json +# - uses: actions/upload-artifact@v4 +# if: failure() +# with: +# name: cypress-screenshots +# path: /home/runner/work/lcfs/lcfs/frontend/cypress/screenshots + +# # For review/debug purposes, just print out key parameters +# - name: Print Env Vars +# run: | +# echo Git Base Ref: ${{ github.base_ref }} +# echo Git Change ID: ${{ github.event.number }} \ No newline at end of file diff --git a/.github/workflows/dev-ci.yml b/.github/workflows/dev-ci.yml new file mode 100644 index 000000000..6de1db940 --- /dev/null +++ b/.github/workflows/dev-ci.yml @@ -0,0 +1,146 @@ +## For each release, the value of workflow name, branches and VERSION need to be adjusted accordingly + +name: LCFS 0.2.0 Dev CI + +on: + push: + branches: [ release-0.2.0 ] + paths: + - frontend/** + - backend/** + workflow_dispatch: + +env: + VERSION: 0.2.0 + GIT_URL: https://github.com/bcgov/lcfs.git + TOOLS_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools + DEV_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev + + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + + install-oc: + runs-on: ubuntu-latest + outputs: + cache-hit: ${{ steps.cache.outputs.cache-hit }} + steps: + - name: Check out repository + uses: actions/checkout@v4.1.1 + + - name: Set up cache for OpenShift CLI + id: cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc # Path where the `oc` binary will be installed + key: oc-cli-${{ runner.os }} + + - name: Install OpenShift CLI (if not cached) + if: steps.cache.outputs.cache-hit != 'true' + run: | + curl -LO https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/openshift-client-linux.tar.gz + tar -xvf openshift-client-linux.tar.gz + sudo mv oc /usr/local/bin/ + oc version --client + + - name: Confirm OpenShift CLI is Available + run: oc version --client + + set-pre-release: + name: Calculate pre-release number + runs-on: ubuntu-latest + needs: [install-oc] + + outputs: + output1: ${{ steps.set-pre-release.outputs.PRE_RELEASE }} + + steps: + - id: set-pre-release + run: echo "PRE_RELEASE=$(date +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT + + build: + + name: Build LCFS + runs-on: ubuntu-latest + needs: set-pre-release + timeout-minutes: 60 + + env: + PRE_RELEASE: ${{ needs.set-pre-release.outputs.output1 }} + + steps: + + - name: Check out repository + uses: actions/checkout@v4.1.1 + + - name: Restore oc command from Cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc + key: oc-cli-${{ runner.os }} + + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.TOOLS_NAMESPACE }} + + - name: Build LCFS Backend + run: | + cd openshift/templates + oc process -f ./backend-bc.yaml VERSION=${{ env.VERSION }}-${{ env.PRE_RELEASE }} GIT_URL=${{ env.GIT_URL }} GIT_REF=release-${{ env.VERSION }} | oc apply --wait=true -f - -n ${{ env.TOOLS_NAMESPACE }} + sleep 2s + oc -n ${{ env.TOOLS_NAMESPACE }} start-build lcfs-backend-${{ env.VERSION }}-${{ env.PRE_RELEASE }} --wait=true + oc tag ${{ env.TOOLS_NAMESPACE }}/lcfs-backend:${{ env.VERSION }}-${{ env.PRE_RELEASE }} ${{ env.DEV_NAMESPACE }}/lcfs-backend:${{ env.VERSION }}-${{ env.PRE_RELEASE }} + + - name: Build LCFS Frontend + run: | + cd openshift/templates + oc process -f ./frontend-bc.yaml VERSION=${{ env.VERSION }}-${{ env.PRE_RELEASE }} GIT_URL=${{ env.GIT_URL }} GIT_REF=release-${{ env.VERSION }} | oc apply --wait=true -f - -n ${{ env.TOOLS_NAMESPACE }} + sleep 2s + oc -n ${{ env.TOOLS_NAMESPACE }} start-build lcfs-frontend-${{ env.VERSION }}-${{ env.PRE_RELEASE }} --wait=true + oc tag ${{ env.TOOLS_NAMESPACE }}/lcfs-frontend:${{ env.VERSION }}-${{ env.PRE_RELEASE }} ${{ env.DEV_NAMESPACE }}/lcfs-frontend:${{ env.VERSION }}-${{ env.PRE_RELEASE }} + + deploy: + + name: Deploy LCFS + runs-on: ubuntu-latest + timeout-minutes: 60 + needs: [set-pre-release, build] + + env: + PRE_RELEASE: ${{ needs.set-pre-release.outputs.output1 }} + + steps: + + - name: Checkout Manifest repository + uses: actions/checkout@v4.1.1 + with: + repository: bcgov-c/tenant-gitops-d2bd59 + ref: main + ssh-key: ${{ secrets.MANIFEST_REPO_DEPLOY_KEY }} + + - name: Update frontend tag + uses: mikefarah/yq@v4.40.5 + with: + cmd: yq -i '.image.tag = "${{ env.VERSION }}-${{ env.PRE_RELEASE }}"' lcfs/charts/lcfs-frontend/values-dev.yaml + + - name: Update backend tag + uses: mikefarah/yq@v4.40.5 + with: + cmd: yq -i '.image.tag = "${{ env.VERSION }}-${{ env.PRE_RELEASE }}"' lcfs/charts/lcfs-backend/values-dev.yaml + + - name: GitHub Commit & Push + run: | + git config --global user.email "actions@github.com" + git config --global user.name "GitHub Actions" + git add lcfs/charts/lcfs-frontend/values-dev.yaml + git add lcfs/charts/lcfs-backend/values-dev.yaml + git commit -m "update the image tag to ${{ env.VERSION }}-${{ env.PRE_RELEASE }}" + git push + \ No newline at end of file diff --git a/.github/workflows/docker-auto-test.yaml b/.github/workflows/docker-auto-test.yaml new file mode 100644 index 000000000..70ee4fe57 --- /dev/null +++ b/.github/workflows/docker-auto-test.yaml @@ -0,0 +1,149 @@ +name: Testing pipeline + +on: + pull_request: + branches: + - "*" + +jobs: + backend-tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10.13" + + - name: Install Docker Compose + run: | + sudo apt-get update + sudo apt-get install -y docker-compose + + - name: Cache Poetry dependencies + uses: actions/cache@v3 + with: + path: ~/.cache/pypoetry + key: ${{ runner.os }}-poetry-${{ hashFiles('backend/poetry.lock') }} + restore-keys: | + ${{ runner.os }}-poetry- + + - name: Install Poetry + run: pip install poetry==1.6.1 + + - name: Install backend dependencies + run: | + pip install --upgrade pip setuptools wheel + cd backend + poetry config virtualenvs.create false + poetry install + pip install pytest-github-actions-annotate-failures typing_extensions + + - name: Fix docker-compose.yml + run: | + sed -i 's/: true/: "true"/g; s/: false/: "false"/g' docker-compose.yml + + - name: Build and start services + run: | + docker-compose build + docker-compose up -d + + - name: Run backend tests + id: backend_tests + run: | + cd backend + poetry run pytest --junitxml=pytest-results.xml + env: + LCFS_DB_HOST: localhost + LCFS_DB_PORT: 5432 + LCFS_DB_USER: lcfs + LCFS_DB_PASS: development_only + LCFS_DB_BASE: lcfs + LCFS_REDIS_HOST: localhost + LCFS_REDIS_PORT: 6379 + LCFS_REDIS_PASSWORD: development_only + APP_ENVIRONMENT: dev + LCFS_CHES_CLIENT_ID: mock_client_id + LCFS_CHES_CLIENT_SECRET: mock_client_secret + LCFS_CHES_AUTH_URL: http://mock_auth_url + LCFS_CHES_SENDER_EMAIL: noreply@gov.bc.ca + LCFS_CHES_SENDER_NAME: Mock Notification System + LCFS_CHES_EMAIL_URL: http://mock_email_url + + - name: Upload pytest results + if: always() + uses: actions/upload-artifact@v4 + with: + name: pytest-results + path: backend/pytest-results.xml + + - name: Stop services + if: always() + run: docker-compose down + + - name: Publish Backend Test Results + uses: EnricoMi/publish-unit-test-result-action@v2 + if: always() + with: + files: backend/pytest-results.xml + github_token: ${{ secrets.GITHUB_TOKEN }} + comment_title: "Backend Test Results" + check_name: "Backend Test Results" + fail_on: "errors" + report_individual_runs: "true" + deduplicate_classes_by_file_name: "true" + + frontend-tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: "20" + + - name: Cache npm dependencies + uses: actions/cache@v3 + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('frontend/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + + - name: Install frontend dependencies + run: | + cd frontend + npm ci + + - name: Run frontend tests + id: frontend_tests + run: | + cd frontend + npm run test:run -- --reporter=junit --outputFile=vitest-results.xml + env: + CI: true + + - name: Upload Vitest results + if: always() + uses: actions/upload-artifact@v4 + with: + name: vitest-results + path: frontend/vitest-results.xml + + - name: Publish Frontend Test Results + uses: EnricoMi/publish-unit-test-result-action@v2 + if: always() + with: + files: frontend/vitest-results.xml + github_token: ${{ secrets.GITHUB_TOKEN }} + comment_title: "Frontend Test Results" + check_name: "Frontend Test Results" + fail_on: "errors" + report_individual_runs: "true" + deduplicate_classes_by_file_name: "true" diff --git a/.github/workflows/pr-build.yaml b/.github/workflows/pr-build.yaml new file mode 100644 index 000000000..7086a5834 --- /dev/null +++ b/.github/workflows/pr-build.yaml @@ -0,0 +1,211 @@ +name: PR Build on Dev + +on: + pull_request: + types: [labeled, synchronize] + paths: + - frontend/** + - backend/** + +env: + GIT_URL: https://github.com/bcgov/lcfs.git + TOOLS_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools + DEV_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev + PR_NUMBER: ${{ github.event.pull_request.number }} + GIT_REF: ${{ github.event.pull_request.head.ref }} + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + install-oc: + runs-on: ubuntu-latest + outputs: + cache-hit: ${{ steps.cache.outputs.cache-hit }} + steps: + - name: Check out repository + uses: actions/checkout@v4.1.1 + + - name: Set up cache for OpenShift CLI + id: cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc # Path where the `oc` binary will be installed + key: oc-cli-${{ runner.os }} + + - name: Install OpenShift CLI (if not cached) + if: steps.cache.outputs.cache-hit != 'true' + run: | + curl -LO https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/openshift-client-linux.tar.gz + tar -xvf openshift-client-linux.tar.gz + sudo mv oc /usr/local/bin/ + oc version --client + + - name: Confirm OpenShift CLI is Available + run: oc version --client + + get-version: + if: > + (github.event.action == 'labeled' && github.event.label.name == 'build' && github.event.pull_request.base.ref == github.event.repository.default_branch) || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'build') && github.event.pull_request.base.ref == github.event.repository.default_branch) + name: Retrieve version + runs-on: ubuntu-latest + needs: [install-oc] + + outputs: + output1: ${{ steps.get-version.outputs.VERSION }} + + steps: + + - name: Restore oc command from Cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc + key: oc-cli-${{ runner.os }} + + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.TOOLS_NAMESPACE }} + + - id: get-version + run: | + + pr_deployed=$(helm -n ${{ env.DEV_NAMESPACE }} list | grep lcfs-frontend-dev- | wc -l | tr -d '[:space:]') + if [ "$pr_deployed" -gt 1 ]; then + echo "There are at least 2 pull requests have been deployed on dev. Please uninstalled one of them in order to to have space to deploy this pull request. Exiting with code 99" + exit 99 + else + echo "There are $pr_deployed pull request builds on dev. Will deploy a new one." + version=$(echo "${{ github.event.repository.default_branch }}" | sed -E 's/release-(.*)/\1/') + echo "VERSION=$version" >> $GITHUB_OUTPUT + fi + + build: + if: > + (github.event.action == 'labeled' && github.event.label.name == 'build' && github.event.pull_request.base.ref == github.event.repository.default_branch) || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'build') && github.event.pull_request.base.ref == github.event.repository.default_branch) + name: Build LCFS + runs-on: ubuntu-latest + needs: [get-version] + timeout-minutes: 60 + + env: + VERSION: ${{ needs.get-version.outputs.output1 }} + + steps: + - name: Check out repository + uses: actions/checkout@v4.1.1 + with: + ref: ${{ github.event.pull_request.head.ref }} + + - name: Restore oc command from Cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc + key: oc-cli-${{ runner.os }} + + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.TOOLS_NAMESPACE }} + + - name: Build LCFS Frontend + run: | + cd openshift/templates + oc process -f ./frontend-bc.yaml VERSION=${{ env.VERSION }}-${{ env.PR_NUMBER }} GIT_URL=${{ env.GIT_URL }} GIT_REF=${{ github.event.pull_request.head.ref }} | oc apply --wait=true -f - -n ${{ env.TOOLS_NAMESPACE }} + sleep 2s + for build in $(oc -n ${{ env.TOOLS_NAMESPACE }} get builds -l buildconfig=lcfs-frontend-${{ env.VERSION }}-${{ env.PR_NUMBER }} -o jsonpath='{.items[?(@.status.phase=="Running")].metadata.name}'); do + oc -n ${{ env.TOOLS_NAMESPACE }} cancel-build $build --wait=true + done + sleep 2s + oc -n ${{ env.TOOLS_NAMESPACE }} start-build lcfs-frontend-${{ env.VERSION }}-${{ env.PR_NUMBER }} --wait=true + sleep 2s + oc tag ${{ env.TOOLS_NAMESPACE }}/lcfs-frontend:${{ env.VERSION }}-${{ env.PR_NUMBER }} ${{ env.DEV_NAMESPACE }}/lcfs-frontend:${{ env.VERSION }}-${{ env.PR_NUMBER }} + + - name: Build LCFS Backend + run: | + cd openshift/templates + oc process -f ./backend-bc.yaml VERSION=${{ env.VERSION }}-${{ env.PR_NUMBER }} GIT_URL=${{ env.GIT_URL }} GIT_REF=${{ github.event.pull_request.head.ref }} | oc apply --wait=true -f - -n ${{ env.TOOLS_NAMESPACE }} + sleep 2s + for build in $(oc -n ${{ env.TOOLS_NAMESPACE }} get builds -l buildconfig=lcfs-backend-${{ env.VERSION }}-${{ env.PR_NUMBER }} -o jsonpath='{.items[?(@.status.phase=="Running")].metadata.name}'); do + oc -n ${{ env.TOOLS_NAMESPACE }} cancel-build $build --wait=true + done + sleep 2s + oc -n ${{ env.TOOLS_NAMESPACE }} start-build lcfs-backend-${{ env.VERSION }}-${{ env.PR_NUMBER }} --wait=true + sleep 2s + oc tag ${{ env.TOOLS_NAMESPACE }}/lcfs-backend:${{ env.VERSION }}-${{ env.PR_NUMBER }} ${{ env.DEV_NAMESPACE }}/lcfs-backend:${{ env.VERSION }}-${{ env.PR_NUMBER }} + + deploy: + if: > + (github.event.action == 'labeled' && github.event.label.name == 'build' && github.event.pull_request.base.ref == github.event.repository.default_branch) || + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'build') && github.event.pull_request.base.ref == github.event.repository.default_branch) + name: Deploy LCFS + runs-on: ubuntu-latest + timeout-minutes: 60 + needs: [get-version, build] + + env: + VERSION: ${{ needs.get-version.outputs.output1 }} + + steps: + - name: Checkout Manifest repository + uses: actions/checkout@v4.1.1 + with: + repository: bcgov-c/tenant-gitops-d2bd59 + ref: main + ssh-key: ${{ secrets.MANIFEST_REPO_DEPLOY_KEY }} + + - name: Restore oc command from Cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc + key: oc-cli-${{ runner.os }} + + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.TOOLS_NAMESPACE }} + + - name: Update values-dev-pr.yaml for frentend and backend + uses: mikefarah/yq@v4.40.5 + with: + cmd: | + yq -i '.image.tag = "${{ env.VERSION }}-${{ env.PR_NUMBER }}"' lcfs/charts/lcfs-frontend/values-dev-pr.yaml + yq -i '.route.host = "lcfs-dev-${{ env.PR_NUMBER }}.apps.silver.devops.gov.bc.ca"' lcfs/charts/lcfs-frontend/values-dev-pr.yaml + yq -i '.volumes[0].configMap.name = "lcfs-frontend-dev-${{ env.PR_NUMBER }}"' lcfs/charts/lcfs-frontend/values-dev-pr.yaml + yq -i '.apiBase = "https://lcfs-backend-dev-${{ env.PR_NUMBER }}.apps.silver.devops.gov.bc.ca/api"' lcfs/charts/lcfs-frontend/values-dev-pr.yaml + yq -i '.keycloak.postLogoutUrl = "https://lcfs-dev-${{ env.PR_NUMBER }}.apps.silver.devops.gov.bc.ca/"' lcfs/charts/lcfs-frontend/values-dev-pr.yaml + yq -i '.image.tag = "${{ env.VERSION }}-${{ env.PR_NUMBER }}"' lcfs/charts/lcfs-backend/values-dev-pr.yaml + yq -i '.route.host = "lcfs-backend-dev-${{ env.PR_NUMBER }}.apps.silver.devops.gov.bc.ca"' lcfs/charts/lcfs-backend/values-dev-pr.yaml + yq -i eval '(.env[] | select(.name == "LCFS_DB_HOST") | .value) = "lcfs-postgres-dev-${{ env.PR_NUMBER }}-postgresql.${{ env.DEV_NAMESPACE }}.svc.cluster.local"' lcfs/charts/lcfs-backend/values-dev-pr.yaml + yq -i eval '(.env[] | select(.name == "LCFS_REDIS_HOST") | .value) = "lcfs-redis-dev-${{ env.PR_NUMBER }}-master.${{ env.DEV_NAMESPACE }}.svc.cluster.local"' lcfs/charts/lcfs-backend/values-dev-pr.yaml + yq -i eval '(.env[] | select(.name == "LCFS_S3_ENDPOINT") | .value) = "lcfs-minio-dev-${{ env.PR_NUMBER }}.${{ env.DEV_NAMESPACE }}.svc.cluster.local"' lcfs/charts/lcfs-backend/values-dev-pr.yaml + yq -i eval '(.env[] | select(.name == "LCFS_S3_SECRET_KEY") | .valueFrom.secretKeyRef.name) = "lcfs-minio-dev-${{ env.PR_NUMBER }}"' lcfs/charts/lcfs-backend/values-dev-pr.yaml + yq -i eval '(.env[] | select(.name == "LCFS_RABBITMQ_TRANSACTION_QUEUE") | .value) = "transaction_queue_${{ env.PR_NUMBER }}"' lcfs/charts/lcfs-backend/values-dev-pr.yaml + + - name: Helm Deployment + shell: bash {0} + run: | + cd lcfs/charts/lcfs-postgres-pr + helm -n ${{ env.DEV_NAMESPACE }} -f ./values-dev-pr.yaml upgrade --install lcfs-postgres-dev-${{ env.PR_NUMBER }} oci://registry-1.docker.io/bitnamicharts/postgresql --version 15.5.17 + cd ../lcfs-redis-pr + helm -n ${{ env.DEV_NAMESPACE }} -f ./values-dev-pr.yaml upgrade --install lcfs-redis-dev-${{ env.PR_NUMBER }} oci://registry-1.docker.io/bitnamicharts/redis --version 19.6.1 + cd ../lcfs-minio-pr + helm -n ${{ env.DEV_NAMESPACE }} -f ./values-dev-pr.yaml upgrade --install lcfs-minio-dev-${{ env.PR_NUMBER }} oci://registry-1.docker.io/bitnamicharts/minio --version 14.8.0 + cd ../lcfs-frontend + # refresh the pod annotation everytime in order to force the rollout + helm -n ${{ env.DEV_NAMESPACE }} -f ./values-dev-pr.yaml upgrade --install lcfs-frontend-dev-${{ env.PR_NUMBER }} . --set podAnnotations.rolloutTriggered="A$(date +%s)E" + sleep 60s + cd ../lcfs-backend + helm -n ${{ env.DEV_NAMESPACE }} -f ./values-dev-pr.yaml upgrade --install lcfs-backend-dev-${{ env.PR_NUMBER }} . --set podAnnotations.rolloutTriggered="A$(date +%s)E" diff --git a/.github/workflows/pr-teardown.yaml b/.github/workflows/pr-teardown.yaml new file mode 100644 index 000000000..201c08e04 --- /dev/null +++ b/.github/workflows/pr-teardown.yaml @@ -0,0 +1,75 @@ +name: PR Teardown on Dev + +on: + pull_request: + types: [unlabeled, closed] + +env: + DEV_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev + PR_NUMBER: ${{ github.event.pull_request.number }} + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + install-oc: + runs-on: ubuntu-latest + outputs: + cache-hit: ${{ steps.cache.outputs.cache-hit }} + steps: + - name: Check out repository + uses: actions/checkout@v4.1.1 + + - name: Set up cache for OpenShift CLI + id: cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc # Path where the `oc` binary will be installed + key: oc-cli-${{ runner.os }} + + - name: Install OpenShift CLI (if not cached) + if: steps.cache.outputs.cache-hit != 'true' + run: | + curl -LO https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/openshift-client-linux.tar.gz + tar -xvf openshift-client-linux.tar.gz + sudo mv oc /usr/local/bin/ + oc version --client + + - name: Confirm OpenShift CLI is Available + run: oc version --client + + teardown: + if: > + (github.event.action == 'unlabeled' && github.event.label.name == 'build') || + (github.event.action == 'closed' && contains(github.event.pull_request.labels.*.name, 'build') ) + name: PR Teardown + runs-on: ubuntu-latest + needs: [install-oc] + timeout-minutes: 60 + + steps: + + - name: Restore oc command from Cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc + key: oc-cli-${{ runner.os }} + + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.DEV_NAMESPACE }} + + - name: Teardown the pull request + run: | + helm -n ${{ env.DEV_NAMESPACE }} uninstall lcfs-frontend-dev-${{ env.PR_NUMBER }} + helm -n ${{ env.DEV_NAMESPACE }} uninstall lcfs-backend-dev-${{ env.PR_NUMBER }} + helm -n ${{ env.DEV_NAMESPACE }} uninstall lcfs-redis-dev-${{ env.PR_NUMBER }} + helm -n ${{ env.DEV_NAMESPACE }} uninstall lcfs-postgres-dev-${{ env.PR_NUMBER }} + helm -n ${{ env.DEV_NAMESPACE }} uninstall lcfs-minio-dev-${{ env.PR_NUMBER }} + oc -n ${{ env.DEV_NAMESPACE }} delete pvc -l app.kubernetes.io/instance=lcfs-postgres-dev-${{ env.PR_NUMBER }} + oc -n ${{ env.DEV_NAMESPACE }} delete pvc -l app.kubernetes.io/instance=lcfs-redis-dev-${{ env.PR_NUMBER }} diff --git a/.github/workflows/prod-ci.yaml b/.github/workflows/prod-ci.yaml new file mode 100644 index 000000000..b3a1eab61 --- /dev/null +++ b/.github/workflows/prod-ci.yaml @@ -0,0 +1,148 @@ + +name: LCFS 0.2.0 Prod CI + +on: + workflow_dispatch: + +env: + GIT_URL: https://github.com/bcgov/lcfs.git + TEST_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-test + PROD_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-prod + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + install-oc: + runs-on: ubuntu-latest + outputs: + cache-hit: ${{ steps.cache.outputs.cache-hit }} + steps: + - name: Check out repository + uses: actions/checkout@v4.1.1 + + - name: Set up cache for OpenShift CLI + id: cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc # Path where the `oc` binary will be installed + key: oc-cli-${{ runner.os }} + + - name: Install OpenShift CLI (if not cached) + if: steps.cache.outputs.cache-hit != 'true' + run: | + curl -LO https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/openshift-client-linux.tar.gz + tar -xvf openshift-client-linux.tar.gz + sudo mv oc /usr/local/bin/ + oc version --client + + - name: Confirm OpenShift CLI is Available + run: oc version --client + + # Read the image tag from test environment + get-image-tag: + + name: Get the image-tag from values-test.yaml + runs-on: ubuntu-latest + needs: [install-oc] + + outputs: + IMAGE_TAG: ${{ steps.get-image-tag.outputs.IMAGE_TAG }} + + steps: + - name: Checkout Manifest repository + uses: actions/checkout@v3 + with: + repository: bcgov-c/tenant-gitops-d2bd59 + ref: main + ssh-key: ${{ secrets.MANIFEST_REPO_DEPLOY_KEY }} + + # to do: verify the frontend image tag is same as the backend image tag + - name: Get the image tag from values-test.yaml + id: get-image-tag + uses: mikefarah/yq@v4.40.5 + with: + cmd: | + imagetag=$(yq eval '.image.tag' lcfs/charts/lcfs-frontend/values-test.yaml) + echo "IMAGE_TAG retrieved from Test is $imagetag" + echo "IMAGE_TAG=$imagetag" >> $GITHUB_OUTPUT + + get-current-time: + name: Get Current Time + runs-on: ubuntu-latest + needs: get-image-tag + + outputs: + CURRENT_TIME: ${{ steps.get-current-time.outputs.CURRENT_TIME }} + + steps: + - id: get-current-time + run: | + TZ="America/Vancouver" + echo "CURRENT_TIME=$(date '+%Y-%m-%d %H:%M:%S %Z')" >> $GITHUB_OUTPUT + + # Deplog the image which is running on test to prod + deploy-on-prod: + + name: Deploy LCFS on Prod + runs-on: ubuntu-latest + needs: [get-image-tag, get-current-time] + timeout-minutes: 60 + + env: + IMAGE_TAG: ${{ needs.get-image-tag.outputs.IMAGE_TAG }} + CURRENT_TIME: ${{ needs.get-current-time.outputs.CURRENT_TIME }} + + steps: + + - name: Checkout Manifest repository + uses: actions/checkout@v3 + with: + repository: bcgov-c/tenant-gitops-d2bd59 + ref: main + ssh-key: ${{ secrets.MANIFEST_REPO_DEPLOY_KEY }} + + - name: Ask for approval for LCFS ${{env.IMAGE_TAG }} Prod deployment + uses: trstringer/manual-approval@v1.6.0 + with: + secret: ${{ github.TOKEN }} + approvers: AlexZorkin,kuanfandevops,hamed-valiollahi,airinggov,areyeslo,dhaselhan,Grulin + minimum-approvals: 2 + issue-title: "LCFS ${{env.IMAGE_TAG }} Prod Deployment at ${{ env.CURRENT_TIME }}." + + - name: Restore oc command from Cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc + key: oc-cli-${{ runner.os }} + + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.PROD_NAMESPACE }} + + - name: Tag LCFS images from Test to Prod + run: | + oc tag ${{ env.TEST_NAMESPACE }}/lcfs-backend:${{env.IMAGE_TAG }} ${{ env.PROD_NAMESPACE }}/lcfs-backend:${{env.IMAGE_TAG }} + oc tag ${{ env.TEST_NAMESPACE }}/lcfs-frontend:${{env.IMAGE_TAG }} ${{ env.PROD_NAMESPACE }}/lcfs-frontend:${{env.IMAGE_TAG }} + + - name: Update frontend tag for prod deployment + uses: mikefarah/yq@v4.40.5 + with: + cmd: | + yq -i '.image.tag = "${{env.IMAGE_TAG }}"' lcfs/charts/lcfs-frontend/values-prod.yaml + yq -i '.image.tag = "${{env.IMAGE_TAG }}"' lcfs/charts/lcfs-backend/values-prod.yaml + + - name: GitHub Commit & Push + run: | + git config --global user.email "actions@github.com" + git config --global user.name "GitHub Actions" + git add lcfs/charts/lcfs-frontend/values-prod.yaml + git add lcfs/charts/lcfs-backend/values-prod.yaml + git commit -m "Update image tag ${{env.IMAGE_TAG }} for prod" + git push + \ No newline at end of file diff --git a/.github/workflows/test-ci.yaml b/.github/workflows/test-ci.yaml new file mode 100644 index 000000000..f7643604b --- /dev/null +++ b/.github/workflows/test-ci.yaml @@ -0,0 +1,315 @@ +name: LCFS 0.2.0 Test CI + +on: + workflow_dispatch: + +env: + VERSION: 0.2.0 + GIT_URL: https://github.com/bcgov/lcfs.git + DEV_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev + TEST_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-test + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + install-oc: + runs-on: ubuntu-latest + outputs: + cache-hit: ${{ steps.cache.outputs.cache-hit }} + steps: + - name: Set up cache for OpenShift CLI + id: cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc # Path where the `oc` binary will be installed + key: oc-cli-${{ runner.os }} + + - name: Install OpenShift CLI (if not cached) + if: steps.cache.outputs.cache-hit != 'true' + run: | + curl -LO https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/openshift-client-linux.tar.gz + tar -xvf openshift-client-linux.tar.gz + sudo mv oc /usr/local/bin/ + oc version --client + + - name: Confirm OpenShift CLI is Available + run: oc version --client + + run-tests: + name: Run Tests + runs-on: ubuntu-latest + needs: [install-oc] + + steps: + - uses: actions/checkout@v3 + with: + repository: bcgov/lcfs + ref: ${{ github.ref }} + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10.13" + + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: "20" + + - name: Install Docker Compose + run: | + sudo apt-get update + sudo apt-get install -y docker-compose + + - name: Cache Poetry dependencies + uses: actions/cache@v3 + with: + path: ~/.cache/pypoetry + key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} + restore-keys: | + ${{ runner.os }}-poetry- + + - name: Cache npm dependencies + uses: actions/cache@v3 + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + + - name: Install Poetry + run: pip install poetry==1.6.1 + + - name: Install backend dependencies + run: | + cd backend + poetry config virtualenvs.create false + poetry install + pip install pytest-github-actions-annotate-failures typing_extensions + + - name: Install frontend dependencies + run: | + cd frontend + npm ci + + - name: Fix docker-compose.yml + run: | + sed -i 's/: true/: "true"/g; s/: false/: "false"/g' docker-compose.yml + + - name: Build and start services + run: | + docker-compose build + docker-compose up -d + + - name: Run backend tests + id: backend_tests + continue-on-error: true + run: | + cd backend + poetry run pytest --junitxml=pytest-results.xml + env: + LCFS_DB_HOST: localhost + LCFS_DB_PORT: 5432 + LCFS_DB_USER: lcfs + LCFS_DB_PASS: development_only + LCFS_DB_BASE: lcfs + LCFS_REDIS_HOST: localhost + LCFS_REDIS_PORT: 6379 + LCFS_REDIS_PASSWORD: development_only + APP_ENVIRONMENT: dev + LCFS_CHES_CLIENT_ID: mock_client_id + LCFS_CHES_CLIENT_SECRET: mock_client_secret + LCFS_CHES_AUTH_URL: http://mock_auth_url + LCFS_CHES_SENDER_EMAIL: noreply@gov.bc.ca + LCFS_CHES_SENDER_NAME: Mock Notification System + LCFS_CHES_EMAIL_URL: http://mock_email_url + + - name: Run frontend tests + id: frontend_tests + continue-on-error: true + run: | + cd frontend + npm run test:run -- --reporter=junit --outputFile=vitest-results.xml + env: + CI: true + # Comment until all cypress tests are fixed for success. + # - name: Create cypress.env.json + # run: | + # echo '{ + # "admin_idir_username": "${{ secrets.ADMIN_IDIR_USERNAME }}", + # "admin_idir_password": "${{ secrets.ADMIN_IDIR_PASSWORD }}", + # "org1_bceid_username": "${{ secrets.ORG1_BCEID_USERNAME }}", + # "org1_bceid_password": "${{ secrets.ORG1_BCEID_PASSWORD }}", + # "org1_bceid_id": "${{ secrets.ORG1_BCEID_ID }}", + # "org1_bceid_userId": "${{ secrets.ORG1_BCEID_USERID }}", + # "org2_bceid_username": "${{ secrets.ORG2_BCEID_USERNAME }}", + # "org2_bceid_password": "${{ secrets.ORG2_BCEID_PASSWORD }}", + # "org2_bceid_id": "${{ secrets.ORG2_BCEID_ID }}", + # "org2_bceid_userId": "${{ secrets.ORG2_BCEID_USERID }}" + # }' > frontend/cypress.env.json + + # - name: Run Cypress tests + # id: cypress_tests + # continue-on-error: true + # uses: cypress-io/github-action@v6 + # with: + # command: npm run cypress:run + # wait-on: 'http://localhost:3000' + # working-directory: frontend + + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: test-results + path: | + backend/pytest-results.xml + frontend/vitest-results.xml + # frontend/cypress/reports/ + # frontend/cypress/screenshots/ + + - name: Stop services + if: always() + run: docker-compose down + + - name: Check test results + if: always() + run: | + if [ "${{ steps.backend_tests.outcome }}" == "failure" ] || \ + [ "${{ steps.frontend_tests.outcome }}" == "failure" ]; then + echo "One or more tests failed" + exit 1 + fi + + set-pre-release: + name: Find Dev deployment pre-release number + needs: run-tests + runs-on: ubuntu-latest + + outputs: + output1: ${{ steps.set-pre-release.outputs.PRE_RELEASE }} + + steps: + - name: Restore oc command from Cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc + key: oc-cli-${{ runner.os }} + + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.DEV_NAMESPACE }} + + - id: set-pre-release + run: | + check_string() { + local string="$1" + + if [ ${#string} -ne 14 ]; then + echo "String length must be 14 characters" + return 1 + fi + + if ! [[ $string =~ ^[0-9]+$ ]]; then + echo "String can only contain numbers" + return 1 + fi + + local year="${string:0:4}" + local month="${string:4:2}" + local day="${string:6:2}" + local hour="${string:8:2}" + local minute="${string:10:2}" + local second="${string:12:2}" + + if ! date -d "$year-$month-$day $hour:$minute:$second" &> /dev/null; then + echo "String format must be yyyymmddhhmmss" + return 1 + fi + + return 0 + } + + input_string=$(oc -n ${{ env.DEV_NAMESPACE }} describe deployment/lcfs-frontend-dev | grep Image | awk -F '-' '{print $NF}') + + echo "The retrieved pre-release number on Dev is $input_string " + if check_string "$input_string"; then + echo "It is valid" + echo "PRE_RELEASE=$input_string" >> $GITHUB_OUTPUT + else + echo "It is not valid" + exit 1 + fi + + deploy-on-test: + name: Deploy LCFS on Test + runs-on: ubuntu-latest + timeout-minutes: 60 + needs: [set-pre-release] + + env: + PRE_RELEASE: ${{ needs.set-pre-release.outputs.output1 }} + + steps: + - id: get-current-time + run: | + echo "CURRENT_TIME=$(TZ='America/Vancouver' date '+%Y-%m-%d %H:%M:%S %Z')" >> $GITHUB_OUTPUT + + - name: Ask for approval for LCFS release-${{ env.VERSION }} Test deployment + uses: trstringer/manual-approval@v1.6.0 + with: + secret: ${{ github.TOKEN }} + approvers: AlexZorkin,kuanfandevops,hamed-valiollahi,airinggov,areyeslo,dhaselhan,Grulin,kevin-hashimoto + minimum-approvals: 1 + issue-title: "LCFS ${{ env.VERSION }}-${{ env.PRE_RELEASE }} Test Deployment at ${{ steps.get-current-time.outputs.CURRENT_TIME }}" + + - name: Restore oc command from Cache + uses: actions/cache@v4.2.0 + with: + path: /usr/local/bin/oc + key: oc-cli-${{ runner.os }} + + - name: Log in to Openshift + uses: redhat-actions/oc-login@v1.3 + with: + openshift_server_url: ${{ secrets.OPENSHIFT_SERVER }} + openshift_token: ${{ secrets.OPENSHIFT_TOKEN }} + insecure_skip_tls_verify: true + namespace: ${{ env.DEV_NAMESPACE }} + + - name: Tag LCFS images to Test + run: | + oc tag ${{ env.DEV_NAMESPACE }}/lcfs-backend:${{ env.VERSION }}-$PRE_RELEASE ${{ env.TEST_NAMESPACE }}/lcfs-backend:${{ env.VERSION }}-$PRE_RELEASE + oc tag ${{ env.DEV_NAMESPACE }}/lcfs-frontend:${{ env.VERSION }}-$PRE_RELEASE ${{ env.TEST_NAMESPACE }}/lcfs-frontend:${{ env.VERSION }}-$PRE_RELEASE + + - name: Checkout Manifest repository + uses: actions/checkout@v3 + with: + repository: bcgov-c/tenant-gitops-d2bd59 + ref: main + ssh-key: ${{ secrets.MANIFEST_REPO_DEPLOY_KEY }} + + - name: Update frontend tag + uses: mikefarah/yq@v4.40.5 + with: + cmd: yq -i '.image.tag = "${{ env.VERSION }}-${{ env.PRE_RELEASE }}"' lcfs/charts/lcfs-frontend/values-test.yaml + + - name: Update backend tag + uses: mikefarah/yq@v4.40.5 + with: + cmd: yq -i '.image.tag = "${{ env.VERSION }}-${{ env.PRE_RELEASE }}"' lcfs/charts/lcfs-backend/values-test.yaml + + - name: GitHub Commit & Push + run: | + git config --global user.email "actions@github.com" + git config --global user.name "GitHub Actions" + git add lcfs/charts/lcfs-frontend/values-test.yaml + git add lcfs/charts/lcfs-backend/values-test.yaml + git commit -m "Update the image tag to ${{ env.VERSION }}-${{ env.PRE_RELEASE }} on LCFS Test Environment" + git push diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 79c8e9e69..e99644bc2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,123 +1,123 @@ -name: Testing lcfs +# name: Testing lcfs -on: push +# on: [push, workflow_dispatch] -jobs: - black: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.9' - - name: Install deps - uses: knowsuchagency/poetry-install@v1 - env: - POETRY_VIRTUALENVS_CREATE: false - - name: Run black check - run: poetry run black --check . - flake8: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.9' - - name: Install deps - uses: knowsuchagency/poetry-install@v1 - env: - POETRY_VIRTUALENVS_CREATE: false - - name: Run flake8 check - run: poetry run flake8 --count . - mypy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.9' - - name: Install deps - uses: knowsuchagency/poetry-install@v1 - env: - POETRY_VIRTUALENVS_CREATE: false - - name: Run mypy check - run: poetry run mypy . - pytest: - runs-on: ubuntu-latest - services: +# jobs: +# black: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v2 +# - name: Set up Python +# uses: actions/setup-python@v2 +# with: +# python-version: '3.9' +# - name: Install deps +# uses: knowsuchagency/poetry-install@v1 +# env: +# POETRY_VIRTUALENVS_CREATE: false +# - name: Run black check +# run: poetry run black --check . +# flake8: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v2 +# - name: Set up Python +# uses: actions/setup-python@v2 +# with: +# python-version: '3.9' +# - name: Install deps +# uses: knowsuchagency/poetry-install@v1 +# env: +# POETRY_VIRTUALENVS_CREATE: false +# - name: Run flake8 check +# run: poetry run flake8 --count . +# mypy: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v2 +# - name: Set up Python +# uses: actions/setup-python@v2 +# with: +# python-version: '3.9' +# - name: Install deps +# uses: knowsuchagency/poetry-install@v1 +# env: +# POETRY_VIRTUALENVS_CREATE: false +# - name: Run mypy check +# run: poetry run mypy . +# pytest: +# runs-on: ubuntu-latest +# services: - lcfs-db: - image: postgres:13.8-bullseye - env: - POSTGRES_PASSWORD: lcfs - POSTGRES_USER: lcfs - POSTGRES_DB: lcfs - options: >- - --health-cmd="pg_isready" - --health-interval=10s - --health-timeout=5s - --health-retries=5 - ports: - - 5432:5432 - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.9' - - name: Install deps - uses: knowsuchagency/poetry-install@v1 - env: - POETRY_VIRTUALENVS_CREATE: false - - name: Run pytest check - run: poetry run pytest -vv --cov="lcfs" . - env: - LCFS_HOST: "0.0.0.0" - LCFS_DB_HOST: localhost - cypress: - runs-on: ubuntu-latest - services: - db: - image: postgres:14.2 - env: - POSTGRES_DB: lcfs - POSTGRES_USER: lcfs - POSTGRES_PASSWORD: development_only - ports: - - 5432:5432 - redis: - image: bitnami/redis:6.2.5 - env: - ALLOW_EMPTY_PASSWORD: "yes" - ports: - - 6379:6379 - steps: - - uses: actions/checkout@v2 - - name: Build and Run Backend Service - run: | - docker build -t backend-service ./backend/Dockerfile - docker run -d --name backend -e LCFS_DB_HOST=localhost -e LCFS_REDIS_HOST=localhost -p 8000:8000 backend-service - - name: Data Seeding - run: docker exec backend poetry run python /app/lcfs/db/seeders/seed_database.py - - name: Build and Run Frontend Service - run: | - docker build -t frontend-service ./fontend/Dockerfile.dev - docker run -d --name frontend -p 3000:3000 frontend-service - - name: Cypress run - uses: cypress-io/github-action@v2 - with: - browser: chrome - wait-on: 'http://localhost:3000' - wait-on-timeout: 60 - env: - CYPRESS_IDIR_TEST_USER: ${{ secrets.CYPRESS_IDIR_TEST_USER }} - CYPRESS_IDIR_TEST_PASS: ${{ secrets.CYPRESS_IDIR_TEST_PASS }} - CYPRESS_BCEID_TEST_USER: ${{ secrets.CYPRESS_BCEID_TEST_USER }} - CYPRESS_BCEID_TEST_PASS: ${{ secrets.CYPRESS_BCEID_TEST_PASS }} - - name: Cleanup - run: | - docker stop backend frontend - docker rm backend frontend \ No newline at end of file +# lcfs-db: +# image: postgres:13.8-bullseye +# env: +# POSTGRES_PASSWORD: lcfs +# POSTGRES_USER: lcfs +# POSTGRES_DB: lcfs +# options: >- +# --health-cmd="pg_isready" +# --health-interval=10s +# --health-timeout=5s +# --health-retries=5 +# ports: +# - 5432:5432 +# steps: +# - uses: actions/checkout@v2 +# - name: Set up Python +# uses: actions/setup-python@v2 +# with: +# python-version: '3.9' +# - name: Install deps +# uses: knowsuchagency/poetry-install@v1 +# env: +# POETRY_VIRTUALENVS_CREATE: false +# - name: Run pytest check +# run: poetry run pytest -vv --cov="lcfs" . +# env: +# LCFS_HOST: "0.0.0.0" +# LCFS_DB_HOST: localhost +# cypress: +# runs-on: ubuntu-latest +# services: +# db: +# image: postgres:14.2 +# env: +# POSTGRES_DB: lcfs +# POSTGRES_USER: lcfs +# POSTGRES_PASSWORD: development_only +# ports: +# - 5432:5432 +# redis: +# image: bitnami/redis:6.2.5 +# env: +# ALLOW_EMPTY_PASSWORD: "yes" +# ports: +# - 6379:6379 +# steps: +# - uses: actions/checkout@v2 +# - name: Build and Run Backend Service +# run: | +# docker build -t backend-service ./backend/Dockerfile +# docker run -d --name backend -e LCFS_DB_HOST=localhost -e LCFS_REDIS_HOST=localhost -p 8000:8000 backend-service +# - name: Data Seeding +# run: docker exec backend poetry run python /app/lcfs/db/seeders/seed_database.py +# - name: Build and Run Frontend Service +# run: | +# docker build -t frontend-service ./fontend/Dockerfile.dev +# docker run -d --name frontend -p 3000:3000 frontend-service +# - name: Cypress run +# uses: cypress-io/github-action@v2 +# with: +# browser: chrome +# wait-on: 'http://localhost:3000' +# wait-on-timeout: 60 +# env: +# CYPRESS_IDIR_TEST_USER: ${{ secrets.CYPRESS_IDIR_TEST_USER }} +# CYPRESS_IDIR_TEST_PASS: ${{ secrets.CYPRESS_IDIR_TEST_PASS }} +# CYPRESS_BCEID_TEST_USER: ${{ secrets.CYPRESS_BCEID_TEST_USER }} +# CYPRESS_BCEID_TEST_PASS: ${{ secrets.CYPRESS_BCEID_TEST_PASS }} +# - name: Cleanup +# run: | +# docker stop backend frontend +# docker rm backend frontend \ No newline at end of file diff --git a/.gitignore b/.gitignore index 4173d1dc8..e437ac444 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ __pycache__/ *.py[cod] *$py.class docs/ +.DS_Store # C extensions *.so @@ -104,7 +105,6 @@ ENV/ env.bak/ venv.bak/ .git/ -.github/ # Spyder project settings .spyderproject @@ -131,4 +131,5 @@ dmypy.json cython_debug/ # local database file -LCFS Local DB.session.sql \ No newline at end of file +LCFS Local DB.session.sql +frontend/cucumber-report.html diff --git a/.vscode/launch.json b/.vscode/launch.json index 5be5965b5..c13baa925 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -5,25 +5,47 @@ "version": "0.2.0", "configurations": [ { - "name": "FastAPI - LCFS", - "type": "python", + "name": "LCFS - pytests", + "type": "debugpy", "request": "launch", "cwd": "${workspaceFolder}/backend", "module": "poetry", - "python": "${workspaceFolder}/backend/.venv/bin/python", + "python": "${workspaceFolder}/backend/venv/bin/python3", "args": [ "run", - "python", - "-m", - "uvicorn", + "pytest", + "-v", + "lcfs/tests/" + ] + }, + { + "name": "LCFS Seeders", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}/backend", + "program": "${workspaceFolder}/backend/lcfs/db/seeders/seed_database.py", + "python": "${workspaceFolder}/backend/venv/bin/python3", + "args": [ + "dev" + ] + }, + { + "name": "FastAPI - LCFS", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}/backend", + "module": "uvicorn", + "python": "${workspaceFolder}/backend/venv/bin/python3", + "args": [ "lcfs.web.application:get_app", "--port", - "8000" + "8000", + "--reload" ] }, { "name": "LCFS FastAPI: Remote Attach", - "type": "python", + "type": "debugpy", "request": "attach", "connect": { "host": "localhost", diff --git a/LCFS_ERD.drawio b/LCFS_ERD_v0.2.0.drawio similarity index 50% rename from LCFS_ERD.drawio rename to LCFS_ERD_v0.2.0.drawio index e2c781fa3..7fafc16e7 100644 --- a/LCFS_ERD.drawio +++ b/LCFS_ERD_v0.2.0.drawio @@ -1,7883 +1,6688 @@ - - - + + + - - + + - - + + + + + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - - + + + + + + + + + - + + + + - - - + + + - - + + - + + + + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - - + + - - - - + - - - + + + - - + + - + - - - - - - - - + + + - - - - - - - - - - - - - - - - - - - - + + - + - - - + + + - - + + - + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + - + - + - - + + - + - + - - - - + + + - - - - - + + - - - - - + + - - - - - + + - - - - - - - - - + + - + - - + + - + - - - - - - + + - + - - - - - - + + - - - - + + + - - - - - + + - - - - - + + - - - - - - - - + + - + + + + - - - - - - + + - - + + - - - - - - - - - - + + + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + + + - - + + + + - - + + - - + + + + - - + + + + - - + + - - + + + + - - + + + + - - + + + + + - - + + - - + + - - + + + + - - + + + + - - + + - - + + + + - - + + + + - - + + - - + + + + - - + + + + - - + + - - + + + + - - - - + + + - - + + + + + + + + + + + + + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - + + + + + + - - - - - - - + + + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + + + + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - + + - - - - + + + - - - - - - - - - - - - - + + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + + + + - - - + + + + - - - + + + + + + + + + - - + + + + - - - + + + + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + + + + + + + + + + + + + + + + + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + + + + + + + + + + + + + + + + + - - - + + + + - - + + - + + + + + + + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + + + + + + + + + + + + + + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - + + - + - + - - + + - + - + - - + + - + - + - - + + + + + - + - + - - + + - + - + - - - - - - - - - - - - - - - - - + + - + - + - - + + - + - + - - + + - + - + - - + + - - - + + + + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - - + + + + - - - + + + + - - - - - - - + + - - - - + + + + + + + + + + - - - - - + + - - - - + + + - - - - + + + - - - - - + + - - - + + + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - - + + - - - - + + - - - + + + - - + + + + - - + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - + - - - - - - - - + + - - + + - - - - - - - - - - + + - - + + + + - - - + + + - - - + + + + + + - - + + + + - - - + + + + + + - - - + + + - - + + - - - + + + + - - - + + + + + + + + + - - + + + + - - - + + + + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - - + + + - - - + + + - - + + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + + + + + + + + + - + + + + + + - - + + - - + + + + + - + + + + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - + + - + - + - - + + - + - + - - + + - + - + - - - - - - - - - - - - - - - - - - - - + + - + - + - - + + - + - + - - + + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + - - - - - - - + + - - - - + + - + - - + + - - - - - - - - + + - - + + - - - - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - + + + + + + - - + + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - + + - + - + - - + + - + - + - - + + - + - + - - - - - - - + + + - - + + - - + + - - - - + + - - - + + + + - - - - - - + + + + - - - + + + - - + + - - - - + + - - - - + + + + + - - + + - - - - + + - - - - + + - - + + - - - + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - + + + + + + + - - + + - + + + + + + + + + + + + + + + + + + + + + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - - + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - + + - - - - + + + - - + + + + - - + + - + - + - - + + + + + - + - - - + + + - - + + - + - - - + + + + + + + + + + + - - - - + + + - - - - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - + - - + + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - + + - - - - + + + - - - - - + + + + - - + + - - + + + + - - + + + + - - + + + + + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - + + + + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - - - + + + + - - - - + + - - - - + + + + + + - - + + + + - - + + - - + + + + - - + + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - - - - - - - - - - - - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - + + + + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - - - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - - + + + + + + + - - - - + + - - + + - + - - - + + + - - - - - + + - + - - - + + + - - + + - + - - - + + + - - - - - - + + + + - - - - + + - - + + - + - - - + + + - - - - - + + - + - - - + + + - - + + - + - - - + + + - - - - - - + + + + - - - - + + - - + + - + - - - + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + - + - - - + + + - - + + - + - - - + + + - - - - - - + + + + - - - + + + + - - + + + + + - - - - + + - - - - + + - - + + - - - - + + - - - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - - - - + + - - - - + + - + - - + + + + + + + + - - + + - - + + - - - - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - + + + + - - - + + + - - + + - + - - - + + + + + + + + + + + + + + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + + + + - + - + - - - - - - - + + + + + + - - - - - - - + + + - - - - - - - + + + + + + - - + + + + - - + + - + - + - - + + + + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - + - + - + - + - + - + - + - - + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - + + - + - + - - + + - + - + - - - - - - - - + + - - - - + + + - - - - + + + - - - - - + + - + - - - + + + - - - - - + + - + - - - + + + - - - - - + + - + - - - + + + - - - - - + + - + - - - + + + - - - - - + + - - - - + + + - - - - + + + - - - - + + + + + + - - + + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + + + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + + + + + + + - - + + + + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - - - - - - - - - - - - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - - - - - - - - - + + + - - - + + + + + + + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + - - - - - + + - - - - - + + - - - - - + + - - + + - - + + - - - - + + - - - - + + - - - - - - - - + + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - - - - + + + - - - - + + + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + + + + + + + + + + + + + + - - + + - + - + - + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - - - - - - - - - - - - - + + + - - - - + - + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - - - - - - - + + + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + + + + - + - - - - - - - - - - - - + + + - - - - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - - + + + + - - + + - + - - - + + + - - + + - + - + - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + - - - - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - + + - + + + + - - - + + + - - - - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - + + - + - + - - - - - + + - + - - + + + + + - + - - + + - - + + + + + - + - - + + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - + + - - - - + + + - - - - + + + - + - - + + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - + + - + - + - - + + - + - + - - - - - - + + + + - - - + + + + - - + + - + + + + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - - - - - - - - - - - - - + + + - - - - - + + - - - - + + + - - - - + + + - - - - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - - - + + + + - - - + + + + - + - - + + + + + - - + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - + + - + - - - + + + - - - - + + + + + + - - - - + + + - - + + - - - + + + - - - - - + + + + - - + + - - + + + + - - + + + + - - + + - - + + - - + + - - + + + + + + + + diff --git a/README.md b/README.md index a983f2361..18be9307c 100644 --- a/README.md +++ b/README.md @@ -2,137 +2,9 @@ An official online application designed for fuel and electricity suppliers in British Columbia to manage compliance obligations under the Low Carbon Fuels Act. This tool streamlines the process, ensuring efficiency and adherence to regulations. -## Project Components +# Documentation -- **Frontend**: Developed with React.js and Material-UI for base components, providing a modern and intuitive user interface. -- **Backend**: Built on FastAPI, the backend handles API requests, business logic, and data management. -- **Database**: Utilizes PostgreSQL for secure and reliable data storage. -- **Authentication**: Implements Keycloak for robust identity and access management. -- **Migrations**: Manages database changes over time with Alembic. -- **ORM**: Employs SQLAlchemy for database entity mapping. -- **Validation**: Utilizes Pydantic for data validation within FastAPI. -- **Bundling**: Leverages Webpack for optimizing frontend assets. -- **Testing**: Uses Jest for frontend unit tests and Cypress for end-to-end testing. - -## Getting Started - -### Prerequisites - -- Docker and Docker Compose -- Node.js and npm (for frontend) -- Python 3.8+ and Poetry (for backend) - -### Environment Setup - -```bash -git clone https://github.com/bcgov/lcfs.git -cd lcfs -``` - -### Running with Docker - -```bash -docker-compose up --build -``` - -This command will build and start the services defined in docker-compose.yml, setting up the frontend, backend, and the database. - -### Frontend Development - -The frontend application is bootstrapped with Create React App and interacts with the backend for data operations. -The project utilizes government hosted authentication via Keycloak. - -To start the frontend separately: - -```bash -cd frontend -npm install -npm start -``` - -Access the UI at [http://localhost:3000](http://localhost:3000). - -### Backend Development - -The backend API is built using FastAPI and manages interactions with the database. - -To run the backend locally with Poetry: - -```bash -cd backend -poetry install -poetry run python -m lcfs -``` - -The API documentation is available at `/api/docs` once the server is running. - -### Database Migrations with Alembic - -```bash -alembic revision --autogenerate -m "Your message" -alembic upgrade head -``` - -Ensure the database service is active before running migrations. - -## Testing - -### Jest Tests - -```bash -cd frontend -npm test -``` - -### Cypress End-to-End Tests - -Cypress is used for end-to-end testing of the application. These tests simulate real user interactions and ensure the integrity of the user flows. - -#### Running Cypress Tests - -To run Cypress tests interactively: - -```bash -cd frontend -npm run cypress:open -``` - -This opens the Cypress Test Runner, from which you can execute individual tests or the entire test suite. - -#### Running in Headless Mode - -For headless execution (useful for CI/CD pipelines): - -```bash -cd frontend -npm run cypress:run -``` - -#### Writing Cypress Tests - -When contributing new tests: - -1. Add your test files under `frontend/cypress/e2e`. -2. Use descriptive names for test files and test cases. -3. Follow established patterns for structuring tests, such as using `beforeEach` and custom commands for routine tasks. -4. Utilize data attributes like `data-test` for more stable element selection. - -#### Configuration and Environmental Variables -- To update the configuration file for Cypress, please go to `frontend/cypress.config.js`. -- For viewing Cypress environmental variables, refer to the file located at `frontend/cypress.env.json`. - -Refer to the [Cypress Documentation](https://docs.cypress.io) for best practices and detailed guidance. - -### Backend Tests - -```bash -cd backend -poetry run pytest -``` - -## Deployment - -Refer to the provided deployment scripts and guidelines for moving to production. +Check out our [wiki docs](https://github.com/bcgov/lcfs/wiki) for documentation! ## Contributing diff --git a/backend/.dockerignore b/backend/.dockerignore index 00ce79b20..e26615916 100644 --- a/backend/.dockerignore +++ b/backend/.dockerignore @@ -142,3 +142,6 @@ dmypy.json # Cython debug symbols cython_debug/ + +# ignore migrate script +migrate.sh \ No newline at end of file diff --git a/backend/.python-version b/backend/.python-version new file mode 100644 index 000000000..a5c4c7633 --- /dev/null +++ b/backend/.python-version @@ -0,0 +1 @@ +3.9.0 diff --git a/backend/Dockerfile b/backend/Dockerfile index be572135e..2f863e90b 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,5 +1,5 @@ # Base stage for common setup -FROM python:3.9.6-slim-buster as base +FROM python:3.11-slim-bullseye as base RUN apt-get update && apt-get install -y \ gcc \ @@ -25,14 +25,14 @@ RUN apt-get purge -y \ # Copying the actual application, wait-for-it script, and prestart script COPY . /app/ -# Note: We mount the local directory using docker-compose so ensure these scripts also have execute permissions +# Note: We mount the local directory using docker-compose so ensure these scripts also have execute permissions # by running the following command on your host machine from the root of this project: # chmod +x ./backend/wait-for-it.sh ./backend/lcfs/prestart.sh ./backend/lcfs/start.sh ./backend/lcfs/start-reload.sh # Make all startup scripts executable RUN chmod +x /app/wait-for-it.sh /app/lcfs/prestart.sh /app/lcfs/start.sh -CMD ["/app/lcfs/start.sh"] +CMD /bin/bash /app/lcfs/start.sh # Production stage FROM base as prod diff --git a/backend/Dockerfile.openshift b/backend/Dockerfile.openshift new file mode 100644 index 000000000..61f02b187 --- /dev/null +++ b/backend/Dockerfile.openshift @@ -0,0 +1,48 @@ +# Base stage for common setup +FROM artifacts.developer.gov.bc.ca/docker-remote/python:3.11-bullseye as base + +RUN apt-get update && apt-get install -y --no-install-recommends procps \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +RUN pip install poetry==1.6.1 + +# Configuring poetry +RUN poetry config virtualenvs.create true + +# Copying requirements of a project +COPY pyproject.toml poetry.lock /app/ +WORKDIR /app + +# Set PYTHONPATH to include the lcfs directory +ENV PYTHONPATH="${PYTHONPATH}:/app/lcfs" + +ENV POETRY_CACHE_DIR=/.cache/pypoetry + +# Installing requirements +RUN poetry install --only main + +# Removing gcc +RUN apt-get purge -y \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copying the actual application, wait-for-it script, and prestart script +COPY . /app/ + +# Note: We mount the local directory using docker-compose so ensure these scripts also have execute permissions +# by running the following command on your host machine from the root of this project: +# chmod +x ./backend/wait-for-it.sh ./backend/lcfs/prestart.sh ./backend/lcfs/start.sh ./backend/lcfs/start-reload.sh + +# Update the group ownership and grant the read/write permission for /app +RUN chgrp -R root /app && \ + chmod -R g+rw /app && \ + chgrp -R root /.cache && \ + chmod -R g+rw /.cache +# Make all startup scripts executable +RUN chmod +x /app/wait-for-it.sh /app/lcfs/prestart.sh /app/lcfs/start.sh + +# Set the APP_ENVIRONMENT variable to 'production' +ENV APP_ENVIRONMENT=prod + +CMD ["/app/lcfs/start.sh"] \ No newline at end of file diff --git a/backend/README.md b/backend/README.md index f9fea50d7..26d7f2e1b 100644 --- a/backend/README.md +++ b/backend/README.md @@ -48,7 +48,7 @@ This application can be configured with environment variables. You can create `.env` file in the root directory and place all environment variables here. -All environment variables should start with "LCFS_" prefix. +All environment variables should start with "LCFS\_" prefix. For example if you see in your "lcfs/settings.py" a variable named like `random_parameter`, you should provide the "LCFS_RANDOM_PARAMETER" @@ -60,6 +60,7 @@ You can read more about BaseSettings class here: https://pydantic-docs.helpmanua ## Pre-commit To install pre-commit simply run inside the shell: + ```bash pre-commit install ``` @@ -68,75 +69,114 @@ pre-commit is very useful to check your code before publishing it. It's configured using .pre-commit-config.yaml file. By default it runs: -* black (formats your code); -* mypy (validates types); -* isort (sorts imports in all files); -* flake8 (spots possibe bugs); +- black (formats your code); +- mypy (validates types); +- isort (sorts imports in all files); +- flake8 (spots possibe bugs); You can read more about pre-commit here: https://pre-commit.com/ - ## Migrations ## Documentation for `migrate.sh` ### Purpose + The `migrate.sh` script is a versatile tool for managing database migrations in this fastapi project. It automates various tasks related to Alembic, including generating new migrations, upgrading and downgrading the database, and managing the virtual environment and dependencies. ### Features -- **Virtual Environment Management**: Automatically creates and manages a virtual environment for migration operations. -- **Dependency Management**: Installs project dependencies using Poetry. -- **Migration Generation**: Generates Alembic migration files based on SQLAlchemy model changes. -- **Database Upgrade**: Upgrades the database schema to a specified revision or the latest version. -- **Database Downgrade**: Reverts the database schema to a specified revision or to the base state. + +- **Virtual Environment Management**: Automatically creates and manages a virtual environment for migration operations. +- **Dependency Management**: Installs project dependencies using Poetry. +- **Migration Generation**: Generates Alembic migration files based on SQLAlchemy model changes. +- **Database Upgrade**: Upgrades the database schema to a specified revision or the latest version. +- **Database Downgrade**: Reverts the database schema to a specified revision or to the base state. ### Usage + 1. **Make Script Executable** + ```bash chmod +x migrate.sh ``` 2. **Generating Migrations** -Generate a new migration with a descriptive message: + Generate a new migration with a descriptive message: + ```bash ./migrate.sh -g "Description of changes" ``` 3. **Upgrading Database** -Upgrade the database to a specific revision or to the latest version: + Upgrade the database to a specific revision or to the latest version: + ```bash ./migrate.sh -u [revision] ``` + Omit `[revision]` to upgrade to the latest version (head). 4. **Downgrading Database** -Downgrade the database to a specific revision or to the base state: + Downgrade the database to a specific revision or to the base state: + ```bash ./migrate.sh -d [revision] ``` + Omit `[revision]` to revert to the base state. 5. **Help Manual** -Display the help manual for script usage: + Display the help manual for script usage: + ```bash ./migrate.sh -h ``` ### Notes -- Ensure Python 3.9+ and Poetry are installed on your system. -- The script assumes that it is located in the same directory as `alembic.ini`. -- Always test migrations in a development environment before applying them to production. -- The script automatically activates and deactivates the virtual environment as needed. +- Ensure Python 3.9+ and Poetry are installed on your system. +- The script assumes that it is located in the same directory as `alembic.ini`. +- Always test migrations in a development environment before applying them to production. +- The script automatically activates and deactivates the virtual environment as needed. + +## Running Tests +To ensure the quality and correctness of the code, it's important to run tests regularly. This project uses `pytest` for testing. Follow these steps to run tests on your local machine: -## Running tests +### Prerequisites -For running tests on your local machine. -1. you need to start a database, ideally with the docker-compose. +Before running the tests, ensure the following prerequisites are met: + +1. **PostgreSQL Instance**: A running instance of PostgreSQL is required. Ideally, use the provided `docker-compose` file to start a PostgreSQL container. This ensures consistency in the testing environment. + +2. **Python Environment**: Make sure your Python environment is set up with all necessary dependencies. This can be achieved using Poetry: + + ```bash + poetry install + ``` + +### Running Tests with Pytest + +The project's tests can be executed using the `pytest` command. Our testing framework is configured to handle the setup and teardown of the test environment automatically. Here's what happens when you run the tests: + +- **Test Database Setup**: A test database is automatically created. This is separate from your development or production databases to avoid any unintended data modifications. + +- **Database Migrations**: Alembic migrations are run against the test database to ensure it has the correct schema. + +- **Data Seeding**: The `test_seeder` is used to populate the test database with necessary data for the tests. + +- **Test Execution**: All test cases are run against the configured test database. + +- **Teardown**: After the tests have completed, the test database is dropped to clean up the environment. + +To run the tests, use the following command in your terminal: -2. Run the pytest. ```bash -pytest -vv . +poetry run pytest -s -v ``` + +Options: + +- `-s`: Disables per-test capturing of stdout/stderr. This is useful for observing print statements and other console outputs in real time. +- `-v`: Verbose mode. Provides detailed information about each test being run... diff --git a/backend/lcfs/__main__.py b/backend/lcfs/__main__.py index 397e0a313..146f7cff8 100644 --- a/backend/lcfs/__main__.py +++ b/backend/lcfs/__main__.py @@ -14,6 +14,7 @@ def main() -> None: reload=settings.reload, log_level=settings.log_level.value.lower(), factory=True, + timeout_keep_alive=settings.timeout_keep_alive, ) except Exception as e: print(e) diff --git a/backend/lcfs/conftest.py b/backend/lcfs/conftest.py index c3400666e..cea7bae8a 100644 --- a/backend/lcfs/conftest.py +++ b/backend/lcfs/conftest.py @@ -1,9 +1,23 @@ -from typing import Any, AsyncGenerator +import structlog +import warnings + +# Suppress the PendingDeprecationWarning for multipart +warnings.filterwarnings( + "ignore", + message="Please use `import python_multipart` instead.", + category=PendingDeprecationWarning, +) +import subprocess +import warnings +from typing import Any, AsyncGenerator, List, Callable import pytest -from fakeredis import FakeServer +from fakeredis import FakeServer, aioredis from fakeredis.aioredis import FakeConnection from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi_cache import FastAPICache +from fastapi_cache.backends.redis import RedisBackend from httpx import AsyncClient from redis.asyncio import ConnectionPool from sqlalchemy.ext.asyncio import ( @@ -12,14 +26,25 @@ async_sessionmaker, create_async_engine, ) +from starlette.authentication import AuthCredentials, AuthenticationBackend +from starlette.middleware.authentication import AuthenticationMiddleware -from lcfs.db.dependencies import get_db_session -from lcfs.db.utils import create_database, drop_database -from lcfs.services.redis.dependency import get_redis_pool +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.organization.Organization import Organization +from lcfs.db.models.user.Role import Role +from lcfs.db.models.user.Role import RoleEnum +from lcfs.db.models.user.UserProfile import UserProfile +from lcfs.db.models.user.UserRole import UserRole +from lcfs.db.seeders.seed_database import seed_database +from lcfs.db.utils import create_test_database, drop_test_database +from lcfs.services.redis.dependency import get_redis_client from lcfs.settings import settings from lcfs.web.application import get_app +logger = structlog.get_logger(__name__) + + @pytest.fixture(scope="session") def anyio_backend() -> str: """ @@ -33,26 +58,27 @@ def anyio_backend() -> str: @pytest.fixture(scope="session") async def _engine() -> AsyncGenerator[AsyncEngine, None]: """ - Create engine and databases. + Create engine and run Alembic migrations. :yield: new engine. """ - from lcfs.db.meta import meta # noqa: WPS433 - from lcfs.db.models import load_all_models # noqa: WPS433 + # Create the test database + await create_test_database() - load_all_models() + # Run Alembic migrations + subprocess.run(["alembic", "upgrade", "head"], check=True) - await create_database() + # Create AsyncEngine instance + engine = create_async_engine(str(settings.db_test_url)) - engine = create_async_engine(str(settings.db_url)) - async with engine.begin() as conn: - await conn.run_sync(meta.create_all) + # Seed the database with test data + await seed_database("test") try: yield engine finally: await engine.dispose() - await drop_database() + await drop_test_database() @pytest.fixture @@ -68,53 +94,86 @@ async def dbsession( :param _engine: current engine. :yields: async session. """ - connection = await _engine.connect() - trans = await connection.begin() + async with _engine.begin() as connection: + session_maker = async_sessionmaker( + connection, + expire_on_commit=False, + ) + session = session_maker() - session_maker = async_sessionmaker( - connection, - expire_on_commit=False, - ) - session = session_maker() + # Add test user info into the session + user_info = UserProfile( + user_profile_id=1, keycloak_username="test_user", organization_id=1 + ) # Mocked user ID - try: - yield session - finally: - await session.close() - await trans.rollback() - await connection.close() + session.info["user"] = user_info + + try: + yield session + finally: + # Rolling back the session here prevents data persistence, + # which causes issues for tests that depend on others. + await session.rollback() + await session.close() @pytest.fixture -async def fake_redis_pool() -> AsyncGenerator[ConnectionPool, None]: +async def fake_redis_client() -> AsyncGenerator[aioredis.FakeRedis, None]: """ - Get instance of a fake redis. + Get instance of a fake Redis client. - :yield: FakeRedis instance. + :yield: FakeRedis client instance. """ server = FakeServer() server.connected = True - pool = ConnectionPool(connection_class=FakeConnection, server=server) + redis_client = aioredis.FakeRedis(server=server, decode_responses=True) - yield pool + try: + yield redis_client + finally: + await redis_client.close() + + +@pytest.fixture +async def dbsession_factory( + _engine: AsyncEngine, +) -> Callable[[], AsyncGenerator[AsyncSession, None]]: + """ + Get a factory function for database sessions. - await pool.disconnect() + :param _engine: current engine. + :return: A factory function that returns an async session. + """ + session_factory = async_sessionmaker( + _engine, + expire_on_commit=False, + ) + return session_factory @pytest.fixture def fastapi_app( dbsession: AsyncSession, - fake_redis_pool: ConnectionPool, + fake_redis_client: aioredis.FakeRedis, + set_mock_user, # Fixture for setting up mock authentication + user_roles: List[RoleEnum] = [RoleEnum.ADMINISTRATOR], # Default role ) -> FastAPI: - """ - Fixture for creating FastAPI app. - - :return: fastapi app with mocked dependencies. - """ + # Create the FastAPI application instance application = get_app() - application.dependency_overrides[get_db_session] = lambda: dbsession - application.dependency_overrides[get_redis_pool] = lambda: fake_redis_pool - return application # noqa: WPS331 + application.dependency_overrides[get_async_db_session] = lambda: dbsession + application.dependency_overrides[get_redis_client] = lambda: fake_redis_client + + # Set up application state for testing + application.state.redis_client = fake_redis_client + application.state.settings = settings + + # Set up mock authentication backend with the specified roles + set_mock_user(application, user_roles) + + # Initialize the cache with fake Redis backend + FastAPICache.init(RedisBackend(fake_redis_client), prefix="lcfs") + + return application @pytest.fixture @@ -130,3 +189,192 @@ async def client( """ async with AsyncClient(app=fastapi_app, base_url="http://test") as ac: yield ac + + +def role_enum_member(role_name): + for role in RoleEnum: + if role.value == role_name: + return role + raise ValueError(f"Invalid role name: {role_name}") + + +class MockAuthenticationBackend(AuthenticationBackend): + def __init__(self, user_roles: List[RoleEnum], user_details: dict): + """ + Initialize mock authentication backend with user details from a dictionary. + + Args: + user_roles (List[RoleEnum]): List of user roles. + user_details (dict): A dictionary containing customizable user attributes (username, email, etc.). + """ + # Convert list of role names (strings) to RoleEnum members + self.user_roles_enum = user_roles + self.role_count = 0 + + # Use the provided user details + self.user_profile_id = user_details["user_profile_id"] + self.keycloak_username = user_details["keycloak_username"] + self.keycloak_email = user_details["keycloak_email"] + self.username = user_details["username"] + self.organization_id = user_details["organization_id"] + self.email = user_details["email"] + self.first_name = user_details["first_name"] + self.last_name = user_details["last_name"] + self.is_active = user_details["is_active"] + self.organization_name = user_details["organization_name"] + + async def authenticate(self, request): + # Simulate a user object based on the role + user = UserProfile( + user_profile_id=self.user_profile_id, + keycloak_username=self.keycloak_username, + keycloak_email=self.keycloak_email, + organization_id=self.organization_id, + email=self.email, + first_name=self.first_name, + last_name=self.last_name, + is_active=self.is_active, + ) + + organization = Organization( + organization_id=self.organization_id, name=self.organization_name + ) + user.organization = organization + + # Create UserRole instances based on the RoleEnum members provided + user.user_roles = [ + self.create_user_role(user, role_enum) for role_enum in self.user_roles_enum + ] + + return AuthCredentials(["authenticated"]), user + + def create_user_role(self, user_profile, role_enum): + role = Role( + role_id=self.role_count, + name=role_enum, + description=f"Mocked role for {role_enum}", + is_government_role=role_enum + in [RoleEnum.GOVERNMENT, RoleEnum.ANALYST, RoleEnum.ADMINISTRATOR], + ) + user_role = UserRole( + user_role_id=self.role_count, user_profile=user_profile, role=role + ) + self.role_count += 1 + return user_role + + +@pytest.fixture +def set_mock_user(): + def _set_mock_auth( + application: FastAPI, roles: List[RoleEnum], user_details: dict = None + ): + """ + Fixture to mock user authentication with customizable user details. + + Args: + application (FastAPI): The FastAPI application instance. + roles (List[RoleEnum]): The roles to be assigned to the user. + user_details (dict): A dictionary containing customizable user attributes (username, email, etc.). + """ + default_user_details = { + "user_profile_id": 1, + "keycloak_username": "mockuser", + "keycloak_email": "test@test.com", + "username": "mockuser", + "organization_id": 1, + "email": "test@test.com", + "first_name": "Test", + "last_name": "User", + "is_active": True, + "organization_name": "Test Organization", + } + + # Merge default values with provided user details + user_details = {**default_user_details, **(user_details or {})} + + # Clear existing middleware + application.user_middleware = [] + + # Add necessary middleware for testing, excluding LazyAuthenticationBackend + application.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_methods=["*"], + allow_headers=["*"], + ) + + # Add the Mock Authentication Middleware + mock_auth_backend = MockAuthenticationBackend( + user_roles=roles, user_details=user_details + ) + application.add_middleware(AuthenticationMiddleware, backend=mock_auth_backend) + + return _set_mock_auth + + +@pytest.fixture +async def add_models(dbsession): + """ + Fixture to add and flush a list of model instances to the database. + + Args: + dbsession: The active database session for transaction management. + + Returns: + A function that takes a list of models and flushes them to the database. It includes + error handling to manage any exceptions that occur during database operations. + """ + + async def _add(models): + try: + dbsession.add_all(models) + await dbsession.flush() + except Exception as e: + logger.error( + "Error adding models to the database", + error=str(e), + exc_info=e, + ) + await dbsession.rollback() + raise + + return _add + + +@pytest.fixture +async def update_model(dbsession): + """ + Fixture to update and flush a model instance to the database. + + Args: + dbsession: The active database session for transaction management. + + Returns: + A function that takes a model instance, updates it in the database, and handles exceptions. + """ + + async def _update(model): + try: + dbsession.add(model) + await dbsession.flush() + except Exception as e: + logger.error( + "Error updating models to the database", + error=str(e), + exc_info=e, + ) + await dbsession.rollback() + raise + + return _update + + +@pytest.fixture(autouse=True) +def ignore_specific_warnings(): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", message="This endpoint is accessible by all roles" + ) + warnings.filterwarnings("ignore", category=DeprecationWarning) + # Add more specific warnings to ignore as needed + yield diff --git a/backend/lcfs/db/base.py b/backend/lcfs/db/base.py index a14c149f1..cd9788188 100644 --- a/backend/lcfs/db/base.py +++ b/backend/lcfs/db/base.py @@ -1,27 +1,65 @@ -from sqlalchemy.ext.declarative import declarative_base, AbstractConcreteBase -from sqlalchemy import String, Column, DateTime, Integer, Date, text, TIMESTAMP -from datetime import datetime +import uuid +import enum +from contextvars import ContextVar +from sqlalchemy.ext.declarative import AbstractConcreteBase +from sqlalchemy import ( + String, + Column, + Integer, + Date, + text, + TIMESTAMP, + func, + Boolean, + MetaData, + Enum, +) +from sqlalchemy.orm import declarative_base -Base = declarative_base() +# Define naming conventions for all constraints +naming_convention = { + "ix": "ix_%(table_name)s_%(column_0_name)s", # Index + "uq": "uq_%(table_name)s_%(column_0_name)s", # Unique constraint + "ck": "ck_%(table_name)s_%(constraint_name)s", # Check constraint + "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", # Foreign key + "pk": "pk_%(table_name)s", # Primary key +} + +# Apply this naming convention to the MetaData object +metadata = MetaData(naming_convention=naming_convention) + +Base = declarative_base(metadata=metadata) + +# Define a context variable to store the user +current_user_var = ContextVar("current_user", default=None) + + +def get_current_user(): + user_info = current_user_var.get() + username = getattr(user_info, "keycloak_username", "no_user") + return username class BaseModel(AbstractConcreteBase, Base): """Base for all models.""" - __table_args__ = {'schema': 'metadata'} + __table_args__ = {"schema": "metadata"} create_date = Column( TIMESTAMP(timezone=True), - server_default=text('now()'), - comment='Date and time (UTC) when the physical record was created in the database.') - update_date = Column(TIMESTAMP(timezone=True), - server_default=text('now()'), - onupdate=datetime.now, - comment='Date and time (UTC) when the physical record was updated in the' - ' database. ' - 'It will be the same as the create_date until the record is first ' - 'updated after creation.') - + server_default=text("now()"), + comment="Date and time (UTC) when the physical record was created in the database.", + ) + update_date = Column( + TIMESTAMP(timezone=True), + server_default=text("now()"), + onupdate=func.now(), + comment="Date and time (UTC) when the physical record was updated in the" + " database. " + "It will be the same as the create_date until the record is first " + "updated after creation.", + ) + def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) # Import the module to ensure SQLAlchemy recognizes it @@ -29,25 +67,78 @@ def __init_subclass__(cls, **kwargs): class Auditable(AbstractConcreteBase, Base): - __table_args__ = {'schema': 'metadata'} + __table_args__ = {"schema": "metadata"} create_user = Column( String, - comment='The user who created this record in the database.') + comment="The user who created this record in the database.", + default=get_current_user, + ) update_user = Column( String, - comment='The user who last updated this record in the database.') + comment="The user who last updated this record in the database.", + default=get_current_user, + onupdate=get_current_user, + ) + class DisplayOrder(Base): __abstract__ = True - display_order = Column(Integer, comment='Relative rank in display sorting order') + display_order = Column(Integer, comment="Relative rank in display sorting order") class EffectiveDates(Base): __abstract__ = True - effective_date = Column(Date, nullable=True, - comment='The calendar date the value became valid.') - expiration_date = Column(Date, nullable=True, - comment='The calendar date the value is no longer valid.') + effective_date = Column( + Date, nullable=True, comment="The calendar date the value became valid." + ) + effective_status = Column( + Boolean, + nullable=False, + default=True, + comment="True if the value is currently valid, False if it is no longer valid.", + ) + expiration_date = Column( + Date, nullable=True, comment="The calendar date the value is no longer valid." + ) + + +class UserTypeEnum(enum.Enum): + SUPPLIER = "SUPPLIER" + GOVERNMENT = "GOVERNMENT" + + +class ActionTypeEnum(enum.Enum): + CREATE = "CREATE" + UPDATE = "UPDATE" + DELETE = "DELETE" + + +class Versioning(Base): + __abstract__ = True + + group_uuid = Column( + String(36), + nullable=False, + default=lambda: str(uuid.uuid4()), + comment="UUID that groups all versions of a record series", + ) + version = Column( + Integer, + nullable=False, + default=0, + comment="Version number of the record", + ) + user_type = Column( + Enum(UserTypeEnum), + nullable=False, + comment="Indicates whether the record was created/modified by a supplier or government user", + ) + action_type = Column( + Enum(ActionTypeEnum), + nullable=False, + server_default=text("'CREATE'"), + comment="Action type for this record", + ) diff --git a/backend/lcfs/db/dependencies.py b/backend/lcfs/db/dependencies.py index 6a26609f1..b78c885a2 100644 --- a/backend/lcfs/db/dependencies.py +++ b/backend/lcfs/db/dependencies.py @@ -1,65 +1,52 @@ -from functools import wraps -from typing import AsyncGenerator, Type, Callable +import logging +from typing import AsyncGenerator -from fastapi import FastAPI, HTTPException, Depends -from sqlalchemy import create_engine +from fastapi import Request +from redis import asyncio as aioredis +from sqlalchemy import text from sqlalchemy.engine import make_url from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker +from lcfs.db.base import current_user_var, get_current_user from lcfs.settings import settings +if settings.environment == "dev": + pass + db_url = make_url(str(settings.db_url.with_path(f"/{settings.db_base}"))) -engine = create_engine(db_url) async_engine = create_async_engine(db_url, future=True) -app = FastAPI() -SessionLocal = sessionmaker(bind=engine, autocommit=False, autoflush=False) -Base = declarative_base() +logging.getLogger("sqlalchemy.engine").setLevel(logging.WARN) -def get_db_session(): +async def set_user_context(session: AsyncSession, username: str): """ - Create and get database session. - :yield: database session. + Set user_id context for the session to be used in auditing. """ - session: SessionLocal = SessionLocal() + try: + await session.execute(text(f"SET SESSION app.username = '{username}'")) - try: # noqa: WPS501 - yield session - finally: - session.commit() - session.close() + except Exception as e: + logging.error(f"Failed to execute SET LOCAL app.user_id = '{username}': {e}") + raise e -async def get_async_db_session() -> AsyncGenerator[AsyncSession, None]: +async def get_async_db_session(request: Request) -> AsyncGenerator[AsyncSession, None]: """ Create and get database session. :yield: database session. """ - session: AsyncSession = AsyncSession(async_engine) - - try: # noqa: WPS501 - yield session - finally: - await session.commit() - await session.close() - - -def transactional(func): - @wraps(func) - async def wrapper(*args, **kwargs): - db: AsyncSession = kwargs.get("db") - if not db: - raise HTTPException(status_code=500, - detail="Database session not available") - - try: - result = None - async with db.begin(): - result = await func(*args, **kwargs) - return result - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - return wrapper + async with AsyncSession(async_engine) as session: + async with session.begin(): + if request.user: + current_user_var.set(request.user) + current_user = get_current_user() + await set_user_context(session, current_user) + try: + yield session + await session.flush() + await session.commit() + except Exception as e: + await session.rollback() # Roll back the transaction on error + raise e + finally: + await session.close() # Always close the session to free up the connection diff --git a/backend/lcfs/db/migrations/env.py b/backend/lcfs/db/migrations/env.py index 21a991a3a..dc0e8daa3 100644 --- a/backend/lcfs/db/migrations/env.py +++ b/backend/lcfs/db/migrations/env.py @@ -1,4 +1,5 @@ import asyncio +import alembic_postgresql_enum from logging.config import fileConfig from alembic import context @@ -21,22 +22,32 @@ if config.config_file_name is not None: fileConfig(config.config_file_name) -# add your model's MetaData object here +# Add your model's MetaData object here # for 'autogenerate' support -# from myapp import mymodel target_metadata = Base.metadata -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. +# Exclude specific tables (views and materialized views) from autogenerate +exclude_tables = [ + "mv_transaction_aggregate", + "mv_transaction_count", + "mv_director_review_transaction_count", + "mv_org_compliance_report_count", + "transaction_status_view", +] + +def include_object(object, name, type_, reflected, compare_to): + if type_ == "table" and name in exclude_tables: + # Exclude these tables from autogenerate + return False + else: + return True async def run_migrations_offline() -> None: """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation + here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the @@ -48,24 +59,27 @@ async def run_migrations_offline() -> None: target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, + include_object=include_object, ) with context.begin_transaction(): context.run_migrations() - def do_run_migrations(connection: Connection) -> None: """ Run actual sync migrations. :param connection: connection to the database. """ - context.configure(connection=connection, target_metadata=target_metadata) + context.configure( + connection=connection, + target_metadata=target_metadata, + include_object=include_object, + ) with context.begin_transaction(): context.run_migrations() - async def run_migrations_online() -> None: """ Run migrations in 'online' mode. @@ -78,7 +92,6 @@ async def run_migrations_online() -> None: async with connectable.connect() as connection: await connection.run_sync(do_run_migrations) - loop = asyncio.get_event_loop() if context.is_offline_mode(): task = run_migrations_offline() diff --git a/backend/lcfs/db/migrations/versions/2023-11-23-13-16_4bf9af8c64f9.py b/backend/lcfs/db/migrations/versions/2023-11-23-13-16_4bf9af8c64f9.py deleted file mode 100644 index 45b7bcd91..000000000 --- a/backend/lcfs/db/migrations/versions/2023-11-23-13-16_4bf9af8c64f9.py +++ /dev/null @@ -1,1478 +0,0 @@ -"""initial migration - -Revision ID: 4bf9af8c64f9 -Revises: -Create Date: 2023-11-23 13:16:29.992060 - -""" -import sqlalchemy as sa -from alembic import op - -# revision identifiers, used by Alembic. -revision = "4bf9af8c64f9" -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "category", - sa.Column( - "category_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the transfer category", - ), - sa.Column( - "category", - sa.String(length=500), - nullable=True, - comment="Transfer category", - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.PrimaryKeyConstraint("category_id"), - sa.UniqueConstraint("category_id"), - comment="Transfer Category", - ) - op.create_table( - "comment", - sa.Column( - "comment_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for comment", - ), - sa.Column( - "comment", sa.String(length=500), nullable=True, comment="Transfer category" - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.PrimaryKeyConstraint("comment_id"), - sa.UniqueConstraint("comment_id"), - comment="Comment for transaction", - ) - op.create_table( - "notification_channel", - sa.Column( - "notification_channel_id", sa.Integer(), autoincrement=True, nullable=False - ), - sa.Column( - "channel_name", - sa.Enum("EMAIL", "IN_APP", name="channel_enum"), - nullable=False, - ), - sa.Column("enabled", sa.Boolean(), nullable=True), - sa.Column("subscribe_by_default", sa.Boolean(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.PrimaryKeyConstraint("notification_channel_id"), - comment="Tracks the state and defaults for communication channels", - ) - op.create_table( - "notification_type", - sa.Column( - "notification_type_id", sa.Integer(), autoincrement=True, nullable=False - ), - sa.Column( - "name", - sa.Enum( - "CREDIT_TRANSFER_CREATED", - "CREDIT_TRANSFER_SIGNED_1OF2", - "CREDIT_TRANSFER_SIGNED_2OF2", - "CREDIT_TRANSFER_PROPOSAL_REFUSED", - "CREDIT_TRANSFER_PROPOSAL_ACCEPTED", - "CREDIT_TRANSFER_RECOMMENDED_FOR_APPROVAL", - "CREDIT_TRANSFER_RECOMMENDED_FOR_DECLINATION", - "CREDIT_TRANSFER_DECLINED", - "CREDIT_TRANSFER_APPROVED", - "CREDIT_TRANSFER_RESCINDED", - "CREDIT_TRANSFER_COMMENT", - "CREDIT_TRANSFER_INTERNAL_COMMENT", - "PVR_CREATED", - "PVR_RECOMMENDED_FOR_APPROVAL", - "PVR_RESCINDED", - "PVR_PULLED_BACK", - "PVR_DECLINED", - "PVR_APPROVED", - "PVR_COMMENT", - "PVR_INTERNAL_COMMENT", - "PVR_RETURNED_TO_ANALYST", - "DOCUMENT_PENDING_SUBMISSION", - "DOCUMENT_SUBMITTED", - "DOCUMENT_SCAN_FAILED", - "DOCUMENT_RECEIVED", - "DOCUMENT_ARCHIVED", - "COMPLIANCE_REPORT_DRAFT", - "COMPLIANCE_REPORT_SUBMITTED", - "COMPLIANCE_REPORT_RECOMMENDED_FOR_ACCEPTANCE_ANALYST", - "COMPLIANCE_REPORT_RECOMMENDED_FOR_REJECTION_ANALYST", - "COMPLIANCE_REPORT_RECOMMENDED_FOR_ACCEPTANCE_MANAGER", - "COMPLIANCE_REPORT_RECOMMENDED_FOR_REJECTION_MANAGER", - "COMPLIANCE_REPORT_ACCEPTED", - "COMPLIANCE_REPORT_REJECTED", - "COMPLIANCE_REPORT_REQUESTED_SUPPLEMENTAL", - "EXCLUSION_REPORT_DRAFT", - "EXCLUSION_REPORT_SUBMITTED", - "EXCLUSION_REPORT_RECOMMENDED_FOR_ACCEPTANCE_ANALYST", - "EXCLUSION_REPORT_RECOMMENDED_FOR_REJECTION_ANALYST", - "EXCLUSION_REPORT_RECOMMENDED_FOR_ACCEPTANCE_MANAGER", - "EXCLUSION_REPORT_RECOMMENDED_FOR_REJECTION_MANAGER", - "EXCLUSION_REPORT_ACCEPTED", - "EXCLUSION_REPORT_REJECTED", - "EXCLUSION_REPORT_REQUESTED_SUPPLEMENTAL", - name="notification_type_enum", - ), - nullable=False, - ), - sa.Column("description", sa.Text(), nullable=True), - sa.Column("email_content", sa.Text(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.PrimaryKeyConstraint("notification_type_id"), - comment="Represents a Notification type", - ) - op.create_table( - "organization_address", - sa.Column("organization_address_id", sa.Integer(), nullable=False), - sa.Column( - "name", sa.String(length=500), nullable=True, comment="Organization name" - ), - sa.Column("street_address", sa.String(length=500), nullable=True), - sa.Column("address_other", sa.String(length=100), nullable=True), - sa.Column("city", sa.String(length=100), nullable=True), - sa.Column("province_state", sa.String(length=50), nullable=True), - sa.Column("country", sa.String(length=100), nullable=True), - sa.Column("postalCode_zipCode", sa.String(length=10), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.PrimaryKeyConstraint("organization_address_id"), - comment="Represents an organization's address.", - ) - op.create_table( - "organization_attorney_address", - sa.Column("organization_attorney_address_id", sa.Integer(), nullable=False), - sa.Column( - "name", - sa.String(length=500), - nullable=True, - comment="Attorney's Organization name", - ), - sa.Column("street_address", sa.String(length=500), nullable=True), - sa.Column("address_other", sa.String(length=100), nullable=True), - sa.Column("city", sa.String(length=100), nullable=True), - sa.Column("province_state", sa.String(length=50), nullable=True), - sa.Column("country", sa.String(length=100), nullable=True), - sa.Column("postalCode_zipCode", sa.String(length=10), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.PrimaryKeyConstraint("organization_attorney_address_id"), - comment="Represents an organization attorney's address.", - ) - op.create_table( - "organization_status", - sa.Column( - "organization_status_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the organization", - ), - sa.Column( - "status", - sa.Enum( - "Unregistered", - "Registered", - "Suspended", - "Canceled", - name="org_status_enum", - ), - nullable=True, - comment="Organization's status", - ), - sa.Column( - "description", - sa.String(length=500), - nullable=True, - comment="Organization description", - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "display_order", - sa.Integer(), - nullable=True, - comment="Relative rank in display sorting order", - ), - sa.PrimaryKeyConstraint("organization_status_id"), - comment="Contains list of organization type", - ) - op.create_table( - "organization_type", - sa.Column( - "organization_type_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the organization_type", - ), - sa.Column( - "org_type", - sa.Enum( - "fuel_supplier", - "electricity_supplier", - "broker", - "utilities", - name="org_type_enum", - ), - nullable=True, - comment="Organization's Types", - ), - sa.Column( - "description", - sa.String(length=500), - nullable=True, - comment="Organization Types", - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "display_order", - sa.Integer(), - nullable=True, - comment="Relative rank in display sorting order", - ), - sa.PrimaryKeyConstraint("organization_type_id"), - comment="Represents a Organization types", - ) - op.create_table( - "role", - sa.Column("role_id", sa.Integer(), autoincrement=True, nullable=False), - sa.Column( - "name", - sa.Enum( - "ADMINISTRATOR", - "ANALYST", - "COMPLIANCE_MANAGER", - "DIRECTOR", - "MANAGE_USERS", - "TRANSFER", - "COMPLIANCE_REPORTING", - "SIGNING_AUTHORITY", - "READ_ONLY", - name="role_enum", - ), - nullable=False, - comment="Role code. Natural key. Used internally. eg Admin, GovUser, GovDirector, etc", - ), - sa.Column( - "description", - sa.String(length=1000), - nullable=True, - comment="Descriptive text explaining this role. This is what's shown to the user.", - ), - sa.Column( - "is_government_role", - sa.Boolean(), - nullable=True, - comment="Flag. True if this is a government role (eg. Analyst, Administrator)", - ), - sa.Column( - "display_order", - sa.Integer(), - nullable=True, - comment="Relative rank in display sorting order", - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.PrimaryKeyConstraint("role_id"), - sa.UniqueConstraint("name"), - sa.UniqueConstraint("name"), - comment="To hold all the available roles and their descriptions.", - ) - op.create_table( - "transaction_type", - sa.Column( - "transaction_type_id", sa.Integer(), autoincrement=True, nullable=False - ), - sa.Column( - "type", - sa.Enum( - "administrative_adjustment", - "initiative_agreement", - "assessment", - "transfer", - name="transaction_type_enum", - ), - nullable=True, - comment="Transaction Types", - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "display_order", - sa.Integer(), - nullable=True, - comment="Relative rank in display sorting order", - ), - sa.PrimaryKeyConstraint("transaction_type_id"), - comment="Represents a Transaction types", - ) - op.create_table( - "transfer_status", - sa.Column( - "transfer_status_id", sa.Integer(), autoincrement=True, nullable=False - ), - sa.Column( - "status", - sa.Enum( - "draft", - "deleted", - "sent", - "submitted", - "recommended", - "recorded", - "refused", - "declined", - "rescinded", - name="transfer_type_enum", - ), - nullable=True, - comment="Transfer Status", - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "display_order", - sa.Integer(), - nullable=True, - comment="Relative rank in display sorting order", - ), - sa.PrimaryKeyConstraint("transfer_status_id"), - comment="Represents a Transfer Status", - ) - op.create_table( - "user_login_history", - sa.Column("user_login_history_id", sa.Integer(), nullable=False), - sa.Column( - "keycloak_email", - sa.String(), - nullable=False, - comment="Keycloak email address to associate on first login.", - ), - sa.Column( - "external_username", - sa.String(length=150), - nullable=True, - comment="BCeID or IDIR username", - ), - sa.Column( - "keycloak_user_id", - sa.String(length=150), - nullable=True, - comment="This is the unique id returned from Keycloak and is the main mapping key between the LCFS user and the Keycloak user. The identity provider type will be appended as a suffix after an @ symbol. For ex. asdf1234@bceidbasic or asdf1234@idir", - ), - sa.Column( - "is_login_successful", - sa.Boolean(), - nullable=True, - comment="True if this login attempt was successful, false on failure.", - ), - sa.Column( - "login_error_message", - sa.String(length=500), - nullable=True, - comment="Error text on unsuccessful login attempt.", - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.PrimaryKeyConstraint("user_login_history_id"), - comment="Keeps track of all user login attempts", - ) - op.create_table( - "organization", - sa.Column( - "organization_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the organization", - ), - sa.Column( - "name", - sa.String(length=500), - nullable=True, - comment="Organization's legal name", - ), - sa.Column("organization_status_id", sa.Integer(), nullable=True), - sa.Column( - "organization_type_id", - sa.Integer(), - nullable=True, - comment="Organization's type", - ), - sa.Column("organization_address_id", sa.Integer(), nullable=True), - sa.Column("organization_attorney_address_id", sa.Integer(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.ForeignKeyConstraint( - ["organization_address_id"], - ["organization_address.organization_address_id"], - ), - sa.ForeignKeyConstraint( - ["organization_attorney_address_id"], - ["organization_attorney_address.organization_attorney_address_id"], - ), - sa.ForeignKeyConstraint( - ["organization_status_id"], - ["organization_status.organization_status_id"], - ), - sa.ForeignKeyConstraint( - ["organization_type_id"], - ["organization_type.organization_type_id"], - ), - sa.PrimaryKeyConstraint("organization_id"), - comment="Contains a list of all of the recognized Part 3 fuel suppliers, both past and present, as well as an entry for the government which is also considered an organization.", - ) - op.create_table( - "transaction", - sa.Column( - "transaction_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the transactions", - ), - sa.Column( - "compliance_units", - sa.BigInteger(), - nullable=True, - comment="Compliance Units", - ), - sa.Column("transaction_type_id", sa.Integer(), nullable=True), - sa.Column("organization_id", sa.Integer(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.ForeignKeyConstraint( - ["organization_id"], - ["organization.organization_id"], - ), - sa.ForeignKeyConstraint( - ["transaction_type_id"], - ["transaction_type.transaction_type_id"], - ), - sa.PrimaryKeyConstraint("transaction_id"), - sa.UniqueConstraint("transaction_id"), - comment="Contains a list of all of the government to organization and Organization to Organization transaction.", - ) - op.create_table( - "user_profile", - sa.Column("user_profile_id", sa.Integer(), autoincrement=True, nullable=False), - sa.Column( - "keycloak_user_id", - sa.String(length=150), - nullable=True, - comment="Unique id returned from Keycloak", - ), - sa.Column( - "keycloak_email", - sa.String(length=255), - nullable=True, - comment="keycloak email address", - ), - sa.Column( - "keycloak_username", - sa.String(length=150), - nullable=False, - comment="keycloak Username", - ), - sa.Column( - "email", - sa.String(length=255), - nullable=True, - comment="Primary email address", - ), - sa.Column( - "username", sa.String(length=150), nullable=False, comment="Login Username" - ), - sa.Column( - "display_name", - sa.String(length=500), - nullable=True, - comment="Displayed name for user", - ), - sa.Column( - "title", sa.String(length=100), nullable=True, comment="Professional Title" - ), - sa.Column( - "phone", sa.String(length=50), nullable=True, comment="Primary phone number" - ), - sa.Column( - "mobile_phone", - sa.String(length=50), - nullable=True, - comment="Mobile phone number", - ), - sa.Column("organization_id", sa.Integer(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.ForeignKeyConstraint( - ["organization_id"], - ["organization.organization_id"], - ), - sa.PrimaryKeyConstraint("user_profile_id"), - sa.UniqueConstraint("keycloak_username"), - sa.UniqueConstraint("username"), - sa.UniqueConstraint("username"), - comment="Users who may access the application", - ) - op.create_table( - "issuance", - sa.Column( - "issuance_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the issuance", - ), - sa.Column( - "compliance_units", - sa.BigInteger(), - nullable=True, - comment="Compliance Units", - ), - sa.Column( - "transaction_effective_date", - sa.DateTime(), - nullable=True, - comment="Transaction effective date", - ), - sa.Column("organization_id", sa.Integer(), nullable=True), - sa.Column("transaction_id", sa.Integer(), nullable=True), - sa.Column("comment_id", sa.Integer(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.ForeignKeyConstraint( - ["comment_id"], - ["comment.comment_id"], - ), - sa.ForeignKeyConstraint( - ["organization_id"], - ["organization.organization_id"], - ), - sa.ForeignKeyConstraint( - ["transaction_id"], - ["transaction.transaction_id"], - ), - sa.PrimaryKeyConstraint("issuance_id"), - sa.UniqueConstraint("issuance_id"), - comment="Goverment to organization compliance units issuance", - ) - op.create_table( - "notification_channel_subscription", - sa.Column( - "notification_channel_subscription_id", - sa.Integer(), - autoincrement=True, - nullable=False, - ), - sa.Column("is_enabled", sa.Boolean(), nullable=True), - sa.Column("user_profile_id", sa.Integer(), nullable=True), - sa.Column("notification_type_id", sa.Integer(), nullable=True), - sa.Column("notification_channel_id", sa.Integer(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.ForeignKeyConstraint( - ["notification_channel_id"], - ["notification_channel.notification_channel_id"], - ), - sa.ForeignKeyConstraint( - ["notification_type_id"], - ["notification_type.notification_type_id"], - ), - sa.ForeignKeyConstraint( - ["user_profile_id"], - ["user_profile.user_profile_id"], - ), - sa.PrimaryKeyConstraint("notification_channel_subscription_id"), - comment="Represents a user's subscription to notification events", - ) - op.create_table( - "notification_message", - sa.Column( - "notification_message_id", sa.Integer(), autoincrement=True, nullable=False - ), - sa.Column("is_read", sa.Boolean(), nullable=True), - sa.Column("is_warning", sa.Boolean(), nullable=True), - sa.Column("is_error", sa.Boolean(), nullable=True), - sa.Column("is_archived", sa.Boolean(), nullable=True), - sa.Column("related_organization_id", sa.Integer(), nullable=True), - sa.Column("origin_user_profile_id", sa.Integer(), nullable=True), - sa.Column("related_user_profile_id", sa.Integer(), nullable=True), - sa.Column("notification_type_id", sa.Integer(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.ForeignKeyConstraint( - ["notification_type_id"], - ["notification_type.notification_type_id"], - ), - sa.ForeignKeyConstraint( - ["origin_user_profile_id"], - ["user_profile.user_profile_id"], - ), - sa.ForeignKeyConstraint( - ["related_organization_id"], - ["organization.organization_id"], - ), - sa.ForeignKeyConstraint( - ["related_user_profile_id"], - ["user_profile.user_profile_id"], - ), - sa.PrimaryKeyConstraint("notification_message_id"), - comment="Represents a notification message sent to an application user", - ) - op.create_table( - "transfer", - sa.Column( - "transfer_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the org to org transfer record", - ), - sa.Column("from_organization_id", sa.Integer(), nullable=True), - sa.Column("to_organization_id", sa.Integer(), nullable=True), - sa.Column("transaction_id", sa.Integer(), nullable=True), - sa.Column( - "transaction_effective_date", - sa.DateTime(), - nullable=True, - comment="transaction effective date", - ), - sa.Column("comment_id", sa.Integer(), nullable=True), - sa.Column("transfer_status_id", sa.Integer(), nullable=True), - sa.Column("transfer_category_id", sa.Integer(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.ForeignKeyConstraint( - ["comment_id"], - ["comment.comment_id"], - ), - sa.ForeignKeyConstraint( - ["from_organization_id"], - ["organization.organization_id"], - ), - sa.ForeignKeyConstraint( - ["to_organization_id"], - ["organization.organization_id"], - ), - sa.ForeignKeyConstraint( - ["transaction_id"], - ["transaction.transaction_id"], - ), - sa.ForeignKeyConstraint( - ["transfer_category_id"], - ["category.category_id"], - ), - sa.ForeignKeyConstraint( - ["transfer_status_id"], - ["transfer_status.transfer_status_id"], - ), - sa.PrimaryKeyConstraint("transfer_id"), - sa.UniqueConstraint("transfer_id"), - comment="Records of tranfer from Organization to Organization", - ) - op.create_table( - "user_role", - sa.Column( - "user_role_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique ID for the user role", - ), - sa.Column( - "user_profile_id", - sa.Integer(), - nullable=True, - comment="Foreign key to user_profile", - ), - sa.Column( - "role_id", sa.Integer(), nullable=True, comment="Foreign key to role" - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.ForeignKeyConstraint( - ["role_id"], - ["role.role_id"], - ), - sa.ForeignKeyConstraint( - ["user_profile_id"], - ["user_profile.user_profile_id"], - ), - sa.PrimaryKeyConstraint("user_role_id"), - sa.UniqueConstraint( - "user_profile_id", "role_id", name="user_role_unique_constraint" - ), - comment="Contains the user and role relationships", - ) - op.create_table( - "issuance_history", - sa.Column( - "issuance_history_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the issuance", - ), - sa.Column( - "compliance_units", - sa.BigInteger(), - nullable=True, - comment="Issued compliance units record", - ), - sa.Column("issuance_id", sa.Integer(), nullable=True), - sa.Column("organization_id", sa.Integer(), nullable=True), - sa.Column("transaction_id", sa.Integer(), nullable=True), - sa.Column( - "transaction_effective_date", - sa.DateTime(), - nullable=True, - comment="Transaction Effective date", - ), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.ForeignKeyConstraint( - ["issuance_id"], - ["issuance.issuance_id"], - ), - sa.ForeignKeyConstraint( - ["organization_id"], - ["organization.organization_id"], - ), - sa.ForeignKeyConstraint( - ["transaction_id"], - ["transaction.transaction_id"], - ), - sa.PrimaryKeyConstraint("issuance_history_id"), - sa.UniqueConstraint("issuance_history_id"), - comment="History record for issuance from governmnent to Organization", - ) - op.create_table( - "transfer_history", - sa.Column( - "transfer_history_id", - sa.Integer(), - autoincrement=True, - nullable=False, - comment="Unique identifier for the org to org transfer record", - ), - sa.Column("transfer_id", sa.Integer(), nullable=True), - sa.Column("from_organization_id", sa.Integer(), nullable=True), - sa.Column("to_organization_id", sa.Integer(), nullable=True), - sa.Column("transaction_id", sa.Integer(), nullable=True), - sa.Column( - "transaction_effective_date", - sa.DateTime(), - nullable=True, - comment="Transaction effective date", - ), - sa.Column("transfer_status_id", sa.Integer(), nullable=True), - sa.Column("transfer_category_id", sa.Integer(), nullable=True), - sa.Column( - "create_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was created in the database.", - ), - sa.Column( - "update_date", - sa.TIMESTAMP(timezone=True), - server_default=sa.text("now()"), - nullable=True, - comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", - ), - sa.Column( - "create_user", - sa.String(), - nullable=True, - comment="The user who created this record in the database.", - ), - sa.Column( - "update_user", - sa.String(), - nullable=True, - comment="The user who last updated this record in the database.", - ), - sa.Column( - "effective_date", - sa.Date(), - nullable=True, - comment="The calendar date the value became valid.", - ), - sa.Column( - "expiration_date", - sa.Date(), - nullable=True, - comment="The calendar date the value is no longer valid.", - ), - sa.ForeignKeyConstraint( - ["from_organization_id"], - ["organization.organization_id"], - ), - sa.ForeignKeyConstraint( - ["to_organization_id"], - ["organization.organization_id"], - ), - sa.ForeignKeyConstraint( - ["transaction_id"], - ["transaction.transaction_id"], - ), - sa.ForeignKeyConstraint( - ["transfer_category_id"], - ["category.category_id"], - ), - sa.ForeignKeyConstraint( - ["transfer_id"], - ["transfer.transfer_id"], - ), - sa.ForeignKeyConstraint( - ["transfer_status_id"], - ["transfer_status.transfer_status_id"], - ), - sa.PrimaryKeyConstraint("transfer_history_id"), - comment="Records of tranfer from Organization to Organization", - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("transfer_history") - op.drop_table("issuance_history") - op.drop_table("user_role") - op.drop_table("transfer") - op.drop_table("notification_message") - op.drop_table("notification_channel_subscription") - op.drop_table("issuance") - op.drop_table("user_profile") - op.drop_table("transaction") - op.drop_table("organization") - op.drop_table("user_login_history") - op.drop_table("transfer_status") - op.drop_table("transaction_type") - op.drop_table("role") - op.drop_table("organization_type") - op.drop_table("organization_status") - op.drop_table("organization_attorney_address") - op.drop_table("organization_address") - op.drop_table("notification_type") - op.drop_table("notification_channel") - op.drop_table("comment") - op.drop_table("category") - # ### end Alembic commands ### diff --git a/backend/lcfs/db/migrations/versions/2023-11-23-13-17_9d24914c3013.py b/backend/lcfs/db/migrations/versions/2023-11-23-13-17_9d24914c3013.py deleted file mode 100644 index cb1462b40..000000000 --- a/backend/lcfs/db/migrations/versions/2023-11-23-13-17_9d24914c3013.py +++ /dev/null @@ -1,44 +0,0 @@ -"""fix unique constraints and enums - -Revision ID: 9d24914c3013 -Revises: 4bf9af8c64f9 -Create Date: 2023-11-23 13:17:53.957485 - -""" -import sqlalchemy as sa -from alembic import op - -# revision identifiers, used by Alembic. -revision = "9d24914c3013" -down_revision = "4bf9af8c64f9" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.create_unique_constraint("uq_category_id", "category", ["category_id"]) - op.create_unique_constraint("uq_comment_id", "comment", ["comment_id"]) - op.create_unique_constraint("uq_issuance_id", "issuance", ["issuance_id"]) - op.create_unique_constraint("uq_issuance_history_id", "issuance_history", ["issuance_history_id"]) - op.create_unique_constraint("uq_transaction_id", "transaction", ["transaction_id"]) - op.create_unique_constraint("uq_transfer_id", "transfer", ["transfer_id"]) - - -def downgrade() -> None: - op.drop_constraint("uq_transfer_id", "transfer", type_="unique") - op.drop_constraint("uq_transaction_id", "transaction", type_="unique") - op.drop_constraint("uq_issuance_history_id", "issuance_history", type_="unique") - op.drop_constraint("uq_issuance_id", "issuance", type_="unique") - op.drop_constraint("uq_comment_id", "comment", type_="unique") - op.drop_constraint("uq_category_id", "category", type_="unique") - - # Manually drop ENUM types - op.execute('DROP TYPE IF EXISTS channel_enum CASCADE;') - op.execute('DROP TYPE IF EXISTS notification_type_enum CASCADE;') - op.execute('DROP TYPE IF EXISTS org_status_enum CASCADE;') - op.execute('DROP TYPE IF EXISTS org_type_enum CASCADE;') - op.execute('DROP TYPE IF EXISTS role_enum CASCADE;') - op.execute('DROP TYPE IF EXISTS transaction_type_enum CASCADE;') - op.execute('DROP TYPE IF EXISTS transfer_type_enum CASCADE;') - - diff --git a/backend/lcfs/db/migrations/versions/2024-11-27-17-35_a2240b2b5629.py b/backend/lcfs/db/migrations/versions/2024-11-27-17-35_a2240b2b5629.py new file mode 100644 index 000000000..e7e768fa3 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-11-27-17-35_a2240b2b5629.py @@ -0,0 +1,5549 @@ +"""initial migration + +Revision ID: a2240b2b5629 +Revises: +Create Date: 2024-11-27 17:35:34.810640 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "a2240b2b5629" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + sa.Enum( + "GOVERNMENT", + "ADMINISTRATOR", + "ANALYST", + "COMPLIANCE_MANAGER", + "DIRECTOR", + "SUPPLIER", + "MANAGE_USERS", + "TRANSFER", + "COMPLIANCE_REPORTING", + "SIGNING_AUTHORITY", + "READ_ONLY", + name="role_enum", + ).create(op.get_bind()) + sa.Enum( + "Draft", + "Deleted", + "Sent", + "Submitted", + "Recommended", + "Recorded", + "Refused", + "Declined", + "Rescinded", + name="transfer_type_enum", + ).create(op.get_bind()) + sa.Enum("A", "B", "C", "D", name="transfercategoryenum").create(op.get_bind()) + sa.Enum("Record", "Refuse", name="transfer_recommendation_enum").create( + op.get_bind() + ) + sa.Enum( + "Adjustment", "Reserved", "Released", name="transaction_action_enum" + ).create(op.get_bind()) + sa.Enum( + "TRANSFER_PARTNER_UPDATE", + "TRANSFER_DIRECTOR_REVIEW", + "INITIATIVE_APPROVED", + "INITIATIVE_DA_REQUEST", + "SUPPLEMENTAL_REQUESTED", + "DIRECTOR_ASSESSMENT", + name="notification_type_enum", + ).create(op.get_bind()) + sa.Enum( + "fuel_supplier", + "electricity_supplier", + "broker", + "utilities", + name="org_type_enum", + ).create(op.get_bind()) + sa.Enum( + "Unregistered", "Registered", "Suspended", "Canceled", name="org_status_enum" + ).create(op.get_bind()) + sa.Enum("EMAIL", "IN_APP", name="channel_enum").create(op.get_bind()) + sa.Enum( + "Draft", + "Recommended", + "Approved", + "Deleted", + name="initiative_agreement_type_enum", + ).create(op.get_bind()) + sa.Enum("Draft", "Approved", "Deleted", name="fuel_code_status_enum").create( + op.get_bind() + ) + sa.Enum("Gasoline", "Diesel", "Jet fuel", name="fuel_category_enum").create( + op.get_bind() + ) + sa.Enum("Received", "Transferred", name="receivedortransferredenum").create( + op.get_bind() + ) + sa.Enum("Q1", "Q2", "Q3", "Q4", name="quarter").create(op.get_bind()) + sa.Enum("CREATE", "UPDATE", "DELETE", name="actiontypeenum").create(op.get_bind()) + sa.Enum("SUPPLIER", "GOVERNMENT", name="usertypeenum").create(op.get_bind()) + sa.Enum( + "Litres", "Kilograms", "Kilowatt_hour", "Cubic_metres", name="quantityunitsenum" + ).create(op.get_bind()) + sa.Enum("Single port", "Dual port", name="ports_enum").create(op.get_bind()) + sa.Enum( + "Draft", + "Submitted", + "Recommended_by_analyst", + "Recommended_by_manager", + "Assessed", + "ReAssessed", + name="compliancereportstatusenum", + ).create(op.get_bind()) + sa.Enum("ANNUAL", "QUARTERLY", name="reportingfrequency").create(op.get_bind()) + sa.Enum( + "SUPPLIER_SUPPLEMENTAL", + "GOVERNMENT_REASSESSMENT", + name="supplementalinitiatortype", + ).create(op.get_bind()) + sa.Enum("Director", "Analyst", "Compliance Manager", name="audience_scope").create( + op.get_bind() + ) + sa.Enum( + "Draft", "Recommended", "Approved", "Deleted", name="admin_adjustment_type_enum" + ).create(op.get_bind()) + op.create_table( + "admin_adjustment_status", + sa.Column( + "admin_adjustment_status_id", + sa.Integer(), + autoincrement=True, + nullable=False, + ), + sa.Column( + "status", + postgresql.ENUM( + "Draft", + "Recommended", + "Approved", + "Deleted", + name="admin_adjustment_type_enum", + create_type=False, + ), + nullable=True, + comment="Admin Adjustment Status", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "admin_adjustment_status_id", name=op.f("pk_admin_adjustment_status") + ), + comment="Represents a Admin Adjustment Status", + ) + op.create_table( + "allocation_transaction_type", + sa.Column( + "allocation_transaction_type_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the allocation transaction type", + ), + sa.Column( + "type", + sa.String(), + nullable=False, + comment="Type of the allocation transaction", + ), + sa.Column( + "description", + sa.String(), + nullable=True, + comment="Description of the allocation transaction type", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "allocation_transaction_type_id", + name=op.f("pk_allocation_transaction_type"), + ), + comment="Lookup table for allocation transaction types.", + ) + op.create_table( + "audit_log", + sa.Column( + "audit_log_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for each audit log entry.", + ), + sa.Column( + "table_name", + sa.Text(), + nullable=False, + comment="Name of the table where the action occurred.", + ), + sa.Column( + "operation", + sa.Text(), + nullable=False, + comment="Type of operation: 'INSERT', 'UPDATE', or 'DELETE'.", + ), + sa.Column( + "row_id", + postgresql.JSONB(astext_type=sa.Text()), + nullable=False, + comment="Primary key of the affected row, stored as JSONB to support composite keys.", + ), + sa.Column( + "old_values", + postgresql.JSONB(astext_type=sa.Text()), + nullable=True, + comment="Previous values before the operation.", + ), + sa.Column( + "new_values", + postgresql.JSONB(astext_type=sa.Text()), + nullable=True, + comment="New values after the operation.", + ), + sa.Column( + "delta", + postgresql.JSONB(astext_type=sa.Text()), + nullable=True, + comment="JSONB delta of the changes.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.PrimaryKeyConstraint("audit_log_id", name=op.f("pk_audit_log")), + comment="Track changes in defined tables.", + ) + op.create_index( + "idx_audit_log_create_date", "audit_log", ["create_date"], unique=False + ) + op.create_index( + "idx_audit_log_create_user", "audit_log", ["create_user"], unique=False + ) + op.create_index( + "idx_audit_log_delta", + "audit_log", + ["delta"], + unique=False, + postgresql_using="gin", + ) + op.create_index("idx_audit_log_operation", "audit_log", ["operation"], unique=False) + op.create_table( + "compliance_period", + sa.Column( + "compliance_period_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the compliance period", + ), + sa.Column( + "description", + sa.String(), + nullable=False, + comment="Year description for the compliance period", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Display order for the compliance period", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.PrimaryKeyConstraint( + "compliance_period_id", name=op.f("pk_compliance_period") + ), + comment="The compliance year associated with compliance reports and other related tables. The description field should be the year.", + ) + op.create_table( + "compliance_report_status", + sa.Column( + "compliance_report_status_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the compliance report status", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Display order for the compliance report status", + ), + sa.Column( + "status", + postgresql.ENUM( + "Draft", + "Submitted", + "Recommended_by_analyst", + "Recommended_by_manager", + "Assessed", + "ReAssessed", + name="compliancereportstatusenum", + create_type=False, + ), + nullable=False, + comment="Status of the compliance report", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.PrimaryKeyConstraint( + "compliance_report_status_id", name=op.f("pk_compliance_report_status") + ), + comment="Lookup table for compliance reports status", + ) + op.create_table( + "document", + sa.Column( + "document_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the document", + ), + sa.Column("file_key", sa.String(), nullable=False), + sa.Column("file_name", sa.String(), nullable=False), + sa.Column("file_size", sa.Integer(), nullable=False), + sa.Column("mime_type", sa.String(), nullable=False), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.PrimaryKeyConstraint("document_id", name=op.f("pk_document")), + sa.UniqueConstraint("document_id", name=op.f("uq_document_document_id")), + comment="Main document table for storing base document information", + ) + op.create_table( + "end_use_type", + sa.Column("end_use_type_id", sa.Integer(), nullable=False), + sa.Column("type", sa.Text(), nullable=False), + sa.Column("sub_type", sa.Text(), nullable=True), + sa.Column("intended_use", sa.Boolean(), nullable=False), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint("end_use_type_id", name=op.f("pk_end_use_type")), + comment="Represents a end use types for various fuel types and categories", + ) + op.create_table( + "end_user_type", + sa.Column( + "end_user_type_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="The unique identifier for the end user type.", + ), + sa.Column( + "type_name", + sa.String(), + nullable=False, + comment="The name of the end user type.", + ), + sa.Column("intended_use", sa.Boolean(), nullable=False), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.PrimaryKeyConstraint("end_user_type_id", name=op.f("pk_end_user_type")), + sa.UniqueConstraint("type_name", name=op.f("uq_end_user_type_type_name")), + comment="Types of intended users for supply equipment", + ) + op.create_table( + "expected_use_type", + sa.Column( + "expected_use_type_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the expected use type", + ), + sa.Column( + "name", sa.Text(), nullable=False, comment="Name of the expected use type" + ), + sa.Column( + "description", + sa.Text(), + nullable=True, + comment="Description of the expected use type", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.PrimaryKeyConstraint( + "expected_use_type_id", name=op.f("pk_expected_use_type") + ), + comment="Represents an expected use type for other fuels", + ) + op.create_table( + "final_supply_equipment_reg_number", + sa.Column( + "organization_code", + sa.String(), + nullable=False, + comment="The organization code for the final supply equipment.", + ), + sa.Column( + "postal_code", + sa.String(), + nullable=False, + comment="The postal code for the final supply equipment.", + ), + sa.Column( + "current_sequence_number", + sa.Integer(), + nullable=False, + comment="Current sequence number used for the postal code.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.PrimaryKeyConstraint( + "organization_code", + "postal_code", + name=op.f("pk_final_supply_equipment_reg_number"), + ), + comment="Tracks the highest sequence numbers for final supply equipment registration numbers by postal code and organization code.", + ) + op.create_table( + "fuel_category", + sa.Column( + "fuel_category_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the fuel category", + ), + sa.Column( + "category", + postgresql.ENUM( + "Gasoline", + "Diesel", + "Jet fuel", + name="fuel_category_enum", + create_type=False, + ), + nullable=False, + comment="Name of the fuel category", + ), + sa.Column( + "description", + sa.Text(), + nullable=True, + comment="Description of the fuel categor", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.PrimaryKeyConstraint("fuel_category_id", name=op.f("pk_fuel_category")), + comment="Represents a static table for fuel categories", + ) + op.create_table( + "fuel_code_prefix", + sa.Column( + "fuel_code_prefix_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column("prefix", sa.Text(), nullable=False), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "fuel_code_prefix_id", name=op.f("pk_fuel_code_prefix") + ), + comment="Represents a Fuel code prefix", + ) + op.create_table( + "fuel_code_status", + sa.Column( + "fuel_code_status_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column( + "status", + postgresql.ENUM( + "Draft", + "Approved", + "Deleted", + name="fuel_code_status_enum", + create_type=False, + ), + nullable=True, + comment="Fuel code status", + ), + sa.Column( + "description", + sa.String(length=500), + nullable=True, + comment="Organization description", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "fuel_code_status_id", name=op.f("pk_fuel_code_status") + ), + comment="Represents fuel code status", + ) + op.create_table( + "fuel_measurement_type", + sa.Column( + "fuel_measurement_type_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the fuel measurement type", + ), + sa.Column( + "type", + sa.String(), + nullable=False, + comment="Name of the fuel measurement type", + ), + sa.Column( + "description", + sa.Text(), + nullable=True, + comment="Description of the fuel measurement type", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "fuel_measurement_type_id", name=op.f("pk_fuel_measurement_type") + ), + comment="Fuel measurement type", + ) + op.create_table( + "initiative_agreement_status", + sa.Column( + "initiative_agreement_status_id", + sa.Integer(), + autoincrement=True, + nullable=False, + ), + sa.Column( + "status", + postgresql.ENUM( + "Draft", + "Recommended", + "Approved", + "Deleted", + name="initiative_agreement_type_enum", + create_type=False, + ), + nullable=True, + comment="Initiative Agreement Status", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "initiative_agreement_status_id", + name=op.f("pk_initiative_agreement_status"), + ), + comment="Represents a InitiativeAgreement Status", + ) + op.create_table( + "internal_comment", + sa.Column( + "internal_comment_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Primary key, unique identifier for each internal comment.", + ), + sa.Column("comment", sa.Text(), nullable=True, comment="Text of the comment."), + sa.Column( + "audience_scope", + postgresql.ENUM( + "Director", + "Analyst", + "Compliance Manager", + name="audience_scope", + create_type=False, + ), + nullable=False, + comment="Defines the audience scope for the comment, e.g., Director, Analyst, Compliance Manager", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.PrimaryKeyConstraint( + "internal_comment_id", name=op.f("pk_internal_comment") + ), + sa.UniqueConstraint( + "internal_comment_id", name=op.f("uq_internal_comment_internal_comment_id") + ), + comment="Stores internal comments with scope and related metadata.", + ) + op.create_table( + "level_of_equipment", + sa.Column( + "level_of_equipment_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column("name", sa.String(length=500), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "level_of_equipment_id", name=op.f("pk_level_of_equipment") + ), + comment="Represents a level of equipment for fuel supply equipments", + ) + op.create_table( + "notification_channel", + sa.Column( + "notification_channel_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column( + "channel_name", + postgresql.ENUM("EMAIL", "IN_APP", name="channel_enum", create_type=False), + nullable=False, + ), + sa.Column("enabled", sa.Boolean(), nullable=True), + sa.Column("subscribe_by_default", sa.Boolean(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.PrimaryKeyConstraint( + "notification_channel_id", name=op.f("pk_notification_channel") + ), + comment="Tracks the state and defaults for communication channels", + ) + op.create_table( + "notification_type", + sa.Column( + "notification_type_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column( + "name", + postgresql.ENUM( + "TRANSFER_PARTNER_UPDATE", + "TRANSFER_DIRECTOR_REVIEW", + "INITIATIVE_APPROVED", + "INITIATIVE_DA_REQUEST", + "SUPPLEMENTAL_REQUESTED", + "DIRECTOR_ASSESSMENT", + name="notification_type_enum", + create_type=False, + ), + nullable=False, + ), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("email_content", sa.Text(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.PrimaryKeyConstraint( + "notification_type_id", name=op.f("pk_notification_type") + ), + comment="Represents a Notification type", + ) + op.create_table( + "organization_address", + sa.Column( + "organization_address_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column( + "name", sa.String(length=500), nullable=True, comment="Organization name" + ), + sa.Column("street_address", sa.String(length=500), nullable=True), + sa.Column("address_other", sa.String(length=100), nullable=True), + sa.Column("city", sa.String(length=100), nullable=True), + sa.Column("province_state", sa.String(length=50), nullable=True), + sa.Column("country", sa.String(length=100), nullable=True), + sa.Column("postalCode_zipCode", sa.String(length=10), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.PrimaryKeyConstraint( + "organization_address_id", name=op.f("pk_organization_address") + ), + comment="Represents an organization's address.", + ) + op.create_table( + "organization_attorney_address", + sa.Column( + "organization_attorney_address_id", + sa.Integer(), + autoincrement=True, + nullable=False, + ), + sa.Column( + "name", + sa.String(length=500), + nullable=True, + comment="Attorney's Organization name", + ), + sa.Column("street_address", sa.String(length=500), nullable=True), + sa.Column("address_other", sa.String(length=100), nullable=True), + sa.Column("city", sa.String(length=100), nullable=True), + sa.Column("province_state", sa.String(length=50), nullable=True), + sa.Column("country", sa.String(length=100), nullable=True), + sa.Column("postalCode_zipCode", sa.String(length=10), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.PrimaryKeyConstraint( + "organization_attorney_address_id", + name=op.f("pk_organization_attorney_address"), + ), + comment="Represents an organization attorney's address.", + ) + op.create_table( + "organization_status", + sa.Column( + "organization_status_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the organization", + ), + sa.Column( + "status", + postgresql.ENUM( + "Unregistered", + "Registered", + "Suspended", + "Canceled", + name="org_status_enum", + create_type=False, + ), + nullable=True, + comment="Organization's status", + ), + sa.Column( + "description", + sa.String(length=500), + nullable=True, + comment="Organization description", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "organization_status_id", name=op.f("pk_organization_status") + ), + comment="Contains list of organization type", + ) + op.create_table( + "organization_type", + sa.Column( + "organization_type_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the organization_type", + ), + sa.Column( + "org_type", + postgresql.ENUM( + "fuel_supplier", + "electricity_supplier", + "broker", + "utilities", + name="org_type_enum", + create_type=False, + ), + nullable=True, + comment="Organization's Types", + ), + sa.Column( + "description", + sa.String(length=500), + nullable=True, + comment="Organization Types", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint( + "organization_type_id", name=op.f("pk_organization_type") + ), + comment="Represents a Organization types", + ) + op.create_table( + "provision_of_the_act", + sa.Column( + "provision_of_the_act_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the provision of the act", + ), + sa.Column( + "name", + sa.String(length=100), + nullable=False, + comment="Name of the Provision. e.g. Section 19 (a)", + ), + sa.Column( + "description", + sa.String(length=1000), + nullable=False, + comment="Description of the provision. This is the displayed name. e.g. Prescribed Carbon Intensity, Approved Fuel Code.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.PrimaryKeyConstraint( + "provision_of_the_act_id", name=op.f("pk_provision_of_the_act") + ), + sa.UniqueConstraint("name", name=op.f("uq_provision_of_the_act_name")), + comment="List of provisions within Greenhouse Gas Reduction\n (Renewable and Low Carbon Fuel Requirement) Act. e.g. Section 19 (a).\n Used in determining carbon intensity needed for for compliance reporting calculation.", + ) + op.create_table( + "role", + sa.Column("role_id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column( + "name", + postgresql.ENUM( + "GOVERNMENT", + "ADMINISTRATOR", + "ANALYST", + "COMPLIANCE_MANAGER", + "DIRECTOR", + "SUPPLIER", + "MANAGE_USERS", + "TRANSFER", + "COMPLIANCE_REPORTING", + "SIGNING_AUTHORITY", + "READ_ONLY", + name="role_enum", + create_type=False, + ), + nullable=False, + comment="Role code. Natural key. Used internally. eg Admin, GovUser, GovDirector, etc", + ), + sa.Column( + "description", + sa.String(length=1000), + nullable=True, + comment="Descriptive text explaining this role. This is what's shown to the user.", + ), + sa.Column( + "is_government_role", + sa.Boolean(), + nullable=True, + comment="Flag. True if this is a government role (eg. Analyst, Administrator)", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.PrimaryKeyConstraint("role_id", name=op.f("pk_role")), + sa.UniqueConstraint("name", name=op.f("uq_role_name")), + sa.UniqueConstraint("name", name=op.f("uq_role_name")), + comment="To hold all the available roles and their descriptions.", + ) + op.create_table( + "transfer_category", + sa.Column( + "transfer_category_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the transfer category", + ), + sa.Column( + "category", + postgresql.ENUM( + "A", "B", "C", "D", name="transfercategoryenum", create_type=False + ), + nullable=True, + comment="Transfer category", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.PrimaryKeyConstraint( + "transfer_category_id", name=op.f("pk_transfer_category") + ), + sa.UniqueConstraint( + "transfer_category_id", + name=op.f("uq_transfer_category_transfer_category_id"), + ), + comment="Transfer Category", + ) + op.create_table( + "transfer_status", + sa.Column( + "transfer_status_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column( + "status", + postgresql.ENUM( + "Draft", + "Deleted", + "Sent", + "Submitted", + "Recommended", + "Recorded", + "Refused", + "Declined", + "Rescinded", + name="transfer_type_enum", + create_type=False, + ), + nullable=True, + comment="Transfer Status", + ), + sa.Column( + "visible_to_transferor", + sa.Boolean(), + nullable=True, + comment="Visibility for transferor entities", + ), + sa.Column( + "visible_to_transferee", + sa.Boolean(), + nullable=True, + comment="Visibility for transferee entities", + ), + sa.Column( + "visible_to_government", + sa.Boolean(), + nullable=True, + comment="Visibility for government entities", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint("transfer_status_id", name=op.f("pk_transfer_status")), + comment="Represents a Transfer Status", + ) + op.create_table( + "transport_mode", + sa.Column( + "transport_mode_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column("transport_mode", sa.Text(), nullable=False), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint("transport_mode_id", name=op.f("pk_transport_mode")), + comment="Represents a Transport Mode Type", + ) + op.create_table( + "unit_of_measure", + sa.Column("uom_id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("name", sa.Text(), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.PrimaryKeyConstraint("uom_id", name=op.f("pk_unit_of_measure")), + comment="Units used to measure energy densities", + ) + op.create_table( + "user_login_history", + sa.Column("user_login_history_id", sa.Integer(), nullable=False), + sa.Column( + "keycloak_email", + sa.String(), + nullable=False, + comment="Keycloak email address to associate on first login.", + ), + sa.Column( + "external_username", + sa.String(length=150), + nullable=True, + comment="BCeID or IDIR username", + ), + sa.Column( + "keycloak_user_id", + sa.String(length=150), + nullable=True, + comment="This is the unique id returned from Keycloak and is the main mapping key between the LCFS user and the Keycloak user. The identity provider type will be appended as a suffix after an @ symbol. For ex. asdf1234@bceidbasic or asdf1234@idir", + ), + sa.Column( + "is_login_successful", + sa.Boolean(), + nullable=True, + comment="True if this login attempt was successful, false on failure.", + ), + sa.Column( + "login_error_message", + sa.String(length=500), + nullable=True, + comment="Error text on unsuccessful login attempt.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.PrimaryKeyConstraint( + "user_login_history_id", name=op.f("pk_user_login_history") + ), + comment="Keeps track of all user login attempts", + ) + op.create_table( + "fuel_type", + sa.Column("fuel_type_id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("fuel_type", sa.Text(), nullable=False), + sa.Column( + "fossil_derived", + sa.Boolean(), + nullable=True, + comment="Indicates whether the fuel is fossil-derived", + ), + sa.Column( + "other_uses_fossil_derived", + sa.Boolean(), + nullable=True, + comment="Indicates whether the fuel is fossil-derived for other uses", + ), + sa.Column("provision_1_id", sa.Integer(), nullable=True), + sa.Column("provision_2_id", sa.Integer(), nullable=True), + sa.Column( + "default_carbon_intensity", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Carbon intensities: default & prescribed (gCO2e/MJ)", + ), + sa.Column( + "units", + postgresql.ENUM( + "Litres", + "Kilograms", + "Kilowatt_hour", + "Cubic_metres", + name="quantityunitsenum", + create_type=False, + ), + nullable=False, + comment="Units of fuel quantity", + ), + sa.Column( + "unrecognized", + sa.Boolean(), + nullable=False, + comment="Indicates if the fuel type is unrecognized", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.ForeignKeyConstraint( + ["provision_1_id"], + ["provision_of_the_act.provision_of_the_act_id"], + name=op.f("fk_fuel_type_provision_1_id_provision_of_the_act"), + ), + sa.ForeignKeyConstraint( + ["provision_2_id"], + ["provision_of_the_act.provision_of_the_act_id"], + name=op.f("fk_fuel_type_provision_2_id_provision_of_the_act"), + ), + sa.PrimaryKeyConstraint("fuel_type_id", name=op.f("pk_fuel_type")), + comment="Represents a Fuel Type", + ) + op.create_table( + "organization", + sa.Column( + "organization_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the organization", + ), + sa.Column( + "organization_code", + sa.String(length=4), + nullable=False, + comment="Unique 4-character alphanumeric ID", + ), + sa.Column( + "name", + sa.String(length=500), + nullable=True, + comment="Organization's legal name", + ), + sa.Column( + "operating_name", + sa.String(length=500), + nullable=True, + comment="Organization's Operating name", + ), + sa.Column( + "email", + sa.String(length=255), + nullable=True, + comment="Organization's email address", + ), + sa.Column( + "phone", + sa.String(length=50), + nullable=True, + comment="Organization's phone number", + ), + sa.Column( + "edrms_record", + sa.String(length=100), + nullable=True, + comment="Organization's EDRMS record number", + ), + sa.Column( + "total_balance", + sa.BigInteger(), + server_default="0", + nullable=False, + comment="The total balance of compliance units for the specified organization.", + ), + sa.Column( + "reserved_balance", + sa.BigInteger(), + server_default="0", + nullable=False, + comment="The reserved balance of compliance units for the specified organization.", + ), + sa.Column( + "count_transfers_in_progress", + sa.Integer(), + server_default="0", + nullable=False, + comment="The count of transfers in progress for the specified organization.", + ), + sa.Column("organization_status_id", sa.Integer(), nullable=True), + sa.Column( + "organization_type_id", + sa.Integer(), + nullable=True, + comment="Organization's type", + ), + sa.Column("organization_address_id", sa.Integer(), nullable=True), + sa.Column("organization_attorney_address_id", sa.Integer(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["organization_address_id"], + ["organization_address.organization_address_id"], + name=op.f("fk_organization_organization_address_id_organization_address"), + ), + sa.ForeignKeyConstraint( + ["organization_attorney_address_id"], + ["organization_attorney_address.organization_attorney_address_id"], + name=op.f( + "fk_organization_organization_attorney_address_id_organization_attorney_address" + ), + ), + sa.ForeignKeyConstraint( + ["organization_status_id"], + ["organization_status.organization_status_id"], + name=op.f("fk_organization_organization_status_id_organization_status"), + ), + sa.ForeignKeyConstraint( + ["organization_type_id"], + ["organization_type.organization_type_id"], + name=op.f("fk_organization_organization_type_id_organization_type"), + ), + sa.PrimaryKeyConstraint("organization_id", name=op.f("pk_organization")), + sa.UniqueConstraint( + "organization_code", name=op.f("uq_organization_organization_code") + ), + comment="Contains a list of all of the recognized Part 3 fuel suppliers, both past and present, as well as an entry for the government which is also considered an organization.", + ) + op.create_table( + "target_carbon_intensity", + sa.Column( + "target_carbon_intensity_id", + sa.Integer(), + autoincrement=True, + nullable=False, + ), + sa.Column( + "compliance_period_id", + sa.Integer(), + nullable=False, + comment="Compliance period ID", + ), + sa.Column( + "fuel_category_id", sa.Integer(), nullable=False, comment="Fuel category ID" + ), + sa.Column( + "target_carbon_intensity", + sa.Numeric(precision=10, scale=2), + nullable=False, + comment="Target Carbon Intensity (gCO2e/MJ)", + ), + sa.Column( + "reduction_target_percentage", + sa.Float(), + nullable=False, + comment="Reduction target percentage", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["compliance_period_id"], + ["compliance_period.compliance_period_id"], + name=op.f( + "fk_target_carbon_intensity_compliance_period_id_compliance_period" + ), + ), + sa.ForeignKeyConstraint( + ["fuel_category_id"], + ["fuel_category.fuel_category_id"], + name=op.f("fk_target_carbon_intensity_fuel_category_id_fuel_category"), + ), + sa.PrimaryKeyConstraint( + "target_carbon_intensity_id", name=op.f("pk_target_carbon_intensity") + ), + comment="Target carbon intensity values for various fuel categories", + ) + op.create_table( + "additional_carbon_intensity", + sa.Column( + "additional_uci_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column("fuel_type_id", sa.Integer(), nullable=True), + sa.Column("end_use_type_id", sa.Integer(), nullable=True), + sa.Column("uom_id", sa.Integer(), nullable=False), + sa.Column("intensity", sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.ForeignKeyConstraint( + ["end_use_type_id"], + ["end_use_type.end_use_type_id"], + name=op.f("fk_additional_carbon_intensity_end_use_type_id_end_use_type"), + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_additional_carbon_intensity_fuel_type_id_fuel_type"), + ), + sa.ForeignKeyConstraint( + ["uom_id"], + ["unit_of_measure.uom_id"], + name=op.f("fk_additional_carbon_intensity_uom_id_unit_of_measure"), + ), + sa.PrimaryKeyConstraint( + "additional_uci_id", name=op.f("pk_additional_carbon_intensity") + ), + comment="Additional carbon intensity attributable to the use of fuel. UCIs are added to the recorded carbon intensity of the fuel to account for additional carbon intensity attributed to the use of the fuel.", + ) + op.create_table( + "energy_density", + sa.Column( + "energy_density_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column("fuel_type_id", sa.Integer(), nullable=False), + sa.Column("density", sa.Numeric(precision=10, scale=2), nullable=False), + sa.Column("uom_id", sa.Integer(), nullable=False), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_energy_density_fuel_type_id_fuel_type"), + ), + sa.ForeignKeyConstraint( + ["uom_id"], + ["unit_of_measure.uom_id"], + name=op.f("fk_energy_density_uom_id_unit_of_measure"), + ), + sa.PrimaryKeyConstraint("energy_density_id", name=op.f("pk_energy_density")), + comment="Represents Energy Density data table", + ) + op.create_table( + "energy_effectiveness_ratio", + sa.Column("eer_id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column( + "fuel_category_id", sa.Integer(), nullable=False, comment="Fuel category" + ), + sa.Column("fuel_type_id", sa.Integer(), nullable=True, comment="Fuel type"), + sa.Column( + "end_use_type_id", sa.Integer(), nullable=True, comment="End use type" + ), + sa.Column( + "ratio", + sa.Float(precision=3, decimal_return_scale=2), + nullable=False, + comment="Energy effectiveness ratio constant", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["end_use_type_id"], + ["end_use_type.end_use_type_id"], + name=op.f("fk_energy_effectiveness_ratio_end_use_type_id_end_use_type"), + ), + sa.ForeignKeyConstraint( + ["fuel_category_id"], + ["fuel_category.fuel_category_id"], + name=op.f("fk_energy_effectiveness_ratio_fuel_category_id_fuel_category"), + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_energy_effectiveness_ratio_fuel_type_id_fuel_type"), + ), + sa.PrimaryKeyConstraint("eer_id", name=op.f("pk_energy_effectiveness_ratio")), + comment="Energy effectiveness ratio (EERs)", + ) + op.create_table( + "fuel_code", + sa.Column( + "fuel_code_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the fuel code", + ), + sa.Column( + "fuel_status_id", sa.Integer(), nullable=True, comment="Fuel code status" + ), + sa.Column("prefix_id", sa.Integer(), nullable=False, comment="Prefix ID"), + sa.Column( + "fuel_suffix", sa.String(length=20), nullable=False, comment="Fuel suffix" + ), + sa.Column( + "company", sa.String(length=500), nullable=False, comment="Company name" + ), + sa.Column( + "contact_name", sa.String(length=500), nullable=True, comment="Contact name" + ), + sa.Column( + "contact_email", + sa.String(length=500), + nullable=True, + comment="Contact email", + ), + sa.Column( + "carbon_intensity", sa.Numeric(precision=10, scale=2), nullable=False + ), + sa.Column("edrms", sa.String(length=255), nullable=False, comment="EDRMS #"), + sa.Column( + "last_updated", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + comment="Date at which the record was last updated.", + ), + sa.Column( + "application_date", + sa.Date(), + nullable=False, + comment="application recorded date.", + ), + sa.Column( + "approval_date", + sa.Date(), + nullable=True, + comment="Date at which the record was approved.", + ), + sa.Column("fuel_type_id", sa.Integer(), nullable=False, comment="Fuel type ID"), + sa.Column( + "feedstock", sa.String(length=255), nullable=False, comment="Feedstock" + ), + sa.Column( + "feedstock_location", + sa.String(length=1000), + nullable=False, + comment="Feedstock location", + ), + sa.Column( + "feedstock_misc", + sa.String(length=500), + nullable=True, + comment="Feedstock misc", + ), + sa.Column( + "fuel_production_facility_city", + sa.String(length=1000), + nullable=True, + comment="City of the fuel production", + ), + sa.Column( + "fuel_production_facility_province_state", + sa.String(length=1000), + nullable=True, + comment="Province or state of the fuel production", + ), + sa.Column( + "fuel_production_facility_country", + sa.String(length=1000), + nullable=True, + comment="Country of the fuel production", + ), + sa.Column( + "facility_nameplate_capacity", + sa.Integer(), + nullable=True, + comment="Nameplate capacity", + ), + sa.Column( + "facility_nameplate_capacity_unit", + postgresql.ENUM( + "Litres", + "Kilograms", + "Kilowatt_hour", + "Cubic_metres", + name="quantityunitsenum", + create_type=False, + ), + nullable=True, + comment="Units of fuel quantity", + ), + sa.Column( + "former_company", + sa.String(length=500), + nullable=True, + comment="Former company", + ), + sa.Column("notes", sa.String(length=1000), nullable=True, comment="Notes"), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["fuel_status_id"], + ["fuel_code_status.fuel_code_status_id"], + name=op.f("fk_fuel_code_fuel_status_id_fuel_code_status"), + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_fuel_code_fuel_type_id_fuel_type"), + ), + sa.ForeignKeyConstraint( + ["prefix_id"], + ["fuel_code_prefix.fuel_code_prefix_id"], + name=op.f("fk_fuel_code_prefix_id_fuel_code_prefix"), + ), + sa.PrimaryKeyConstraint("fuel_code_id", name=op.f("pk_fuel_code")), + comment="Contains a list of all of fuel codes", + ) + op.create_table( + "fuel_instance", + sa.Column("fuel_instance_id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column( + "fuel_type_id", sa.Integer(), nullable=False, comment="ID of the fuel type" + ), + sa.Column( + "fuel_category_id", + sa.Integer(), + nullable=False, + comment="ID of the fuel category", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="User who created this record in the database", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="User who last updated this record in the database", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the record was created", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the record was last updated", + ), + sa.ForeignKeyConstraint( + ["fuel_category_id"], + ["fuel_category.fuel_category_id"], + name=op.f("fk_fuel_instance_fuel_category_id_fuel_category"), + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_fuel_instance_fuel_type_id_fuel_type"), + ), + sa.PrimaryKeyConstraint("fuel_instance_id", name=op.f("pk_fuel_instance")), + comment="Table linking fuel types and fuel categories", + ) + op.create_table( + "transaction", + sa.Column( + "transaction_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the transactions", + ), + sa.Column( + "compliance_units", + sa.BigInteger(), + nullable=True, + comment="Compliance Units", + ), + sa.Column("organization_id", sa.Integer(), nullable=True), + sa.Column( + "transaction_action", + postgresql.ENUM( + "Adjustment", + "Reserved", + "Released", + name="transaction_action_enum", + create_type=False, + ), + nullable=True, + comment="Action type for the transaction, e.g., Adjustment, Reserved, or Released.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["organization_id"], + ["organization.organization_id"], + name=op.f("fk_transaction_organization_id_organization"), + ), + sa.PrimaryKeyConstraint("transaction_id", name=op.f("pk_transaction")), + sa.UniqueConstraint( + "transaction_id", name=op.f("uq_transaction_transaction_id") + ), + comment="Contains a list of all of the government to organization and Organization to Organization transaction.", + ) + op.create_table( + "user_profile", + sa.Column("user_profile_id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column( + "keycloak_user_id", + sa.String(length=150), + nullable=True, + comment="Unique id returned from Keycloak", + ), + sa.Column( + "keycloak_email", + sa.String(length=255), + nullable=True, + comment="keycloak email address", + ), + sa.Column( + "keycloak_username", + sa.String(length=150), + nullable=False, + comment="keycloak Username", + ), + sa.Column( + "email", + sa.String(length=255), + nullable=True, + comment="Primary email address", + ), + sa.Column( + "title", sa.String(length=100), nullable=True, comment="Professional Title" + ), + sa.Column( + "phone", sa.String(length=50), nullable=True, comment="Primary phone number" + ), + sa.Column( + "mobile_phone", + sa.String(length=50), + nullable=True, + comment="Mobile phone number", + ), + sa.Column( + "first_name", sa.String(length=100), nullable=True, comment="First name" + ), + sa.Column( + "last_name", sa.String(length=100), nullable=True, comment="Last name" + ), + sa.Column( + "is_active", sa.Boolean(), nullable=False, comment="Is the user active?" + ), + sa.Column("organization_id", sa.Integer(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["organization_id"], + ["organization.organization_id"], + name=op.f("fk_user_profile_organization_id_organization"), + ), + sa.PrimaryKeyConstraint("user_profile_id", name=op.f("pk_user_profile")), + sa.UniqueConstraint( + "keycloak_username", name=op.f("uq_user_profile_keycloak_username") + ), + sa.UniqueConstraint( + "keycloak_username", name=op.f("uq_user_profile_keycloak_username") + ), + comment="Users who may access the application", + ) + op.create_table( + "admin_adjustment", + sa.Column( + "admin_adjustment_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the admin_adjustment", + ), + sa.Column( + "compliance_units", + sa.BigInteger(), + nullable=True, + comment="Compliance Units", + ), + sa.Column( + "transaction_effective_date", + sa.DateTime(), + nullable=True, + comment="Transaction effective date", + ), + sa.Column( + "gov_comment", + sa.String(length=1500), + nullable=True, + comment="Comment from the government to organization", + ), + sa.Column("to_organization_id", sa.Integer(), nullable=True), + sa.Column("transaction_id", sa.Integer(), nullable=True), + sa.Column("current_status_id", sa.Integer(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["current_status_id"], + ["admin_adjustment_status.admin_adjustment_status_id"], + name=op.f("fk_admin_adjustment_current_status_id_admin_adjustment_status"), + ), + sa.ForeignKeyConstraint( + ["to_organization_id"], + ["organization.organization_id"], + name=op.f("fk_admin_adjustment_to_organization_id_organization"), + ), + sa.ForeignKeyConstraint( + ["transaction_id"], + ["transaction.transaction_id"], + name=op.f("fk_admin_adjustment_transaction_id_transaction"), + ), + sa.PrimaryKeyConstraint( + "admin_adjustment_id", name=op.f("pk_admin_adjustment") + ), + sa.UniqueConstraint( + "admin_adjustment_id", name=op.f("uq_admin_adjustment_admin_adjustment_id") + ), + comment="Goverment to organization compliance units admin_adjustment", + ) + op.create_table( + "compliance_report", + sa.Column( + "compliance_report_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the compliance report version", + ), + sa.Column( + "compliance_period_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the compliance period", + ), + sa.Column( + "organization_id", + sa.Integer(), + nullable=False, + comment="Identifier for the organization", + ), + sa.Column( + "current_status_id", + sa.Integer(), + nullable=True, + comment="Identifier for the current compliance report status", + ), + sa.Column( + "transaction_id", + sa.Integer(), + nullable=True, + comment="Identifier for the transaction", + ), + sa.Column( + "compliance_report_group_uuid", + sa.String(length=36), + nullable=False, + comment="UUID that groups all versions of a compliance report", + ), + sa.Column( + "version", + sa.Integer(), + nullable=False, + comment="Version number of the compliance report", + ), + sa.Column( + "supplemental_initiator", + postgresql.ENUM( + "SUPPLIER_SUPPLEMENTAL", + "GOVERNMENT_REASSESSMENT", + name="supplementalinitiatortype", + create_type=False, + ), + nullable=True, + comment="Indicates whether supplier or government initiated the supplemental", + ), + sa.Column( + "reporting_frequency", + postgresql.ENUM( + "ANNUAL", "QUARTERLY", name="reportingfrequency", create_type=False + ), + nullable=False, + comment="Reporting frequency", + ), + sa.Column( + "nickname", + sa.String(), + nullable=True, + comment="Nickname for the compliance report", + ), + sa.Column( + "supplemental_note", + sa.String(), + nullable=True, + comment="Supplemental note for the compliance report", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["compliance_period_id"], + ["compliance_period.compliance_period_id"], + name=op.f("fk_compliance_report_compliance_period_id_compliance_period"), + ), + sa.ForeignKeyConstraint( + ["current_status_id"], + ["compliance_report_status.compliance_report_status_id"], + name=op.f( + "fk_compliance_report_current_status_id_compliance_report_status" + ), + ), + sa.ForeignKeyConstraint( + ["organization_id"], + ["organization.organization_id"], + name=op.f("fk_compliance_report_organization_id_organization"), + ), + sa.ForeignKeyConstraint( + ["transaction_id"], + ["transaction.transaction_id"], + name=op.f("fk_compliance_report_transaction_id_transaction"), + ), + sa.PrimaryKeyConstraint( + "compliance_report_id", name=op.f("pk_compliance_report") + ), + comment="Main tracking table for all the sub-tables associated with a supplier's compliance report", + ) + op.create_table( + "feedstock_fuel_transport_mode", + sa.Column( + "feedstock_fuel_transport_mode_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier", + ), + sa.Column( + "fuel_code_id", sa.Integer(), nullable=True, comment="Fuel code identifier" + ), + sa.Column( + "transport_mode_id", + sa.Integer(), + nullable=True, + comment="Transport mode identifier", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["fuel_code_id"], + ["fuel_code.fuel_code_id"], + name=op.f("fk_feedstock_fuel_transport_mode_fuel_code_id_fuel_code"), + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["transport_mode_id"], + ["transport_mode.transport_mode_id"], + name=op.f( + "fk_feedstock_fuel_transport_mode_transport_mode_id_transport_mode" + ), + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "feedstock_fuel_transport_mode_id", + name=op.f("pk_feedstock_fuel_transport_mode"), + ), + sa.UniqueConstraint( + "fuel_code_id", + "transport_mode_id", + name=op.f("uq_feedstock_fuel_transport_mode_fuel_code_id"), + ), + comment="Contains a list of transport modes associated with feedstock fuel", + ) + op.create_table( + "finished_fuel_transport_mode", + sa.Column( + "finished_fuel_transport_mode_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier", + ), + sa.Column( + "fuel_code_id", sa.Integer(), nullable=True, comment="Fuel code identifier" + ), + sa.Column( + "transport_mode_id", + sa.Integer(), + nullable=True, + comment="Transport mode identifier", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["fuel_code_id"], + ["fuel_code.fuel_code_id"], + name=op.f("fk_finished_fuel_transport_mode_fuel_code_id_fuel_code"), + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["transport_mode_id"], + ["transport_mode.transport_mode_id"], + name=op.f( + "fk_finished_fuel_transport_mode_transport_mode_id_transport_mode" + ), + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "finished_fuel_transport_mode_id", + name=op.f("pk_finished_fuel_transport_mode"), + ), + sa.UniqueConstraint( + "fuel_code_id", + "transport_mode_id", + name=op.f("uq_finished_fuel_transport_mode_fuel_code_id"), + ), + comment="Contains a list of transport modes associated with finished fuel", + ) + op.create_table( + "initiative_agreement", + sa.Column( + "initiative_agreement_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the initiative_agreement", + ), + sa.Column( + "compliance_units", + sa.BigInteger(), + nullable=True, + comment="Compliance Units", + ), + sa.Column( + "transaction_effective_date", + sa.DateTime(), + nullable=True, + comment="Transaction effective date", + ), + sa.Column( + "gov_comment", + sa.String(length=1500), + nullable=True, + comment="Comment from the government to organization", + ), + sa.Column("to_organization_id", sa.Integer(), nullable=True), + sa.Column("transaction_id", sa.Integer(), nullable=True), + sa.Column("current_status_id", sa.Integer(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["current_status_id"], + ["initiative_agreement_status.initiative_agreement_status_id"], + name=op.f( + "fk_initiative_agreement_current_status_id_initiative_agreement_status" + ), + ), + sa.ForeignKeyConstraint( + ["to_organization_id"], + ["organization.organization_id"], + name=op.f("fk_initiative_agreement_to_organization_id_organization"), + ), + sa.ForeignKeyConstraint( + ["transaction_id"], + ["transaction.transaction_id"], + name=op.f("fk_initiative_agreement_transaction_id_transaction"), + ), + sa.PrimaryKeyConstraint( + "initiative_agreement_id", name=op.f("pk_initiative_agreement") + ), + sa.UniqueConstraint( + "initiative_agreement_id", + name=op.f("uq_initiative_agreement_initiative_agreement_id"), + ), + comment="Goverment to organization compliance units initiative agreement", + ) + op.create_table( + "notification_channel_subscription", + sa.Column( + "notification_channel_subscription_id", + sa.Integer(), + autoincrement=True, + nullable=False, + ), + sa.Column("is_enabled", sa.Boolean(), nullable=True), + sa.Column("user_profile_id", sa.Integer(), nullable=True), + sa.Column("notification_type_id", sa.Integer(), nullable=True), + sa.Column("notification_channel_id", sa.Integer(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["notification_channel_id"], + ["notification_channel.notification_channel_id"], + name=op.f( + "fk_notification_channel_subscription_notification_channel_id_notification_channel" + ), + ), + sa.ForeignKeyConstraint( + ["notification_type_id"], + ["notification_type.notification_type_id"], + name=op.f( + "fk_notification_channel_subscription_notification_type_id_notification_type" + ), + ), + sa.ForeignKeyConstraint( + ["user_profile_id"], + ["user_profile.user_profile_id"], + name=op.f( + "fk_notification_channel_subscription_user_profile_id_user_profile" + ), + ), + sa.PrimaryKeyConstraint( + "notification_channel_subscription_id", + name=op.f("pk_notification_channel_subscription"), + ), + sa.UniqueConstraint( + "user_profile_id", + "notification_channel_id", + "notification_type_id", + name="uq_user_channel_type", + ), + comment="Represents a user's subscription to notification events", + ) + op.create_table( + "notification_message", + sa.Column( + "notification_message_id", sa.Integer(), autoincrement=True, nullable=False + ), + sa.Column("is_read", sa.Boolean(), nullable=True), + sa.Column("is_warning", sa.Boolean(), nullable=True), + sa.Column("is_error", sa.Boolean(), nullable=True), + sa.Column("is_archived", sa.Boolean(), nullable=True), + sa.Column("message", sa.Text(), nullable=False), + sa.Column("related_organization_id", sa.Integer(), nullable=True), + sa.Column("origin_user_profile_id", sa.Integer(), nullable=True), + sa.Column("related_user_profile_id", sa.Integer(), nullable=True), + sa.Column("notification_type_id", sa.Integer(), nullable=True), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["notification_type_id"], + ["notification_type.notification_type_id"], + name=op.f("fk_notification_message_notification_type_id_notification_type"), + ), + sa.ForeignKeyConstraint( + ["origin_user_profile_id"], + ["user_profile.user_profile_id"], + name=op.f("fk_notification_message_origin_user_profile_id_user_profile"), + ), + sa.ForeignKeyConstraint( + ["related_organization_id"], + ["organization.organization_id"], + name=op.f("fk_notification_message_related_organization_id_organization"), + ), + sa.ForeignKeyConstraint( + ["related_user_profile_id"], + ["user_profile.user_profile_id"], + name=op.f("fk_notification_message_related_user_profile_id_user_profile"), + ), + sa.PrimaryKeyConstraint( + "notification_message_id", name=op.f("pk_notification_message") + ), + comment="Represents a notification message sent to an application user", + ) + op.create_table( + "transfer", + sa.Column( + "transfer_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the org to org transfer record", + ), + sa.Column("from_organization_id", sa.Integer(), nullable=True), + sa.Column("to_organization_id", sa.Integer(), nullable=True), + sa.Column("from_transaction_id", sa.Integer(), nullable=True), + sa.Column("to_transaction_id", sa.Integer(), nullable=True), + sa.Column( + "agreement_date", + sa.DateTime(), + nullable=True, + comment="Agreement date of the transfer", + ), + sa.Column( + "transaction_effective_date", + sa.DateTime(), + nullable=True, + comment="transaction effective date", + ), + sa.Column( + "price_per_unit", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Price per unit with two decimal places", + ), + sa.Column("quantity", sa.Integer(), nullable=True, comment="Quantity of units"), + sa.Column( + "from_org_comment", + sa.String(length=1000), + nullable=True, + comment="Comment from the from-organization", + ), + sa.Column( + "to_org_comment", + sa.String(length=1000), + nullable=True, + comment="Comment from the to-organization", + ), + sa.Column( + "gov_comment", + sa.String(length=1500), + nullable=True, + comment="Comment from the government to organizations", + ), + sa.Column("transfer_category_id", sa.Integer(), nullable=True), + sa.Column("current_status_id", sa.Integer(), nullable=True), + sa.Column( + "recommendation", + postgresql.ENUM( + "Record", + "Refuse", + name="transfer_recommendation_enum", + create_type=False, + ), + nullable=True, + comment="Analyst recommendation for the transfer.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["current_status_id"], + ["transfer_status.transfer_status_id"], + name=op.f("fk_transfer_current_status_id_transfer_status"), + ), + sa.ForeignKeyConstraint( + ["from_organization_id"], + ["organization.organization_id"], + name=op.f("fk_transfer_from_organization_id_organization"), + ), + sa.ForeignKeyConstraint( + ["from_transaction_id"], + ["transaction.transaction_id"], + name=op.f("fk_transfer_from_transaction_id_transaction"), + ), + sa.ForeignKeyConstraint( + ["to_organization_id"], + ["organization.organization_id"], + name=op.f("fk_transfer_to_organization_id_organization"), + ), + sa.ForeignKeyConstraint( + ["to_transaction_id"], + ["transaction.transaction_id"], + name=op.f("fk_transfer_to_transaction_id_transaction"), + ), + sa.ForeignKeyConstraint( + ["transfer_category_id"], + ["transfer_category.transfer_category_id"], + name=op.f("fk_transfer_transfer_category_id_transfer_category"), + ), + sa.PrimaryKeyConstraint("transfer_id", name=op.f("pk_transfer")), + sa.UniqueConstraint("transfer_id", name=op.f("uq_transfer_transfer_id")), + comment="Records of tranfer from Organization to Organization", + ) + op.create_table( + "user_role", + sa.Column( + "user_role_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique ID for the user role", + ), + sa.Column( + "user_profile_id", + sa.Integer(), + nullable=True, + comment="Foreign key to user_profile", + ), + sa.Column( + "role_id", sa.Integer(), nullable=True, comment="Foreign key to role" + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["role_id"], ["role.role_id"], name=op.f("fk_user_role_role_id_role") + ), + sa.ForeignKeyConstraint( + ["user_profile_id"], + ["user_profile.user_profile_id"], + name=op.f("fk_user_role_user_profile_id_user_profile"), + ), + sa.PrimaryKeyConstraint("user_role_id", name=op.f("pk_user_role")), + sa.UniqueConstraint( + "user_profile_id", "role_id", name="user_role_unique_constraint" + ), + comment="Contains the user and role relationships", + ) + op.create_table( + "admin_adjustment_history", + sa.Column( + "admin_adjustment_history_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the admin_adjustment history record", + ), + sa.Column("admin_adjustment_id", sa.Integer(), nullable=True), + sa.Column("admin_adjustment_status_id", sa.Integer(), nullable=True), + sa.Column( + "user_profile_id", + sa.Integer(), + nullable=True, + comment="Foreign key to user_profile", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["admin_adjustment_id"], + ["admin_adjustment.admin_adjustment_id"], + name=op.f( + "fk_admin_adjustment_history_admin_adjustment_id_admin_adjustment" + ), + ), + sa.ForeignKeyConstraint( + ["admin_adjustment_status_id"], + ["admin_adjustment_status.admin_adjustment_status_id"], + name=op.f( + "fk_admin_adjustment_history_admin_adjustment_status_id_admin_adjustment_status" + ), + ), + sa.ForeignKeyConstraint( + ["user_profile_id"], + ["user_profile.user_profile_id"], + name=op.f("fk_admin_adjustment_history_user_profile_id_user_profile"), + ), + sa.PrimaryKeyConstraint( + "admin_adjustment_history_id", name=op.f("pk_admin_adjustment_history") + ), + sa.UniqueConstraint( + "admin_adjustment_history_id", + name=op.f("uq_admin_adjustment_history_admin_adjustment_history_id"), + ), + comment="History record for admin_adjustment status change.", + ) + op.create_table( + "admin_adjustment_internal_comment", + sa.Column( + "admin_adjustment_id", + sa.Integer(), + nullable=False, + comment="Foreign key to admin_adjustment, part of the composite primary key.", + ), + sa.Column( + "internal_comment_id", + sa.Integer(), + nullable=False, + comment="Foreign key to internal_comment, part of the composite primary key.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.ForeignKeyConstraint( + ["admin_adjustment_id"], + ["admin_adjustment.admin_adjustment_id"], + name=op.f( + "fk_admin_adjustment_internal_comment_admin_adjustment_id_admin_adjustment" + ), + ), + sa.ForeignKeyConstraint( + ["internal_comment_id"], + ["internal_comment.internal_comment_id"], + name=op.f( + "fk_admin_adjustment_internal_comment_internal_comment_id_internal_comment" + ), + ), + sa.PrimaryKeyConstraint( + "admin_adjustment_id", + "internal_comment_id", + name=op.f("pk_admin_adjustment_internal_comment"), + ), + comment="Associates internal comments with admin adjustments.", + ) + op.create_table( + "allocation_agreement", + sa.Column( + "allocation_agreement_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the allocation agreement", + ), + sa.Column( + "transaction_partner", + sa.String(), + nullable=False, + comment="Partner involved in the transaction", + ), + sa.Column( + "postal_address", + sa.String(), + nullable=False, + comment="Postal address of the transaction partner", + ), + sa.Column( + "transaction_partner_email", + sa.String(), + nullable=False, + comment="Transaction Partner email", + ), + sa.Column( + "transaction_partner_phone", + sa.String(), + nullable=False, + comment="Transaction Partner phone number", + ), + sa.Column( + "ci_of_fuel", + sa.Numeric(precision=10, scale=2), + nullable=False, + comment="The Carbon intesity of fuel", + ), + sa.Column( + "quantity", + sa.Integer(), + nullable=False, + comment="Quantity of fuel involved in the transaction", + ), + sa.Column( + "units", + sa.String(), + nullable=False, + comment="Units of the fuel quantity. Auto-selected, locked field.", + ), + sa.Column( + "fuel_type_other", + sa.String(length=1000), + nullable=True, + comment="Other fuel type if one provided", + ), + sa.Column( + "allocation_transaction_type_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the transaction type", + ), + sa.Column( + "fuel_type_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel type", + ), + sa.Column( + "fuel_category_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel category", + ), + sa.Column( + "provision_of_the_act_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the provision of the act", + ), + sa.Column( + "fuel_code_id", + sa.Integer(), + nullable=True, + comment="Foreign key to the fuel code", + ), + sa.Column( + "compliance_report_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the compliance report", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "display_order", + sa.Integer(), + nullable=True, + comment="Relative rank in display sorting order", + ), + sa.ForeignKeyConstraint( + ["allocation_transaction_type_id"], + ["allocation_transaction_type.allocation_transaction_type_id"], + name=op.f( + "fk_allocation_agreement_allocation_transaction_type_id_allocation_transaction_type" + ), + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f("fk_allocation_agreement_compliance_report_id_compliance_report"), + ), + sa.ForeignKeyConstraint( + ["fuel_category_id"], + ["fuel_category.fuel_category_id"], + name=op.f("fk_allocation_agreement_fuel_category_id_fuel_category"), + ), + sa.ForeignKeyConstraint( + ["fuel_code_id"], + ["fuel_code.fuel_code_id"], + name=op.f("fk_allocation_agreement_fuel_code_id_fuel_code"), + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_allocation_agreement_fuel_type_id_fuel_type"), + ), + sa.ForeignKeyConstraint( + ["provision_of_the_act_id"], + ["provision_of_the_act.provision_of_the_act_id"], + name=op.f( + "fk_allocation_agreement_provision_of_the_act_id_provision_of_the_act" + ), + ), + sa.PrimaryKeyConstraint( + "allocation_agreement_id", name=op.f("pk_allocation_agreement") + ), + comment="Records allocation agreements where the reporting obligation is passed from one party to another. Each party must report their end of the transaction.", + ) + op.create_table( + "compliance_report_document_association", + sa.Column("compliance_report_id", sa.Integer(), nullable=False), + sa.Column("document_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f( + "fk_compliance_report_document_association_compliance_report_id_compliance_report" + ), + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["document_id"], + ["document.document_id"], + name=op.f("fk_compliance_report_document_association_document_id_document"), + ), + sa.PrimaryKeyConstraint( + "compliance_report_id", + "document_id", + name=op.f("pk_compliance_report_document_association"), + ), + ) + op.create_table( + "compliance_report_history", + sa.Column( + "compliance_report_history_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the compliance report history", + ), + sa.Column( + "compliance_report_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the compliance report", + ), + sa.Column( + "status_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the compliance report status", + ), + sa.Column( + "user_profile_id", + sa.Integer(), + nullable=True, + comment="Identifier for the user associated with the status change", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f( + "fk_compliance_report_history_compliance_report_id_compliance_report" + ), + ), + sa.ForeignKeyConstraint( + ["status_id"], + ["compliance_report_status.compliance_report_status_id"], + name=op.f( + "fk_compliance_report_history_status_id_compliance_report_status" + ), + ), + sa.ForeignKeyConstraint( + ["user_profile_id"], + ["user_profile.user_profile_id"], + name=op.f("fk_compliance_report_history_user_profile_id_user_profile"), + ), + sa.PrimaryKeyConstraint( + "compliance_report_history_id", name=op.f("pk_compliance_report_history") + ), + comment="Tracks status changes of compliance reports", + ) + op.create_table( + "compliance_report_internal_comment", + sa.Column( + "compliance_report_id", + sa.Integer(), + nullable=False, + comment="Foreign key to compliance_report, part of the composite primary key.", + ), + sa.Column( + "internal_comment_id", + sa.Integer(), + nullable=False, + comment="Foreign key to internal_comment, part of the composite primary key.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f( + "fk_compliance_report_internal_comment_compliance_report_id_compliance_report" + ), + ), + sa.ForeignKeyConstraint( + ["internal_comment_id"], + ["internal_comment.internal_comment_id"], + name=op.f( + "fk_compliance_report_internal_comment_internal_comment_id_internal_comment" + ), + ), + sa.PrimaryKeyConstraint( + "compliance_report_id", + "internal_comment_id", + name=op.f("pk_compliance_report_internal_comment"), + ), + comment="Associates internal comments with compliance report.", + ) + op.create_table( + "compliance_report_summary", + sa.Column("summary_id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("compliance_report_id", sa.Integer(), nullable=True), + sa.Column("quarter", sa.Integer(), nullable=True), + sa.Column("is_locked", sa.Boolean(), nullable=True), + sa.Column( + "line_1_fossil_derived_base_fuel_gasoline", sa.Float(), nullable=False + ), + sa.Column("line_1_fossil_derived_base_fuel_diesel", sa.Float(), nullable=False), + sa.Column( + "line_1_fossil_derived_base_fuel_jet_fuel", sa.Float(), nullable=False + ), + sa.Column( + "line_2_eligible_renewable_fuel_supplied_gasoline", + sa.Float(), + nullable=False, + ), + sa.Column( + "line_2_eligible_renewable_fuel_supplied_diesel", sa.Float(), nullable=False + ), + sa.Column( + "line_2_eligible_renewable_fuel_supplied_jet_fuel", + sa.Float(), + nullable=False, + ), + sa.Column( + "line_3_total_tracked_fuel_supplied_gasoline", sa.Float(), nullable=False + ), + sa.Column( + "line_3_total_tracked_fuel_supplied_diesel", sa.Float(), nullable=False + ), + sa.Column( + "line_3_total_tracked_fuel_supplied_jet_fuel", sa.Float(), nullable=False + ), + sa.Column( + "line_4_eligible_renewable_fuel_required_gasoline", + sa.Float(), + nullable=False, + ), + sa.Column( + "line_4_eligible_renewable_fuel_required_diesel", sa.Float(), nullable=False + ), + sa.Column( + "line_4_eligible_renewable_fuel_required_jet_fuel", + sa.Float(), + nullable=False, + ), + sa.Column( + "line_5_net_notionally_transferred_gasoline", sa.Float(), nullable=False + ), + sa.Column( + "line_5_net_notionally_transferred_diesel", sa.Float(), nullable=False + ), + sa.Column( + "line_5_net_notionally_transferred_jet_fuel", sa.Float(), nullable=False + ), + sa.Column( + "line_6_renewable_fuel_retained_gasoline", sa.Float(), nullable=False + ), + sa.Column("line_6_renewable_fuel_retained_diesel", sa.Float(), nullable=False), + sa.Column( + "line_6_renewable_fuel_retained_jet_fuel", sa.Float(), nullable=False + ), + sa.Column("line_7_previously_retained_gasoline", sa.Float(), nullable=False), + sa.Column("line_7_previously_retained_diesel", sa.Float(), nullable=False), + sa.Column("line_7_previously_retained_jet_fuel", sa.Float(), nullable=False), + sa.Column("line_8_obligation_deferred_gasoline", sa.Float(), nullable=False), + sa.Column("line_8_obligation_deferred_diesel", sa.Float(), nullable=False), + sa.Column("line_8_obligation_deferred_jet_fuel", sa.Float(), nullable=False), + sa.Column("line_9_obligation_added_gasoline", sa.Float(), nullable=False), + sa.Column("line_9_obligation_added_diesel", sa.Float(), nullable=False), + sa.Column("line_9_obligation_added_jet_fuel", sa.Float(), nullable=False), + sa.Column( + "line_10_net_renewable_fuel_supplied_gasoline", sa.Float(), nullable=False + ), + sa.Column( + "line_10_net_renewable_fuel_supplied_diesel", sa.Float(), nullable=False + ), + sa.Column( + "line_10_net_renewable_fuel_supplied_jet_fuel", sa.Float(), nullable=False + ), + sa.Column("line_11_non_compliance_penalty_gasoline", sa.Float(), nullable=True), + sa.Column("line_11_non_compliance_penalty_diesel", sa.Float(), nullable=True), + sa.Column("line_11_non_compliance_penalty_jet_fuel", sa.Float(), nullable=True), + sa.Column("line_12_low_carbon_fuel_required", sa.Float(), nullable=False), + sa.Column("line_13_low_carbon_fuel_supplied", sa.Float(), nullable=False), + sa.Column("line_14_low_carbon_fuel_surplus", sa.Float(), nullable=False), + sa.Column("line_15_banked_units_used", sa.Float(), nullable=False), + sa.Column("line_16_banked_units_remaining", sa.Float(), nullable=False), + sa.Column("line_17_non_banked_units_used", sa.Float(), nullable=False), + sa.Column("line_18_units_to_be_banked", sa.Float(), nullable=False), + sa.Column("line_19_units_to_be_exported", sa.Float(), nullable=False), + sa.Column("line_20_surplus_deficit_units", sa.Float(), nullable=False), + sa.Column("line_21_surplus_deficit_ratio", sa.Float(), nullable=False), + sa.Column("line_22_compliance_units_issued", sa.Float(), nullable=False), + sa.Column( + "line_11_fossil_derived_base_fuel_gasoline", sa.Float(), nullable=False + ), + sa.Column( + "line_11_fossil_derived_base_fuel_diesel", sa.Float(), nullable=False + ), + sa.Column( + "line_11_fossil_derived_base_fuel_jet_fuel", sa.Float(), nullable=False + ), + sa.Column("line_11_fossil_derived_base_fuel_total", sa.Float(), nullable=False), + sa.Column("line_21_non_compliance_penalty_payable", sa.Float(), nullable=False), + sa.Column("total_non_compliance_penalty_payable", sa.Float(), nullable=False), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f( + "fk_compliance_report_summary_compliance_report_id_compliance_report" + ), + ), + sa.PrimaryKeyConstraint( + "summary_id", name=op.f("pk_compliance_report_summary") + ), + comment="Summary of all compliance calculations displaying the compliance units over a compliance period", + ) + op.create_table( + "final_supply_equipment", + sa.Column( + "final_supply_equipment_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="The unique identifier for the final supply equipment.", + ), + sa.Column( + "compliance_report_id", + sa.Integer(), + nullable=False, + comment="The foreign key referencing the compliance report.", + ), + sa.Column( + "supply_from_date", + sa.Date(), + nullable=False, + comment="The date from which the equipment is supplied.", + ), + sa.Column( + "supply_to_date", + sa.Date(), + nullable=False, + comment="The date until which the equipment is supplied.", + ), + sa.Column( + "kwh_usage", + sa.Double(precision=2), + nullable=True, + comment="Optional kWh usage with up to 2 decimal places.", + ), + sa.Column( + "registration_nbr", + sa.String(), + nullable=True, + comment="Unique registration number in format ORGCODE-POSTAL-SEQ (e.g., AB55-V3B0G2-001)", + ), + sa.Column( + "serial_nbr", + sa.String(), + nullable=False, + comment="The serial number of the equipment.", + ), + sa.Column( + "manufacturer", + sa.String(), + nullable=False, + comment="The manufacturer of the equipment.", + ), + sa.Column( + "model", + sa.String(), + nullable=True, + comment="Optional model of the equipment, following 'Make' field.", + ), + sa.Column( + "level_of_equipment_id", + sa.Integer(), + nullable=False, + comment="The foreign key referencing the level of equipment.", + ), + sa.Column( + "ports", + postgresql.ENUM( + "Single port", "Dual port", name="ports_enum", create_type=False + ), + nullable=True, + comment="Port type with options 'Single port' and 'Dual port.'", + ), + sa.Column( + "fuel_measurement_type_id", + sa.Integer(), + nullable=False, + comment="The foreign key referencing the fuel measurement type.", + ), + sa.Column( + "street_address", + sa.String(), + nullable=False, + comment="The street address of the equipment location.", + ), + sa.Column( + "city", + sa.String(), + nullable=False, + comment="The city of the equipment location.", + ), + sa.Column( + "postal_code", + sa.String(), + nullable=False, + comment="The postcode of the equipment location.", + ), + sa.Column( + "latitude", + sa.Double(), + nullable=False, + comment="The latitude of the equipment location.", + ), + sa.Column( + "longitude", + sa.Double(), + nullable=False, + comment="The longitude of the equipment location.", + ), + sa.Column( + "notes", + sa.Text(), + nullable=True, + comment="Any additional notes related to the equipment.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f( + "fk_final_supply_equipment_compliance_report_id_compliance_report" + ), + ), + sa.ForeignKeyConstraint( + ["fuel_measurement_type_id"], + ["fuel_measurement_type.fuel_measurement_type_id"], + name=op.f( + "fk_final_supply_equipment_fuel_measurement_type_id_fuel_measurement_type" + ), + ), + sa.ForeignKeyConstraint( + ["level_of_equipment_id"], + ["level_of_equipment.level_of_equipment_id"], + name=op.f( + "fk_final_supply_equipment_level_of_equipment_id_level_of_equipment" + ), + ), + sa.PrimaryKeyConstraint( + "final_supply_equipment_id", name=op.f("pk_final_supply_equipment") + ), + comment="Final Supply Equipment", + ) + op.create_index( + op.f("ix_final_supply_equipment_compliance_report_id"), + "final_supply_equipment", + ["compliance_report_id"], + unique=False, + ) + op.create_index( + op.f("ix_final_supply_equipment_fuel_measurement_type_id"), + "final_supply_equipment", + ["fuel_measurement_type_id"], + unique=False, + ) + op.create_index( + op.f("ix_final_supply_equipment_level_of_equipment_id"), + "final_supply_equipment", + ["level_of_equipment_id"], + unique=False, + ) + op.create_index( + op.f("ix_final_supply_equipment_registration_nbr"), + "final_supply_equipment", + ["registration_nbr"], + unique=False, + ) + op.create_table( + "fuel_export", + sa.Column( + "fuel_export_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the fuel supply", + ), + sa.Column( + "compliance_report_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the compliance report", + ), + sa.Column( + "export_date", sa.Date(), nullable=False, comment="Date of fuel supply" + ), + sa.Column( + "quarter", + postgresql.ENUM("Q1", "Q2", "Q3", "Q4", name="quarter", create_type=False), + nullable=True, + comment="Quarter for quarterly reports", + ), + sa.Column( + "quantity", + sa.Integer(), + nullable=False, + comment="Quantity of fuel supplied", + ), + sa.Column( + "units", + postgresql.ENUM( + "Litres", + "Kilograms", + "Kilowatt_hour", + "Cubic_metres", + name="quantityunitsenum", + create_type=False, + ), + nullable=False, + comment="Units of fuel quantity", + ), + sa.Column( + "compliance_units", + sa.Integer(), + nullable=True, + comment="Compliance units for the fuel supply", + ), + sa.Column( + "target_ci", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="CI limit for the fuel supply", + ), + sa.Column( + "ci_of_fuel", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="CI of fuel for the fuel supply", + ), + sa.Column( + "energy_density", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Energy density of the fuel supplied", + ), + sa.Column( + "eer", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Energy effectiveness ratio of the fuel supplied", + ), + sa.Column( + "energy", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Energy content of the fuel supplied", + ), + sa.Column( + "fuel_category_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel category", + ), + sa.Column( + "fuel_code_id", + sa.Integer(), + nullable=True, + comment="Foreign key to the fuel code", + ), + sa.Column( + "fuel_type_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel type", + ), + sa.Column( + "provision_of_the_act_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the provision of the act", + ), + sa.Column( + "end_use_id", + sa.Integer(), + nullable=True, + comment="Foreign key to the end use type", + ), + sa.Column( + "fuel_type_other", + sa.String(length=1000), + nullable=True, + comment="Other fuel type if one provided", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "group_uuid", + sa.String(length=36), + nullable=False, + comment="UUID that groups all versions of a record series", + ), + sa.Column( + "version", + sa.Integer(), + nullable=False, + comment="Version number of the record", + ), + sa.Column( + "user_type", + postgresql.ENUM( + "SUPPLIER", "GOVERNMENT", name="usertypeenum", create_type=False + ), + nullable=False, + comment="Indicates whether the record was created/modified by a supplier or government user", + ), + sa.Column( + "action_type", + postgresql.ENUM( + "CREATE", "UPDATE", "DELETE", name="actiontypeenum", create_type=False + ), + server_default=sa.text("'CREATE'"), + nullable=False, + comment="Action type for this record", + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f("fk_fuel_export_compliance_report_id_compliance_report"), + ), + sa.ForeignKeyConstraint( + ["end_use_id"], + ["end_use_type.end_use_type_id"], + name=op.f("fk_fuel_export_end_use_id_end_use_type"), + ), + sa.ForeignKeyConstraint( + ["fuel_category_id"], + ["fuel_category.fuel_category_id"], + name=op.f("fk_fuel_export_fuel_category_id_fuel_category"), + ), + sa.ForeignKeyConstraint( + ["fuel_code_id"], + ["fuel_code.fuel_code_id"], + name=op.f("fk_fuel_export_fuel_code_id_fuel_code"), + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_fuel_export_fuel_type_id_fuel_type"), + ), + sa.ForeignKeyConstraint( + ["provision_of_the_act_id"], + ["provision_of_the_act.provision_of_the_act_id"], + name=op.f("fk_fuel_export_provision_of_the_act_id_provision_of_the_act"), + ), + sa.PrimaryKeyConstraint("fuel_export_id", name=op.f("pk_fuel_export")), + comment="Records the supply of fuel for compliance purposes, including changes in supplemental reports", + ) + op.create_table( + "fuel_supply", + sa.Column( + "fuel_supply_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the fuel supply version", + ), + sa.Column( + "compliance_report_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the compliance report", + ), + sa.Column( + "quantity", + sa.Integer(), + nullable=False, + comment="Quantity of fuel supplied", + ), + sa.Column( + "units", + postgresql.ENUM( + "Litres", + "Kilograms", + "Kilowatt_hour", + "Cubic_metres", + name="quantityunitsenum", + create_type=False, + ), + nullable=False, + comment="Units of fuel quantity", + ), + sa.Column( + "compliance_units", sa.Integer(), nullable=True, comment="Compliance units" + ), + sa.Column( + "target_ci", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Target Carbon Intensity", + ), + sa.Column( + "ci_of_fuel", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="CI of the fuel", + ), + sa.Column( + "energy_density", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Energy density", + ), + sa.Column( + "eer", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Energy Effectiveness Ratio", + ), + sa.Column("energy", sa.BigInteger(), nullable=True, comment="Energy content"), + sa.Column( + "fuel_type_other", + sa.String(length=1000), + nullable=True, + comment="Other fuel type if one provided", + ), + sa.Column( + "fuel_category_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel category", + ), + sa.Column( + "fuel_code_id", + sa.Integer(), + nullable=True, + comment="Foreign key to the fuel code", + ), + sa.Column( + "fuel_type_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel type", + ), + sa.Column( + "provision_of_the_act_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the provision of the act", + ), + sa.Column( + "end_use_id", + sa.Integer(), + nullable=True, + comment="Foreign key to the end use type", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "group_uuid", + sa.String(length=36), + nullable=False, + comment="UUID that groups all versions of a record series", + ), + sa.Column( + "version", + sa.Integer(), + nullable=False, + comment="Version number of the record", + ), + sa.Column( + "user_type", + postgresql.ENUM( + "SUPPLIER", "GOVERNMENT", name="usertypeenum", create_type=False + ), + nullable=False, + comment="Indicates whether the record was created/modified by a supplier or government user", + ), + sa.Column( + "action_type", + postgresql.ENUM( + "CREATE", "UPDATE", "DELETE", name="actiontypeenum", create_type=False + ), + server_default=sa.text("'CREATE'"), + nullable=False, + comment="Action type for this record", + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f("fk_fuel_supply_compliance_report_id_compliance_report"), + ), + sa.ForeignKeyConstraint( + ["end_use_id"], + ["end_use_type.end_use_type_id"], + name=op.f("fk_fuel_supply_end_use_id_end_use_type"), + ), + sa.ForeignKeyConstraint( + ["fuel_category_id"], + ["fuel_category.fuel_category_id"], + name=op.f("fk_fuel_supply_fuel_category_id_fuel_category"), + ), + sa.ForeignKeyConstraint( + ["fuel_code_id"], + ["fuel_code.fuel_code_id"], + name=op.f("fk_fuel_supply_fuel_code_id_fuel_code"), + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_fuel_supply_fuel_type_id_fuel_type"), + ), + sa.ForeignKeyConstraint( + ["provision_of_the_act_id"], + ["provision_of_the_act.provision_of_the_act_id"], + name=op.f("fk_fuel_supply_provision_of_the_act_id_provision_of_the_act"), + ), + sa.PrimaryKeyConstraint("fuel_supply_id", name=op.f("pk_fuel_supply")), + comment="Records the supply of fuel for compliance purposes, including changes in supplemental reports", + ) + op.create_table( + "initiative_agreement_history", + sa.Column( + "initiative_agreement_history_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the initiative agreement history record", + ), + sa.Column("initiative_agreement_id", sa.Integer(), nullable=True), + sa.Column("initiative_agreement_status_id", sa.Integer(), nullable=True), + sa.Column( + "user_profile_id", + sa.Integer(), + nullable=True, + comment="Foreign key to user_profile", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["initiative_agreement_id"], + ["initiative_agreement.initiative_agreement_id"], + name=op.f( + "fk_initiative_agreement_history_initiative_agreement_id_initiative_agreement" + ), + ), + sa.ForeignKeyConstraint( + ["initiative_agreement_status_id"], + ["initiative_agreement_status.initiative_agreement_status_id"], + name=op.f( + "fk_initiative_agreement_history_initiative_agreement_status_id_initiative_agreement_status" + ), + ), + sa.ForeignKeyConstraint( + ["user_profile_id"], + ["user_profile.user_profile_id"], + name=op.f("fk_initiative_agreement_history_user_profile_id_user_profile"), + ), + sa.PrimaryKeyConstraint( + "initiative_agreement_history_id", + name=op.f("pk_initiative_agreement_history"), + ), + sa.UniqueConstraint( + "initiative_agreement_history_id", + name=op.f( + "uq_initiative_agreement_history_initiative_agreement_history_id" + ), + ), + comment="History record for initiative agreement status change.", + ) + op.create_table( + "initiative_agreement_internal_comment", + sa.Column( + "initiative_agreement_id", + sa.Integer(), + nullable=False, + comment="Foreign key to initiative_agreement, part of the composite primary key.", + ), + sa.Column( + "internal_comment_id", + sa.Integer(), + nullable=False, + comment="Foreign key to internal_comment, part of the composite primary key.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.ForeignKeyConstraint( + ["initiative_agreement_id"], + ["initiative_agreement.initiative_agreement_id"], + name=op.f( + "fk_initiative_agreement_internal_comment_initiative_agreement_id_initiative_agreement" + ), + ), + sa.ForeignKeyConstraint( + ["internal_comment_id"], + ["internal_comment.internal_comment_id"], + name=op.f( + "fk_initiative_agreement_internal_comment_internal_comment_id_internal_comment" + ), + ), + sa.PrimaryKeyConstraint( + "initiative_agreement_id", + "internal_comment_id", + name=op.f("pk_initiative_agreement_internal_comment"), + ), + comment="Associates internal comments with initiative agreements.", + ) + op.create_table( + "notional_transfer", + sa.Column( + "notional_transfer_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the notional transfer", + ), + sa.Column( + "compliance_report_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the compliance report", + ), + sa.Column( + "quantity", + sa.Integer(), + nullable=False, + comment="Quantity of fuel being notionally transferred. Cannot be negative.", + ), + sa.Column( + "legal_name", + sa.String(), + nullable=False, + comment="Legal name of the trading partner", + ), + sa.Column( + "address_for_service", + sa.String(), + nullable=False, + comment="Address for service of the trading partner", + ), + sa.Column( + "fuel_category_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel category", + ), + sa.Column( + "received_or_transferred", + postgresql.ENUM( + "Received", + "Transferred", + name="receivedortransferredenum", + create_type=False, + ), + nullable=False, + comment="Indicates whether the transfer is Received or Transferred", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "group_uuid", + sa.String(length=36), + nullable=False, + comment="UUID that groups all versions of a record series", + ), + sa.Column( + "version", + sa.Integer(), + nullable=False, + comment="Version number of the record", + ), + sa.Column( + "user_type", + postgresql.ENUM( + "SUPPLIER", "GOVERNMENT", name="usertypeenum", create_type=False + ), + nullable=False, + comment="Indicates whether the record was created/modified by a supplier or government user", + ), + sa.Column( + "action_type", + postgresql.ENUM( + "CREATE", "UPDATE", "DELETE", name="actiontypeenum", create_type=False + ), + server_default=sa.text("'CREATE'"), + nullable=False, + comment="Action type for this record", + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f("fk_notional_transfer_compliance_report_id_compliance_report"), + ), + sa.ForeignKeyConstraint( + ["fuel_category_id"], + ["fuel_category.fuel_category_id"], + name=op.f("fk_notional_transfer_fuel_category_id_fuel_category"), + ), + sa.PrimaryKeyConstraint( + "notional_transfer_id", name=op.f("pk_notional_transfer") + ), + comment="Records notional transfers for compliance reports.", + ) + op.create_table( + "other_uses", + sa.Column( + "other_uses_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the other uses record", + ), + sa.Column( + "compliance_report_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the compliance report", + ), + sa.Column( + "fuel_type_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel type", + ), + sa.Column( + "fuel_category_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the fuel category", + ), + sa.Column( + "provision_of_the_act_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the provision of the act", + ), + sa.Column( + "fuel_code_id", + sa.Integer(), + nullable=True, + comment="Foreign key to the fuel code", + ), + sa.Column( + "ci_of_fuel", + sa.Numeric(precision=10, scale=2), + nullable=False, + comment="The Carbon intesity of fuel", + ), + sa.Column( + "quantity_supplied", + sa.Integer(), + nullable=False, + comment="Quantity of fuel used. Cannot be negative.", + ), + sa.Column( + "units", + sa.String(), + nullable=False, + comment="Units of the fuel quantity. Auto-selected, locked field.", + ), + sa.Column( + "expected_use_id", + sa.Integer(), + nullable=False, + comment="Foreign key to the expected use type", + ), + sa.Column( + "rationale", + sa.String(), + nullable=True, + comment="Rationale for the use of the fuel, required if 'Other' is selected as expected use", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "group_uuid", + sa.String(length=36), + nullable=False, + comment="UUID that groups all versions of a record series", + ), + sa.Column( + "version", + sa.Integer(), + nullable=False, + comment="Version number of the record", + ), + sa.Column( + "user_type", + postgresql.ENUM( + "SUPPLIER", "GOVERNMENT", name="usertypeenum", create_type=False + ), + nullable=False, + comment="Indicates whether the record was created/modified by a supplier or government user", + ), + sa.Column( + "action_type", + postgresql.ENUM( + "CREATE", "UPDATE", "DELETE", name="actiontypeenum", create_type=False + ), + server_default=sa.text("'CREATE'"), + nullable=False, + comment="Action type for this record", + ), + sa.ForeignKeyConstraint( + ["compliance_report_id"], + ["compliance_report.compliance_report_id"], + name=op.f("fk_other_uses_compliance_report_id_compliance_report"), + ), + sa.ForeignKeyConstraint( + ["expected_use_id"], + ["expected_use_type.expected_use_type_id"], + name=op.f("fk_other_uses_expected_use_id_expected_use_type"), + ), + sa.ForeignKeyConstraint( + ["fuel_category_id"], + ["fuel_category.fuel_category_id"], + name=op.f("fk_other_uses_fuel_category_id_fuel_category"), + ), + sa.ForeignKeyConstraint( + ["fuel_code_id"], + ["fuel_code.fuel_code_id"], + name=op.f("fk_other_uses_fuel_code_id_fuel_code"), + ), + sa.ForeignKeyConstraint( + ["fuel_type_id"], + ["fuel_type.fuel_type_id"], + name=op.f("fk_other_uses_fuel_type_id_fuel_type"), + ), + sa.ForeignKeyConstraint( + ["provision_of_the_act_id"], + ["provision_of_the_act.provision_of_the_act_id"], + name=op.f("fk_other_uses_provision_of_the_act_id_provision_of_the_act"), + ), + sa.PrimaryKeyConstraint("other_uses_id", name=op.f("pk_other_uses")), + comment="Records other uses of fuels that are subject to renewable requirements but do not earn credits.", + ) + op.create_table( + "transfer_history", + sa.Column( + "transfer_history_id", + sa.Integer(), + autoincrement=True, + nullable=False, + comment="Unique identifier for the transfer history record", + ), + sa.Column("transfer_id", sa.Integer(), nullable=True), + sa.Column("transfer_status_id", sa.Integer(), nullable=True), + sa.Column( + "user_profile_id", + sa.Integer(), + nullable=True, + comment="Foreign key to user_profile", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.Column( + "create_user", + sa.String(), + nullable=True, + comment="The user who created this record in the database.", + ), + sa.Column( + "update_user", + sa.String(), + nullable=True, + comment="The user who last updated this record in the database.", + ), + sa.Column( + "effective_date", + sa.Date(), + nullable=True, + comment="The calendar date the value became valid.", + ), + sa.Column( + "effective_status", + sa.Boolean(), + nullable=False, + comment="True if the value is currently valid, False if it is no longer valid.", + ), + sa.Column( + "expiration_date", + sa.Date(), + nullable=True, + comment="The calendar date the value is no longer valid.", + ), + sa.ForeignKeyConstraint( + ["transfer_id"], + ["transfer.transfer_id"], + name=op.f("fk_transfer_history_transfer_id_transfer"), + ), + sa.ForeignKeyConstraint( + ["transfer_status_id"], + ["transfer_status.transfer_status_id"], + name=op.f("fk_transfer_history_transfer_status_id_transfer_status"), + ), + sa.ForeignKeyConstraint( + ["user_profile_id"], + ["user_profile.user_profile_id"], + name=op.f("fk_transfer_history_user_profile_id_user_profile"), + ), + sa.PrimaryKeyConstraint( + "transfer_history_id", name=op.f("pk_transfer_history") + ), + comment="Records the status changes of a transfer.", + ) + op.create_table( + "transfer_internal_comment", + sa.Column( + "transfer_id", + sa.Integer(), + nullable=False, + comment="Foreign key to transfer, part of the composite primary key.", + ), + sa.Column( + "internal_comment_id", + sa.Integer(), + nullable=False, + comment="Foreign key to internal_comment, part of the composite primary key.", + ), + sa.Column( + "create_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was created in the database.", + ), + sa.Column( + "update_date", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the physical record was updated in the database. It will be the same as the create_date until the record is first updated after creation.", + ), + sa.ForeignKeyConstraint( + ["internal_comment_id"], + ["internal_comment.internal_comment_id"], + name=op.f( + "fk_transfer_internal_comment_internal_comment_id_internal_comment" + ), + ), + sa.ForeignKeyConstraint( + ["transfer_id"], + ["transfer.transfer_id"], + name=op.f("fk_transfer_internal_comment_transfer_id_transfer"), + ), + sa.PrimaryKeyConstraint( + "transfer_id", + "internal_comment_id", + name=op.f("pk_transfer_internal_comment"), + ), + comment="Associates internal comments with transfers.", + ) + op.create_table( + "final_supply_intended_use_association", + sa.Column("final_supply_equipment_id", sa.Integer(), nullable=False), + sa.Column("end_use_type_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["end_use_type_id"], + ["end_use_type.end_use_type_id"], + name=op.f( + "fk_final_supply_intended_use_association_end_use_type_id_end_use_type" + ), + ), + sa.ForeignKeyConstraint( + ["final_supply_equipment_id"], + ["final_supply_equipment.final_supply_equipment_id"], + name=op.f( + "fk_final_supply_intended_use_association_final_supply_equipment_id_final_supply_equipment" + ), + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "final_supply_equipment_id", + "end_use_type_id", + name=op.f("pk_final_supply_intended_use_association"), + ), + ) + op.create_table( + "final_supply_intended_user_association", + sa.Column("final_supply_equipment_id", sa.Integer(), nullable=False), + sa.Column("end_user_type_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["end_user_type_id"], + ["end_user_type.end_user_type_id"], + name=op.f( + "fk_final_supply_intended_user_association_end_user_type_id_end_user_type" + ), + ), + sa.ForeignKeyConstraint( + ["final_supply_equipment_id"], + ["final_supply_equipment.final_supply_equipment_id"], + name=op.f( + "fk_final_supply_intended_user_association_final_supply_equipment_id_final_supply_equipment" + ), + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "final_supply_equipment_id", + "end_user_type_id", + name=op.f("pk_final_supply_intended_user_association"), + ), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("final_supply_intended_user_association") + op.drop_table("final_supply_intended_use_association") + op.drop_table("transfer_internal_comment") + op.drop_table("transfer_history") + op.drop_table("other_uses") + op.drop_table("notional_transfer") + op.drop_table("initiative_agreement_internal_comment") + op.drop_table("initiative_agreement_history") + op.drop_table("fuel_supply") + op.drop_table("fuel_export") + op.drop_index( + op.f("ix_final_supply_equipment_registration_nbr"), + table_name="final_supply_equipment", + ) + op.drop_index( + op.f("ix_final_supply_equipment_level_of_equipment_id"), + table_name="final_supply_equipment", + ) + op.drop_index( + op.f("ix_final_supply_equipment_fuel_measurement_type_id"), + table_name="final_supply_equipment", + ) + op.drop_index( + op.f("ix_final_supply_equipment_compliance_report_id"), + table_name="final_supply_equipment", + ) + op.drop_table("final_supply_equipment") + op.drop_table("compliance_report_summary") + op.drop_table("compliance_report_internal_comment") + op.drop_table("compliance_report_history") + op.drop_table("compliance_report_document_association") + op.drop_table("allocation_agreement") + op.drop_table("admin_adjustment_internal_comment") + op.drop_table("admin_adjustment_history") + op.drop_table("user_role") + op.drop_table("transfer") + op.drop_table("notification_message") + op.drop_table("notification_channel_subscription") + op.drop_table("initiative_agreement") + op.drop_table("finished_fuel_transport_mode") + op.drop_table("feedstock_fuel_transport_mode") + op.drop_table("compliance_report") + op.drop_table("admin_adjustment") + op.drop_table("user_profile") + op.drop_table("transaction") + op.drop_table("fuel_instance") + op.drop_table("fuel_code") + op.drop_table("energy_effectiveness_ratio") + op.drop_table("energy_density") + op.drop_table("additional_carbon_intensity") + op.drop_table("target_carbon_intensity") + op.drop_table("organization") + op.drop_table("fuel_type") + op.drop_table("user_login_history") + op.drop_table("unit_of_measure") + op.drop_table("transport_mode") + op.drop_table("transfer_status") + op.drop_table("transfer_category") + op.drop_table("role") + op.drop_table("provision_of_the_act") + op.drop_table("organization_type") + op.drop_table("organization_status") + op.drop_table("organization_attorney_address") + op.drop_table("organization_address") + op.drop_table("notification_type") + op.drop_table("notification_channel") + op.drop_table("level_of_equipment") + op.drop_table("internal_comment") + op.drop_table("initiative_agreement_status") + op.drop_table("fuel_measurement_type") + op.drop_table("fuel_code_status") + op.drop_table("fuel_code_prefix") + op.drop_table("fuel_category") + op.drop_table("final_supply_equipment_reg_number") + op.drop_table("expected_use_type") + op.drop_table("end_user_type") + op.drop_table("end_use_type") + op.drop_table("document") + op.drop_table("compliance_report_status") + op.drop_table("compliance_period") + op.drop_index("idx_audit_log_operation", table_name="audit_log") + op.drop_index("idx_audit_log_delta", table_name="audit_log", postgresql_using="gin") + op.drop_index("idx_audit_log_create_user", table_name="audit_log") + op.drop_index("idx_audit_log_create_date", table_name="audit_log") + op.drop_table("audit_log") + op.drop_table("allocation_transaction_type") + op.drop_table("admin_adjustment_status") + sa.Enum( + "Draft", "Recommended", "Approved", "Deleted", name="admin_adjustment_type_enum" + ).drop(op.get_bind()) + sa.Enum("Director", "Analyst", "Compliance Manager", name="audience_scope").drop( + op.get_bind() + ) + sa.Enum( + "SUPPLIER_SUPPLEMENTAL", + "GOVERNMENT_REASSESSMENT", + name="supplementalinitiatortype", + ).drop(op.get_bind()) + sa.Enum("ANNUAL", "QUARTERLY", name="reportingfrequency").drop(op.get_bind()) + sa.Enum( + "Draft", + "Submitted", + "Recommended_by_analyst", + "Recommended_by_manager", + "Assessed", + "ReAssessed", + name="compliancereportstatusenum", + ).drop(op.get_bind()) + sa.Enum("Single port", "Dual port", name="ports_enum").drop(op.get_bind()) + sa.Enum( + "Litres", "Kilograms", "Kilowatt_hour", "Cubic_metres", name="quantityunitsenum" + ).drop(op.get_bind()) + sa.Enum("SUPPLIER", "GOVERNMENT", name="usertypeenum").drop(op.get_bind()) + sa.Enum("CREATE", "UPDATE", "DELETE", name="actiontypeenum").drop(op.get_bind()) + sa.Enum("Q1", "Q2", "Q3", "Q4", name="quarter").drop(op.get_bind()) + sa.Enum("Received", "Transferred", name="receivedortransferredenum").drop( + op.get_bind() + ) + sa.Enum("Gasoline", "Diesel", "Jet fuel", name="fuel_category_enum").drop( + op.get_bind() + ) + sa.Enum("Draft", "Approved", "Deleted", name="fuel_code_status_enum").drop( + op.get_bind() + ) + sa.Enum( + "Draft", + "Recommended", + "Approved", + "Deleted", + name="initiative_agreement_type_enum", + ).drop(op.get_bind()) + sa.Enum("EMAIL", "IN_APP", name="channel_enum").drop(op.get_bind()) + sa.Enum( + "Unregistered", "Registered", "Suspended", "Canceled", name="org_status_enum" + ).drop(op.get_bind()) + sa.Enum( + "fuel_supplier", + "electricity_supplier", + "broker", + "utilities", + name="org_type_enum", + ).drop(op.get_bind()) + sa.Enum( + "TRANSFER_PARTNER_UPDATE", + "TRANSFER_DIRECTOR_REVIEW", + "INITIATIVE_APPROVED", + "INITIATIVE_DA_REQUEST", + "SUPPLEMENTAL_REQUESTED", + "DIRECTOR_ASSESSMENT", + name="notification_type_enum", + ).drop(op.get_bind()) + sa.Enum("Adjustment", "Reserved", "Released", name="transaction_action_enum").drop( + op.get_bind() + ) + sa.Enum("Record", "Refuse", name="transfer_recommendation_enum").drop(op.get_bind()) + sa.Enum("A", "B", "C", "D", name="transfercategoryenum").drop(op.get_bind()) + sa.Enum( + "Draft", + "Deleted", + "Sent", + "Submitted", + "Recommended", + "Recorded", + "Refused", + "Declined", + "Rescinded", + name="transfer_type_enum", + ).drop(op.get_bind()) + sa.Enum( + "GOVERNMENT", + "ADMINISTRATOR", + "ANALYST", + "COMPLIANCE_MANAGER", + "DIRECTOR", + "SUPPLIER", + "MANAGE_USERS", + "TRANSFER", + "COMPLIANCE_REPORTING", + "SIGNING_AUTHORITY", + "READ_ONLY", + name="role_enum", + ).drop(op.get_bind()) + # ### end Alembic commands ### diff --git a/backend/lcfs/db/migrations/versions/2024-11-27-17-51_4038ff8d8c49.py b/backend/lcfs/db/migrations/versions/2024-11-27-17-51_4038ff8d8c49.py new file mode 100644 index 000000000..ce9205687 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-11-27-17-51_4038ff8d8c49.py @@ -0,0 +1,797 @@ +"""Consolidated Migration: Materialized Views, Audit Logging Functions, and Triggers + +Revision ID: 4038ff8d8c49 +Revises: a2240b2b5629 +Create Date: 2024-11-02 10:00:00.000000 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "4038ff8d8c49" +down_revision = "a2240b2b5629" +branch_labels = None +depends_on = None + + +def upgrade(): + # ---------------------------------------- + # Part 1: Drop Existing Triggers, Functions, and Views + # ---------------------------------------- + drop_existing_triggers_functions_views() + + # ---------------------------------------- + # Part 2: Create Audit Log Functions and Triggers + # ---------------------------------------- + create_audit_log_functions_and_triggers() + + # ---------------------------------------- + # Part 3: Create Materialized Views and Triggers + # ---------------------------------------- + create_materialized_views_and_triggers() + + # ---------------------------------------- + # Part 4: Create Update Organization Balance Function and Trigger + # ---------------------------------------- + create_update_organization_balance_function_and_trigger() + + +def downgrade(): + # ---------------------------------------- + # Part 4 Downgrade: Drop Update Organization Balance Function and Trigger + # ---------------------------------------- + op.execute( + """DROP TRIGGER IF EXISTS update_organization_balance_trigger ON "transaction";""" + ) + op.execute("""DROP FUNCTION IF EXISTS update_organization_balance();""") + + # ---------------------------------------- + # Part 3 Downgrade: Drop Materialized Views and Triggers + # ---------------------------------------- + drop_materialized_views_and_triggers() + + # ---------------------------------------- + # Part 2 Downgrade: Drop Audit Log Functions and Triggers + # ---------------------------------------- + drop_audit_log_functions_and_triggers() + + +# --------------------------- +# Helper Functions +# --------------------------- + + +def drop_existing_triggers_functions_views(): + # Drop existing triggers + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_transfer ON transfer;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_initiative_agreement ON initiative_agreement;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_admin_adjustment ON admin_adjustment;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_transfer_history ON transfer_history;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_initiative_agreement_history ON initiative_agreement_history;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_admin_adjustment_history ON admin_adjustment_history;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_transaction_count_after_transfer ON transfer;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_transaction_count_after_initiative_agreement ON initiative_agreement;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_transaction_count_after_admin_adjustment ON admin_adjustment;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_director_review_transaction_count_after_transfer ON transfer;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_director_review_transaction_count_after_compliance_report ON compliance_report;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_director_review_transaction_count_after_initiative_agreement ON initiative_agreement;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_director_review_transaction_count_after_admin_adjustment ON admin_adjustment;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_org_compliance_report_count_after_compliance_report ON compliance_report;""" + ) + + # Drop existing functions + op.execute("""DROP FUNCTION IF EXISTS refresh_transaction_aggregate();""") + op.execute("""DROP FUNCTION IF EXISTS refresh_mv_transaction_count();""") + op.execute( + """DROP FUNCTION IF EXISTS refresh_mv_director_review_transaction_count();""" + ) + op.execute("""DROP FUNCTION IF EXISTS refresh_mv_org_compliance_report_count();""") + + # Drop existing materialized views and views + op.execute("""DROP MATERIALIZED VIEW IF EXISTS mv_transaction_aggregate;""") + op.execute("""DROP VIEW IF EXISTS transaction_status_view;""") + op.execute("""DROP MATERIALIZED VIEW IF EXISTS mv_transaction_count;""") + op.execute( + """DROP MATERIALIZED VIEW IF EXISTS mv_director_review_transaction_count;""" + ) + op.execute("""DROP MATERIALIZED VIEW IF EXISTS mv_org_compliance_report_count;""") + + +def create_audit_log_functions_and_triggers(): + # Step 1: Create JSONB_DIFF FUNCTION + op.execute( + """ + CREATE OR REPLACE FUNCTION jsonb_diff( + old_row JSONB, + new_row JSONB + ) RETURNS JSONB AS $$ + BEGIN + RETURN ( + SELECT jsonb_object_agg(key, value) + FROM ( + SELECT key, value + FROM jsonb_each(new_row) + EXCEPT + SELECT key, value + FROM jsonb_each(old_row) + ) diff + ); + END; + $$ LANGUAGE plpgsql; + """ + ) + + # Step 2: Add JSON delta function + op.execute( + """ + CREATE OR REPLACE FUNCTION generate_json_delta( + old_row JSONB, + new_row JSONB + ) RETURNS JSONB AS $$ + BEGIN + RETURN jsonb_diff(old_row, new_row); + END; + $$ LANGUAGE plpgsql; + """ + ) + + # Step 3: Add audit trigger function + op.execute( + """ + CREATE OR REPLACE FUNCTION audit_trigger_func() + RETURNS TRIGGER AS $$ + DECLARE + v_operation TEXT; + v_table_name TEXT := TG_TABLE_NAME; + v_row_id JSONB; + v_old_values JSONB; + v_new_values JSONB; + v_delta JSONB; + v_pk_col TEXT; + BEGIN + SELECT c.column_name INTO v_pk_col + FROM information_schema.table_constraints tc + JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) + JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema + AND tc.table_name = c.table_name AND ccu.column_name = c.column_name + WHERE tc.constraint_type = 'PRIMARY KEY' AND tc.table_name = TG_TABLE_NAME + LIMIT 1; + + IF (TG_OP = 'INSERT') THEN + v_operation := 'INSERT'; + v_new_values := to_jsonb(NEW); + EXECUTE format('SELECT ($1).%I', v_pk_col) INTO v_row_id USING NEW; + ELSIF (TG_OP = 'UPDATE') THEN + v_operation := 'UPDATE'; + v_old_values := to_jsonb(OLD); + v_new_values := to_jsonb(NEW); + v_delta := generate_json_delta(v_old_values, v_new_values); + EXECUTE format('SELECT ($1).%I', v_pk_col) INTO v_row_id USING NEW; + ELSIF (TG_OP = 'DELETE') THEN + v_operation := 'DELETE'; + v_old_values := to_jsonb(OLD); + EXECUTE format('SELECT ($1).%I', v_pk_col) INTO v_row_id USING OLD; + END IF; + + INSERT INTO audit_log ( + create_user, + update_user, + table_name, + operation, + row_id, + delta, + old_values, + new_values + ) + VALUES ( + current_setting('app.username', true), + current_setting('app.username', true), + v_table_name, + v_operation, + v_row_id, + v_delta, + v_old_values, + v_new_values + ); + + RETURN NULL; + END; + $$ LANGUAGE plpgsql; + """ + ) + + # Step 4: Add audit triggers to relevant tables + op.execute( + """ + DO $$ + DECLARE + r RECORD; + BEGIN + FOR r IN SELECT tablename + FROM pg_tables + WHERE schemaname = 'public' + AND tablename IN ('transaction', 'compliance_report', 'compliance_report_history', + 'compliance_report_status', 'compliance_report_summary', 'compliance_period', + 'initiative_agreement', 'initiative_agreement_status', 'initiative_agreement_history', + 'allocation_agreement', 'allocation_transaction_type', 'custom_fuel_type', 'fuel_code', + 'fuel_code_prefix', 'fuel_code_status', 'fuel_category', 'fuel_instance', 'fuel_type', + 'fuel_export', 'organization', 'organization_address', 'organization_attorney_address', + 'organization_status', 'organization_type', 'transfer', 'transfer_category', 'transfer_history', + 'transfer_status', 'internal_comment', 'user_profile', 'user_role', 'role', 'notification_message', + 'notification_type', 'admin_adjustment', 'admin_adjustment_status', 'admin_adjustment_history', + 'provision_of_the_act', 'supplemental_report', 'final_supply_equipment', 'notional_transfer', + 'fuel_supply', 'additional_carbon_intensity', 'document', 'end_use_type', 'energy_density', + 'energy_effectiveness_ratio', 'transport_mode', 'final_supply_equipment', 'level_of_equipment', + 'user_login_history', 'unit_of_measure', 'target_carbon_intensity') + LOOP + EXECUTE format(' + CREATE TRIGGER audit_%I_insert_update_delete + AFTER INSERT OR UPDATE OR DELETE ON %I + FOR EACH ROW EXECUTE FUNCTION audit_trigger_func();', + r.tablename, r.tablename); + END LOOP; + END $$; + """ + ) + + +def create_materialized_views_and_triggers(): + # Create mv_transaction_aggregate materialized view + op.execute( + """ + CREATE MATERIALIZED VIEW mv_transaction_aggregate AS + SELECT + t.transfer_id AS transaction_id, + 'Transfer' AS transaction_type, + org_from.organization_id AS from_organization_id, + org_from.name AS from_organization, + org_to.organization_id AS to_organization_id, + org_to.name AS to_organization, + t.quantity, + t.price_per_unit, + ts.status::text AS status, + NULL AS compliance_period, + t.from_org_comment AS comment, + tc.category, + ( + SELECT th.create_date + FROM transfer_history th + WHERE th.transfer_id = t.transfer_id AND th.transfer_status_id = 6 + ) AS recorded_date, + NULL AS approved_date, + t.transaction_effective_date, + t.update_date, + t.create_date + FROM transfer t + JOIN organization org_from ON t.from_organization_id = org_from.organization_id + JOIN organization org_to ON t.to_organization_id = org_to.organization_id + JOIN transfer_status ts ON t.current_status_id = ts.transfer_status_id + LEFT JOIN transfer_category tc ON t.transfer_category_id = tc.transfer_category_id + UNION ALL + SELECT + ia.initiative_agreement_id AS transaction_id, + 'InitiativeAgreement' AS transaction_type, + NULL AS from_organization_id, + NULL AS from_organization, + org.organization_id AS to_organization_id, + org.name AS to_organization, + ia.compliance_units AS quantity, + NULL AS price_per_unit, + ias.status::text AS status, + NULL AS compliance_period, + ia.gov_comment AS comment, + NULL AS category, + NULL AS recorded_date, + ( + SELECT iah.create_date + FROM initiative_agreement_history iah + WHERE iah.initiative_agreement_id = ia.initiative_agreement_id AND iah.initiative_agreement_status_id = 3 + ) AS approved_date, + ia.transaction_effective_date, + ia.update_date, + ia.create_date + FROM initiative_agreement ia + JOIN organization org ON ia.to_organization_id = org.organization_id + JOIN initiative_agreement_status ias ON ia.current_status_id = ias.initiative_agreement_status_id + UNION ALL + SELECT + aa.admin_adjustment_id AS transaction_id, + 'AdminAdjustment' AS transaction_type, + NULL AS from_organization_id, + NULL AS from_organization, + org.organization_id AS to_organization_id, + org.name AS to_organization, + aa.compliance_units AS quantity, + NULL AS price_per_unit, + aas.status::text AS status, + NULL AS compliance_period, + aa.gov_comment AS comment, + NULL AS category, + NULL AS recorded_date, + ( + SELECT aah.create_date + FROM admin_adjustment_history aah + WHERE aah.admin_adjustment_id = aa.admin_adjustment_id AND aah.admin_adjustment_status_id = 3 + ) AS approved_date, + aa.transaction_effective_date, + aa.update_date, + aa.create_date + FROM admin_adjustment aa + JOIN organization org ON aa.to_organization_id = org.organization_id + JOIN admin_adjustment_status aas ON aa.current_status_id = aas.admin_adjustment_status_id; + """ + ) + + # Create unique index on mv_transaction_aggregate + op.execute( + """ + CREATE UNIQUE INDEX mv_transaction_aggregate_unique_idx ON mv_transaction_aggregate (transaction_id, transaction_type); + """ + ) + + # Create transaction_status_view + op.execute( + """ + CREATE OR REPLACE VIEW transaction_status_view AS + SELECT + status::text, + create_date, + update_date + FROM initiative_agreement_status + UNION + SELECT + status::text, + create_date, + update_date + FROM admin_adjustment_status + UNION + SELECT + status::text, + create_date, + update_date + FROM transfer_status; + """ + ) + + # Create refresh_transaction_aggregate function + op.execute( + """ + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS TRIGGER AS $$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate; + RETURN NULL; + END; + $$ LANGUAGE plpgsql; + """ + ) + + # Create triggers to refresh mv_transaction_aggregate + op.execute( + """ + CREATE TRIGGER refresh_transaction_view_after_transfer + AFTER INSERT OR UPDATE OR DELETE ON transfer + FOR EACH STATEMENT EXECUTE FUNCTION refresh_transaction_aggregate(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_transaction_view_after_initiative_agreement + AFTER INSERT OR UPDATE OR DELETE ON initiative_agreement + FOR EACH STATEMENT EXECUTE FUNCTION refresh_transaction_aggregate(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_transaction_view_after_admin_adjustment + AFTER INSERT OR UPDATE OR DELETE ON admin_adjustment + FOR EACH STATEMENT EXECUTE FUNCTION refresh_transaction_aggregate(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_transaction_view_after_transfer_history + AFTER INSERT OR UPDATE OR DELETE ON transfer_history + FOR EACH STATEMENT EXECUTE FUNCTION refresh_transaction_aggregate(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_transaction_view_after_initiative_agreement_history + AFTER INSERT OR UPDATE OR DELETE ON initiative_agreement_history + FOR EACH STATEMENT EXECUTE FUNCTION refresh_transaction_aggregate(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_transaction_view_after_admin_adjustment_history + AFTER INSERT OR UPDATE OR DELETE ON admin_adjustment_history + FOR EACH STATEMENT EXECUTE FUNCTION refresh_transaction_aggregate(); + """ + ) + + # Create mv_transaction_count materialized view + op.execute( + """ + CREATE MATERIALIZED VIEW mv_transaction_count AS + SELECT + 'transfers' AS transaction_type, + COUNT(*) FILTER ( + WHERE + t.current_status_id IN (4, 5) -- Submitted, Recommended + ) AS count_in_progress + FROM transfer t + UNION ALL + SELECT + 'initiative_agreements' AS transaction_type, + COUNT(*) FILTER ( + WHERE + ia.current_status_id IN (1, 2) -- Draft, Recommended + ) AS count_in_progress + FROM initiative_agreement ia + UNION ALL + SELECT + 'admin_adjustments' AS transaction_type, + COUNT(*) FILTER ( + WHERE + aa.current_status_id IN (1, 2) -- Draft, Recommended + ) AS count_in_progress + FROM admin_adjustment aa; + """ + ) + + # Create unique index on mv_transaction_count + op.execute( + """ + CREATE UNIQUE INDEX mv_transaction_count_unique_idx ON mv_transaction_count (transaction_type); + """ + ) + + # Create refresh_mv_transaction_count function + op.execute( + """ + CREATE OR REPLACE FUNCTION refresh_mv_transaction_count() + RETURNS TRIGGER AS $$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_count; + RETURN NULL; + END; + $$ LANGUAGE plpgsql; + """ + ) + + # Create triggers to refresh mv_transaction_count + op.execute( + """ + CREATE TRIGGER refresh_mv_transaction_count_after_transfer + AFTER INSERT OR UPDATE OR DELETE ON transfer + FOR EACH STATEMENT EXECUTE FUNCTION refresh_mv_transaction_count(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_mv_transaction_count_after_initiative_agreement + AFTER INSERT OR UPDATE OR DELETE ON initiative_agreement + FOR EACH STATEMENT EXECUTE FUNCTION refresh_mv_transaction_count(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_mv_transaction_count_after_admin_adjustment + AFTER INSERT OR UPDATE OR DELETE ON admin_adjustment + FOR EACH STATEMENT EXECUTE FUNCTION refresh_mv_transaction_count(); + """ + ) + + # Create mv_director_review_transaction_count materialized view + op.execute( + """ + CREATE MATERIALIZED VIEW mv_director_review_transaction_count AS + SELECT + 'transfers' AS transaction_type, + COUNT(*) FILTER ( + WHERE + t.current_status_id = 5 -- Recommended + ) AS count_for_review + FROM transfer t + UNION ALL + SELECT + 'compliance_reports' AS transaction_type, + COUNT(*) FILTER ( + WHERE + cr.current_status_id = 4 -- Recommended by Manager + ) AS count_for_review + FROM compliance_report cr + UNION ALL + SELECT + 'initiative_agreements' AS transaction_type, + COUNT(*) FILTER ( + WHERE + ia.current_status_id = 2 -- Recommended + ) AS count_for_review + FROM initiative_agreement ia + UNION ALL + SELECT + 'admin_adjustments' AS transaction_type, + COUNT(*) FILTER ( + WHERE + aa.current_status_id = 2 -- Recommended + ) AS count_for_review + FROM admin_adjustment aa; + """ + ) + + # Create unique index on mv_director_review_transaction_count + op.execute( + """ + CREATE UNIQUE INDEX mv_director_review_transaction_count_unique_idx ON mv_director_review_transaction_count (transaction_type); + """ + ) + + # Create refresh_mv_director_review_transaction_count function + op.execute( + """ + CREATE OR REPLACE FUNCTION refresh_mv_director_review_transaction_count() + RETURNS TRIGGER AS $$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_director_review_transaction_count; + RETURN NULL; + END; + $$ LANGUAGE plpgsql; + """ + ) + + # Create triggers to refresh mv_director_review_transaction_count + op.execute( + """ + CREATE TRIGGER refresh_mv_director_review_transaction_count_after_transfer + AFTER INSERT OR UPDATE OR DELETE ON transfer + FOR EACH STATEMENT EXECUTE FUNCTION refresh_mv_director_review_transaction_count(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_mv_director_review_transaction_count_after_compliance_report + AFTER INSERT OR UPDATE OR DELETE ON compliance_report + FOR EACH STATEMENT EXECUTE FUNCTION refresh_mv_director_review_transaction_count(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_mv_director_review_transaction_count_after_initiative_agreement + AFTER INSERT OR UPDATE OR DELETE ON initiative_agreement + FOR EACH STATEMENT EXECUTE FUNCTION refresh_mv_director_review_transaction_count(); + """ + ) + op.execute( + """ + CREATE TRIGGER refresh_mv_director_review_transaction_count_after_admin_adjustment + AFTER INSERT OR UPDATE OR DELETE ON admin_adjustment + FOR EACH STATEMENT EXECUTE FUNCTION refresh_mv_director_review_transaction_count(); + """ + ) + + # Create mv_org_compliance_report_count materialized view + op.execute( + """ + CREATE MATERIALIZED VIEW mv_org_compliance_report_count AS + SELECT + organization_id, + COUNT(*) FILTER (WHERE current_status_id = 1) AS count_in_progress, + COUNT(*) FILTER (WHERE current_status_id = 2) AS count_awaiting_gov_review + FROM + compliance_report + GROUP BY + organization_id; + """ + ) + + # Create unique index on mv_org_compliance_report_count + op.execute( + """ + CREATE UNIQUE INDEX mv_org_compliance_report_count_org_id_idx ON mv_org_compliance_report_count (organization_id); + """ + ) + + # Create refresh_mv_org_compliance_report_count function + op.execute( + """ + CREATE OR REPLACE FUNCTION refresh_mv_org_compliance_report_count() + RETURNS TRIGGER AS $$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_org_compliance_report_count; + RETURN NULL; + END; + $$ LANGUAGE plpgsql; + """ + ) + + # Create trigger to refresh mv_org_compliance_report_count + op.execute( + """ + CREATE TRIGGER refresh_mv_org_compliance_report_count_after_compliance_report + AFTER INSERT OR UPDATE OR DELETE ON compliance_report + FOR EACH STATEMENT EXECUTE FUNCTION refresh_mv_org_compliance_report_count(); + """ + ) + + +def create_update_organization_balance_function_and_trigger(): + # Create update_organization_balance function + op.execute( + """ + CREATE OR REPLACE FUNCTION update_organization_balance() + RETURNS TRIGGER AS $$ + DECLARE + new_total_balance BIGINT; + new_reserved_balance BIGINT; + org_id INT := COALESCE(NEW.organization_id, OLD.organization_id); + BEGIN + -- Calculate new total balance for specific organization_id + SELECT COALESCE(SUM(compliance_units), 0) INTO new_total_balance + FROM "transaction" + WHERE organization_id = org_id + AND transaction_action = 'Adjustment'; + + -- Calculate new reserved balance for specific organization_id + SELECT COALESCE(SUM(compliance_units), 0) INTO new_reserved_balance + FROM "transaction" + WHERE organization_id = org_id + AND transaction_action = 'Reserved'; + + -- Update the organization with the new balances + UPDATE organization + SET total_balance = new_total_balance, + reserved_balance = new_reserved_balance + WHERE organization_id = org_id; + + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + """ + ) + + # Create trigger to update organization balance + op.execute( + """ + CREATE TRIGGER update_organization_balance_trigger + AFTER INSERT OR UPDATE OR DELETE ON "transaction" + FOR EACH ROW EXECUTE FUNCTION update_organization_balance(); + """ + ) + + +def drop_materialized_views_and_triggers(): + # Drop triggers related to materialized views + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_transfer ON transfer;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_initiative_agreement ON initiative_agreement;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_admin_adjustment ON admin_adjustment;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_transfer_history ON transfer_history;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_initiative_agreement_history ON initiative_agreement_history;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_transaction_view_after_admin_adjustment_history ON admin_adjustment_history;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_transaction_count_after_transfer ON transfer;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_transaction_count_after_initiative_agreement ON initiative_agreement;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_transaction_count_after_admin_adjustment ON admin_adjustment;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_director_review_transaction_count_after_transfer ON transfer;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_director_review_transaction_count_after_compliance_report ON compliance_report;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_director_review_transaction_count_after_initiative_agreement ON initiative_agreement;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_director_review_transaction_count_after_admin_adjustment ON admin_adjustment;""" + ) + op.execute( + """DROP TRIGGER IF EXISTS refresh_mv_org_compliance_report_count_after_compliance_report ON compliance_report;""" + ) + + # Drop functions related to materialized views + op.execute("""DROP FUNCTION IF EXISTS refresh_transaction_aggregate();""") + op.execute("""DROP FUNCTION IF EXISTS refresh_mv_transaction_count();""") + op.execute( + """DROP FUNCTION IF EXISTS refresh_mv_director_review_transaction_count();""" + ) + op.execute("""DROP FUNCTION IF EXISTS refresh_mv_org_compliance_report_count();""") + + # Drop materialized views and views + op.execute("""DROP MATERIALIZED VIEW IF EXISTS mv_transaction_aggregate;""") + op.execute("""DROP VIEW IF EXISTS transaction_status_view;""") + op.execute("""DROP MATERIALIZED VIEW IF EXISTS mv_transaction_count;""") + op.execute( + """DROP MATERIALIZED VIEW IF EXISTS mv_director_review_transaction_count;""" + ) + op.execute("""DROP MATERIALIZED VIEW IF EXISTS mv_org_compliance_report_count;""") + + +def drop_audit_log_functions_and_triggers(): + # Remove audit triggers + op.execute( + """ + DO $$ + DECLARE + r RECORD; + BEGIN + FOR r IN SELECT tablename + FROM pg_tables + WHERE schemaname = 'public' + AND tablename IN ('transaction', 'compliance_report', 'compliance_report_history', + 'compliance_report_status', 'compliance_report_summary', 'compliance_period', + 'initiative_agreement', 'initiative_agreement_status', 'initiative_agreement_history', + 'allocation_agreement', 'allocation_transaction_type', 'custom_fuel_type', 'fuel_code', + 'fuel_code_prefix', 'fuel_code_status', 'fuel_category', 'fuel_instance', 'fuel_type', + 'fuel_export', 'organization', 'organization_address', 'organization_attorney_address', + 'organization_status', 'organization_type', 'transfer', 'transfer_category', 'transfer_history', + 'transfer_status', 'internal_comment', 'user_profile', 'user_role', 'role', 'notification_message', + 'notification_type', 'admin_adjustment', 'admin_adjustment_status', 'admin_adjustment_history', + 'provision_of_the_act', 'supplemental_report', 'final_supply_equipment', 'notional_transfer', + 'fuel_supply', 'additional_carbon_intensity', 'document', 'end_use_type', 'energy_density', + 'energy_effectiveness_ratio', 'transport_mode', 'final_supply_equipment', 'level_of_equipment', + 'user_login_history', 'unit_of_measure', 'target_carbon_intensity') + LOOP + EXECUTE format(' + DROP TRIGGER IF EXISTS audit_%I_insert_update_delete ON %I;', + r.tablename, r.tablename); + END LOOP; + END $$; + """ + ) + + # Drop audit trigger function + op.execute("DROP FUNCTION IF EXISTS audit_trigger_func;") + + # Drop generate_json_delta function + op.execute("DROP FUNCTION IF EXISTS generate_json_delta;") + + # Drop JSONB_DIFF FUNCTION + op.execute("DROP FUNCTION IF EXISTS jsonb_diff;") diff --git a/backend/lcfs/db/migrations/versions/2024-11-27-18-05_043c52082a3b.py b/backend/lcfs/db/migrations/versions/2024-11-27-18-05_043c52082a3b.py new file mode 100644 index 000000000..c71a8aae3 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-11-27-18-05_043c52082a3b.py @@ -0,0 +1,67 @@ +"""Add options to the Feedstock Transport mode fuel code + +Revision ID: 043c52082a3b +Revises: None +Create Date: 2024-11-27 18:05:12.015327 + +""" + +from datetime import datetime +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "043c52082a3b" +down_revision = "4038ff8d8c49" +branch_labels = None +depends_on = None + + +def upgrade(): + current_time = datetime.now() + + # Update Marine to Marine-domestic + op.execute( + """ + UPDATE transport_mode + SET transport_mode = 'Marine-domestic', + update_date = '{}', + update_user = 'no-user' + WHERE transport_mode = 'Marine' + """.format( + current_time + ) + ) + + # Insert Marine-international + op.execute( + """ + INSERT INTO transport_mode (transport_mode, create_date, update_date, create_user, update_user) + VALUES ('Marine-international', '{}', '{}', 'no_user', 'no_user') + """.format( + current_time, current_time + ) + ) + + +def downgrade(): + # Revert Marine-domestic back to Marine + op.execute( + """ + UPDATE transport_mode + SET transport_mode = 'Marine', + update_date = '{}', + update_user = 'no_user' + WHERE transport_mode = 'Marine-domestic' + """.format( + datetime.utcnow() + ) + ) + + # Remove Marine-international + op.execute( + """ + DELETE FROM transport_mode + WHERE transport_mode = 'Marine-international' + """ + ) diff --git a/backend/lcfs/db/migrations/versions/2024-11-28-01-09_b4da565bb711.py b/backend/lcfs/db/migrations/versions/2024-11-28-01-09_b4da565bb711.py new file mode 100644 index 000000000..49a0936d1 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-11-28-01-09_b4da565bb711.py @@ -0,0 +1,46 @@ +"""Add UCI Columns to FS and Export + +Revision ID: b4da565bb711 +Revises: 0775a141d335 +Create Date: 2024-11-28 01:09:21.241693 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "b4da565bb711" +down_revision = "043c52082a3b" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "fuel_export", + sa.Column( + "uci", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Additional Carbon Intensity", + ), + ) + op.add_column( + "fuel_supply", + sa.Column( + "uci", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Additional Carbon Intensity", + ), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("fuel_supply", "uci") + op.drop_column("fuel_export", "uci") + # ### end Alembic commands ### diff --git a/backend/lcfs/db/migrations/versions/2024-12-02-22-52_aeaa26f5cdd5.py b/backend/lcfs/db/migrations/versions/2024-12-02-22-52_aeaa26f5cdd5.py new file mode 100644 index 000000000..80ab64168 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-02-22-52_aeaa26f5cdd5.py @@ -0,0 +1,38 @@ +"""Replace 'Other' with detailed description in 'level_of_equipment' table + +Revision ID: aeaa26f5cdd5 +Revises: b4da565bb711 +Create Date: 2024-12-02 22:52:12.302543 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "aeaa26f5cdd5" +down_revision = "b4da565bb711" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Update the 'name' column in the 'level_of_equipment' table + op.execute( + """ + UPDATE level_of_equipment + SET name = 'Other - Additional information provided in notes field' + WHERE name = 'Other' + """ + ) + + +def downgrade() -> None: + # Revert the 'name' column update in the 'level_of_equipment' table + op.execute( + """ + UPDATE level_of_equipment + SET name = 'Other' + WHERE name = 'Other - Additional information provided in notes field' + """ + ) diff --git a/backend/lcfs/db/migrations/versions/2024-12-04-23-00_8491890dd688.py b/backend/lcfs/db/migrations/versions/2024-12-04-23-00_8491890dd688.py new file mode 100644 index 000000000..d12c0a57a --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-04-23-00_8491890dd688.py @@ -0,0 +1,57 @@ +"""Data Fixes + +Revision ID: 8491890dd688 +Revises: aeaa26f5cdd5 +Create Date: 2024-12-04 23:00:10.708533 + +""" + +from alembic import op +from sqlalchemy import update + +from lcfs.db.models import FuelType, AllocationTransactionType +from lcfs.db.models.fuel.FuelType import QuantityUnitsEnum + +# revision identifiers, used by Alembic. +revision = "8491890dd688" +down_revision = "aeaa26f5cdd5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.execute( + update(FuelType) + .where(FuelType.fuel_type_id == 6) + .values(units=QuantityUnitsEnum.Kilograms) + ) + + op.execute( + update(FuelType).where(FuelType.fuel_type_id == 20).values(fossil_derived=False) + ) + + # Update 'type' and 'description' in allocation_transaction_type where allocation_transaction_type_id = 2 + op.execute( + update(AllocationTransactionType) + .where(AllocationTransactionType.allocation_transaction_type_id == 2) + .values( + type="Allocated to", + description="Fuel allocated to another supplier under an allocation agreement", + ) + ) + + # Update 'type' and 'description' in allocation_transaction_type where allocation_transaction_type_id = 1 + op.execute( + update(AllocationTransactionType) + .where(AllocationTransactionType.allocation_transaction_type_id == 1) + .values( + type="Allocated from", + description="Fuel allocated from another supplier under an allocation agreement", + ) + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + pass diff --git a/backend/lcfs/db/migrations/versions/2024-12-05-02-19_b69a33bbd135.py b/backend/lcfs/db/migrations/versions/2024-12-05-02-19_b69a33bbd135.py new file mode 100644 index 000000000..3f1477892 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-05-02-19_b69a33bbd135.py @@ -0,0 +1,34 @@ +"""Add notifications email to user_profile + +Revision ID: b69a33bbd135 +Revises: 8491890dd688 +Create Date: 2024-12-05 20:48:24.724112 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "b69a33bbd135" +down_revision = "8491890dd688" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Add notifications_email column to user_profile table + op.add_column( + "user_profile", + sa.Column( + "notifications_email", + sa.String(length=255), + nullable=True, + comment="Email address used for notifications", + ), + ) + + +def downgrade() -> None: + # Remove notifications_email column from user_profile table + op.drop_column("user_profile", "notifications_email") diff --git a/backend/lcfs/db/migrations/versions/2024-12-05-02-20_d4104af84f2b.py b/backend/lcfs/db/migrations/versions/2024-12-05-02-20_d4104af84f2b.py new file mode 100644 index 000000000..2ccc2ba9e --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-05-02-20_d4104af84f2b.py @@ -0,0 +1,90 @@ +"""Update Notification Types and remove data + +Revision ID: d4104af84f2b +Revises: b69a33bbd135 +Create Date: 2024-12-05 02:20:33.898150 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects.postgresql import ENUM + +# Revision identifiers, used by Alembic. +revision = "d4104af84f2b" +down_revision = "b69a33bbd135" +branch_labels = None +depends_on = None + + +def upgrade(): + # Remove the notification types added in the previous migration + op.execute("DELETE FROM notification_type;") + + # Alter the `name` column in `notification_type` to be a VARCHAR + with op.batch_alter_table("notification_type") as batch_op: + batch_op.alter_column( + "name", + existing_type=ENUM( + "TRANSFER_PARTNER_UPDATE", + "TRANSFER_DIRECTOR_REVIEW", + "INITIATIVE_APPROVED", + "INITIATIVE_DA_REQUEST", + "SUPPLEMENTAL_REQUESTED", + "DIRECTOR_ASSESSMENT", + name="notification_type_enum_v2", + ), + type_=sa.String(length=255), + existing_nullable=False, + ) + + # Drop the old enum types + op.execute("DROP TYPE IF EXISTS notification_type_enum_v2;") + + +def downgrade(): + # First, remove all existing data that might not match the enum + op.execute("DELETE FROM notification_type;") + + # Re-create the old enum type + notification_type_enum_v2 = ENUM( + "TRANSFER_PARTNER_UPDATE", + "TRANSFER_DIRECTOR_REVIEW", + "INITIATIVE_APPROVED", + "INITIATIVE_DA_REQUEST", + "SUPPLEMENTAL_REQUESTED", + "DIRECTOR_ASSESSMENT", + name="notification_type_enum_v2", + ) + notification_type_enum_v2.create(op.get_bind(), checkfirst=False) + + # Alter the `name` column back to the old enum + with op.batch_alter_table("notification_type") as batch_op: + batch_op.alter_column( + "name", + type_=notification_type_enum_v2, + existing_type=sa.String(length=255), + postgresql_using="name::notification_type_enum_v2", + existing_nullable=False, + ) + + # Re-insert the previous notification types + op.execute( + """ + INSERT INTO notification_type (notification_type_id, name, description, email_content, create_user, update_user) + VALUES + (1, 'TRANSFER_PARTNER_UPDATE', 'Transfer partner update notification', 'Email content for transfer partner update', 'system', 'system'), + (2, 'TRANSFER_DIRECTOR_REVIEW', 'Director review notification', 'Email content for director review', 'system', 'system'), + (3, 'INITIATIVE_APPROVED', 'Initiative approved notification', 'Email content for initiative approval', 'system', 'system'), + (4, 'INITIATIVE_DA_REQUEST', 'DA request notification', 'Email content for DA request', 'system', 'system'), + (5, 'SUPPLEMENTAL_REQUESTED', 'Supplemental requested notification', 'Email content for supplemental request', 'system', 'system'), + (6, 'DIRECTOR_ASSESSMENT', 'Director assessment notification', 'Email content for director assessment', 'system', 'system'); + """ + ) + + # Reset the sequence for the id column with correct sequence name + op.execute( + """ + SELECT setval('notification_type_notification_type_id_seq', (SELECT MAX(notification_type_id) FROM notification_type)); + """ + ) diff --git a/backend/lcfs/db/migrations/versions/2024-12-06-01-23_26ab15f8ab18.py b/backend/lcfs/db/migrations/versions/2024-12-06-01-23_26ab15f8ab18.py new file mode 100644 index 000000000..5a7c88e10 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-06-01-23_26ab15f8ab18.py @@ -0,0 +1,282 @@ +"""update end use table with new values + +Revision ID: 26ab15f8ab18 +Revises: d4104af84f2b +Create Date: 2024-12-06 01:23:21.598991 + +""" + +import sqlalchemy as sa +from alembic import op +from datetime import datetime + +# revision identifiers, used by Alembic. +revision = "26ab15f8ab18" +down_revision = "d4104af84f2b" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + current_time = datetime.now() + + # Update existing end use types 14-21 to new values + updates = [ + (14, 'Aircraft'), + (15, 'Compression-ignition engine- Marine, general'), + (16, 'Compression-ignition engine- Marine, operated within 51 to 75% of load range'), + (17, 'Compression-ignition engine- Marine, operated within 76 to 100% of load range'), + (18, 'Compression-ignition engine- Marine, with methane slip reduction kit- General'), + (19, 'Compression-ignition engine- Marine, with methane slip reduction kit- Operated within 51 to 75% of load range'), + (20, 'Compression-ignition engine- Marine, with methane slip reduction kit- Operated within 76 to 100% of load range'), + (21, 'Compression-ignition engine- Marine, unknown whether kit is installed or average operating load range') + ] + + for end_use_id, type_name in updates: + op.execute( + """ + UPDATE end_use_type + SET type = '{}', + sub_type = NULL, + intended_use = true, + update_date = '{}', + update_user = 'no_user' + WHERE end_use_type_id = {} + """.format(type_name, current_time, end_use_id) + ) + + # Update existing UCI values for IDs 1-9 + uci_updates = [ + (1, 7, 5, None, 0), # Remove end_use_type_id 14 + (2, None, 5, None, 0), # Remove fuel_type_id and end_use_type_id + (3, 7, 5, 15, 27.3), # Update with new end_use_type_id and intensity + (4, 7, 5, 16, 17.8), + (5, 7, 5, 17, 12.2), + (6, 7, 5, 18, 10.6), + (7, 7, 5, 19, 8.4), + (8, 7, 5, 20, 8.0), + (9, 7, 5, 21, 27.3) + ] + + for uci_id, fuel_type_id, uom_id, end_use_type_id, intensity in uci_updates: + if fuel_type_id and end_use_type_id: + op.execute( + """ + UPDATE additional_carbon_intensity + SET fuel_type_id = {}, + uom_id = {}, + end_use_type_id = {}, + intensity = {}, + update_date = '{}', + update_user = 'no_user' + WHERE additional_uci_id = {} + """.format(fuel_type_id, uom_id, end_use_type_id, intensity, current_time, uci_id) + ) + elif fuel_type_id: + op.execute( + """ + UPDATE additional_carbon_intensity + SET fuel_type_id = {}, + uom_id = {}, + end_use_type_id = NULL, + intensity = {}, + update_date = '{}', + update_user = 'no_user' + WHERE additional_uci_id = {} + """.format(fuel_type_id, uom_id, intensity, current_time, uci_id) + ) + else: + op.execute( + """ + UPDATE additional_carbon_intensity + SET fuel_type_id = NULL, + uom_id = {}, + end_use_type_id = NULL, + intensity = {}, + update_date = '{}', + update_user = 'no_user' + WHERE additional_uci_id = {} + """.format(uom_id, intensity, current_time, uci_id) + ) + + # Update existing EER values for IDs 14-24 + eer_updates = [ + (14, 2, 3, 10, 2.8), # Changed to Shore power + (15, 2, 3, 11, 2.4), # Changed to Trolley bus + (16, 2, 3, 2, 1.0), # Changed to Other or unknown + (17, 2, 6, 3, 1.8), # Changed to Fuel cell vehicle + (18, 2, 6, 2, 0.9), # Changed to Other or unknown + (19, 2, 13, None, 0.9), # Changed to default ratio + (20, 3, 3, None, 2.5), # Changed to default ratio + (21, 3, 11, None, 1.0), # Changed to default ratio + (22, 2, 7, 15, 1.0), # Changed to new marine type + (23, 2, 7, 16, 1.0), # Changed to new marine type + (24, 2, 7, 17, 1.0) # Changed to new marine type + ] + + for eer_id, fuel_category_id, fuel_type_id, end_use_type_id, ratio in eer_updates: + if end_use_type_id: + op.execute( + """ + UPDATE energy_effectiveness_ratio + SET fuel_category_id = {}, + fuel_type_id = {}, + end_use_type_id = {}, + ratio = {}, + update_date = '{}', + update_user = 'no_user' + WHERE eer_id = {} + """.format(fuel_category_id, fuel_type_id, end_use_type_id, ratio, current_time, eer_id) + ) + else: + op.execute( + """ + UPDATE energy_effectiveness_ratio + SET fuel_category_id = {}, + fuel_type_id = {}, + end_use_type_id = NULL, + ratio = {}, + update_date = '{}', + update_user = 'no_user' + WHERE eer_id = {} + """.format(fuel_category_id, fuel_type_id, ratio, current_time, eer_id) + ) + + +def downgrade() -> None: + current_time = datetime.now() + + # Restore original end use types 14-21 + original_values = [ + (14, 'Marine', 'General'), + (15, 'Marine', 'Operated within 51 to 75% of load range'), + (16, 'Marine', 'Operated within 76 to 100% of load range'), + (17, 'Marine, w/ methane slip reduction kit', 'General'), + (18, 'Marine, w/ methane slip reduction kit', + 'Operated within 51 to 75% of load range'), + (19, 'Marine, w/ methane slip reduction kit', + 'Operated within 76 to 100% of load range'), + (20, 'Unknown', None), + (21, 'Aircraft', None) + ] + + for end_use_id, type_name, sub_type in original_values: + if sub_type: + op.execute( + """ + UPDATE end_use_type + SET type = '{}', + sub_type = '{}', + update_date = '{}', + update_user = 'no_user' + WHERE end_use_type_id = {} + """.format(type_name, sub_type, current_time, end_use_id) + ) + else: + op.execute( + """ + UPDATE end_use_type + SET type = '{}', + sub_type = NULL, + update_date = '{}', + update_user = 'no_user' + WHERE end_use_type_id = {} + """.format(type_name, current_time, end_use_id) + ) + + # Restore original UCI values + uci_originals = [ + (1, 7, 5, 14, 27.3), + (2, 7, 5, 15, 17.8), + (3, 7, 5, 16, 12.2), + (4, 7, 5, 17, 10.6), + (5, 7, 5, 18, 8.4), + (6, 7, 5, 19, 8.0), + (7, 7, 5, 20, 27.3), + (8, 7, 5, None, 0), + (9, None, 5, None, 0) + ] + + for uci_id, fuel_type_id, uom_id, end_use_type_id, intensity in uci_originals: + if fuel_type_id and end_use_type_id: + op.execute( + """ + UPDATE additional_carbon_intensity + SET fuel_type_id = {}, + uom_id = {}, + end_use_type_id = {}, + intensity = {}, + update_date = '{}', + update_user = 'no_user' + WHERE additional_uci_id = {} + """.format(fuel_type_id, uom_id, end_use_type_id, intensity, current_time, uci_id) + ) + elif fuel_type_id: + op.execute( + """ + UPDATE additional_carbon_intensity + SET fuel_type_id = {}, + uom_id = {}, + end_use_type_id = NULL, + intensity = {}, + update_date = '{}', + update_user = 'no_user' + WHERE additional_uci_id = {} + """.format(fuel_type_id, uom_id, intensity, current_time, uci_id) + ) + else: + op.execute( + """ + UPDATE additional_carbon_intensity + SET fuel_type_id = NULL, + uom_id = {}, + end_use_type_id = NULL, + intensity = {}, + update_date = '{}', + update_user = 'no_user' + WHERE additional_uci_id = {} + """.format(uom_id, intensity, current_time, uci_id) + ) + + # Restore original EER values + eer_originals = [ + (14, 2, 3, 14, 2.5), # Restore to Marine + (15, 2, 3, 10, 2.8), # Restore to Shore power + (16, 2, 3, 11, 2.4), # Restore to Trolley bus + (17, 2, 3, 2, 1.0), # Restore to Other or unknown + (18, 2, 6, 3, 1.8), # Restore to Fuel cell vehicle + (19, 2, 6, 2, 0.9), # Restore to Other or unknown + (20, 2, 7, 12, 1.0), # Restore to Compression-ignition engine + (21, 2, 7, 2, 0.9), # Restore to Other or unknown + (22, 2, 13, None, 0.9), # Restore to default ratio + (23, 3, 3, None, 2.5), # Restore to default ratio + (24, 3, 11, None, 1.0) # Restore to default ratio + ] + + for eer_id, fuel_category_id, fuel_type_id, end_use_type_id, ratio in eer_originals: + if end_use_type_id: + op.execute( + """ + UPDATE energy_effectiveness_ratio + SET fuel_category_id = {}, + fuel_type_id = {}, + end_use_type_id = {}, + ratio = {}, + update_date = '{}', + update_user = 'no_user' + WHERE eer_id = {} + """.format(fuel_category_id, fuel_type_id, end_use_type_id, ratio, current_time, eer_id) + ) + else: + op.execute( + """ + UPDATE energy_effectiveness_ratio + SET fuel_category_id = {}, + fuel_type_id = {}, + end_use_type_id = NULL, + ratio = {}, + update_date = '{}', + update_user = 'no_user' + WHERE eer_id = {} + """.format(fuel_category_id, fuel_type_id, ratio, current_time, eer_id) + ) diff --git a/backend/lcfs/db/migrations/versions/2024-12-06-09-59_9206124a098b.py b/backend/lcfs/db/migrations/versions/2024-12-06-09-59_9206124a098b.py new file mode 100644 index 000000000..d12cf71d4 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-06-09-59_9206124a098b.py @@ -0,0 +1,37 @@ +"""Add Organization name to FSE + +Revision ID: 9206124a098b +Revises: aeaa26f5cdd5 +Create Date: 2024-12-04 09:59:22.876386 + +""" + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "9206124a098b" +down_revision = "26ab15f8ab18" +branch_labels = None +depends_on = None + + +def upgrade(): + # Add the column 'organization_name' to 'final_supply_equipment' table with a default value + op.add_column( + "final_supply_equipment", + sa.Column("organization_name", sa.String(), nullable=False, server_default=""), + ) + + # Update existing rows to have the default value + op.execute( + "UPDATE final_supply_equipment SET organization_name = '' WHERE organization_name IS NULL" + ) + + # Remove the server default to prevent future rows from automatically getting the default value + op.alter_column("final_supply_equipment", "organization_name", server_default=None) + + +def downgrade(): + # Remove the column 'organization_name' from 'final_supply_equipment' table + op.drop_column("final_supply_equipment", "organization_name") diff --git a/backend/lcfs/db/migrations/versions/2024-12-09-22-33_cd8698fe40e6.py b/backend/lcfs/db/migrations/versions/2024-12-09-22-33_cd8698fe40e6.py new file mode 100644 index 000000000..8ec2b8223 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-09-22-33_cd8698fe40e6.py @@ -0,0 +1,34 @@ +"""Remove notifications email from user_profile + +Revision ID: cd8698fe40e6 +Revises: 26ab15f8ab18 +Create Date: 2024-12-09 22:33:29.554360 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "cd8698fe40e6" +down_revision = "9206124a098b" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Remove notifications_email column from user_profile table + op.drop_column("user_profile", "notifications_email") + + +def downgrade() -> None: + # Add notifications_email column to user_profile table + op.add_column( + "user_profile", + sa.Column( + "notifications_email", + sa.String(length=255), + nullable=True, + comment="Email address used for notifications", + ), + ) diff --git a/backend/lcfs/db/migrations/versions/2024-12-09-23-31_7ae38a8413ab.py b/backend/lcfs/db/migrations/versions/2024-12-09-23-31_7ae38a8413ab.py new file mode 100644 index 000000000..51856f8c4 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-09-23-31_7ae38a8413ab.py @@ -0,0 +1,95 @@ +"""Update Fuel Types measured in volume to be other-uses + +Revision ID: 7ae38a8413ab +Revises: 26ab15f8ab18 +Create Date: 2024-12-09 19:31:18.199089 + +""" + +import sqlalchemy as sa +from alembic import op +from datetime import datetime + +# revision identifiers, used by Alembic. +revision = "7ae38a8413ab" +down_revision = "cd8698fe40e6" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + current_time = datetime.now() + + # Update the `other_uses_fossil_derived` field for all specified fuel types + op.execute( + f""" + UPDATE fuel_type + SET other_uses_fossil_derived = true, + update_date = '{current_time}', + update_user = 'no_user' + WHERE fuel_type IN ( + 'Alternative jet fuel', + 'Biodiesel', + 'Ethanol', + 'HDRD', + 'Renewable gasoline', + 'Renewable naphtha' + ) + """ + ) + + # Update the `other_uses_fossil_derived` field for all specified fuel types + op.execute( + f""" + UPDATE fuel_type + SET other_uses_fossil_derived = false, + update_date = '{current_time}', + update_user = 'no_user' + WHERE fuel_type IN ( + 'CNG', + 'Electricity', + 'Hydrogen', + 'LNG', + 'Propane' + ) + """ + ) + + +def downgrade() -> None: + current_time = datetime.now() + + # Revert the `other_uses_fossil_derived` field to false for the first set of fuel types + op.execute( + f""" + UPDATE fuel_type + SET other_uses_fossil_derived = false, + update_date = '{current_time}', + update_user = 'no_user' + WHERE fuel_type IN ( + 'Alternative jet fuel', + 'Biodiesel', + 'Ethanol', + 'HDRD', + 'Renewable gasoline', + 'Renewable naphtha' + ) + """ + ) + + # Revert the `other_uses_fossil_derived` field to true for the second set of fuel types + op.execute( + f""" + UPDATE fuel_type + SET other_uses_fossil_derived = true, + update_date = '{current_time}', + update_user = 'no_user' + WHERE fuel_type IN ( + 'CNG', + 'Electricity', + 'Hydrogen', + 'LNG', + 'Propane' + ) + """ + ) diff --git a/backend/lcfs/db/migrations/versions/2024-12-12-21-43_5d729face5ab.py b/backend/lcfs/db/migrations/versions/2024-12-12-21-43_5d729face5ab.py new file mode 100644 index 000000000..558720d38 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-12-21-43_5d729face5ab.py @@ -0,0 +1,35 @@ +"""Update default_carbon_intensity for 'Other diesel' fuel type + +Revision ID: 5d729face5ab +Revises: 7ae38a8413ab +Create Date: 2024-12-12 21:43:01.414475 +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5d729face5ab" +down_revision = "7ae38a8413ab" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.execute( + """ + UPDATE fuel_type + SET default_carbon_intensity = 100.21 + WHERE fuel_type_id = 20 + """ + ) + + +def downgrade() -> None: + op.execute( + """ + UPDATE fuel_type + SET default_carbon_intensity = 94.38 + WHERE fuel_type_id = 20 + """ + ) diff --git a/backend/lcfs/db/migrations/versions/2024-12-17-11-23_f93546eaec61.py b/backend/lcfs/db/migrations/versions/2024-12-17-11-23_f93546eaec61.py new file mode 100644 index 000000000..4fbabc280 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-17-11-23_f93546eaec61.py @@ -0,0 +1,33 @@ +"""update notification message model + +Revision ID: f93546eaec61 +Revises: 5d729face5ab +Create Date: 2024-12-17 11:23:19.563138 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "f93546eaec61" +down_revision = "5d729face5ab" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("notification_message", sa.Column("type", sa.Text(), nullable=False)) + op.add_column( + "notification_message", + sa.Column("related_transaction_id", sa.Text(), nullable=False), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("notification_message", "related_transaction_id") + op.drop_column("notification_message", "type") + # ### end Alembic commands ### diff --git a/backend/lcfs/db/migrations/versions/2024-12-17-12-25_5b374dd97469.py b/backend/lcfs/db/migrations/versions/2024-12-17-12-25_5b374dd97469.py new file mode 100644 index 000000000..3c7475040 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-17-12-25_5b374dd97469.py @@ -0,0 +1,36 @@ +"""Add legacy id to compliance reports + +Revision ID: 5b374dd97469 +Revises: f93546eaec61 +Create Date: 2024-17-13 12:25:32.076684 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5b374dd97469" +down_revision = "f93546eaec61" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "compliance_report", + sa.Column( + "legacy_id", + sa.Integer(), + nullable=True, + comment="ID from TFRS if this is a transferred application, NULL otherwise", + ), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("compliance_report", "legacy_id") + # ### end Alembic commands ### diff --git a/backend/lcfs/db/migrations/versions/2024-12-17-23-58_851e09cf8661.py b/backend/lcfs/db/migrations/versions/2024-12-17-23-58_851e09cf8661.py new file mode 100644 index 000000000..e7f10a2c3 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-17-23-58_851e09cf8661.py @@ -0,0 +1,62 @@ +"""Add Default CI to Categories + +Revision ID: 851e09cf8661 +Revises: 5b374dd97469 +Create Date: 2024-12-17 23:58:07.462215 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "851e09cf8661" +down_revision = "5b374dd97469" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "fuel_category", + sa.Column( + "default_carbon_intensity", + sa.Numeric(precision=10, scale=2), + nullable=True, + comment="Default carbon intensity of the fuel category", + ), + ) + + # Populate default values for existing records + op.execute( + """ + UPDATE "fuel_category" SET "default_carbon_intensity" = 88.83 WHERE "description" = 'Jet fuel'; + """ + ) + op.execute( + """ + UPDATE "fuel_category" SET "default_carbon_intensity" = 100.21 WHERE "description" = 'Diesel'; + """ + ) + op.execute( + """ + UPDATE "fuel_category" SET "default_carbon_intensity" = 93.67 WHERE "description" = 'Gasoline'; + """ + ) + + # Now set the column to NOT NULL after populating defaults + op.alter_column( + "fuel_category", + "default_carbon_intensity", + existing_type=sa.Numeric(precision=10, scale=2), + nullable=False, + ) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("fuel_category", "default_carbon_intensity") + # ### end Alembic commands ### diff --git a/backend/lcfs/db/migrations/versions/2024-12-20-05-17_59873cafbcd8.py b/backend/lcfs/db/migrations/versions/2024-12-20-05-17_59873cafbcd8.py new file mode 100644 index 000000000..79914e3b0 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-20-05-17_59873cafbcd8.py @@ -0,0 +1,40 @@ +"""update other diesel + +Revision ID: 59873cafbcd8 +Revises: 851e09cf8661 +Create Date: 2024-12-20 05:17:40.638826 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "59873cafbcd8" +down_revision = "851e09cf8661" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.execute( + """ + UPDATE fuel_type + SET fossil_derived = false, other_uses_fossil_derived = false + WHERE fuel_type = 'Other diesel' + """ + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.execute( + """ + UPDATE fuel_type + SET fossil_derived = true, other_uses_fossil_derived = true + WHERE fuel_type = 'Other diesel' + """ + ) + # ### end Alembic commands ### diff --git a/backend/lcfs/db/migrations/versions/2024-12-21-00-18_5fbcb508c1be.py b/backend/lcfs/db/migrations/versions/2024-12-21-00-18_5fbcb508c1be.py new file mode 100644 index 000000000..2477b1308 --- /dev/null +++ b/backend/lcfs/db/migrations/versions/2024-12-21-00-18_5fbcb508c1be.py @@ -0,0 +1,40 @@ +"""LNG end use naming change + +Revision ID: 5fbcb508c1be +Revises: 59873cafbcd8 +Create Date: 2024-12-21 00:18:12.324520 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5fbcb508c1be" +down_revision = "59873cafbcd8" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.execute( + """ + UPDATE end_use_type + SET type = 'Compression-ignition engine- Marine, with methane slip reduction kit- Operated within 26 to 75% of load range' + WHERE end_use_type_id = 19 + """ + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.execute( + """ + UPDATE end_use_type + SET type = 'Compression-ignition engine- Marine, with methane slip reduction kit- Operated within 51 to 75% of load range' + WHERE end_use_type_id = 19 + """ + ) + # ### end Alembic commands ### diff --git a/backend/lcfs/db/models/Category.py b/backend/lcfs/db/models/Category.py deleted file mode 100644 index 60a23591d..000000000 --- a/backend/lcfs/db/models/Category.py +++ /dev/null @@ -1,22 +0,0 @@ -from sqlalchemy import Column, Integer, String -from sqlalchemy.orm import relationship -from sqlalchemy import UniqueConstraint -from lcfs.db.base import BaseModel, Auditable, EffectiveDates - -class Category(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'category' - __table_args__ = (UniqueConstraint('category_id'), - {'comment': "Transfer Category"} - ) - - - category_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for the transfer category") - category = Column(String(500), comment="Transfer category") - - transfer = relationship('Transfer', back_populates='category') - transfer_history = relationship('TransferHistory', back_populates='transfer_category') - - - def __repr__(self): - return self.category - diff --git a/backend/lcfs/db/models/Comment.py b/backend/lcfs/db/models/Comment.py deleted file mode 100644 index 5e5b4d216..000000000 --- a/backend/lcfs/db/models/Comment.py +++ /dev/null @@ -1,22 +0,0 @@ -from sqlalchemy import Column, Integer, String -from sqlalchemy.orm import relationship -from sqlalchemy import UniqueConstraint -from lcfs.db.base import BaseModel, Auditable, EffectiveDates - -class Comment(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'comment' - __table_args__ = (UniqueConstraint('comment_id'), - {'comment': "Comment for transaction"} - ) - - - comment_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for comment") - comment = Column(String(500), comment="Transfer category") - - transfer = relationship('Transfer', back_populates='comments') - issuance = relationship('Issuance', back_populates='comments') - - - def __repr__(self): - return self.comment - diff --git a/backend/lcfs/db/models/Issuance.py b/backend/lcfs/db/models/Issuance.py deleted file mode 100644 index 9fca76ca8..000000000 --- a/backend/lcfs/db/models/Issuance.py +++ /dev/null @@ -1,28 +0,0 @@ -from sqlalchemy import Column, Integer, BigInteger, ForeignKey, DateTime -from sqlalchemy.orm import relationship -from sqlalchemy import UniqueConstraint -from lcfs.db.base import BaseModel, Auditable, EffectiveDates - -class Issuance(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'issuance' - __table_args__ = (UniqueConstraint('issuance_id'), - {'comment': "Goverment to organization compliance units issuance"} - ) - - - issuance_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for the issuance") - compliance_units = Column(BigInteger, comment="Compliance Units") - transaction_effective_date = Column(DateTime, comment='Transaction effective date') - # compliance_period = Column(Integer ) - organization_id = Column(Integer, ForeignKey('organization.organization_id')) - transaction_id = Column(Integer, ForeignKey('transaction.transaction_id')) - comment_id = Column(Integer, ForeignKey('comment.comment_id')) - - organization = relationship('Organization', back_populates='issuances') - transaction = relationship('Transaction') - comments = relationship('Comment', back_populates='issuance') - issuance_history_records = relationship('IssuanceHistory', back_populates='issuance') - - def __repr__(self): - return self.compliance_units - diff --git a/backend/lcfs/db/models/IssuanceHistory.py b/backend/lcfs/db/models/IssuanceHistory.py deleted file mode 100644 index 8813e2373..000000000 --- a/backend/lcfs/db/models/IssuanceHistory.py +++ /dev/null @@ -1,27 +0,0 @@ -from sqlalchemy import Column, Integer, BigInteger, ForeignKey, DateTime -from sqlalchemy.orm import relationship -from sqlalchemy import UniqueConstraint -from lcfs.db.base import BaseModel, Auditable, EffectiveDates - -class IssuanceHistory(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'issuance_history' - __table_args__ = (UniqueConstraint('issuance_history_id'), - {'comment': "History record for issuance from governmnent to Organization"} - ) - - issuance_history_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for the issuance") - compliance_units = Column(BigInteger, comment='Issued compliance units record') - issuance_id = Column(Integer, ForeignKey('issuance.issuance_id')) - organization_id = Column(Integer, ForeignKey('organization.organization_id')) - transaction_id = Column(Integer, ForeignKey('transaction.transaction_id')) - transaction_effective_date = Column(DateTime, comment="Transaction Effective date") - # compliance_period = Column(Integer, ) - - issuance = relationship('Issuance', back_populates='issuance_history_records') - transaction = relationship('Transaction', back_populates='issuance_history_record') - organization = relationship('Organization') - # issuance_status = relationship('IssuanceStatus') Missing table - - def __repr__(self): - return self.compliance_units - diff --git a/backend/lcfs/db/models/NotificationChannel.py b/backend/lcfs/db/models/NotificationChannel.py deleted file mode 100644 index 5479b0452..000000000 --- a/backend/lcfs/db/models/NotificationChannel.py +++ /dev/null @@ -1,19 +0,0 @@ -import enum -from lcfs.db.base import BaseModel, Auditable -from sqlalchemy import Column, Integer, Enum, Boolean -from sqlalchemy.orm import relationship - -class ChannelEnum(enum.Enum): - EMAIL = "Email" - IN_APP = "In-Application" - -class NotificationChannel(BaseModel, Auditable): - __tablename__ = 'notification_channel' - __table_args__ = {'comment': "Tracks the state and defaults for communication channels"} - - notification_channel_id = Column(Integer, primary_key=True, autoincrement=True) - channel_name = Column(Enum(ChannelEnum, name="channel_enum", create_type=True), nullable=False) - enabled = Column(Boolean, default=False) - subscribe_by_default = Column(Boolean, default=False) - - notification_channel_subscriptions = relationship('NotificationChannelSubscription', back_populates='notification_channel') diff --git a/backend/lcfs/db/models/NotificationChannelSubscription.py b/backend/lcfs/db/models/NotificationChannelSubscription.py deleted file mode 100644 index 24c312f67..000000000 --- a/backend/lcfs/db/models/NotificationChannelSubscription.py +++ /dev/null @@ -1,19 +0,0 @@ -from lcfs.db.base import BaseModel, Auditable -from sqlalchemy import Column, Integer, ForeignKey, Boolean -from sqlalchemy.orm import relationship - -class NotificationChannelSubscription(BaseModel, Auditable): - __tablename__ = 'notification_channel_subscription' - __table_args__ = {'comment': "Represents a user's subscription to notification events"} - - notification_channel_subscription_id = Column(Integer, primary_key=True, autoincrement=True) - - is_enabled = Column(Boolean, default=False) - - user_profile_id = Column(Integer, ForeignKey('user_profile.user_profile_id')) - notification_type_id = Column(Integer, ForeignKey('notification_type.notification_type_id')) - notification_channel_id = Column(Integer, ForeignKey('notification_channel.notification_channel_id')) - - user_profile = relationship('UserProfile', back_populates='notification_channel_subscriptions') - notification_type = relationship('NotificationType') - notification_channel = relationship('NotificationChannel', back_populates='notification_channel_subscriptions') \ No newline at end of file diff --git a/backend/lcfs/db/models/NotificationMessage.py b/backend/lcfs/db/models/NotificationMessage.py deleted file mode 100644 index 80ffe6c9f..000000000 --- a/backend/lcfs/db/models/NotificationMessage.py +++ /dev/null @@ -1,39 +0,0 @@ -from sqlalchemy import Column, Integer, ForeignKey, Boolean -from sqlalchemy.orm import relationship -from lcfs.db.base import BaseModel, Auditable -# from lcfs.db.models.NotificationType import NotificationType -from lcfs.db.models.Organization import Organization # Adjust according to your project structure - -class NotificationMessage(BaseModel, Auditable): - __tablename__ = 'notification_message' - __table_args__ = {'comment': "Represents a notification message sent to an application user"} - - notification_message_id = Column(Integer, primary_key=True, autoincrement=True) - - is_read = Column(Boolean, default=False) - is_warning = Column(Boolean, default=False) - is_error = Column(Boolean, default=False) - is_archived = Column(Boolean, default=False) - - related_organization_id = Column(Integer, ForeignKey('organization.organization_id')) - origin_user_profile_id = Column(Integer, ForeignKey('user_profile.user_profile_id')) - related_user_profile_id = Column(Integer, ForeignKey('user_profile.user_profile_id')) - notification_type_id = Column(Integer, ForeignKey('notification_type.notification_type_id')) - - # Models not created yet - # related_transaction_id = Column(Integer,ForeignKey('')) - # related_document_id = Column(Integer, ForeignKey('document.id')) - # related_report_id = Column(Integer, ForeignKey('compliance_report.id')) - - related_organization = relationship('Organization') - notification_type = relationship('NotificationType') - origin_user_profile = relationship( - 'UserProfile', - foreign_keys=[origin_user_profile_id], - back_populates='originated_notifications' - ) - related_user_profile = relationship( - 'UserProfile', - foreign_keys=[related_user_profile_id], - back_populates='notification_messages' - ) \ No newline at end of file diff --git a/backend/lcfs/db/models/NotificationType.py b/backend/lcfs/db/models/NotificationType.py deleted file mode 100644 index bbf24c6de..000000000 --- a/backend/lcfs/db/models/NotificationType.py +++ /dev/null @@ -1,77 +0,0 @@ -import enum -from lcfs.db.base import BaseModel, Auditable -from sqlalchemy import Column, Integer, Enum, Text - -class NotificationTypeEnum(enum.Enum): - CREDIT_TRANSFER_CREATED = "Credit Transfer Proposal Created" - CREDIT_TRANSFER_SIGNED_1OF2 = "Credit Transfer Proposal Signed 1/2" - CREDIT_TRANSFER_SIGNED_2OF2 = "Credit Transfer Proposal Signed 2/2" - CREDIT_TRANSFER_PROPOSAL_REFUSED = "Credit Transfer Proposal Refused" - CREDIT_TRANSFER_PROPOSAL_ACCEPTED = "Credit Transfer Proposal Accepted" - CREDIT_TRANSFER_RECOMMENDED_FOR_APPROVAL = \ - "Credit Transfer Proposal Recommended For Approval" - CREDIT_TRANSFER_RECOMMENDED_FOR_DECLINATION = \ - "Credit Transfer Proposal Recommended For Declination" - CREDIT_TRANSFER_DECLINED = "Credit Transfer Proposal Declined" - CREDIT_TRANSFER_APPROVED = "Credit Transfer Proposal Approved" - CREDIT_TRANSFER_RESCINDED = "Credit Transfer Proposal Rescinded" - CREDIT_TRANSFER_COMMENT = \ - "Credit Transfer Proposal Comment Created Or Updated" - CREDIT_TRANSFER_INTERNAL_COMMENT = \ - "Credit Transfer Proposal Internal Comment Created Or Updated" - - PVR_CREATED = "PVR Created" - PVR_RECOMMENDED_FOR_APPROVAL = "PVR Recommended For Approval" - PVR_RESCINDED = "PVR Rescinded" - PVR_PULLED_BACK = "PVR Pulled Back" - PVR_DECLINED = "PVR Declined" - PVR_APPROVED = "PVR Approved" - PVR_COMMENT = "PVR Comment Created Or Updated" - PVR_INTERNAL_COMMENT = "PVR Internal Comment Created Or Updated" - PVR_RETURNED_TO_ANALYST = "PVR Returned to Analyst" - - DOCUMENT_PENDING_SUBMISSION = "Document Pending Submission" - DOCUMENT_SUBMITTED = "Document Submitted" - DOCUMENT_SCAN_FAILED = "Document Security Scan Failed" - DOCUMENT_RECEIVED = "Document Received" - DOCUMENT_ARCHIVED = "Document Archived" - - COMPLIANCE_REPORT_DRAFT = "Compliance Report Draft Saved" - COMPLIANCE_REPORT_SUBMITTED = "Compliance Report Submitted" - COMPLIANCE_REPORT_RECOMMENDED_FOR_ACCEPTANCE_ANALYST = \ - "Compliance Report Recommended for Acceptance - Analyst" - COMPLIANCE_REPORT_RECOMMENDED_FOR_REJECTION_ANALYST = \ - "Compliance Report Recommended for Rejection - Analyst" - COMPLIANCE_REPORT_RECOMMENDED_FOR_ACCEPTANCE_MANAGER = \ - "Compliance Report Recommended for Acceptance - Manager" - COMPLIANCE_REPORT_RECOMMENDED_FOR_REJECTION_MANAGER = \ - "Compliance Report Recommended for Rejection - Manager" - COMPLIANCE_REPORT_ACCEPTED = "Compliance Report Accepted" - COMPLIANCE_REPORT_REJECTED = "Compliance Report Rejected" - COMPLIANCE_REPORT_REQUESTED_SUPPLEMENTAL = \ - "Compliance Report Requested Supplemental" - - EXCLUSION_REPORT_DRAFT = "Exclusion Report Draft Saved" - EXCLUSION_REPORT_SUBMITTED = "Exclusion Report Submitted" - EXCLUSION_REPORT_RECOMMENDED_FOR_ACCEPTANCE_ANALYST = \ - "Exclusion Report Recommended for Acceptance - Analyst" - EXCLUSION_REPORT_RECOMMENDED_FOR_REJECTION_ANALYST = \ - "Exclusion Report Recommended for Rejection - Analyst" - EXCLUSION_REPORT_RECOMMENDED_FOR_ACCEPTANCE_MANAGER = \ - "Exclusion Report Recommended for Acceptance - Manager" - EXCLUSION_REPORT_RECOMMENDED_FOR_REJECTION_MANAGER = \ - "Exclusion Report Recommended for Rejection - Manager" - EXCLUSION_REPORT_ACCEPTED = "Exclusion Report Accepted" - EXCLUSION_REPORT_REJECTED = "Exclusion Report Rejected" - EXCLUSION_REPORT_REQUESTED_SUPPLEMENTAL = \ - "Exclusion Report Requested Supplemental" - - -class NotificationType(BaseModel, Auditable): - __tablename__ = 'notification_type' - __table_args__ = {'comment': "Represents a Notification type"} - - notification_type_id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(Enum(NotificationTypeEnum, name="notification_type_enum", create_type=True), nullable=False) - description = Column(Text, nullable=True) - email_content = Column(Text) \ No newline at end of file diff --git a/backend/lcfs/db/models/Organization.py b/backend/lcfs/db/models/Organization.py deleted file mode 100644 index 289d83167..000000000 --- a/backend/lcfs/db/models/Organization.py +++ /dev/null @@ -1,29 +0,0 @@ -import enum - -from sqlalchemy import Column, Integer, String, ForeignKey -from sqlalchemy.orm import relationship - -from lcfs.db.base import BaseModel, Auditable, EffectiveDates - -class Organization(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'organization' - __table_args__ = {'comment': "Contains a list of all of the recognized Part 3 " - "fuel suppliers, both past and present, as well as " - "an entry for the government which is also " - "considered an organization."} - - organization_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for the organization") - name = Column(String(500), comment="Organization's legal name") - - organization_status_id = Column(Integer, ForeignKey('organization_status.organization_status_id')) - organization_type_id = Column(Integer, ForeignKey('organization_type.organization_type_id'), comment="Organization's type") - organization_address_id = Column(Integer, ForeignKey('organization_address.organization_address_id')) - organization_attorney_address_id = Column(Integer, ForeignKey('organization_attorney_address.organization_attorney_address_id')) - - org_type = relationship('OrganizationType', back_populates='organizations') - org_status = relationship('OrganizationStatus', back_populates='organizations') - org_address = relationship('OrganizationAddress', back_populates='organization') - org_attorney_address = relationship('OrganizationAttorneyAddress', back_populates='organization') - user_profiles = relationship('UserProfile', back_populates='organization') - transactions = relationship('Transaction', back_populates='organization') - issuances = relationship('Issuance', back_populates='organization') diff --git a/backend/lcfs/db/models/OrganizationStatus.py b/backend/lcfs/db/models/OrganizationStatus.py deleted file mode 100644 index 70622c500..000000000 --- a/backend/lcfs/db/models/OrganizationStatus.py +++ /dev/null @@ -1,20 +0,0 @@ -from sqlalchemy import Column, Integer, String, Enum -from sqlalchemy.orm import relationship -import enum -from lcfs.db.base import BaseModel, Auditable, DisplayOrder - -class OrgStatusEnum(enum.Enum): - Unregistered = "Unregistered" - Registered = "Registered" - Suspended = "Suspended" - Canceled = "Canceled" - -class OrganizationStatus(BaseModel, Auditable,DisplayOrder): - __tablename__ = 'organization_status' - __table_args__ = {'comment': "Contains list of organization type"} - - organization_status_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for the organization") - status = Column(Enum(OrgStatusEnum, name="org_status_enum", create_type=True), default=OrgStatusEnum.Unregistered, comment="Organization's status") - description = Column(String(500), nullable=True, comment="Organization description") - - organizations = relationship('Organization', back_populates='org_status') \ No newline at end of file diff --git a/backend/lcfs/db/models/OrganizationType.py b/backend/lcfs/db/models/OrganizationType.py deleted file mode 100644 index 9a28cff77..000000000 --- a/backend/lcfs/db/models/OrganizationType.py +++ /dev/null @@ -1,20 +0,0 @@ -from sqlalchemy import Column, Integer, String, Enum -from sqlalchemy.orm import relationship -from lcfs.db.base import BaseModel, Auditable, DisplayOrder -import enum - -class OrgTypeEnum(enum.Enum): - fuel_supplier = "Fuel Supplier" - electricity_supplier = "Electricity Supplier" - broker = "Broker" - utilities = "Utilities (local or public)" - -class OrganizationType(BaseModel, Auditable, DisplayOrder): - - __tablename__ = 'organization_type' - __table_args__ = {'comment': "Represents a Organization types"} - - organization_type_id = Column(Integer, primary_key=True, autoincrement=True, nullable=False, comment="Unique identifier for the organization_type") - org_type = Column(Enum(OrgTypeEnum, name="org_type_enum", create_type=True), default=OrgTypeEnum.fuel_supplier, comment="Organization's Types") - description = Column(String(500), nullable=True, comment="Organization Types") - organizations = relationship('Organization', back_populates='org_type') diff --git a/backend/lcfs/db/models/Role.py b/backend/lcfs/db/models/Role.py deleted file mode 100644 index 61fe4ba79..000000000 --- a/backend/lcfs/db/models/Role.py +++ /dev/null @@ -1,48 +0,0 @@ -from sqlalchemy import Column, Integer, String, Boolean, UniqueConstraint, Enum -from sqlalchemy.orm import relationship -from lcfs.db.base import Auditable, BaseModel -import enum - -class RoleEnum(enum.Enum): - ADMINISTRATOR = 'Administrator' - ANALYST = 'Analyst' - COMPLIANCE_MANAGER = 'Compliance Manager' - DIRECTOR = 'Director' - - MANAGE_USERS = 'Manage Users' - TRANSFER = 'Transfer' - COMPLIANCE_REPORTING = 'Compliance Reporting' - SIGNING_AUTHORITY = 'Signing Authority' - READ_ONLY = 'Read Only' - - -class Role(BaseModel, Auditable): - __tablename__ = 'role' - __table_args__ = ( - UniqueConstraint('name'), - {'comment': 'To hold all the available roles and their descriptions.'} - ) - role_id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(Enum(RoleEnum, name="role_enum", create_type=True), unique=True, nullable=False, - comment="Role code. Natural key. Used internally. eg Admin, GovUser, GovDirector, etc") - description = Column(String(1000), - comment="Descriptive text explaining this role. This is what's shown to the user.") - is_government_role = Column(Boolean, default=False, - comment="Flag. True if this is a government role (eg. Analyst, Administrator)") - display_order = Column(Integer, comment="Relative rank in display sorting order") - - user_roles = relationship('UserRole', back_populates='role') - - def __repr__(self): - return '' % self.name - - def __str__(self): - return self.name - - def to_dict(self): - return { - 'id': self.id, - 'name': self.name, - 'description': self.description, - 'isGovernmentRole': self.is_government_role - } diff --git a/backend/lcfs/db/models/Transaction.py b/backend/lcfs/db/models/Transaction.py deleted file mode 100644 index 68f7d7e3c..000000000 --- a/backend/lcfs/db/models/Transaction.py +++ /dev/null @@ -1,25 +0,0 @@ -from sqlalchemy import Column, Integer, BigInteger, ForeignKey -from sqlalchemy.orm import relationship -from sqlalchemy import UniqueConstraint -from lcfs.db.base import BaseModel, Auditable, EffectiveDates - -class Transaction(BaseModel,Auditable, EffectiveDates): - __tablename__ = 'transaction' - __table_args__ = (UniqueConstraint('transaction_id'), - {'comment': "Contains a list of all of the government to organization and Organization to Organization transaction."} - ) - - - transaction_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for the transactions") - compliance_units = Column(BigInteger, comment="Compliance Units") - transaction_type_id = Column(Integer, ForeignKey('transaction_type.transaction_type_id')) - organization_id = Column(Integer, ForeignKey('organization.organization_id')) - - organization = relationship('Organization', back_populates='transactions') - transaction_type = relationship('TransactionType') - issuance_history_record = relationship('IssuanceHistory', back_populates='transaction') - transfer_history_record = relationship('TransferHistory', back_populates='transaction') - - def __repr__(self): - return self.compliance_units - diff --git a/backend/lcfs/db/models/TransactionType.py b/backend/lcfs/db/models/TransactionType.py deleted file mode 100644 index 970d95cfe..000000000 --- a/backend/lcfs/db/models/TransactionType.py +++ /dev/null @@ -1,21 +0,0 @@ -from sqlalchemy import Column, Integer, Enum -from sqlalchemy.orm import relationship -from sqlalchemy import UniqueConstraint -from lcfs.db.base import BaseModel, Auditable, DisplayOrder -import enum - -class TransactionTypeEnum(enum.Enum): - administrative_adjustment = "Administrative Adjustment" - initiative_agreement = "Initiative Agreement" - assessment = "Assessment" - transfer = "Transfer" - -class TransactionType(BaseModel, Auditable, DisplayOrder): - - __tablename__ = 'transaction_type' - __table_args__ = {'comment': "Represents a Transaction types"} - - transaction_type_id = Column(Integer, primary_key=True, autoincrement=True) - type = Column(Enum(TransactionTypeEnum, name="transaction_type_enum", create_type=True), comment="Transaction Types") - - transactions = relationship('Transaction', back_populates='transaction_type') diff --git a/backend/lcfs/db/models/Transfer.py b/backend/lcfs/db/models/Transfer.py deleted file mode 100644 index ff898fde5..000000000 --- a/backend/lcfs/db/models/Transfer.py +++ /dev/null @@ -1,39 +0,0 @@ -from sqlalchemy import Column, Integer, ForeignKey, DateTime -from sqlalchemy.orm import relationship -from sqlalchemy import UniqueConstraint -from lcfs.db.base import BaseModel, Auditable, EffectiveDates - -class Transfer(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'transfer' - __table_args__ = (UniqueConstraint('transfer_id'), - {'comment': "Records of tranfer from Organization to Organization"} - ) - - - transfer_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for the org to org transfer record") - from_organization_id = Column(Integer, ForeignKey('organization.organization_id')) - to_organization_id = Column(Integer, ForeignKey('organization.organization_id')) - transaction_id = Column(Integer, ForeignKey('transaction.transaction_id')) - transaction_effective_date = Column(DateTime, comment="transaction effective date") - # compliance_period = Column(Integer, ) - comment_id = Column(Integer, ForeignKey('comment.comment_id')) - transfer_status_id = Column(Integer, ForeignKey('transfer_status.transfer_status_id')) - transfer_category_id = Column(Integer, ForeignKey('category.category_id')) - - category = relationship('Category') - transaction = relationship('Transaction') - transfer_status = relationship('TransferStatus') - comments = relationship('Comment', back_populates='transfer') - transfer_history_records = relationship('TransferHistory', back_populates='transfer') - - from_organization = relationship( - 'Organization', - foreign_keys=[from_organization_id] - ) - to_organization = relationship( - 'Organization', - foreign_keys=[to_organization_id] - ) - - - diff --git a/backend/lcfs/db/models/TransferHistory.py b/backend/lcfs/db/models/TransferHistory.py deleted file mode 100644 index bbfc66c25..000000000 --- a/backend/lcfs/db/models/TransferHistory.py +++ /dev/null @@ -1,34 +0,0 @@ -from sqlalchemy import Column, Integer, ForeignKey, DateTime -from sqlalchemy.orm import relationship -from lcfs.db.base import BaseModel, Auditable, EffectiveDates - -class TransferHistory(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'transfer_history' - __table_args__ = {'comment': "Records of tranfer from Organization to Organization"} - - - transfer_history_id = Column(Integer, primary_key=True, autoincrement=True, comment="Unique identifier for the org to org transfer record") - transfer_id = Column(Integer, ForeignKey('transfer.transfer_id')) - from_organization_id = Column(Integer, ForeignKey('organization.organization_id')) - to_organization_id = Column(Integer, ForeignKey('organization.organization_id')) - transaction_id = Column(Integer, ForeignKey('transaction.transaction_id')) - transaction_effective_date = Column(DateTime, comment="Transaction effective date") - # compliance_period = Column(Integer, ) - transfer_status_id = Column(Integer, ForeignKey('transfer_status.transfer_status_id')) - transfer_category_id = Column(Integer, ForeignKey('category.category_id')) - - transfer = relationship('Transfer', back_populates='transfer_history_records') - transaction = relationship('Transaction', back_populates='transfer_history_record') - transfer_category = relationship("Category") - transfer_status = relationship('TransferStatus') - - from_organization = relationship( - 'Organization', - foreign_keys=[from_organization_id] - ) - to_organization = relationship( - 'Organization', - foreign_keys=[to_organization_id] - ) - - diff --git a/backend/lcfs/db/models/TransferStatus.py b/backend/lcfs/db/models/TransferStatus.py deleted file mode 100644 index 42f7eb417..000000000 --- a/backend/lcfs/db/models/TransferStatus.py +++ /dev/null @@ -1,22 +0,0 @@ -from sqlalchemy import Column, Integer, Enum -from lcfs.db.base import BaseModel, Auditable, DisplayOrder -import enum - -class TransferStatusEnum(enum.Enum): - draft = "Draft" # Created by Org - deleted = "Deleted" # Deleted by Org - sent = "Sent" # Sent to Org - submitted = "Submitted" # Submitted to gov by organization.. - recommended = "Recommended" # Analyst makes recommendation to record or refuse the transfer - recorded = "Recorded" # Recorded - Approved by Director - refused = "Refused" # Refused - Declined by director - declined = "Declined" # Declined - declined by Organization - rescinded = "Rescinded" # Rescinded - Cancelled by Organization - -class TransferStatus(BaseModel, Auditable, DisplayOrder): - - __tablename__ = 'transfer_status' - __table_args__ = {'comment': "Represents a Transfer Status"} - - transfer_status_id = Column(Integer, primary_key=True, autoincrement=True) - status = Column(Enum(TransferStatusEnum, name="transfer_type_enum", create_type=True), comment="Transfer Status") diff --git a/backend/lcfs/db/models/UserLoginHistory.py b/backend/lcfs/db/models/UserLoginHistory.py deleted file mode 100644 index 560774fbd..000000000 --- a/backend/lcfs/db/models/UserLoginHistory.py +++ /dev/null @@ -1,13 +0,0 @@ -from sqlalchemy import Column, String, Boolean, Integer -from lcfs.db.base import BaseModel - -class UserLoginHistory(BaseModel): - __tablename__ = 'user_login_history' - __table_args__ = {'comment': 'Keeps track of all user login attempts'} - - user_login_history_id = Column(Integer, primary_key=True) - keycloak_email = Column(String, nullable=False, comment="Keycloak email address to associate on first login.") - external_username = Column(String(150), nullable=True, comment="BCeID or IDIR username") - keycloak_user_id = Column(String(150), nullable=True, comment="This is the unique id returned from Keycloak and is the main mapping key between the LCFS user and the Keycloak user. The identity provider type will be appended as a suffix after an @ symbol. For ex. asdf1234@bceidbasic or asdf1234@idir") - is_login_successful = Column(Boolean, default=False, comment="True if this login attempt was successful, false on failure.") - login_error_message = Column(String(500), nullable=True, comment='Error text on unsuccessful login attempt.') diff --git a/backend/lcfs/db/models/UserProfile.py b/backend/lcfs/db/models/UserProfile.py deleted file mode 100644 index 0cedae90b..000000000 --- a/backend/lcfs/db/models/UserProfile.py +++ /dev/null @@ -1,43 +0,0 @@ -from lcfs.db.base import Auditable, BaseModel -from sqlalchemy import Column, Integer, String, ForeignKey, UniqueConstraint -from sqlalchemy.orm import relationship -from lcfs.db.models.NotificationMessage import NotificationMessage - -class UserProfile(BaseModel, Auditable): - __tablename__ = 'user_profile' - __table_args__ = ( - UniqueConstraint('username'), - {'comment': 'Users who may access the application'} - ) - - user_profile_id = Column(Integer, primary_key=True, autoincrement=True) - - keycloak_user_id = Column(String(150), nullable=True, comment='Unique id returned from Keycloak') - keycloak_email = Column(String(255), nullable=True, comment='keycloak email address') - keycloak_username = Column(String(150), unique=True, nullable=False, - comment='keycloak Username') - email = Column(String(255), nullable=True, comment='Primary email address') - username = Column(String(150), unique=True, nullable=False, - comment='Login Username') - display_name = Column('display_name', String(500), nullable=True, - comment='Displayed name for user') - title = Column(String(100), nullable=True, comment='Professional Title') - phone = Column(String(50), nullable=True, comment='Primary phone number') - mobile_phone = Column(String(50), nullable=True, comment='Mobile phone number') - organization_id = Column(Integer, ForeignKey('organization.organization_id')) - - organization = relationship('Organization', back_populates='user_profiles') - user_roles = relationship('UserRole', back_populates='user_profile') - - notification_channel_subscriptions = relationship('NotificationChannelSubscription', back_populates='user_profile') - - originated_notifications = relationship( - 'NotificationMessage', - foreign_keys=[NotificationMessage.origin_user_profile_id], - back_populates='origin_user_profile' - ) - notification_messages = relationship( - 'NotificationMessage', - foreign_keys=[NotificationMessage.related_user_profile_id], - back_populates='related_user_profile' - ) diff --git a/backend/lcfs/db/models/UserRole.py b/backend/lcfs/db/models/UserRole.py deleted file mode 100644 index 99ec9e2a6..000000000 --- a/backend/lcfs/db/models/UserRole.py +++ /dev/null @@ -1,19 +0,0 @@ -from sqlalchemy import Column, ForeignKey, Integer, text, UniqueConstraint -from sqlalchemy.orm import relationship -from sqlalchemy.dialects.postgresql import UUID - -from lcfs.db.base import Auditable, BaseModel - -class UserRole(BaseModel, Auditable): - __tablename__ = 'user_role' - __table_args__ = ( - UniqueConstraint('user_profile_id', 'role_id', name='user_role_unique_constraint'), - {'comment': 'Contains the user and role relationships'} - ) - # Columns - user_role_id = Column(Integer, primary_key=True, autoincrement=True, comment='Unique ID for the user role') - user_profile_id = Column(Integer, ForeignKey('user_profile.user_profile_id'), comment='Foreign key to user_profile') - role_id = Column(Integer, ForeignKey('role.role_id'), comment='Foreign key to role') - # Relationships - user_profile = relationship('UserProfile', back_populates='user_roles') - role = relationship('Role', back_populates='user_roles') diff --git a/backend/lcfs/db/models/__init__.py b/backend/lcfs/db/models/__init__.py index b5c6a77df..e52da7ec7 100644 --- a/backend/lcfs/db/models/__init__.py +++ b/backend/lcfs/db/models/__init__.py @@ -1,16 +1,18 @@ """lcfs models.""" + from pathlib import Path import pkgutil + def load_all_models() -> None: """Load all models from this folder, loading specific models first.""" # Models that need to be loaded first priority_models = [ - 'OrganizationStatus', - 'Organization', - 'UserProfile', - 'Role' + "organization.OrganizationStatus", + "organization.Organization", + "user.UserProfile", + "user.Role", ] # Load priority models @@ -29,27 +31,15 @@ def load_all_models() -> None: __import__(module.name) # noqa: WPS421 -from . import OrganizationType -from . import OrganizationStatus -from . import OrganizationAddress -from . import OrganizationAttorneyAddress -from . import OrganizationStatus -from . import OrganizationBalance -from . import Organization -from . import Role -from . import UserProfile -from . import UserRole -from . import UserLoginHistory -from . import Category -from . import Comment -from . import NotificationChannel -from . import NotificationType -from . import NotificationChannelSubscription -from . import NotificationMessage -from . import Transaction -from . import TransactionType -from . import Issuance -from . import IssuanceHistory -from . import Transfer -from . import TransferHistory -from . import TransferStatus \ No newline at end of file +# Explicit model imports from subdirectories +from .admin_adjustment import * +from .comment import * +from .compliance import * +from .document import * +from .fuel import * +from .initiative_agreement import * +from .notification import * +from .organization import * +from .transaction import * +from .transfer import * +from .user import * diff --git a/backend/lcfs/db/models/admin_adjustment/AdminAdjustment.py b/backend/lcfs/db/models/admin_adjustment/AdminAdjustment.py new file mode 100644 index 000000000..49443d9d2 --- /dev/null +++ b/backend/lcfs/db/models/admin_adjustment/AdminAdjustment.py @@ -0,0 +1,42 @@ +from sqlalchemy import Column, Integer, BigInteger, ForeignKey, DateTime, String +from sqlalchemy.orm import relationship +from sqlalchemy import UniqueConstraint +from lcfs.db.base import BaseModel, Auditable, EffectiveDates + + +class AdminAdjustment(BaseModel, Auditable, EffectiveDates): + __tablename__ = "admin_adjustment" + __table_args__ = ( + UniqueConstraint("admin_adjustment_id"), + {"comment": "Goverment to organization compliance units admin_adjustment"}, + ) + + admin_adjustment_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the admin_adjustment", + ) + compliance_units = Column(BigInteger, comment="Compliance Units") + transaction_effective_date = Column( + DateTime, nullable=True, comment="Transaction effective date" + ) + gov_comment = Column( + String(1500), comment="Comment from the government to organization" + ) + to_organization_id = Column(Integer, ForeignKey("organization.organization_id")) + transaction_id = Column(Integer, ForeignKey("transaction.transaction_id")) + current_status_id = Column( + Integer, ForeignKey("admin_adjustment_status.admin_adjustment_status_id") + ) + + to_organization = relationship("Organization", back_populates="admin_adjustments") + transaction = relationship("Transaction") + history = relationship("AdminAdjustmentHistory", back_populates="admin_adjustment") + current_status = relationship("AdminAdjustmentStatus") + admin_adjustment_internal_comments = relationship( + "AdminAdjustmentInternalComment", back_populates="admin_adjustment" + ) + + def __repr__(self): + return self.compliance_units diff --git a/backend/lcfs/db/models/admin_adjustment/AdminAdjustmentHistory.py b/backend/lcfs/db/models/admin_adjustment/AdminAdjustmentHistory.py new file mode 100644 index 000000000..36fbf0d6f --- /dev/null +++ b/backend/lcfs/db/models/admin_adjustment/AdminAdjustmentHistory.py @@ -0,0 +1,34 @@ +from sqlalchemy import Column, Integer, BigInteger, ForeignKey, DateTime +from sqlalchemy.orm import relationship +from sqlalchemy import UniqueConstraint +from lcfs.db.base import BaseModel, Auditable, EffectiveDates + + +class AdminAdjustmentHistory(BaseModel, Auditable, EffectiveDates): + __tablename__ = "admin_adjustment_history" + __table_args__ = ( + UniqueConstraint("admin_adjustment_history_id"), + {"comment": "History record for admin_adjustment status change."}, + ) + + admin_adjustment_history_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the admin_adjustment history record", + ) + admin_adjustment_id = Column( + Integer, ForeignKey("admin_adjustment.admin_adjustment_id") + ) + admin_adjustment_status_id = Column( + Integer, ForeignKey("admin_adjustment_status.admin_adjustment_status_id") + ) + user_profile_id = Column( + Integer, + ForeignKey("user_profile.user_profile_id"), + comment="Foreign key to user_profile", + ) + + admin_adjustment = relationship("AdminAdjustment", back_populates="history") + admin_adjustment_status = relationship("AdminAdjustmentStatus") + user_profile = relationship("UserProfile") diff --git a/backend/lcfs/db/models/admin_adjustment/AdminAdjustmentStatus.py b/backend/lcfs/db/models/admin_adjustment/AdminAdjustmentStatus.py new file mode 100644 index 000000000..09bc786b5 --- /dev/null +++ b/backend/lcfs/db/models/admin_adjustment/AdminAdjustmentStatus.py @@ -0,0 +1,26 @@ +from sqlalchemy import Column, Integer, Enum +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +import enum + + +class AdminAdjustmentStatusEnum(enum.Enum): + Draft = "Draft" # Draft created by analyst + Recommended = "Recommended" # Recommended by analyst + Approved = "Approved" # Approved by director + Deleted = "Deleted" # Deleted by analyst + + +class AdminAdjustmentStatus(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "admin_adjustment_status" + __table_args__ = {"comment": "Represents a Admin Adjustment Status"} + + admin_adjustment_status_id = Column(Integer, primary_key=True, autoincrement=True) + status = Column( + Enum( + AdminAdjustmentStatusEnum, + name="admin_adjustment_type_enum", + create_type=True, + ), + comment="Admin Adjustment Status", + ) diff --git a/backend/lcfs/db/models/admin_adjustment/__init__.py b/backend/lcfs/db/models/admin_adjustment/__init__.py new file mode 100644 index 000000000..01fed9b43 --- /dev/null +++ b/backend/lcfs/db/models/admin_adjustment/__init__.py @@ -0,0 +1,9 @@ +from .AdminAdjustment import AdminAdjustment +from .AdminAdjustmentStatus import AdminAdjustmentStatus +from .AdminAdjustmentHistory import AdminAdjustmentHistory + +__all__ = [ + "AdminAdjustment", + "AdminAdjustmentStatus", + "AdminAdjustmentHistory", +] diff --git a/backend/lcfs/db/models/audit/AuditLog.py b/backend/lcfs/db/models/audit/AuditLog.py new file mode 100644 index 000000000..f428940cb --- /dev/null +++ b/backend/lcfs/db/models/audit/AuditLog.py @@ -0,0 +1,71 @@ +from lcfs.db.base import Auditable, BaseModel +from sqlalchemy import ( + Integer, + Column, + Text, + Index, +) +from sqlalchemy.dialects.postgresql import JSONB + + +class AuditLog(BaseModel, Auditable): + """ + Audit log capturing changes to database tables. + + As the table grows, consider implementing automatic archiving (e.g., moving older logs to an archive table) + and purging (e.g., deleting logs after a retention period) using tools like `pg_cron` or external schedulers. + + Archiving: + - Create an `audit_log_archive` table with the same structure as `audit_log`. + - Use a scheduled job (e.g., with `pg_cron`) to move records older than a certain threshold (e.g., 1 month) from `audit_log` to `audit_log_archive`. + - Alternatively, consider creating date-based archive tables (e.g., audit_log_archive_2025_01) to organize logs by time periods. + + Purging: + - Use a scheduled job (e.g., with `pg_cron`) to delete records older than a defined retention period (e.g., 1 year) from `audit_log_archive`. + """ + + __tablename__ = "audit_log" + __table_args__ = ( + Index("idx_audit_log_operation", "operation"), + Index("idx_audit_log_create_date", "create_date"), + Index("idx_audit_log_create_user", "create_user"), + Index("idx_audit_log_delta", "delta", postgresql_using="gin"), + {"comment": "Track changes in defined tables."}, + ) + + audit_log_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for each audit log entry.", + ) + table_name = Column( + Text, + nullable=False, + comment="Name of the table where the action occurred.", + ) + operation = Column( + Text, + nullable=False, + comment="Type of operation: 'INSERT', 'UPDATE', or 'DELETE'.", + ) + row_id = Column( + JSONB, + nullable=False, + comment="Primary key of the affected row, stored as JSONB to support composite keys.", + ) + old_values = Column( + JSONB, + nullable=True, + comment="Previous values before the operation.", + ) + new_values = Column( + JSONB, + nullable=True, + comment="New values after the operation.", + ) + delta = Column( + JSONB, + nullable=True, + comment="JSONB delta of the changes.", + ) diff --git a/backend/lcfs/db/models/audit/__init__.py b/backend/lcfs/db/models/audit/__init__.py new file mode 100644 index 000000000..21bbbbb57 --- /dev/null +++ b/backend/lcfs/db/models/audit/__init__.py @@ -0,0 +1,5 @@ +from .AuditLog import AuditLog + +__all__ = [ + "AuditLog", +] \ No newline at end of file diff --git a/backend/lcfs/db/models/comment/AdminAdjustmentInternalComment.py b/backend/lcfs/db/models/comment/AdminAdjustmentInternalComment.py new file mode 100644 index 000000000..7a8044e3d --- /dev/null +++ b/backend/lcfs/db/models/comment/AdminAdjustmentInternalComment.py @@ -0,0 +1,30 @@ +from sqlalchemy import Column, Integer, ForeignKey +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel + + +class AdminAdjustmentInternalComment(BaseModel): + __tablename__ = "admin_adjustment_internal_comment" + __table_args__ = {"comment": "Associates internal comments with admin adjustments."} + + # Columns + admin_adjustment_id = Column( + Integer, + ForeignKey("admin_adjustment.admin_adjustment_id"), + primary_key=True, + comment="Foreign key to admin_adjustment, part of the composite primary key.", + ) + internal_comment_id = Column( + Integer, + ForeignKey("internal_comment.internal_comment_id"), + primary_key=True, + comment="Foreign key to internal_comment, part of the composite primary key.", + ) + + # Relationships + admin_adjustment = relationship( + "AdminAdjustment", back_populates="admin_adjustment_internal_comments" + ) + internal_comment = relationship( + "InternalComment", back_populates="admin_adjustment_internal_comments" + ) diff --git a/backend/lcfs/db/models/comment/ComplianceReportInternalComment.py b/backend/lcfs/db/models/comment/ComplianceReportInternalComment.py new file mode 100644 index 000000000..f47f27bc5 --- /dev/null +++ b/backend/lcfs/db/models/comment/ComplianceReportInternalComment.py @@ -0,0 +1,30 @@ +from sqlalchemy import Column, Integer, ForeignKey +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel + + +class ComplianceReportInternalComment(BaseModel): + __tablename__ = "compliance_report_internal_comment" + __table_args__ = {"comment": "Associates internal comments with compliance report."} + + # Columns + compliance_report_id = Column( + Integer, + ForeignKey("compliance_report.compliance_report_id"), + primary_key=True, + comment="Foreign key to compliance_report, part of the composite primary key.", + ) + internal_comment_id = Column( + Integer, + ForeignKey("internal_comment.internal_comment_id"), + primary_key=True, + comment="Foreign key to internal_comment, part of the composite primary key.", + ) + + # Relationships + compliance_report = relationship( + "ComplianceReport", back_populates="compliance_report_internal_comments" + ) + internal_comment = relationship( + "InternalComment", back_populates="compliance_report_internal_comments" + ) diff --git a/backend/lcfs/db/models/comment/InitiativeAgreementInternalComment.py b/backend/lcfs/db/models/comment/InitiativeAgreementInternalComment.py new file mode 100644 index 000000000..20b00542c --- /dev/null +++ b/backend/lcfs/db/models/comment/InitiativeAgreementInternalComment.py @@ -0,0 +1,32 @@ +from sqlalchemy import Column, Integer, ForeignKey +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel + + +class InitiativeAgreementInternalComment(BaseModel): + __tablename__ = "initiative_agreement_internal_comment" + __table_args__ = { + "comment": "Associates internal comments with initiative agreements." + } + + # Columns + initiative_agreement_id = Column( + Integer, + ForeignKey("initiative_agreement.initiative_agreement_id"), + primary_key=True, + comment="Foreign key to initiative_agreement, part of the composite primary key.", + ) + internal_comment_id = Column( + Integer, + ForeignKey("internal_comment.internal_comment_id"), + primary_key=True, + comment="Foreign key to internal_comment, part of the composite primary key.", + ) + + # Relationships + initiative_agreement = relationship( + "InitiativeAgreement", back_populates="initiative_agreement_internal_comments" + ) + internal_comment = relationship( + "InternalComment", back_populates="initiative_agreement_internal_comments" + ) diff --git a/backend/lcfs/db/models/comment/InternalComment.py b/backend/lcfs/db/models/comment/InternalComment.py new file mode 100644 index 000000000..56dcecbc9 --- /dev/null +++ b/backend/lcfs/db/models/comment/InternalComment.py @@ -0,0 +1,45 @@ +from sqlalchemy import Column, Integer, Text, UniqueConstraint +from sqlalchemy.orm import relationship +from sqlalchemy.dialects.postgresql import ENUM +from lcfs.db.base import BaseModel, Auditable + +# ENUM for audience scope +audience_scope_enum = ENUM( + "Director", "Analyst", "Compliance Manager", name="audience_scope", create_type=False +) + + +class InternalComment(BaseModel, Auditable): + __tablename__ = "internal_comment" + __table_args__ = ( + UniqueConstraint("internal_comment_id"), + {"comment": "Stores internal comments with scope and related metadata."}, + ) + + # Columns + internal_comment_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Primary key, unique identifier for each internal comment.", + ) + comment = Column(Text, nullable=True, comment="Text of the comment.") + audience_scope = Column( + audience_scope_enum, + nullable=False, + comment="Defines the audience scope for the comment, e.g., Director, Analyst, Compliance Manager", + ) + + # Relationships + transfer_internal_comments = relationship( + "TransferInternalComment", back_populates="internal_comment" + ) + initiative_agreement_internal_comments = relationship( + "InitiativeAgreementInternalComment", back_populates="internal_comment" + ) + admin_adjustment_internal_comments = relationship( + "AdminAdjustmentInternalComment", back_populates="internal_comment" + ) + compliance_report_internal_comments = relationship( + "ComplianceReportInternalComment", back_populates="internal_comment" + ) \ No newline at end of file diff --git a/backend/lcfs/db/models/comment/TransferInternalComment.py b/backend/lcfs/db/models/comment/TransferInternalComment.py new file mode 100644 index 000000000..8c19a342c --- /dev/null +++ b/backend/lcfs/db/models/comment/TransferInternalComment.py @@ -0,0 +1,28 @@ +from sqlalchemy import Column, Integer, ForeignKey +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel + + +class TransferInternalComment(BaseModel): + __tablename__ = "transfer_internal_comment" + __table_args__ = {"comment": "Associates internal comments with transfers."} + + # Columns + transfer_id = Column( + Integer, + ForeignKey("transfer.transfer_id"), + primary_key=True, + comment="Foreign key to transfer, part of the composite primary key.", + ) + internal_comment_id = Column( + Integer, + ForeignKey("internal_comment.internal_comment_id"), + primary_key=True, + comment="Foreign key to internal_comment, part of the composite primary key.", + ) + + # Relationships + transfer = relationship("Transfer", back_populates="transfer_internal_comments") + internal_comment = relationship( + "InternalComment", back_populates="transfer_internal_comments" + ) diff --git a/backend/lcfs/db/models/comment/__init__.py b/backend/lcfs/db/models/comment/__init__.py new file mode 100644 index 000000000..16e133e30 --- /dev/null +++ b/backend/lcfs/db/models/comment/__init__.py @@ -0,0 +1,11 @@ +from .AdminAdjustmentInternalComment import AdminAdjustmentInternalComment +from .InitiativeAgreementInternalComment import InitiativeAgreementInternalComment +from .InternalComment import InternalComment +from .ComplianceReportInternalComment import ComplianceReportInternalComment + +__all__ = [ + "AdminAdjustmentInternalComment", + "InitiativeAgreementInternalComment", + "InternalComment", + "ComplianceReportInternalComment", +] diff --git a/backend/lcfs/db/models/compliance/AllocationAgreement.py b/backend/lcfs/db/models/compliance/AllocationAgreement.py new file mode 100644 index 000000000..cc46b52be --- /dev/null +++ b/backend/lcfs/db/models/compliance/AllocationAgreement.py @@ -0,0 +1,93 @@ +from sqlalchemy import Column, Integer, String, ForeignKey, Numeric +from sqlalchemy.orm import relationship + +from lcfs.db.base import BaseModel, Auditable, DisplayOrder + + +class AllocationAgreement(BaseModel, Auditable, DisplayOrder): + __tablename__ = "allocation_agreement" + __table_args__ = { + "comment": "Records allocation agreements where the reporting obligation is passed from one party to another. Each party must report their end of the transaction." + } + + allocation_agreement_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the allocation agreement", + ) + transaction_partner = Column( + String, nullable=False, comment="Partner involved in the transaction" + ) + postal_address = Column( + String, nullable=False, comment="Postal address of the transaction partner" + ) + transaction_partner_email = Column( + String, nullable=False, comment="Transaction Partner email" + ) + transaction_partner_phone = Column( + String, nullable=False, comment="Transaction Partner phone number" + ) + ci_of_fuel = Column( + Numeric(10, 2), nullable=False, comment="The Carbon intesity of fuel" + ) + quantity = Column( + Integer, nullable=False, comment="Quantity of fuel involved in the transaction" + ) + units = Column( + String, + nullable=False, + comment="Units of the fuel quantity. Auto-selected, locked field.", + ) + fuel_type_other = Column( + String(1000), nullable=True, comment="Other fuel type if one provided" + ) + + allocation_transaction_type_id = Column( + Integer, + ForeignKey("allocation_transaction_type.allocation_transaction_type_id"), + nullable=False, + comment="Foreign key to the transaction type", + ) + fuel_type_id = Column( + Integer, + ForeignKey("fuel_type.fuel_type_id"), + nullable=False, + comment="Foreign key to the fuel type", + ) + fuel_category_id = Column( + Integer, + ForeignKey("fuel_category.fuel_category_id"), + nullable=False, + comment="Foreign key to the fuel category", + ) + provision_of_the_act_id = Column( + Integer, + ForeignKey("provision_of_the_act.provision_of_the_act_id"), + nullable=False, + comment="Foreign key to the provision of the act", + ) + fuel_code_id = Column( + Integer, + ForeignKey("fuel_code.fuel_code_id"), + nullable=True, + comment="Foreign key to the fuel code", + ) + compliance_report_id = Column( + Integer, + ForeignKey("compliance_report.compliance_report_id"), + nullable=False, + comment="Foreign key to the compliance report", + ) + + allocation_transaction_type = relationship("AllocationTransactionType") + fuel_type = relationship("FuelType") + fuel_category = relationship("FuelCategory") + provision_of_the_act = relationship("ProvisionOfTheAct") + fuel_code = relationship("FuelCode") + compliance_report = relationship( + "ComplianceReport", back_populates="allocation_agreements" + ) + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/AllocationTransactionType.py b/backend/lcfs/db/models/compliance/AllocationTransactionType.py new file mode 100644 index 000000000..5379f47ab --- /dev/null +++ b/backend/lcfs/db/models/compliance/AllocationTransactionType.py @@ -0,0 +1,22 @@ +from sqlalchemy import Column, Integer, String, Date +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, EffectiveDates, DisplayOrder + + +class AllocationTransactionType(BaseModel, EffectiveDates, DisplayOrder): + __tablename__ = "allocation_transaction_type" + __table_args__ = {"comment": "Lookup table for allocation transaction types."} + + allocation_transaction_type_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the allocation transaction type", + ) + type = Column(String, nullable=False, comment="Type of the allocation transaction") + description = Column( + String, nullable=True, comment="Description of the allocation transaction type" + ) + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/CompliancePeriod.py b/backend/lcfs/db/models/compliance/CompliancePeriod.py new file mode 100644 index 000000000..187f7ee67 --- /dev/null +++ b/backend/lcfs/db/models/compliance/CompliancePeriod.py @@ -0,0 +1,30 @@ +from sqlalchemy import Column, Integer, String, Date +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, EffectiveDates + + +class CompliancePeriod(BaseModel, EffectiveDates): + __tablename__ = "compliance_period" + __table_args__ = { + "comment": "The compliance year associated with compliance reports and other related tables. The description field should be the year." + } + + compliance_period_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the compliance period", + ) + description = Column( + String, nullable=False, comment="Year description for the compliance period" + ) + display_order = Column( + Integer, nullable=True, comment="Display order for the compliance period" + ) + + compliance_reports = relationship( + "ComplianceReport", back_populates="compliance_period" + ) + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/ComplianceReport.py b/backend/lcfs/db/models/compliance/ComplianceReport.py new file mode 100644 index 000000000..6656cc6ec --- /dev/null +++ b/backend/lcfs/db/models/compliance/ComplianceReport.py @@ -0,0 +1,180 @@ +import uuid +import enum + +from pydantic import computed_field +from sqlalchemy import ( + Column, + Integer, + String, + ForeignKey, + Enum, + Table, + ForeignKey, +) +from sqlalchemy.orm import relationship, backref +from lcfs.db.base import BaseModel, Auditable + + +class ReportingFrequency(enum.Enum): + ANNUAL = "Annual" + QUARTERLY = "Quarterly" + + +class SupplementalInitiatorType(enum.Enum): + SUPPLIER_SUPPLEMENTAL = "Supplier Supplemental" + GOVERNMENT_REASSESSMENT = "Government Reassessment" + + +class Quarter(enum.Enum): + Q1 = "Q1" + Q2 = "Q2" + Q3 = "Q3" + Q4 = "Q4" + + +class QuantityUnitsEnum(enum.Enum): + Litres = "L" + Kilograms = "kg" + Kilowatt_hour = "kWh" + Cubic_metres = "m³" + + +# Association table for +compliance_report_document_association = Table( + "compliance_report_document_association", + BaseModel.metadata, + Column( + "compliance_report_id", + Integer, + ForeignKey("compliance_report.compliance_report_id", ondelete="CASCADE"), + primary_key=True, + ), + Column( + "document_id", + Integer, + ForeignKey("document.document_id"), + primary_key=True, + ), +) + + +class ComplianceReport(BaseModel, Auditable): + __tablename__ = "compliance_report" + __table_args__ = { + "comment": "Main tracking table for all the sub-tables associated with a supplier's compliance report" + } + + compliance_report_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the compliance report version", + ) + compliance_period_id = Column( + Integer, + ForeignKey("compliance_period.compliance_period_id"), + nullable=False, + comment="Foreign key to the compliance period", + ) + organization_id = Column( + Integer, + ForeignKey("organization.organization_id"), + nullable=False, + comment="Identifier for the organization", + ) + current_status_id = Column( + Integer, + ForeignKey("compliance_report_status.compliance_report_status_id"), + nullable=True, + comment="Identifier for the current compliance report status", + ) + transaction_id = Column( + Integer, + ForeignKey("transaction.transaction_id"), + nullable=True, + comment="Identifier for the transaction", + ) + compliance_report_group_uuid = Column( + String(36), + nullable=False, + default=lambda: str(uuid.uuid4()), + comment="UUID that groups all versions of a compliance report", + ) + legacy_id = Column( + Integer, + nullable=True, + comment="ID from TFRS if this is a transferred application, NULL otherwise", + ) + version = Column( + Integer, + nullable=False, + default=0, + comment="Version number of the compliance report", + ) + supplemental_initiator = Column( + Enum(SupplementalInitiatorType), + nullable=True, + comment="Indicates whether supplier or government initiated the supplemental", + ) + reporting_frequency = Column( + Enum(ReportingFrequency), + nullable=False, + default=ReportingFrequency.ANNUAL, + comment="Reporting frequency", + ) + nickname = Column( + String, + nullable=True, + comment="Nickname for the compliance report", + ) + supplemental_note = Column( + String, + nullable=True, + comment="Supplemental note for the compliance report", + ) + + # Relationships + compliance_period = relationship( + "CompliancePeriod", back_populates="compliance_reports" + ) + organization = relationship("Organization", back_populates="compliance_reports") + current_status = relationship("ComplianceReportStatus") + transaction = relationship("Transaction") + + # Tracking relationships + summary = relationship( + "ComplianceReportSummary", back_populates="compliance_report", uselist=False + ) + history = relationship( + "ComplianceReportHistory", back_populates="compliance_report" + ) + compliance_report_internal_comments = relationship( + "ComplianceReportInternalComment", back_populates="compliance_report" + ) + documents = relationship( + "Document", + secondary=compliance_report_document_association, + back_populates="compliance_reports", + ) + + # Schedule relationships + notional_transfers = relationship( + "NotionalTransfer", back_populates="compliance_report" + ) + fuel_supplies = relationship("FuelSupply", back_populates="compliance_report") + fuel_exports = relationship("FuelExport", back_populates="compliance_report") + allocation_agreements = relationship( + "AllocationAgreement", back_populates="compliance_report" + ) + other_uses = relationship("OtherUses", back_populates="compliance_report") + final_supply_equipment = relationship( + "FinalSupplyEquipment", back_populates="compliance_report" + ) + + def __repr__(self): + return f"" + + @computed_field + @property + def has_supplemental(self): + return self.version > 0 diff --git a/backend/lcfs/db/models/compliance/ComplianceReportHistory.py b/backend/lcfs/db/models/compliance/ComplianceReportHistory.py new file mode 100644 index 000000000..bb73bec07 --- /dev/null +++ b/backend/lcfs/db/models/compliance/ComplianceReportHistory.py @@ -0,0 +1,39 @@ +from sqlalchemy import Column, Integer, ForeignKey +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable + + +class ComplianceReportHistory(BaseModel, Auditable): + __tablename__ = "compliance_report_history" + __table_args__ = {"comment": "Tracks status changes of compliance reports"} + + compliance_report_history_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the compliance report history", + ) + compliance_report_id = Column( + Integer, + ForeignKey("compliance_report.compliance_report_id"), + nullable=False, + comment="Foreign key to the compliance report", + ) + status_id = Column( + Integer, + ForeignKey("compliance_report_status.compliance_report_status_id"), + nullable=False, + comment="Foreign key to the compliance report status", + ) + user_profile_id = Column( + Integer, + ForeignKey("user_profile.user_profile_id"), + comment="Identifier for the user associated with the status change", + ) + + compliance_report = relationship("ComplianceReport", back_populates="history") + status = relationship("ComplianceReportStatus") + user_profile = relationship("UserProfile") + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/ComplianceReportStatus.py b/backend/lcfs/db/models/compliance/ComplianceReportStatus.py new file mode 100644 index 000000000..ee366c8a1 --- /dev/null +++ b/backend/lcfs/db/models/compliance/ComplianceReportStatus.py @@ -0,0 +1,41 @@ +from sqlalchemy import Column, Integer, String, Date, Enum +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship +import enum +from lcfs.db.base import BaseModel, EffectiveDates + + +class ComplianceReportStatusEnum(enum.Enum): + Draft = "Draft" + Submitted = "Submitted" + Recommended_by_analyst = "Recommended by analyst" + Recommended_by_manager = "Recommended by manager" + Assessed = "Assessed" + ReAssessed = "ReAssessed" + + +class ComplianceReportStatus(BaseModel, EffectiveDates): + __tablename__ = "compliance_report_status" + __table_args__ = {"comment": "Lookup table for compliance reports status"} + + compliance_report_status_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the compliance report status", + ) + display_order = Column( + Integer, nullable=True, comment="Display order for the compliance report status" + ) + status = Column( + Enum(ComplianceReportStatusEnum), + nullable=False, + comment="Status of the compliance report", + ) + + compliance_reports = relationship( + "ComplianceReport", back_populates="current_status" + ) + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/ComplianceReportSummary.py b/backend/lcfs/db/models/compliance/ComplianceReportSummary.py new file mode 100644 index 000000000..99b30b40d --- /dev/null +++ b/backend/lcfs/db/models/compliance/ComplianceReportSummary.py @@ -0,0 +1,124 @@ +from sqlalchemy import Column, Integer, Float, ForeignKey, Boolean, CheckConstraint +from sqlalchemy.exc import InvalidRequestError +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable +from datetime import datetime + + +class ComplianceReportSummary(BaseModel, Auditable): + __tablename__ = "compliance_report_summary" + __table_args__ = { + "comment": "Summary of all compliance calculations displaying the compliance units over a compliance period" + } + + summary_id = Column(Integer, primary_key=True, autoincrement=True) + compliance_report_id = Column( + Integer, ForeignKey("compliance_report.compliance_report_id"), nullable=True + ) + quarter = Column(Integer, nullable=True) # Null for annual reports + is_locked = Column(Boolean, default=False) + + # Renewable fuel target summary + line_1_fossil_derived_base_fuel_gasoline = Column(Float, nullable=False, default=0) + line_1_fossil_derived_base_fuel_diesel = Column(Float, nullable=False, default=0) + line_1_fossil_derived_base_fuel_jet_fuel = Column(Float, nullable=False, default=0) + + line_2_eligible_renewable_fuel_supplied_gasoline = Column( + Float, nullable=False, default=0 + ) + line_2_eligible_renewable_fuel_supplied_diesel = Column( + Float, nullable=False, default=0 + ) + line_2_eligible_renewable_fuel_supplied_jet_fuel = Column( + Float, nullable=False, default=0 + ) + + line_3_total_tracked_fuel_supplied_gasoline = Column( + Float, nullable=False, default=0 + ) + line_3_total_tracked_fuel_supplied_diesel = Column(Float, nullable=False, default=0) + line_3_total_tracked_fuel_supplied_jet_fuel = Column( + Float, nullable=False, default=0 + ) + + line_4_eligible_renewable_fuel_required_gasoline = Column( + Float, nullable=False, default=0 + ) + line_4_eligible_renewable_fuel_required_diesel = Column( + Float, nullable=False, default=0 + ) + line_4_eligible_renewable_fuel_required_jet_fuel = Column( + Float, nullable=False, default=0 + ) + + line_5_net_notionally_transferred_gasoline = Column( + Float, nullable=False, default=0 + ) + line_5_net_notionally_transferred_diesel = Column(Float, nullable=False, default=0) + line_5_net_notionally_transferred_jet_fuel = Column( + Float, nullable=False, default=0 + ) + + line_6_renewable_fuel_retained_gasoline = Column(Float, nullable=False, default=0) + line_6_renewable_fuel_retained_diesel = Column(Float, nullable=False, default=0) + line_6_renewable_fuel_retained_jet_fuel = Column(Float, nullable=False, default=0) + + line_7_previously_retained_gasoline = Column(Float, nullable=False, default=0) + line_7_previously_retained_diesel = Column(Float, nullable=False, default=0) + line_7_previously_retained_jet_fuel = Column(Float, nullable=False, default=0) + + line_8_obligation_deferred_gasoline = Column(Float, nullable=False, default=0) + line_8_obligation_deferred_diesel = Column(Float, nullable=False, default=0) + line_8_obligation_deferred_jet_fuel = Column(Float, nullable=False, default=0) + + line_9_obligation_added_gasoline = Column(Float, nullable=False, default=0) + line_9_obligation_added_diesel = Column(Float, nullable=False, default=0) + line_9_obligation_added_jet_fuel = Column(Float, nullable=False, default=0) + + line_10_net_renewable_fuel_supplied_gasoline = Column( + Float, nullable=False, default=0 + ) + line_10_net_renewable_fuel_supplied_diesel = Column( + Float, nullable=False, default=0 + ) + line_10_net_renewable_fuel_supplied_jet_fuel = Column( + Float, nullable=False, default=0 + ) + + line_11_non_compliance_penalty_gasoline = Column(Float, nullable=True, default=0) + line_11_non_compliance_penalty_diesel = Column(Float, nullable=True, default=0) + line_11_non_compliance_penalty_jet_fuel = Column(Float, nullable=True, default=0) + + # Low carbon fuel target summary columns + line_12_low_carbon_fuel_required = Column(Float, nullable=False, default=0) + line_13_low_carbon_fuel_supplied = Column(Float, nullable=False, default=0) + line_14_low_carbon_fuel_surplus = Column(Float, nullable=False, default=0) + line_15_banked_units_used = Column(Float, nullable=False, default=0) + line_16_banked_units_remaining = Column(Float, nullable=False, default=0) + line_17_non_banked_units_used = Column(Float, nullable=False, default=0) + line_18_units_to_be_banked = Column(Float, nullable=False, default=0) + line_19_units_to_be_exported = Column(Float, nullable=False, default=0) + line_20_surplus_deficit_units = Column(Float, nullable=False, default=0) + line_21_surplus_deficit_ratio = Column(Float, nullable=False, default=0) + line_22_compliance_units_issued = Column(Float, nullable=False, default=0) + + # Non-compliance penalty summary columns + line_11_fossil_derived_base_fuel_gasoline = Column(Float, nullable=False, default=0) + line_11_fossil_derived_base_fuel_diesel = Column(Float, nullable=False, default=0) + line_11_fossil_derived_base_fuel_jet_fuel = Column(Float, nullable=False, default=0) + line_11_fossil_derived_base_fuel_total = Column(Float, nullable=False, default=0) + line_21_non_compliance_penalty_payable = Column(Float, nullable=False, default=0) + total_non_compliance_penalty_payable = Column(Float, nullable=False, default=0) + + compliance_report = relationship("ComplianceReport", back_populates="summary") + + def lock_summary(self): + if not self.is_locked: + self.is_locked = True + else: + raise InvalidRequestError("ComplianceReportSummary is already locked") + + def __repr__(self): + return f"" + + # method to annualize a report once all four quarters are approved? \ No newline at end of file diff --git a/backend/lcfs/db/models/compliance/EndUserType.py b/backend/lcfs/db/models/compliance/EndUserType.py new file mode 100644 index 000000000..d575e6616 --- /dev/null +++ b/backend/lcfs/db/models/compliance/EndUserType.py @@ -0,0 +1,28 @@ +from sqlalchemy import Boolean, Column, Integer, String +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel + +class EndUserType(BaseModel): + """ + Model representing the end user type for the final supply equipment. + """ + __tablename__ = "end_user_type" + __table_args__ = {"comment": "Types of intended users for supply equipment"} + + end_user_type_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="The unique identifier for the end user type.", + ) + type_name = Column( + String, nullable=False, unique=True, comment="The name of the end user type." + ) + intended_use = Column(Boolean, nullable=False, default=True) + + # Establish bidirectional relationship with FinalSupplyEquipment + final_supply_equipments = relationship( + "FinalSupplyEquipment", + secondary="final_supply_intended_user_association", + back_populates="intended_user_types" + ) diff --git a/backend/lcfs/db/models/compliance/FinalSupplyEquipment.py b/backend/lcfs/db/models/compliance/FinalSupplyEquipment.py new file mode 100644 index 000000000..90bd37b27 --- /dev/null +++ b/backend/lcfs/db/models/compliance/FinalSupplyEquipment.py @@ -0,0 +1,147 @@ +from lcfs.db.base import Auditable, BaseModel +from sqlalchemy import Column, Date, Double, Enum, Integer, String, Table, Text +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship + +# Association table for many-to-many relationship +final_supply_intended_use_association = Table( + "final_supply_intended_use_association", + BaseModel.metadata, + Column( + "final_supply_equipment_id", + Integer, + ForeignKey( + "final_supply_equipment.final_supply_equipment_id", ondelete="CASCADE" + ), + primary_key=True, + ), + Column( + "end_use_type_id", + Integer, + ForeignKey("end_use_type.end_use_type_id"), + primary_key=True, + ), +) + +# Association table for FinalSupplyEquipment and EndUserType +final_supply_intended_user_association = Table( + "final_supply_intended_user_association", + BaseModel.metadata, + Column( + "final_supply_equipment_id", + Integer, + ForeignKey("final_supply_equipment.final_supply_equipment_id", ondelete="CASCADE"), + primary_key=True, + ), + Column( + "end_user_type_id", + Integer, + ForeignKey("end_user_type.end_user_type_id"), + primary_key=True, + ), +) + + +class FinalSupplyEquipment(BaseModel, Auditable): + """ + Model representing a final supply equipment. + """ + + __tablename__ = "final_supply_equipment" + __table_args__ = {"comment": "Final Supply Equipment"} + + final_supply_equipment_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="The unique identifier for the final supply equipment.", + ) + compliance_report_id = Column( + Integer, + ForeignKey("compliance_report.compliance_report_id"), + nullable=False, + comment="The foreign key referencing the compliance report.", + index=True, + ) + supply_from_date = Column( + Date, nullable=False, comment="The date from which the equipment is supplied." + ) + supply_to_date = Column( + Date, nullable=False, comment="The date until which the equipment is supplied." + ) + kwh_usage = Column( + Double(precision=2), + nullable=True, + comment="Optional kWh usage with up to 2 decimal places.", + ) + registration_nbr = Column( + String, + index=True, + comment="Unique registration number in format ORGCODE-POSTAL-SEQ (e.g., AB55-V3B0G2-001)", + ) + serial_nbr = Column( + String, nullable=False, comment="The serial number of the equipment." + ) + manufacturer = Column( + String, nullable=False, comment="The manufacturer of the equipment." + ) + model = Column( + String, + nullable=True, + comment="Optional model of the equipment, following 'Make' field.", + ) + level_of_equipment_id = Column( + Integer, + ForeignKey("level_of_equipment.level_of_equipment_id"), + nullable=False, + comment="The foreign key referencing the level of equipment.", + index=True, + ) + ports = Column( + Enum("Single port", "Dual port", name="ports_enum"), + nullable=True, + comment="Port type with options 'Single port' and 'Dual port.'" + ) + fuel_measurement_type_id = Column( + Integer, + ForeignKey("fuel_measurement_type.fuel_measurement_type_id"), + nullable=False, + comment="The foreign key referencing the fuel measurement type.", + index=True, + ) + street_address = Column( + String, nullable=False, comment="The street address of the equipment location." + ) + city = Column(String, nullable=False, comment="The city of the equipment location.") + postal_code = Column( + String, nullable=False, comment="The postcode of the equipment location." + ) + latitude = Column( + Double, nullable=False, comment="The latitude of the equipment location." + ) + longitude = Column( + Double, nullable=False, comment="The longitude of the equipment location." + ) + notes = Column(Text, comment="Any additional notes related to the equipment.") + organization_name = Column(Text, comment="External organization name.") + + # relationships + compliance_report = relationship( + "ComplianceReport", back_populates="final_supply_equipment" + ) + level_of_equipment = relationship( + "LevelOfEquipment", back_populates="final_supply_equipment" + ) + fuel_measurement_type = relationship( + "FuelMeasurementType", back_populates="final_supply_equipment" + ) + intended_use_types = relationship( + "EndUseType", + secondary=final_supply_intended_use_association, + back_populates="final_supply_equipments", + ) + intended_user_types = relationship( + "EndUserType", + secondary=final_supply_intended_user_association, + back_populates="final_supply_equipments", + ) diff --git a/backend/lcfs/db/models/compliance/FinalSupplyEquipmentRegNumber.py b/backend/lcfs/db/models/compliance/FinalSupplyEquipmentRegNumber.py new file mode 100644 index 000000000..47736366f --- /dev/null +++ b/backend/lcfs/db/models/compliance/FinalSupplyEquipmentRegNumber.py @@ -0,0 +1,33 @@ +from lcfs.db.base import BaseModel +from sqlalchemy import Column, Integer, String, PrimaryKeyConstraint + + +class FinalSupplyEquipmentRegNumber(BaseModel): + """ + Model representing the highest sequence number for a given postal code + to generate unique registration numbers for final supply equipment. + """ + + __tablename__ = "final_supply_equipment_reg_number" + __table_args__ = ( + PrimaryKeyConstraint("organization_code", "postal_code"), + { + "comment": "Tracks the highest sequence numbers for final supply equipment registration numbers by postal code and organization code." + }, + ) + + organization_code = Column( + String, + nullable=False, + comment="The organization code for the final supply equipment.", + ) + postal_code = Column( + String, + nullable=False, + comment="The postal code for the final supply equipment.", + ) + current_sequence_number = Column( + Integer, + nullable=False, + comment="Current sequence number used for the postal code.", + ) diff --git a/backend/lcfs/db/models/compliance/FinalSupplyEquipmentSupplier.py b/backend/lcfs/db/models/compliance/FinalSupplyEquipmentSupplier.py new file mode 100644 index 000000000..97cd58d0b --- /dev/null +++ b/backend/lcfs/db/models/compliance/FinalSupplyEquipmentSupplier.py @@ -0,0 +1,58 @@ +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.future import select +from sqlalchemy.ext.asyncio import AsyncSession +from fastapi import Depends +from lcfs.db.dependencies import get_async_db_session + +Base = declarative_base() + + +class FinalSupplyEquipmentSupplier(Base): + __tablename__ = "fianl_supply_equipment_supplier" + + supplier_id = Column(Integer, primary_key=True) + postal_code = Column(String, nullable=False) + registration_number = Column(String, nullable=False, unique=True) + + def __init__( + self, + db: AsyncSession = Depends(get_async_db_session), + supplier_id: int = None, + postal_code: str = None, + ): + self.db = db + self.supplier_id = supplier_id + self.postal_code = postal_code + self.registration_number = self.generate_registration_number() + + @staticmethod + async def generate_registration_number(self, supplier_id, postal_code): + # Force supplier_id to 5 digits + supplier_id_str = f"{supplier_id:05}" + + # Get the last 3 characters of the postal code + postal_code_suffix = postal_code[-3:] + + # Get the max sequential number for the given postal code + result = await self.db.execute( + select([FinalSupplyEquipmentSupplier]) + .where( + FinalSupplyEquipmentSupplier.postal_code.endswith(postal_code_suffix) + ) + .order_by(FinalSupplyEquipmentSupplier.registration_number.desc()) + ) + exists = result.scalars().first() is not None + + if exists is None: + sequential_number = "001" + else: + last_reg_number = exists[0].registration_number + last_sequential_number = int(last_reg_number[-3:]) + sequential_number = f"{(last_sequential_number + 1):03}" + + # Construct the registration number + registration_number = ( + f"{supplier_id_str}{postal_code_suffix}{sequential_number}" + ) + return registration_number diff --git a/backend/lcfs/db/models/compliance/FuelExport.py b/backend/lcfs/db/models/compliance/FuelExport.py new file mode 100644 index 000000000..4b11067d7 --- /dev/null +++ b/backend/lcfs/db/models/compliance/FuelExport.py @@ -0,0 +1,109 @@ +from sqlalchemy import Column, Date, Integer, ForeignKey, Enum, String, Numeric +from sqlalchemy.orm import relationship +from sqlalchemy.sql import text + +from lcfs.db.base import BaseModel, Auditable, Versioning +from lcfs.db.models.compliance.ComplianceReport import ( + Quarter, + QuantityUnitsEnum, +) + + +class FuelExport(BaseModel, Auditable, Versioning): + __tablename__ = "fuel_export" + __table_args__ = { + "comment": "Records the supply of fuel for compliance purposes, including changes in supplemental reports" + } + + fuel_export_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the fuel supply", + ) + compliance_report_id = Column( + Integer, + ForeignKey("compliance_report.compliance_report_id"), + nullable=False, + comment="Foreign key to the compliance report", + ) + + # data columns + export_date = Column(Date, nullable=False, comment="Date of fuel supply") + quarter = Column( + Enum(Quarter), nullable=True, comment="Quarter for quarterly reports" + ) + quantity = Column(Integer, nullable=False, comment="Quantity of fuel supplied") + units = Column( + Enum(QuantityUnitsEnum), nullable=False, comment="Units of fuel quantity" + ) + compliance_units = Column( + Integer, nullable=True, comment="Compliance units for the fuel supply" + ) + target_ci = Column( + Numeric(10, 2), nullable=True, comment="CI limit for the fuel supply" + ) + ci_of_fuel = Column( + Numeric(10, 2), nullable=True, comment="CI of fuel for the fuel supply" + ) + uci = Column(Numeric(10, 2), nullable=True, comment="Additional Carbon Intensity") + energy_density = Column( + Numeric(10, 2), + nullable=True, + comment="Energy density of the fuel supplied", + ) + eer = Column( + Numeric(10, 2), + nullable=True, + comment="Energy effectiveness ratio of the fuel supplied", + ) + energy = Column( + Numeric(10, 2), + nullable=True, + comment="Energy content of the fuel supplied", + ) + + # relational columns + fuel_category_id = Column( + Integer, + ForeignKey("fuel_category.fuel_category_id"), + nullable=False, + comment="Foreign key to the fuel category", + ) + fuel_code_id = Column( + Integer, + ForeignKey("fuel_code.fuel_code_id"), + nullable=True, + comment="Foreign key to the fuel code", + ) + fuel_type_id = Column( + Integer, + ForeignKey("fuel_type.fuel_type_id"), + nullable=False, + comment="Foreign key to the fuel type", + ) + provision_of_the_act_id = Column( + Integer, + ForeignKey("provision_of_the_act.provision_of_the_act_id"), + nullable=False, + comment="Foreign key to the provision of the act", + ) + end_use_id = Column( + Integer, + ForeignKey("end_use_type.end_use_type_id"), + nullable=True, + comment="Foreign key to the end use type", + ) + fuel_type_other = Column( + String(1000), nullable=True, comment="Other fuel type if one provided" + ) + + compliance_report = relationship("ComplianceReport", back_populates="fuel_exports") + fuel_category = relationship("FuelCategory") + fuel_code = relationship("FuelCode") + fuel_type = relationship("FuelType") + provision_of_the_act = relationship("ProvisionOfTheAct") + end_use_type = relationship("EndUseType") + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/FuelMeasurementType.py b/backend/lcfs/db/models/compliance/FuelMeasurementType.py new file mode 100644 index 000000000..d3233298e --- /dev/null +++ b/backend/lcfs/db/models/compliance/FuelMeasurementType.py @@ -0,0 +1,19 @@ +from lcfs.db.base import Auditable, BaseModel, DisplayOrder +from sqlalchemy import Column, Integer, String, Text +from sqlalchemy.orm import relationship + + +class FuelMeasurementType(BaseModel, Auditable, DisplayOrder): + __tablename__ = "fuel_measurement_type" + __table_args__ = {"comment": "Fuel measurement type"} + + fuel_measurement_type_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the fuel measurement type", + ) + type = Column(String, nullable=False, comment="Name of the fuel measurement type") + description = Column(Text, comment="Description of the fuel measurement type") + # relationship + final_supply_equipment = relationship("FinalSupplyEquipment") diff --git a/backend/lcfs/db/models/compliance/FuelSupply.py b/backend/lcfs/db/models/compliance/FuelSupply.py new file mode 100644 index 000000000..de9859892 --- /dev/null +++ b/backend/lcfs/db/models/compliance/FuelSupply.py @@ -0,0 +1,83 @@ +from sqlalchemy import Column, Integer, ForeignKey, Enum, String, Numeric, BigInteger +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable, Versioning +from lcfs.db.models.compliance.ComplianceReport import QuantityUnitsEnum + + +class FuelSupply(BaseModel, Auditable, Versioning): + __tablename__ = "fuel_supply" + __table_args__ = { + "comment": "Records the supply of fuel for compliance purposes, including changes in supplemental reports" + } + + fuel_supply_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the fuel supply version", + ) + compliance_report_id = Column( + Integer, + ForeignKey("compliance_report.compliance_report_id"), + nullable=False, + comment="Foreign key to the compliance report", + ) + + # Data columns + quantity = Column(Integer, nullable=False, comment="Quantity of fuel supplied") + units = Column( + Enum(QuantityUnitsEnum), nullable=False, comment="Units of fuel quantity" + ) + compliance_units = Column(Integer, nullable=True, comment="Compliance units") + target_ci = Column(Numeric(10, 2), nullable=True, comment="Target Carbon Intensity") + ci_of_fuel = Column(Numeric(10, 2), nullable=True, comment="CI of the fuel") + energy_density = Column(Numeric(10, 2), nullable=True, comment="Energy density") + eer = Column(Numeric(10, 2), nullable=True, comment="Energy Effectiveness Ratio") + uci = Column(Numeric(10, 2), nullable=True, comment="Additional Carbon Intensity") + energy = Column(BigInteger, nullable=True, comment="Energy content") + fuel_type_other = Column( + String(1000), nullable=True, comment="Other fuel type if one provided" + ) + + # Relational columns + fuel_category_id = Column( + Integer, + ForeignKey("fuel_category.fuel_category_id"), + nullable=False, + comment="Foreign key to the fuel category", + ) + fuel_code_id = Column( + Integer, + ForeignKey("fuel_code.fuel_code_id"), + nullable=True, + comment="Foreign key to the fuel code", + ) + fuel_type_id = Column( + Integer, + ForeignKey("fuel_type.fuel_type_id"), + nullable=False, + comment="Foreign key to the fuel type", + ) + provision_of_the_act_id = Column( + Integer, + ForeignKey("provision_of_the_act.provision_of_the_act_id"), + nullable=False, + comment="Foreign key to the provision of the act", + ) + end_use_id = Column( + Integer, + ForeignKey("end_use_type.end_use_type_id"), + nullable=True, + comment="Foreign key to the end use type", + ) + + # Relationships + compliance_report = relationship("ComplianceReport", back_populates="fuel_supplies") + fuel_category = relationship("FuelCategory") + fuel_code = relationship("FuelCode") + fuel_type = relationship("FuelType") + provision_of_the_act = relationship("ProvisionOfTheAct") + end_use_type = relationship("EndUseType") + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/LevelOfEquipment.py b/backend/lcfs/db/models/compliance/LevelOfEquipment.py new file mode 100644 index 000000000..bcc440e38 --- /dev/null +++ b/backend/lcfs/db/models/compliance/LevelOfEquipment.py @@ -0,0 +1,18 @@ +from lcfs.db.base import Auditable, BaseModel, DisplayOrder +from sqlalchemy import Column, Integer, Text +from sqlalchemy import String +from sqlalchemy.orm import relationship + + +class LevelOfEquipment(BaseModel, Auditable, DisplayOrder): + __tablename__ = "level_of_equipment" + __table_args__ = { + "comment": "Represents a level of equipment for fuel supply equipments" + } + + level_of_equipment_id = Column(Integer, primary_key=True, autoincrement=True) + name = Column(String(500), nullable=False) + description = Column(Text) + + # relationship + final_supply_equipment = relationship("FinalSupplyEquipment") diff --git a/backend/lcfs/db/models/compliance/NotionalTransfer.py b/backend/lcfs/db/models/compliance/NotionalTransfer.py new file mode 100644 index 000000000..d6f2af930 --- /dev/null +++ b/backend/lcfs/db/models/compliance/NotionalTransfer.py @@ -0,0 +1,57 @@ +from sqlalchemy import Column, Integer, String, ForeignKey, Enum +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable, Versioning +import enum + + +class ReceivedOrTransferredEnum(enum.Enum): + Received = "Received" + Transferred = "Transferred" + + +class NotionalTransfer(BaseModel, Auditable, Versioning): + __tablename__ = "notional_transfer" + __table_args__ = {"comment": "Records notional transfers for compliance reports."} + + notional_transfer_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the notional transfer", + ) + compliance_report_id = Column( + Integer, + ForeignKey("compliance_report.compliance_report_id"), + nullable=False, + comment="Foreign key to the compliance report", + ) + quantity = Column( + Integer, + nullable=False, + comment="Quantity of fuel being notionally transferred. Cannot be negative.", + ) + legal_name = Column( + String, nullable=False, comment="Legal name of the trading partner" + ) + address_for_service = Column( + String, nullable=False, comment="Address for service of the trading partner" + ) + fuel_category_id = Column( + Integer, + ForeignKey("fuel_category.fuel_category_id"), + nullable=False, + comment="Foreign key to the fuel category", + ) + received_or_transferred = Column( + Enum(ReceivedOrTransferredEnum), + nullable=False, + comment="Indicates whether the transfer is Received or Transferred", + ) + + compliance_report = relationship( + "ComplianceReport", back_populates="notional_transfers" + ) + fuel_category = relationship("FuelCategory") + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/OrgComplianceReportCountView.py b/backend/lcfs/db/models/compliance/OrgComplianceReportCountView.py new file mode 100644 index 000000000..e9d81735d --- /dev/null +++ b/backend/lcfs/db/models/compliance/OrgComplianceReportCountView.py @@ -0,0 +1,18 @@ +from sqlalchemy import Column, Integer +from lcfs.db.base import BaseModel + + +class OrgComplianceReportCountView(BaseModel): + __tablename__ = "mv_org_compliance_report_count" + __table_args__ = { + "extend_existing": True, + "comment": "Materialized view for counting organization compliance reports for the OrgComplianceReports card on the dashboard", + } + + organization_id = Column(Integer, primary_key=True, comment="Organization ID") + count_in_progress = Column( + Integer, comment="Count of in-progress compliance reports" + ) + count_awaiting_gov_review = Column( + Integer, comment="Count of compliance reports awaiting government review" + ) diff --git a/backend/lcfs/db/models/compliance/OtherUses.py b/backend/lcfs/db/models/compliance/OtherUses.py new file mode 100644 index 000000000..6271cb219 --- /dev/null +++ b/backend/lcfs/db/models/compliance/OtherUses.py @@ -0,0 +1,79 @@ +from sqlalchemy import Column, Integer, String, ForeignKey, Numeric +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable, Versioning + + +class OtherUses(BaseModel, Auditable, Versioning): + __tablename__ = "other_uses" + __table_args__ = { + "comment": "Records other uses of fuels that are subject to renewable requirements but do not earn credits." + } + + other_uses_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the other uses record", + ) + compliance_report_id = Column( + Integer, + ForeignKey("compliance_report.compliance_report_id"), + nullable=False, + comment="Foreign key to the compliance report", + ) + fuel_type_id = Column( + Integer, + ForeignKey("fuel_type.fuel_type_id"), + nullable=False, + comment="Foreign key to the fuel type", + ) + fuel_category_id = Column( + Integer, + ForeignKey("fuel_category.fuel_category_id"), + nullable=False, + comment="Foreign key to the fuel category", + ) + provision_of_the_act_id = Column( + Integer, + ForeignKey("provision_of_the_act.provision_of_the_act_id"), + nullable=False, + comment="Foreign key to the provision of the act", + ) + fuel_code_id = Column( + Integer, + ForeignKey("fuel_code.fuel_code_id"), + nullable=True, + comment="Foreign key to the fuel code", + ) + ci_of_fuel = Column( + Numeric(10, 2), nullable=False, comment="The Carbon intesity of fuel" + ) + quantity_supplied = Column( + Integer, nullable=False, comment="Quantity of fuel used. Cannot be negative." + ) + units = Column( + String, + nullable=False, + comment="Units of the fuel quantity. Auto-selected, locked field.", + ) + expected_use_id = Column( + Integer, + ForeignKey("expected_use_type.expected_use_type_id"), + nullable=False, + comment="Foreign key to the expected use type", + ) + rationale = Column( + String, + nullable=True, + comment="Rationale for the use of the fuel, required if 'Other' is selected as expected use", + ) + + compliance_report = relationship("ComplianceReport", back_populates="other_uses") + fuel_type = relationship("FuelType") + fuel_category = relationship("FuelCategory") + expected_use = relationship("ExpectedUseType") + provision_of_the_act = relationship("ProvisionOfTheAct") + fuel_code = relationship("FuelCode") + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/compliance/__init__.py b/backend/lcfs/db/models/compliance/__init__.py new file mode 100644 index 000000000..56244e076 --- /dev/null +++ b/backend/lcfs/db/models/compliance/__init__.py @@ -0,0 +1,35 @@ +from .AllocationAgreement import AllocationAgreement +from .AllocationTransactionType import AllocationTransactionType +from .CompliancePeriod import CompliancePeriod +from .ComplianceReport import ComplianceReport +from .ComplianceReportHistory import ComplianceReportHistory +from .ComplianceReportStatus import ComplianceReportStatus +from .ComplianceReportSummary import ComplianceReportSummary +from .FinalSupplyEquipment import FinalSupplyEquipment +from .FuelMeasurementType import FuelMeasurementType +from .FuelSupply import FuelSupply +from .FuelExport import FuelExport +from .LevelOfEquipment import LevelOfEquipment +from .NotionalTransfer import NotionalTransfer +from .OtherUses import OtherUses +from .EndUserType import EndUserType +from . import listeners + +__all__ = [ + "AllocationAgreement", + "AllocationTransactionType", + "CompliancePeriod", + "ComplianceReport", + "ComplianceReportHistory", + "ComplianceReportStatus", + "ComplianceReportSummary", + "FinalSupplyEquipment", + "FuelMeasurementType", + "FuelSupply", + "FuelExport", + "LevelOfEquipment", + "NotionalTransfer", + "OtherUses", + "listeners", + "EndUserType", +] diff --git a/backend/lcfs/db/models/compliance/listeners.py b/backend/lcfs/db/models/compliance/listeners.py new file mode 100644 index 000000000..49ab43c34 --- /dev/null +++ b/backend/lcfs/db/models/compliance/listeners.py @@ -0,0 +1,17 @@ +from sqlalchemy import event +from sqlalchemy.exc import InvalidRequestError +from lcfs.db.models.compliance.ComplianceReportSummary import ComplianceReportSummary + + +@event.listens_for(ComplianceReportSummary, "before_update") +def prevent_update_if_locked(mapper, connection, target): + if target.is_locked: + raise InvalidRequestError("Cannot update a locked ComplianceReportSummary") + + +@event.listens_for(ComplianceReportSummary.is_locked, "set") +def prevent_unlock(target, value, oldvalue, initiator): + if oldvalue and not value: + raise InvalidRequestError( + "Cannot unlock a ComplianceReportSummary once it's locked" + ) diff --git a/backend/lcfs/db/models/document/Document.py b/backend/lcfs/db/models/document/Document.py new file mode 100644 index 000000000..865df76ee --- /dev/null +++ b/backend/lcfs/db/models/document/Document.py @@ -0,0 +1,34 @@ +from sqlalchemy import Column, Integer, String, ForeignKey +from sqlalchemy import UniqueConstraint +from sqlalchemy.orm import relationship + +from lcfs.db.base import BaseModel, Auditable +from lcfs.db.models.compliance.ComplianceReport import ( + compliance_report_document_association, +) + + +class Document(BaseModel, Auditable): + __tablename__ = "document" + __table_args__ = ( + UniqueConstraint("document_id"), + {"comment": "Main document table for storing base document information"}, + ) + + document_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the document", + ) + + file_key = Column(String, nullable=False) + file_name = Column(String, nullable=False) + file_size = Column(Integer, nullable=False) + mime_type = Column(String, nullable=False) + + compliance_reports = relationship( + "ComplianceReport", + secondary=compliance_report_document_association, + back_populates="documents", + ) diff --git a/backend/lcfs/db/models/document/__init__.py b/backend/lcfs/db/models/document/__init__.py new file mode 100644 index 000000000..230bc2889 --- /dev/null +++ b/backend/lcfs/db/models/document/__init__.py @@ -0,0 +1,3 @@ +from .Document import Document + +__all__ = ["Document"] diff --git a/backend/lcfs/db/models/fuel/AdditionalCarbonIntensity.py b/backend/lcfs/db/models/fuel/AdditionalCarbonIntensity.py new file mode 100644 index 000000000..fb9dcbaa8 --- /dev/null +++ b/backend/lcfs/db/models/fuel/AdditionalCarbonIntensity.py @@ -0,0 +1,26 @@ +from sqlalchemy import Column, Integer, Numeric +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +from sqlalchemy.orm import relationship +from sqlalchemy import ForeignKey + + +class AdditionalCarbonIntensity(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "additional_carbon_intensity" + __table_args__ = { + "comment": "Additional carbon intensity attributable to the use of fuel. UCIs are added to the recorded carbon intensity of the fuel to account for additional carbon intensity attributed to the use of the fuel." + } + # if both fuel type and end use type id's are null, then this is a default uci + additional_uci_id = Column(Integer, primary_key=True, autoincrement=True) + fuel_type_id = Column(Integer, ForeignKey("fuel_type.fuel_type_id"), nullable=True) + end_use_type_id = Column( + Integer, ForeignKey("end_use_type.end_use_type_id"), nullable=True + ) + uom_id = Column(Integer, ForeignKey("unit_of_measure.uom_id"), nullable=False) + intensity = Column(Numeric(10, 2), nullable=False) + + fuel_type = relationship("FuelType", back_populates="additional_carbon_intensity") + end_use_type = relationship( + "EndUseType", back_populates="additional_carbon_intensity" + ) + uom = relationship("UnitOfMeasure", back_populates="additional_carbon_intensity") diff --git a/backend/lcfs/db/models/fuel/EndUseType.py b/backend/lcfs/db/models/fuel/EndUseType.py new file mode 100644 index 000000000..c1a28c81f --- /dev/null +++ b/backend/lcfs/db/models/fuel/EndUseType.py @@ -0,0 +1,31 @@ +from sqlalchemy import Column, Integer, Text, Boolean +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +from sqlalchemy.orm import relationship +from lcfs.db.models.compliance.FinalSupplyEquipment import ( + final_supply_intended_use_association, +) + + +class EndUseType(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "end_use_type" + __table_args__ = { + "comment": "Represents a end use types for various fuel types and categories" + } + + end_use_type_id = Column(Integer, primary_key=True) + type = Column(Text, nullable=False) + sub_type = Column(Text) + intended_use = Column(Boolean, nullable=False, default=False) + + energy_effectiveness_ratio = relationship( + "EnergyEffectivenessRatio", back_populates="end_use_type" + ) + additional_carbon_intensity = relationship( + "AdditionalCarbonIntensity", back_populates="end_use_type" + ) + final_supply_equipments = relationship( + "FinalSupplyEquipment", + secondary=final_supply_intended_use_association, + back_populates="intended_use_types", + ) diff --git a/backend/lcfs/db/models/fuel/EnergyDensity.py b/backend/lcfs/db/models/fuel/EnergyDensity.py new file mode 100644 index 000000000..1d933c9d1 --- /dev/null +++ b/backend/lcfs/db/models/fuel/EnergyDensity.py @@ -0,0 +1,18 @@ +from sqlalchemy import Column, Integer, Numeric +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +from sqlalchemy.orm import relationship +from sqlalchemy import ForeignKey + + +class EnergyDensity(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "energy_density" + __table_args__ = {"comment": "Represents Energy Density data table"} + + energy_density_id = Column(Integer, primary_key=True, autoincrement=True) + fuel_type_id = Column(Integer, ForeignKey("fuel_type.fuel_type_id"), nullable=False) + density = Column(Numeric(10, 2), nullable=False) + uom_id = Column(Integer, ForeignKey("unit_of_measure.uom_id"), nullable=False) + + fuel_type = relationship("FuelType", back_populates="energy_density", uselist=False) + uom = relationship("UnitOfMeasure", back_populates="energy_density") diff --git a/backend/lcfs/db/models/fuel/EnergyEffectivenessRatio.py b/backend/lcfs/db/models/fuel/EnergyEffectivenessRatio.py new file mode 100644 index 000000000..6abec4dc8 --- /dev/null +++ b/backend/lcfs/db/models/fuel/EnergyEffectivenessRatio.py @@ -0,0 +1,41 @@ +from sqlalchemy import Column, Integer, Float +from lcfs.db.base import BaseModel, Auditable, DisplayOrder, EffectiveDates +from sqlalchemy.orm import relationship +from sqlalchemy import ForeignKey + + +class EnergyEffectivenessRatio(BaseModel, Auditable, DisplayOrder, EffectiveDates): + __tablename__ = "energy_effectiveness_ratio" + __table_args__ = {"comment": "Energy effectiveness ratio (EERs)"} + + # if both fuel type & category and end use type id's are null, then this is a default eer + eer_id = Column(Integer, primary_key=True, autoincrement=True) + fuel_category_id = Column( + Integer, + ForeignKey("fuel_category.fuel_category_id"), + nullable=False, + comment="Fuel category", + ) + fuel_type_id = Column( + Integer, + ForeignKey("fuel_type.fuel_type_id"), + nullable=True, + comment="Fuel type", + ) + end_use_type_id = Column( + Integer, + ForeignKey("end_use_type.end_use_type_id"), + nullable=True, + comment="End use type", + ) + ratio = Column( + Float(3, False, 2), nullable=False, comment="Energy effectiveness ratio constant" + ) + + fuel_category = relationship( + "FuelCategory", back_populates="energy_effectiveness_ratio" + ) + fuel_type = relationship("FuelType", back_populates="energy_effectiveness_ratio") + end_use_type = relationship( + "EndUseType", back_populates="energy_effectiveness_ratio" + ) diff --git a/backend/lcfs/db/models/fuel/ExpectedUseType.py b/backend/lcfs/db/models/fuel/ExpectedUseType.py new file mode 100644 index 000000000..c6f445363 --- /dev/null +++ b/backend/lcfs/db/models/fuel/ExpectedUseType.py @@ -0,0 +1,24 @@ +from sqlalchemy import Column, Integer, Text +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable, DisplayOrder, EffectiveDates + + +class ExpectedUseType(BaseModel, Auditable, DisplayOrder, EffectiveDates): + __tablename__ = "expected_use_type" + __table_args__ = {"comment": "Represents an expected use type for other fuels"} + + expected_use_type_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the expected use type", + ) + name = Column(Text, nullable=False, comment="Name of the expected use type") + description = Column( + Text, nullable=True, comment="Description of the expected use type" + ) + + other_uses = relationship("OtherUses", back_populates="expected_use") + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/fuel/FeedstockFuelTransportMode.py b/backend/lcfs/db/models/fuel/FeedstockFuelTransportMode.py new file mode 100644 index 000000000..4ae9dae34 --- /dev/null +++ b/backend/lcfs/db/models/fuel/FeedstockFuelTransportMode.py @@ -0,0 +1,39 @@ +from sqlalchemy import Column, Integer, ForeignKey, UniqueConstraint +from lcfs.db.base import BaseModel, Auditable +from sqlalchemy.orm import relationship + + +class FeedstockFuelTransportMode(BaseModel, Auditable): + __tablename__ = "feedstock_fuel_transport_mode" + __table_args__ = { + "comment": "Contains a list of transport modes associated with feedstock fuel" + } + + feedstock_fuel_transport_mode_id = Column( + Integer, primary_key=True, autoincrement=True, comment="Unique identifier" + ) + + fuel_code_id = Column( + Integer, + ForeignKey("fuel_code.fuel_code_id", ondelete="CASCADE"), + comment="Fuel code identifier", + ) + + transport_mode_id = Column( + Integer, + ForeignKey("transport_mode.transport_mode_id", ondelete="CASCADE"), + comment="Transport mode identifier", + ) + # Define relationships + feedstock_fuel_code = relationship( + "FuelCode", back_populates="feedstock_fuel_transport_modes" + ) + feedstock_fuel_transport_mode = relationship( + "TransportMode", back_populates="feedstock_fuel_transport_modes" + ) + + # Add unique constraint on fuel_code_id and transport_mode_id + __table_args__ = ( + UniqueConstraint("fuel_code_id", "transport_mode_id"), + __table_args__, + ) diff --git a/backend/lcfs/db/models/fuel/FinishedFuelTransportMode.py b/backend/lcfs/db/models/fuel/FinishedFuelTransportMode.py new file mode 100644 index 000000000..fd5632048 --- /dev/null +++ b/backend/lcfs/db/models/fuel/FinishedFuelTransportMode.py @@ -0,0 +1,39 @@ +from sqlalchemy import Column, Integer, ForeignKey, UniqueConstraint +from lcfs.db.base import BaseModel, Auditable +from sqlalchemy.orm import relationship + + +class FinishedFuelTransportMode(BaseModel, Auditable): + __tablename__ = "finished_fuel_transport_mode" + __table_args__ = { + "comment": "Contains a list of transport modes associated with finished fuel" + } + + finished_fuel_transport_mode_id = Column( + Integer, primary_key=True, autoincrement=True, comment="Unique identifier" + ) + + fuel_code_id = Column( + Integer, + ForeignKey("fuel_code.fuel_code_id", ondelete="CASCADE"), + comment="Fuel code identifier", + ) + + transport_mode_id = Column( + Integer, + ForeignKey("transport_mode.transport_mode_id", ondelete="CASCADE"), + comment="Transport mode identifier", + ) + # Define relationships + finished_fuel_code = relationship( + "FuelCode", back_populates="finished_fuel_transport_modes" + ) + finished_fuel_transport_mode = relationship( + "TransportMode", back_populates="finished_fuel_transport_modes" + ) + + # Add unique constraint on fuel_code_id and transport_mode_id + __table_args__ = ( + UniqueConstraint("fuel_code_id", "transport_mode_id"), + __table_args__, + ) diff --git a/backend/lcfs/db/models/fuel/FuelCategory.py b/backend/lcfs/db/models/fuel/FuelCategory.py new file mode 100644 index 000000000..f4d3f0791 --- /dev/null +++ b/backend/lcfs/db/models/fuel/FuelCategory.py @@ -0,0 +1,41 @@ +from sqlalchemy import Column, Integer, Text, Enum, Float, Numeric +from lcfs.db.base import BaseModel, Auditable, DisplayOrder, EffectiveDates +from sqlalchemy.orm import relationship + + +class FuelCategory(BaseModel, Auditable, DisplayOrder, EffectiveDates): + + __tablename__ = "fuel_category" + __table_args__ = {"comment": "Represents a static table for fuel categories"} + + fuel_category_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the fuel category", + ) + category = Column( + Enum( + "Gasoline", + "Diesel", + "Jet fuel", + name="fuel_category_enum", + create_type=True, + ), + nullable=False, + comment="Name of the fuel category", + ) + description = Column( + Text, nullable=True, comment="Description of the fuel category" + ) + default_carbon_intensity = Column( + Numeric(10, 2), + nullable=False, + comment="Default carbon intensity of the fuel category", + ) + + energy_effectiveness_ratio = relationship("EnergyEffectivenessRatio") + target_carbon_intensities = relationship( + "TargetCarbonIntensity", back_populates="fuel_category" + ) + fuel_instances = relationship("FuelInstance", back_populates="fuel_category") diff --git a/backend/lcfs/db/models/fuel/FuelCode.py b/backend/lcfs/db/models/fuel/FuelCode.py new file mode 100644 index 000000000..aa3a28ed0 --- /dev/null +++ b/backend/lcfs/db/models/fuel/FuelCode.py @@ -0,0 +1,117 @@ +from sqlalchemy import ( + Column, + Integer, + String, + ForeignKey, + Numeric, + Date, + DateTime, + func, + Enum, +) +from lcfs.db.base import BaseModel, Auditable, EffectiveDates +from sqlalchemy.orm import relationship +from .FuelType import QuantityUnitsEnum + +class FuelCode(BaseModel, Auditable, EffectiveDates): + __tablename__ = "fuel_code" + __table_args__ = {"comment": "Contains a list of all of fuel codes"} + + fuel_code_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the fuel code", + ) + fuel_status_id = Column( + Integer, + ForeignKey("fuel_code_status.fuel_code_status_id"), + comment="Fuel code status", + ) + prefix_id = Column( + Integer, + ForeignKey("fuel_code_prefix.fuel_code_prefix_id"), + nullable=False, + comment="Prefix ID", + ) + fuel_suffix = Column(String(20), nullable=False, comment="Fuel suffix") + + company = Column(String(500), nullable=False, comment="Company name") + contact_name = Column(String(500), nullable=True, comment="Contact name") + contact_email = Column(String(500), nullable=True, comment="Contact email") + carbon_intensity = Column( + Numeric(precision=10, scale=2, asdecimal=True), nullable=False + ) + edrms = Column(String(255), nullable=False, comment="EDRMS #") + last_updated = Column( + DateTime(timezone=True), + server_default=func.now(), + onupdate=func.now(), + nullable=False, + comment="Date at which the record was last updated.", + ) + application_date = Column( + Date, nullable=False, comment="application recorded date." + ) + approval_date = Column( + Date, nullable=True, comment="Date at which the record was approved." + ) + fuel_type_id = Column( + Integer, + ForeignKey("fuel_type.fuel_type_id"), + nullable=False, + comment="Fuel type ID", + ) + feedstock = Column(String(255), nullable=False, comment="Feedstock") + feedstock_location = Column( + String(1000), nullable=False, comment="Feedstock location" + ) + feedstock_misc = Column(String(500), nullable=True, comment="Feedstock misc") + + fuel_production_facility_city = Column( + String(1000), nullable=True, comment="City of the fuel production" + ) + fuel_production_facility_province_state = Column( + String(1000), nullable=True, comment="Province or state of the fuel production" + ) + fuel_production_facility_country = Column( + String(1000), nullable=True, comment="Country of the fuel production" + ) + + facility_nameplate_capacity = Column( + Integer, nullable=True, comment="Nameplate capacity" + ) + facility_nameplate_capacity_unit = Column( + Enum(QuantityUnitsEnum), nullable=True, comment="Units of fuel quantity" + ) + former_company = Column(String(500), nullable=True, comment="Former company") + notes = Column(String(1000), nullable=True, comment="Notes") + + # Define the relationships + fuel_code_status = relationship( + "FuelCodeStatus", back_populates="fuel_codes", lazy="joined" + ) + fuel_code_prefix = relationship( + "FuelCodePrefix", back_populates="fuel_codes", lazy="joined" + ) + fuel_type = relationship( + "FuelType", back_populates="fuel_codes", lazy="joined" + ) + + feedstock_fuel_transport_modes = relationship( + "FeedstockFuelTransportMode", + back_populates="feedstock_fuel_code", + primaryjoin="FuelCode.fuel_code_id == FeedstockFuelTransportMode.fuel_code_id", + cascade="all, delete, delete-orphan", + ) + + finished_fuel_transport_modes = relationship( + "FinishedFuelTransportMode", + back_populates="finished_fuel_code", + primaryjoin="FuelCode.fuel_code_id == FinishedFuelTransportMode.fuel_code_id", + cascade="all, delete, delete-orphan", + ) + + @property + def fuel_code(self): + return self.fuel_code_prefix.prefix + self.fuel_suffix diff --git a/backend/lcfs/db/models/fuel/FuelCodePrefix.py b/backend/lcfs/db/models/fuel/FuelCodePrefix.py new file mode 100644 index 000000000..b69b414bf --- /dev/null +++ b/backend/lcfs/db/models/fuel/FuelCodePrefix.py @@ -0,0 +1,14 @@ +from sqlalchemy import Column, Integer, Text +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +from sqlalchemy.orm import relationship + + +class FuelCodePrefix(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "fuel_code_prefix" + __table_args__ = {"comment": "Represents a Fuel code prefix"} + + fuel_code_prefix_id = Column(Integer, primary_key=True, autoincrement=True) + prefix = Column(Text, nullable=False) + + fuel_codes = relationship("FuelCode", back_populates="fuel_code_prefix") diff --git a/backend/lcfs/db/models/fuel/FuelCodeStatus.py b/backend/lcfs/db/models/fuel/FuelCodeStatus.py new file mode 100644 index 000000000..308c67419 --- /dev/null +++ b/backend/lcfs/db/models/fuel/FuelCodeStatus.py @@ -0,0 +1,25 @@ +from sqlalchemy import Column, Integer, Enum, String +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +import enum + + +class FuelCodeStatusEnum(enum.Enum): + Draft: str = "Draft" + Approved: str = "Approved" + Deleted: str = "Deleted" + + +class FuelCodeStatus(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "fuel_code_status" + __table_args__ = {"comment": "Represents fuel code status"} + + fuel_code_status_id = Column(Integer, primary_key=True, autoincrement=True) + status = Column( + Enum(FuelCodeStatusEnum, name="fuel_code_status_enum", create_type=True), + comment="Fuel code status", + ) + description = Column(String(500), nullable=True, comment="Organization description") + + fuel_codes = relationship("FuelCode", back_populates="fuel_code_status") diff --git a/backend/lcfs/db/models/fuel/FuelInstance.py b/backend/lcfs/db/models/fuel/FuelInstance.py new file mode 100644 index 000000000..746a12c1f --- /dev/null +++ b/backend/lcfs/db/models/fuel/FuelInstance.py @@ -0,0 +1,49 @@ +import sqlalchemy as sa +from sqlalchemy import Column, Integer, String, TIMESTAMP, ForeignKey +from lcfs.db.base import BaseModel, Auditable +from sqlalchemy.orm import relationship + + +class FuelInstance(BaseModel, Auditable): + + __tablename__ = "fuel_instance" + __table_args__ = {"comment": "Table linking fuel types and fuel categories"} + + fuel_instance_id = Column( + Integer, primary_key=True, autoincrement=True, nullable=False + ) + fuel_type_id = Column( + Integer, + ForeignKey("fuel_type.fuel_type_id"), + nullable=False, + comment="ID of the fuel type", + ) + fuel_category_id = Column( + Integer, + ForeignKey("fuel_category.fuel_category_id"), + nullable=False, + comment="ID of the fuel category", + ) + create_user = Column( + String, nullable=True, comment="User who created this record in the database" + ) + update_user = Column( + String, + nullable=True, + comment="User who last updated this record in the database", + ) + create_date = Column( + TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the record was created", + ) + update_date = Column( + TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=True, + comment="Date and time (UTC) when the record was last updated", + ) + + fuel_type = relationship("FuelType", back_populates="fuel_instances") + fuel_category = relationship("FuelCategory", back_populates="fuel_instances") diff --git a/backend/lcfs/db/models/fuel/FuelType.py b/backend/lcfs/db/models/fuel/FuelType.py new file mode 100644 index 000000000..5f652b738 --- /dev/null +++ b/backend/lcfs/db/models/fuel/FuelType.py @@ -0,0 +1,75 @@ +from sqlalchemy import Column, Integer, Text, Boolean, Float, Enum, Numeric +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +from sqlalchemy.orm import relationship +from sqlalchemy import ForeignKey +import enum + + +# Enum for fuel quantity units +class QuantityUnitsEnum(enum.Enum): + Litres = "L" + Kilograms = "kg" + Kilowatt_hour = "kWh" + Cubic_metres = "m³" + + +class FuelType(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "fuel_type" + __table_args__ = {"comment": "Represents a Fuel Type"} + + fuel_type_id = Column(Integer, primary_key=True, autoincrement=True) + fuel_type = Column(Text, nullable=False) + fossil_derived = Column( + Boolean, default=False, comment="Indicates whether the fuel is fossil-derived" + ) + other_uses_fossil_derived = Column( + Boolean, + default=False, + comment="Indicates whether the fuel is fossil-derived for other uses", + ) + provision_1_id = Column( + Integer, + ForeignKey("provision_of_the_act.provision_of_the_act_id"), + nullable=True, + ) + provision_2_id = Column( + Integer, + ForeignKey("provision_of_the_act.provision_of_the_act_id"), + nullable=True, + ) + default_carbon_intensity = Column( + Numeric(10, 2), + nullable=True, + comment="Carbon intensities: default & prescribed (gCO2e/MJ)", + ) + units = Column( + Enum(QuantityUnitsEnum), nullable=False, comment="Units of fuel quantity" + ) + unrecognized = Column( + Boolean, + default=False, + nullable=False, + comment="Indicates if the fuel type is unrecognized", + ) + + # Relationships + fuel_codes = relationship("FuelCode", back_populates="fuel_type") + energy_density = relationship( + "EnergyDensity", back_populates="fuel_type", uselist=False + ) # One energy density per fuel type + energy_effectiveness_ratio = relationship( + "EnergyEffectivenessRatio", back_populates="fuel_type" + ) + additional_carbon_intensity = relationship("AdditionalCarbonIntensity") + provision_1 = relationship( + "ProvisionOfTheAct", + foreign_keys=[provision_1_id], + back_populates="fuel_type_provision_1", + ) + provision_2 = relationship( + "ProvisionOfTheAct", + foreign_keys=[provision_2_id], + back_populates="fuel_type_provision_2", + ) + fuel_instances = relationship("FuelInstance", back_populates="fuel_type") diff --git a/backend/lcfs/db/models/fuel/ProvisionOfTheAct.py b/backend/lcfs/db/models/fuel/ProvisionOfTheAct.py new file mode 100644 index 000000000..e10178ba6 --- /dev/null +++ b/backend/lcfs/db/models/fuel/ProvisionOfTheAct.py @@ -0,0 +1,45 @@ +from sqlalchemy import Column, Integer, String, Boolean +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable, DisplayOrder, EffectiveDates + + +class ProvisionOfTheAct(BaseModel, Auditable, DisplayOrder, EffectiveDates): + __tablename__ = "provision_of_the_act" + __table_args__ = { + "comment": """List of provisions within Greenhouse Gas Reduction + (Renewable and Low Carbon Fuel Requirement) Act. e.g. Section 19 (a). + Used in determining carbon intensity needed for for compliance reporting calculation.""" + } + + provision_of_the_act_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the provision of the act", + ) + name = Column( + String(100), + unique=True, + nullable=False, + comment="Name of the Provision. e.g. Section 19 (a)", + ) + description = Column( + String(1000), + nullable=False, + comment="Description of the provision. This is the displayed name. e.g. Prescribed Carbon Intensity, Approved Fuel Code.", + ) + + # relationships + fuel_type_provision_1 = relationship( + "FuelType", + foreign_keys="[FuelType.provision_1_id]", + back_populates="provision_1", + ) + fuel_type_provision_2 = relationship( + "FuelType", + foreign_keys="[FuelType.provision_2_id]", + back_populates="provision_2", + ) + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/fuel/TargetCarbonIntensity.py b/backend/lcfs/db/models/fuel/TargetCarbonIntensity.py new file mode 100644 index 000000000..edf7c5ef8 --- /dev/null +++ b/backend/lcfs/db/models/fuel/TargetCarbonIntensity.py @@ -0,0 +1,35 @@ +from sqlalchemy import Column, Integer, Float, ForeignKey, Numeric +from lcfs.db.base import BaseModel, Auditable, EffectiveDates +from sqlalchemy.orm import relationship + + +class TargetCarbonIntensity(BaseModel, Auditable, EffectiveDates): + __tablename__ = "target_carbon_intensity" + __table_args__ = { + "comment": "Target carbon intensity values for various fuel categories" + } + + target_carbon_intensity_id = Column(Integer, primary_key=True, autoincrement=True) + compliance_period_id = Column( + Integer, + ForeignKey("compliance_period.compliance_period_id"), + nullable=False, + comment="Compliance period ID", + ) + fuel_category_id = Column( + Integer, + ForeignKey("fuel_category.fuel_category_id"), + nullable=False, + comment="Fuel category ID", + ) + target_carbon_intensity = Column( + Numeric(10, 2), nullable=False, comment="Target Carbon Intensity (gCO2e/MJ)" + ) + reduction_target_percentage = Column( + Float, nullable=False, comment="Reduction target percentage" + ) + + fuel_category = relationship( + "FuelCategory", back_populates="target_carbon_intensities" + ) + compliance_period = relationship("CompliancePeriod") diff --git a/backend/lcfs/db/models/fuel/TransportMode.py b/backend/lcfs/db/models/fuel/TransportMode.py new file mode 100644 index 000000000..37e5b5161 --- /dev/null +++ b/backend/lcfs/db/models/fuel/TransportMode.py @@ -0,0 +1,25 @@ +from sqlalchemy import Column, Integer, Text +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +from sqlalchemy.orm import relationship + + +class TransportMode(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "transport_mode" + __table_args__ = {"comment": "Represents a Transport Mode Type"} + + transport_mode_id = Column(Integer, primary_key=True, autoincrement=True) + transport_mode = Column(Text, nullable=False) + + # Define relationships + feedstock_fuel_transport_modes = relationship( + "FeedstockFuelTransportMode", + back_populates="feedstock_fuel_transport_mode", + primaryjoin="TransportMode.transport_mode_id == FeedstockFuelTransportMode.transport_mode_id", + ) + + finished_fuel_transport_modes = relationship( + "FinishedFuelTransportMode", + back_populates="finished_fuel_transport_mode", + primaryjoin="TransportMode.transport_mode_id == FinishedFuelTransportMode.transport_mode_id", + ) diff --git a/backend/lcfs/db/models/fuel/UnitOfMeasure.py b/backend/lcfs/db/models/fuel/UnitOfMeasure.py new file mode 100644 index 000000000..27a9e8db9 --- /dev/null +++ b/backend/lcfs/db/models/fuel/UnitOfMeasure.py @@ -0,0 +1,18 @@ +from sqlalchemy import Column, Integer, Text +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +from sqlalchemy.orm import relationship + + +class UnitOfMeasure(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "unit_of_measure" + __table_args__ = {"comment": "Units used to measure energy densities"} + + uom_id = Column(Integer, primary_key=True, autoincrement=True) + name = Column(Text, nullable=False) + description = Column(Text) + + energy_density = relationship("EnergyDensity", back_populates="uom") + additional_carbon_intensity = relationship( + "AdditionalCarbonIntensity", back_populates="uom" + ) diff --git a/backend/lcfs/db/models/fuel/__init__.py b/backend/lcfs/db/models/fuel/__init__.py new file mode 100644 index 000000000..c34b9a895 --- /dev/null +++ b/backend/lcfs/db/models/fuel/__init__.py @@ -0,0 +1,38 @@ +from .AdditionalCarbonIntensity import AdditionalCarbonIntensity +from .EndUseType import EndUseType +from .EnergyDensity import EnergyDensity +from .EnergyEffectivenessRatio import EnergyEffectivenessRatio +from .ExpectedUseType import ExpectedUseType +from .FeedstockFuelTransportMode import FeedstockFuelTransportMode +from .FinishedFuelTransportMode import FinishedFuelTransportMode +from .FuelCategory import FuelCategory +from .FuelCode import FuelCode +from .FuelCodePrefix import FuelCodePrefix +from .FuelCodeStatus import FuelCodeStatus +from .FuelType import FuelType +from .ProvisionOfTheAct import ProvisionOfTheAct +from .TransportMode import TransportMode +from .UnitOfMeasure import UnitOfMeasure +from .TargetCarbonIntensity import TargetCarbonIntensity +from .FuelInstance import FuelInstance + +__all__ = [ + "AdditionalCarbonIntensity", + "EndUseType", + "EnergyDensity", + "EnergyEffectivenessRatio", + "ExpectedUseType", + "FeedstockFuelTransportMode", + "FinishedFuelTransportMode", + "FuelCategory", + "FuelCode", + "FuelCodePrefix", + "FuelCodeStatus", + "FuelType", + "ProvisionOfTheAct", + "TransportMode", + "UnitOfMeasure", + "ProvisionOfTheAct", + "TargetCarbonIntensity", + "FuelInstance", +] diff --git a/backend/lcfs/db/models/initiative_agreement/InitiativeAgreement.py b/backend/lcfs/db/models/initiative_agreement/InitiativeAgreement.py new file mode 100644 index 000000000..63d5a9817 --- /dev/null +++ b/backend/lcfs/db/models/initiative_agreement/InitiativeAgreement.py @@ -0,0 +1,47 @@ +from sqlalchemy import Column, Integer, BigInteger, ForeignKey, DateTime, String +from sqlalchemy.orm import relationship +from sqlalchemy import UniqueConstraint +from lcfs.db.base import BaseModel, Auditable, EffectiveDates + + +class InitiativeAgreement(BaseModel, Auditable, EffectiveDates): + __tablename__ = "initiative_agreement" + __table_args__ = ( + UniqueConstraint("initiative_agreement_id"), + {"comment": "Goverment to organization compliance units initiative agreement"}, + ) + + initiative_agreement_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the initiative_agreement", + ) + compliance_units = Column(BigInteger, comment="Compliance Units") + transaction_effective_date = Column( + DateTime, nullable=True, comment="Transaction effective date" + ) + gov_comment = Column( + String(1500), comment="Comment from the government to organization" + ) + to_organization_id = Column(Integer, ForeignKey("organization.organization_id")) + transaction_id = Column(Integer, ForeignKey("transaction.transaction_id")) + current_status_id = Column( + Integer, + ForeignKey("initiative_agreement_status.initiative_agreement_status_id"), + ) + + to_organization = relationship( + "Organization", back_populates="initiative_agreements" + ) + transaction = relationship("Transaction") + history = relationship( + "InitiativeAgreementHistory", back_populates="initiative_agreement" + ) + current_status = relationship("InitiativeAgreementStatus") + initiative_agreement_internal_comments = relationship( + "InitiativeAgreementInternalComment", back_populates="initiative_agreement" + ) + + def __repr__(self): + return self.compliance_units diff --git a/backend/lcfs/db/models/initiative_agreement/InitiativeAgreementHistory.py b/backend/lcfs/db/models/initiative_agreement/InitiativeAgreementHistory.py new file mode 100644 index 000000000..868c96f8d --- /dev/null +++ b/backend/lcfs/db/models/initiative_agreement/InitiativeAgreementHistory.py @@ -0,0 +1,35 @@ +from sqlalchemy import Column, Integer, BigInteger, ForeignKey, DateTime +from sqlalchemy.orm import relationship +from sqlalchemy import UniqueConstraint +from lcfs.db.base import BaseModel, Auditable, EffectiveDates + + +class InitiativeAgreementHistory(BaseModel, Auditable, EffectiveDates): + __tablename__ = "initiative_agreement_history" + __table_args__ = ( + UniqueConstraint("initiative_agreement_history_id"), + {"comment": "History record for initiative agreement status change."}, + ) + + initiative_agreement_history_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the initiative agreement history record", + ) + initiative_agreement_id = Column( + Integer, ForeignKey("initiative_agreement.initiative_agreement_id") + ) + initiative_agreement_status_id = Column( + Integer, + ForeignKey("initiative_agreement_status.initiative_agreement_status_id"), + ) + user_profile_id = Column( + Integer, + ForeignKey("user_profile.user_profile_id"), + comment="Foreign key to user_profile", + ) + + initiative_agreement = relationship("InitiativeAgreement", back_populates="history") + initiative_agreement_status = relationship("InitiativeAgreementStatus") + user_profile = relationship("UserProfile") diff --git a/backend/lcfs/db/models/initiative_agreement/InitiativeAgreementStatus.py b/backend/lcfs/db/models/initiative_agreement/InitiativeAgreementStatus.py new file mode 100644 index 000000000..71575aaff --- /dev/null +++ b/backend/lcfs/db/models/initiative_agreement/InitiativeAgreementStatus.py @@ -0,0 +1,28 @@ +from sqlalchemy import Column, Integer, Enum +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +import enum + + +class InitiativeAgreementStatusEnum(enum.Enum): + Draft = "Draft" # Draft created by analyst + Recommended = "Recommended" # Recommended by analyst + Approved = "Approved" # Approved by director + Deleted = "Deleted" # Deleted by analyst + + +class InitiativeAgreementStatus(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "initiative_agreement_status" + __table_args__ = {"comment": "Represents a InitiativeAgreement Status"} + + initiative_agreement_status_id = Column( + Integer, primary_key=True, autoincrement=True + ) + status = Column( + Enum( + InitiativeAgreementStatusEnum, + name="initiative_agreement_type_enum", + create_type=True, + ), + comment="Initiative Agreement Status", + ) diff --git a/backend/lcfs/db/models/initiative_agreement/__init__.py b/backend/lcfs/db/models/initiative_agreement/__init__.py new file mode 100644 index 000000000..efc213bc4 --- /dev/null +++ b/backend/lcfs/db/models/initiative_agreement/__init__.py @@ -0,0 +1,9 @@ +from .InitiativeAgreement import InitiativeAgreement +from .InitiativeAgreementHistory import InitiativeAgreementHistory +from .InitiativeAgreementStatus import InitiativeAgreementStatus + +__all__ = [ + "InitiativeAgreement", + "InitiativeAgreementHistory", + "InitiativeAgreementStatus", +] diff --git a/backend/lcfs/db/models/notification/NotificationChannel.py b/backend/lcfs/db/models/notification/NotificationChannel.py new file mode 100644 index 000000000..2d1103b51 --- /dev/null +++ b/backend/lcfs/db/models/notification/NotificationChannel.py @@ -0,0 +1,27 @@ +import enum +from lcfs.db.base import BaseModel, Auditable +from sqlalchemy import Column, Integer, Enum, Boolean +from sqlalchemy.orm import relationship + + +class ChannelEnum(enum.Enum): + EMAIL = "EMAIL" + IN_APP = "IN_APP" + + +class NotificationChannel(BaseModel, Auditable): + __tablename__ = "notification_channel" + __table_args__ = { + "comment": "Tracks the state and defaults for communication channels" + } + + notification_channel_id = Column(Integer, primary_key=True, autoincrement=True) + channel_name = Column( + Enum(ChannelEnum, name="channel_enum", create_type=True), nullable=False + ) + enabled = Column(Boolean, default=False) + subscribe_by_default = Column(Boolean, default=False) + + notification_channel_subscriptions = relationship( + "NotificationChannelSubscription", back_populates="notification_channel" + ) diff --git a/backend/lcfs/db/models/notification/NotificationChannelSubscription.py b/backend/lcfs/db/models/notification/NotificationChannelSubscription.py new file mode 100644 index 000000000..0e8ed4bde --- /dev/null +++ b/backend/lcfs/db/models/notification/NotificationChannelSubscription.py @@ -0,0 +1,32 @@ +from lcfs.db.base import BaseModel, Auditable +from sqlalchemy import Column, Integer, ForeignKey, Boolean, UniqueConstraint +from sqlalchemy.orm import relationship + + +class NotificationChannelSubscription(BaseModel, Auditable): + __tablename__ = "notification_channel_subscription" + __table_args__ = ( + UniqueConstraint( + "user_profile_id", "notification_channel_id", "notification_type_id", + name="uq_user_channel_type" + ), + {"comment": "Represents a user's subscription to notification events"}, + ) + + notification_channel_subscription_id = Column( + Integer, primary_key=True, autoincrement=True + ) + + is_enabled = Column(Boolean, default=False) + + user_profile_id = Column(Integer, ForeignKey("user_profile.user_profile_id")) + notification_type_id = Column( + Integer, ForeignKey("notification_type.notification_type_id") + ) + notification_channel_id = Column( + Integer, ForeignKey("notification_channel.notification_channel_id") + ) + + notification_type = relationship("NotificationType", back_populates="subscriptions") + user_profile = relationship("UserProfile", back_populates="notification_channel_subscriptions") + notification_channel = relationship("NotificationChannel", back_populates="notification_channel_subscriptions") diff --git a/backend/lcfs/db/models/notification/NotificationMessage.py b/backend/lcfs/db/models/notification/NotificationMessage.py new file mode 100644 index 000000000..fddc1a961 --- /dev/null +++ b/backend/lcfs/db/models/notification/NotificationMessage.py @@ -0,0 +1,52 @@ +from sqlalchemy import Column, Integer, ForeignKey, Boolean, Text +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable + +# from lcfs.db.models.NotificationType import NotificationType +from lcfs.db.models.organization.Organization import ( + Organization, +) # Adjust according to your project structure + + +class NotificationMessage(BaseModel, Auditable): + __tablename__ = "notification_message" + __table_args__ = { + "comment": "Represents a notification message sent to an application user" + } + + notification_message_id = Column(Integer, primary_key=True, autoincrement=True) + + is_read = Column(Boolean, default=False) + is_warning = Column(Boolean, default=False) + is_error = Column(Boolean, default=False) + is_archived = Column(Boolean, default=False) + type = Column(Text, nullable=False) + message = Column(Text, nullable=False) + + related_organization_id = Column( + Integer, ForeignKey("organization.organization_id") + ) + origin_user_profile_id = Column(Integer, ForeignKey("user_profile.user_profile_id")) + related_user_profile_id = Column( + Integer, ForeignKey("user_profile.user_profile_id") + ) + notification_type_id = Column( + Integer, ForeignKey("notification_type.notification_type_id") + ) + related_transaction_id = Column(Text, nullable=False) + + # Relationships + related_organization = relationship( + "Organization", back_populates="notification_messages" + ) + notification_type = relationship("NotificationType") + origin_user_profile = relationship( + "UserProfile", + foreign_keys=[origin_user_profile_id], + back_populates="originated_notifications", + ) + related_user_profile = relationship( + "UserProfile", + foreign_keys=[related_user_profile_id], + back_populates="notification_messages", + ) diff --git a/backend/lcfs/db/models/notification/NotificationType.py b/backend/lcfs/db/models/notification/NotificationType.py new file mode 100644 index 000000000..9c8f31914 --- /dev/null +++ b/backend/lcfs/db/models/notification/NotificationType.py @@ -0,0 +1,17 @@ +from lcfs.db.base import BaseModel, Auditable +from sqlalchemy import Column, Integer, Text, String +from sqlalchemy.orm import relationship + + +class NotificationType(BaseModel, Auditable): + __tablename__ = "notification_type" + __table_args__ = {"comment": "Represents a Notification type"} + + notification_type_id = Column(Integer, primary_key=True, autoincrement=True) + name = Column(String(255), nullable=False) + description = Column(Text, nullable=True) + email_content = Column(Text, nullable=True) + + subscriptions = relationship( + "NotificationChannelSubscription", back_populates="notification_type" + ) diff --git a/backend/lcfs/db/models/notification/__init__.py b/backend/lcfs/db/models/notification/__init__.py new file mode 100644 index 000000000..e16003e7e --- /dev/null +++ b/backend/lcfs/db/models/notification/__init__.py @@ -0,0 +1,13 @@ +from .NotificationChannel import NotificationChannel +from .NotificationChannel import ChannelEnum +from .NotificationChannelSubscription import NotificationChannelSubscription +from .NotificationMessage import NotificationMessage +from .NotificationType import NotificationType + +__all__ = [ + "NotificationChannel", + "ChannelEnum", + "NotificationChannelSubscription", + "NotificationMessage", + "NotificationType", +] diff --git a/backend/lcfs/db/models/organization/Organization.py b/backend/lcfs/db/models/organization/Organization.py new file mode 100644 index 000000000..80d8dbbff --- /dev/null +++ b/backend/lcfs/db/models/organization/Organization.py @@ -0,0 +1,120 @@ +import string +import random +from sqlalchemy import Column, BigInteger, Integer, String, ForeignKey, event, select +from sqlalchemy.orm import relationship, Session +from lcfs.db.base import BaseModel, Auditable, EffectiveDates + + +def generate_unique_code(session): + """Generates a unique 4-character alphanumeric code.""" + characters = string.ascii_uppercase + string.digits + while True: + code = "".join(random.choices(characters, k=4)) + result = session.execute( + select(Organization).filter_by(organization_code=code).limit(1) + ) + if not result.scalar(): + return code + + +class Organization(BaseModel, Auditable, EffectiveDates): + __tablename__ = "organization" + __table_args__ = { + "comment": "Contains a list of all of the recognized Part 3 " + "fuel suppliers, both past and present, as well as " + "an entry for the government which is also " + "considered an organization." + } + + organization_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the organization", + ) + organization_code = Column( + String(4), + unique=True, + nullable=False, + comment="Unique 4-character alphanumeric ID", + ) + name = Column(String(500), comment="Organization's legal name") + operating_name = Column(String(500), comment="Organization's Operating name") + email = Column(String(255), comment="Organization's email address") + phone = Column(String(50), comment="Organization's phone number") + edrms_record = Column(String(100), comment="Organization's EDRMS record number") + total_balance = Column( + BigInteger, + server_default="0", + nullable=False, + comment="The total balance of compliance units for the specified organization.", + ) + reserved_balance = Column( + BigInteger, + server_default="0", + nullable=False, + comment="The reserved balance of compliance units for the specified organization.", + ) + count_transfers_in_progress = Column( + Integer, + server_default="0", + nullable=False, + comment="The count of transfers in progress for the specified organization.", + ) + + organization_status_id = Column( + Integer, ForeignKey("organization_status.organization_status_id") + ) + organization_type_id = Column( + Integer, + ForeignKey("organization_type.organization_type_id"), + comment="Organization's type", + ) + organization_address_id = Column( + Integer, ForeignKey("organization_address.organization_address_id") + ) + organization_attorney_address_id = Column( + Integer, + ForeignKey("organization_attorney_address.organization_attorney_address_id"), + ) + + org_type = relationship( + "OrganizationType", back_populates="organizations", lazy="joined" + ) + org_status = relationship( + "OrganizationStatus", back_populates="organizations", lazy="joined" + ) + org_address = relationship("OrganizationAddress", back_populates="organization") + org_attorney_address = relationship( + "OrganizationAttorneyAddress", back_populates="organization" + ) + user_profiles = relationship("UserProfile", back_populates="organization") + transactions = relationship("Transaction", back_populates="organization") + + admin_adjustments = relationship( + "AdminAdjustment", back_populates="to_organization" + ) + initiative_agreements = relationship( + "InitiativeAgreement", back_populates="to_organization" + ) + transfers_sent = relationship( + "Transfer", + foreign_keys="[Transfer.from_organization_id]", + back_populates="from_organization", + ) + transfers_received = relationship( + "Transfer", + foreign_keys="[Transfer.to_organization_id]", + back_populates="to_organization", + ) + compliance_reports = relationship("ComplianceReport", back_populates="organization") + notification_messages = relationship( + "NotificationMessage", back_populates="related_organization" + ) + + +@event.listens_for(Organization, "before_insert") +def receive_before_insert(mapper, connection, target): + session = Session(bind=connection) + target.organization_code = generate_unique_code(session) + session.close() diff --git a/backend/lcfs/db/models/OrganizationAddress.py b/backend/lcfs/db/models/organization/OrganizationAddress.py similarity index 80% rename from backend/lcfs/db/models/OrganizationAddress.py rename to backend/lcfs/db/models/organization/OrganizationAddress.py index 38e1c466b..1d96914b8 100644 --- a/backend/lcfs/db/models/OrganizationAddress.py +++ b/backend/lcfs/db/models/organization/OrganizationAddress.py @@ -25,17 +25,18 @@ from sqlalchemy.orm import relationship from lcfs.db.base import BaseModel, Auditable, EffectiveDates + class OrganizationAddress(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'organization_address' - __table_args__ = {'comment': "Represents an organization's address."} + __tablename__ = "organization_address" + __table_args__ = {"comment": "Represents an organization's address."} - organization_address_id = Column(Integer, primary_key=True) - name = Column(String(500), nullable=True, comment='Organization name') + organization_address_id = Column(Integer, primary_key=True, autoincrement=True) + name = Column(String(500), nullable=True, comment="Organization name") street_address = Column(String(500), nullable=True) address_other = Column(String(100), nullable=True) city = Column(String(100), nullable=True) province_state = Column(String(50), nullable=True) country = Column(String(100), nullable=True) postalCode_zipCode = Column(String(10), nullable=True) - - organization = relationship('Organization', back_populates='org_address') + + organization = relationship("Organization", back_populates="org_address") diff --git a/backend/lcfs/db/models/OrganizationAttorneyAddress.py b/backend/lcfs/db/models/organization/OrganizationAttorneyAddress.py similarity index 83% rename from backend/lcfs/db/models/OrganizationAttorneyAddress.py rename to backend/lcfs/db/models/organization/OrganizationAttorneyAddress.py index fae3b9776..b6ef1df7c 100644 --- a/backend/lcfs/db/models/OrganizationAttorneyAddress.py +++ b/backend/lcfs/db/models/organization/OrganizationAttorneyAddress.py @@ -25,11 +25,14 @@ from sqlalchemy.orm import relationship from lcfs.db.base import BaseModel, Auditable, EffectiveDates + class OrganizationAttorneyAddress(BaseModel, Auditable, EffectiveDates): - __tablename__ = 'organization_attorney_address' - __table_args__ = {'comment': "Represents an organization attorney's address."} + __tablename__ = "organization_attorney_address" + __table_args__ = {"comment": "Represents an organization attorney's address."} - organization_attorney_address_id = Column(Integer, primary_key=True) + organization_attorney_address_id = Column( + Integer, primary_key=True, autoincrement=True + ) name = Column(String(500), nullable=True, comment="Attorney's Organization name") street_address = Column(String(500), nullable=True) address_other = Column(String(100), nullable=True) @@ -38,4 +41,4 @@ class OrganizationAttorneyAddress(BaseModel, Auditable, EffectiveDates): country = Column(String(100), nullable=True) postalCode_zipCode = Column(String(10), nullable=True) - organization = relationship('Organization', back_populates='org_attorney_address') + organization = relationship("Organization", back_populates="org_attorney_address") diff --git a/backend/lcfs/db/models/OrganizationBalance.py b/backend/lcfs/db/models/organization/OrganizationBalance.py similarity index 50% rename from backend/lcfs/db/models/OrganizationBalance.py rename to backend/lcfs/db/models/organization/OrganizationBalance.py index af22b442d..5b1e5ca71 100644 --- a/backend/lcfs/db/models/OrganizationBalance.py +++ b/backend/lcfs/db/models/organization/OrganizationBalance.py @@ -5,19 +5,29 @@ metadata = MetaData() # Define the SQLAlchemy view for OrganizationBalance -organization_balance_view = Table('organization_balance_view', metadata, - Column('organization_id', Integer, primary_key=True), - Column('compliance_units', BigInteger, comment='The actual balance of validated Low Carbon Fuel credits held by a fuel supplier between the effective_date and the expiration_date. If expiration_date is NULL then we assume that it is the current balance.'), +organization_balance_view = Table( + "organization_balance_view", + metadata, + Column("organization_id", Integer, primary_key=True), + Column( + "compliance_units", + BigInteger, + comment="The actual balance of validated Low Carbon Fuel credits held by a fuel supplier between the effective_date and the expiration_date. If expiration_date is NULL then we assume that it is the current balance.", + ), ) + # Create a class to map to the view class OrganizationBalanceView: def __init__(self, organization_id, compliance_units): self.organization_id = organization_id self.compliance_units = compliance_units + # Create a registry registry = registry() # Map the class to the view using map_imperatively -organization_balance_mapper = registry.map_imperatively(OrganizationBalanceView, organization_balance_view) +organization_balance_mapper = registry.map_imperatively( + OrganizationBalanceView, organization_balance_view +) diff --git a/backend/lcfs/db/models/organization/OrganizationStatus.py b/backend/lcfs/db/models/organization/OrganizationStatus.py new file mode 100644 index 000000000..19fce56d5 --- /dev/null +++ b/backend/lcfs/db/models/organization/OrganizationStatus.py @@ -0,0 +1,31 @@ +from sqlalchemy import Column, Integer, String, Enum +from sqlalchemy.orm import relationship +import enum +from lcfs.db.base import BaseModel, Auditable, DisplayOrder + + +class OrgStatusEnum(enum.Enum): + Unregistered = "Unregistered" + Registered = "Registered" + Suspended = "Suspended" + Canceled = "Canceled" + + +class OrganizationStatus(BaseModel, Auditable, DisplayOrder): + __tablename__ = "organization_status" + __table_args__ = {"comment": "Contains list of organization type"} + + organization_status_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the organization", + ) + status = Column( + Enum(OrgStatusEnum, name="org_status_enum", create_type=True), + default=OrgStatusEnum.Unregistered, + comment="Organization's status", + ) + description = Column(String(500), nullable=True, comment="Organization description") + + organizations = relationship("Organization", back_populates="org_status") diff --git a/backend/lcfs/db/models/organization/OrganizationType.py b/backend/lcfs/db/models/organization/OrganizationType.py new file mode 100644 index 000000000..4a65fd1a7 --- /dev/null +++ b/backend/lcfs/db/models/organization/OrganizationType.py @@ -0,0 +1,32 @@ +from sqlalchemy import Column, Integer, String, Enum +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +import enum + + +class OrgTypeEnum(enum.Enum): + fuel_supplier = "Fuel Supplier" + electricity_supplier = "Electricity Supplier" + broker = "Broker" + utilities = "Utilities (local or public)" + + +class OrganizationType(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "organization_type" + __table_args__ = {"comment": "Represents a Organization types"} + + organization_type_id = Column( + Integer, + primary_key=True, + autoincrement=True, + nullable=False, + comment="Unique identifier for the organization_type", + ) + org_type = Column( + Enum(OrgTypeEnum, name="org_type_enum", create_type=True), + default=OrgTypeEnum.fuel_supplier, + comment="Organization's Types", + ) + description = Column(String(500), nullable=True, comment="Organization Types") + organizations = relationship("Organization", back_populates="org_type") diff --git a/backend/lcfs/db/models/organization/__init__.py b/backend/lcfs/db/models/organization/__init__.py new file mode 100644 index 000000000..ba0462ff8 --- /dev/null +++ b/backend/lcfs/db/models/organization/__init__.py @@ -0,0 +1,15 @@ +from .Organization import Organization +from .OrganizationAddress import OrganizationAddress +from .OrganizationAttorneyAddress import OrganizationAttorneyAddress +from .OrganizationBalance import OrganizationBalanceView +from .OrganizationStatus import OrganizationStatus +from .OrganizationType import OrganizationType + +__all__ = [ + "Organization", + "OrganizationAddress", + "OrganizationAttorneyAddress", + "OrganizationBalanceView", + "OrganizationStatus", + "OrganizationType", +] diff --git a/backend/lcfs/db/models/transaction/DirectorReviewTransactionCountView.py b/backend/lcfs/db/models/transaction/DirectorReviewTransactionCountView.py new file mode 100644 index 000000000..5ebe58e22 --- /dev/null +++ b/backend/lcfs/db/models/transaction/DirectorReviewTransactionCountView.py @@ -0,0 +1,13 @@ +from sqlalchemy import Column, Integer, String +from lcfs.db.base import BaseModel + + +class DirectorReviewTransactionCountView(BaseModel): + __tablename__ = "mv_director_review_transaction_count" + __table_args__ = { + "extend_existing": True, + "comment": "Materialized view for counting transactions and compliance reports for the DirectorReview card on the dashboard", + } + + transaction_type = Column(String, primary_key=True, comment="Type of transaction") + count_for_review = Column(Integer, comment="Count of transactions for review") diff --git a/backend/lcfs/db/models/transaction/Transaction.py b/backend/lcfs/db/models/transaction/Transaction.py new file mode 100644 index 000000000..10042ab68 --- /dev/null +++ b/backend/lcfs/db/models/transaction/Transaction.py @@ -0,0 +1,41 @@ +import enum + +from sqlalchemy import Column, Integer, BigInteger, ForeignKey, Enum +from sqlalchemy import UniqueConstraint +from sqlalchemy.orm import relationship + +from lcfs.db.base import BaseModel, Auditable, EffectiveDates + + +class TransactionActionEnum(enum.Enum): + Adjustment = "Adjustment" + Reserved = "Reserved" + Released = "Released" + + +class Transaction(BaseModel, Auditable, EffectiveDates): + __tablename__ = "transaction" + __table_args__ = ( + UniqueConstraint("transaction_id"), + { + "comment": "Contains a list of all of the government to organization and Organization to Organization transaction." + }, + ) + + transaction_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the transactions", + ) + compliance_units = Column(BigInteger, comment="Compliance Units") + organization_id = Column(Integer, ForeignKey("organization.organization_id")) + transaction_action = Column( + Enum(TransactionActionEnum, name="transaction_action_enum", create_type=True), + comment="Action type for the transaction, e.g., Adjustment, Reserved, or Released.", + ) + + organization = relationship("Organization", back_populates="transactions") + + def __repr__(self): + return f"" diff --git a/backend/lcfs/db/models/transaction/TransactionCountView.py b/backend/lcfs/db/models/transaction/TransactionCountView.py new file mode 100644 index 000000000..ac01c0602 --- /dev/null +++ b/backend/lcfs/db/models/transaction/TransactionCountView.py @@ -0,0 +1,13 @@ +from sqlalchemy import Column, Integer, String +from lcfs.db.base import BaseModel + + +class TransactionCountView(BaseModel): + __tablename__ = "mv_transaction_count" + __table_args__ = { + "extend_existing": True, + "comment": "Materialized view for counting transactions in progress for IDIR users", + } + + transaction_type = Column(String, primary_key=True, comment="Type of transaction") + count_in_progress = Column(Integer, comment="Count of transactions in progress") diff --git a/backend/lcfs/db/models/transaction/TransactionStatusView.py b/backend/lcfs/db/models/transaction/TransactionStatusView.py new file mode 100644 index 000000000..f924be9f7 --- /dev/null +++ b/backend/lcfs/db/models/transaction/TransactionStatusView.py @@ -0,0 +1,12 @@ +from sqlalchemy import Column, String +from lcfs.db.base import BaseModel + + +# A database view that aggregates all possible transaction statuses +# so we can return them as a full list +class TransactionStatusView(BaseModel): + __tablename__ = "transaction_status_view" + __table_args__ = {"extend_existing": True} + status = Column(String, primary_key=True) + create_date = Column(String) + update_date = Column(String) diff --git a/backend/lcfs/db/models/transaction/TransactionView.py b/backend/lcfs/db/models/transaction/TransactionView.py new file mode 100644 index 000000000..3f15f7222 --- /dev/null +++ b/backend/lcfs/db/models/transaction/TransactionView.py @@ -0,0 +1,43 @@ +import enum +from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean +from lcfs.db.base import BaseModel + + +class TransactionViewTypeEnum(enum.Enum): + Transfer = "Transfer" + InitiativeAgreement = "InitiativeAgreement" + AdminAdjustment = "AdminAdjustment" + + +# This class represents a database view for transactions. It is intended to consolidate +# and simplify access to various transaction-related data by providing a unified interface +# to query against. The view combines data from multiple underlying tables, making +# easier retrieval of transaction details such as type, organizations involved, quantities, +# and statuses without requiring complex joins in every query. +class TransactionView(BaseModel): + # Table name for the SQLAlchemy model, matching the database view name + __tablename__ = "mv_transaction_aggregate" + + # Allows modification of the class definition without requiring a restart, + # useful for dynamic schema changes or when working with database views. + __table_args__ = {"extend_existing": True} + + # Columns definitions mapping to the database fields + # id and type columns are defined as a composite primary key. + transaction_id = Column(Integer, primary_key=True) + transaction_type = Column(String, primary_key=True) + from_organization_id = Column(Integer) + from_organization = Column(String) + to_organization_id = Column(Integer) + to_organization = Column(String) + quantity = Column(Integer) + price_per_unit = Column(Float) + status = Column(String) + compliance_period = Column(String) + comment = Column(String) + category = Column(String) + transaction_effective_date = Column(DateTime) + recorded_date = Column(DateTime) + approved_date = Column(DateTime) + create_date = Column(DateTime) + update_date = Column(DateTime) diff --git a/backend/lcfs/db/models/transaction/__init__.py b/backend/lcfs/db/models/transaction/__init__.py new file mode 100644 index 000000000..2c05b038d --- /dev/null +++ b/backend/lcfs/db/models/transaction/__init__.py @@ -0,0 +1,9 @@ +from .Transaction import Transaction +from .TransactionStatusView import TransactionStatusView +from .TransactionView import TransactionView + +__all__ = [ + "Transaction", + "TransactionStatusView", + "TransactionView", +] diff --git a/backend/lcfs/db/models/transfer/Transfer.py b/backend/lcfs/db/models/transfer/Transfer.py new file mode 100644 index 000000000..5869dce0b --- /dev/null +++ b/backend/lcfs/db/models/transfer/Transfer.py @@ -0,0 +1,79 @@ +import enum +from sqlalchemy import Column, Integer, ForeignKey, DateTime, Enum, String, Numeric +from sqlalchemy.orm import relationship +from sqlalchemy import UniqueConstraint +from lcfs.db.base import BaseModel, Auditable, EffectiveDates + + +class TransferRecommendationEnum(enum.Enum): + Record = "Record" + Refuse = "Refuse" + + +class Transfer(BaseModel, Auditable, EffectiveDates): + __tablename__ = "transfer" + __table_args__ = ( + UniqueConstraint("transfer_id"), + {"comment": "Records of tranfer from Organization to Organization"}, + ) + + transfer_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the org to org transfer record", + ) + from_organization_id = Column(Integer, ForeignKey("organization.organization_id")) + to_organization_id = Column(Integer, ForeignKey("organization.organization_id")) + from_transaction_id = Column(Integer, ForeignKey("transaction.transaction_id")) + to_transaction_id = Column(Integer, ForeignKey("transaction.transaction_id")) + agreement_date = Column(DateTime, comment="Agreement date of the transfer") + transaction_effective_date = Column(DateTime, comment="transaction effective date") + price_per_unit = Column( + Numeric(10, 2), + comment="Price per unit with two decimal places", + ) + quantity = Column(Integer, comment="Quantity of units") + # compliance_period = Column(Integer, ) + from_org_comment = Column( + String(1000), comment="Comment from the from-organization" + ) + to_org_comment = Column(String(1000), comment="Comment from the to-organization") + gov_comment = Column( + String(1500), comment="Comment from the government to organizations" + ) + transfer_category_id = Column( + Integer, ForeignKey("transfer_category.transfer_category_id") + ) + current_status_id = Column( + Integer, ForeignKey("transfer_status.transfer_status_id") + ) + recommendation = Column( + Enum( + TransferRecommendationEnum, + name="transfer_recommendation_enum", + create_type=True, + ), + comment="Analyst recommendation for the transfer.", + ) + + # relationships + transfer_category = relationship("TransferCategory") + transfer_history = relationship("TransferHistory", back_populates="transfer") + current_status = relationship("TransferStatus") + transfer_internal_comments = relationship( + "TransferInternalComment", back_populates="transfer" + ) + + from_transaction = relationship("Transaction", foreign_keys=[from_transaction_id]) + to_transaction = relationship("Transaction", foreign_keys=[to_transaction_id]) + from_organization = relationship( + "Organization", + foreign_keys=[from_organization_id], + back_populates="transfers_sent", + ) + to_organization = relationship( + "Organization", + foreign_keys=[to_organization_id], + back_populates="transfers_received", + ) diff --git a/backend/lcfs/db/models/transfer/TransferCategory.py b/backend/lcfs/db/models/transfer/TransferCategory.py new file mode 100644 index 000000000..8bab13e73 --- /dev/null +++ b/backend/lcfs/db/models/transfer/TransferCategory.py @@ -0,0 +1,33 @@ +from sqlalchemy import Column, Integer, String, Enum +from sqlalchemy.orm import relationship +from sqlalchemy import UniqueConstraint +from lcfs.db.base import BaseModel, Auditable, EffectiveDates +import enum + + +class TransferCategoryEnum(enum.Enum): + A = "A" + B = "B" + C = "C" + D = "D" + + +class TransferCategory(BaseModel, Auditable, EffectiveDates): + __tablename__ = "transfer_category" + __table_args__ = ( + UniqueConstraint("transfer_category_id"), + {"comment": "Transfer Category"}, + ) + + transfer_category_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the transfer category", + ) + category = Column( + Enum(TransferCategoryEnum, create_type=True), comment="Transfer category" + ) + + def __repr__(self): + return self.category diff --git a/backend/lcfs/db/models/transfer/TransferHistory.py b/backend/lcfs/db/models/transfer/TransferHistory.py new file mode 100644 index 000000000..1cdd702b4 --- /dev/null +++ b/backend/lcfs/db/models/transfer/TransferHistory.py @@ -0,0 +1,28 @@ +from sqlalchemy import Column, Integer, ForeignKey, DateTime +from sqlalchemy.orm import relationship +from lcfs.db.base import BaseModel, Auditable, EffectiveDates + + +class TransferHistory(BaseModel, Auditable, EffectiveDates): + __tablename__ = "transfer_history" + __table_args__ = {"comment": "Records the status changes of a transfer."} + + transfer_history_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique identifier for the transfer history record", + ) + transfer_id = Column(Integer, ForeignKey("transfer.transfer_id")) + transfer_status_id = Column( + Integer, ForeignKey("transfer_status.transfer_status_id") + ) + user_profile_id = Column( + Integer, + ForeignKey("user_profile.user_profile_id"), + comment="Foreign key to user_profile", + ) + + transfer = relationship("Transfer", back_populates="transfer_history") + transfer_status = relationship("TransferStatus") + user_profile = relationship("UserProfile") diff --git a/backend/lcfs/db/models/transfer/TransferStatus.py b/backend/lcfs/db/models/transfer/TransferStatus.py new file mode 100644 index 000000000..ba77f6775 --- /dev/null +++ b/backend/lcfs/db/models/transfer/TransferStatus.py @@ -0,0 +1,38 @@ +from sqlalchemy import Column, Integer, Enum, Boolean +from lcfs.db.base import BaseModel, Auditable, DisplayOrder +import enum + + +class TransferStatusEnum(enum.Enum): + Draft = "Draft" # Created by Org + Deleted = "Deleted" # Deleted by Org + Sent = "Sent" # Sent to Org + Submitted = "Submitted" # Submitted to gov by organization.. + Recommended = ( + "Recommended" # Analyst makes recommendation to record or refuse the transfer + ) + Recorded = "Recorded" # Recorded - Approved by Director + Refused = "Refused" # Refused - Declined by director + Declined = "Declined" # Declined - declined by Organization + Rescinded = "Rescinded" # Rescinded - Cancelled by Organization + + +class TransferStatus(BaseModel, Auditable, DisplayOrder): + + __tablename__ = "transfer_status" + __table_args__ = {"comment": "Represents a Transfer Status"} + + transfer_status_id = Column(Integer, primary_key=True, autoincrement=True) + status = Column( + Enum(TransferStatusEnum, name="transfer_type_enum", create_type=True), + comment="Transfer Status", + ) + visible_to_transferor = Column( + Boolean, default=False, comment="Visibility for transferor entities" + ) + visible_to_transferee = Column( + Boolean, default=False, comment="Visibility for transferee entities" + ) + visible_to_government = Column( + Boolean, default=False, comment="Visibility for government entities" + ) diff --git a/backend/lcfs/db/models/transfer/__init__.py b/backend/lcfs/db/models/transfer/__init__.py new file mode 100644 index 000000000..2da018c71 --- /dev/null +++ b/backend/lcfs/db/models/transfer/__init__.py @@ -0,0 +1,13 @@ +from .Transfer import Transfer +from .TransferCategory import TransferCategory +from .TransferHistory import TransferHistory +from lcfs.db.models.comment.TransferInternalComment import TransferInternalComment +from .TransferStatus import TransferStatus + +__all__ = [ + "Transfer", + "TransferCategory", + "TransferHistory", + "TransferInternalComment", + "TransferStatus", +] diff --git a/backend/lcfs/db/models/user/Role.py b/backend/lcfs/db/models/user/Role.py new file mode 100644 index 000000000..579dfe05a --- /dev/null +++ b/backend/lcfs/db/models/user/Role.py @@ -0,0 +1,52 @@ +from sqlalchemy import Column, Integer, String, Boolean, UniqueConstraint, Enum +from sqlalchemy.orm import relationship +from lcfs.db.base import Auditable, BaseModel +import enum + + +class RoleEnum(enum.Enum): + GOVERNMENT = "Government" + ADMINISTRATOR = "Administrator" + ANALYST = "Analyst" + COMPLIANCE_MANAGER = "Compliance Manager" + DIRECTOR = "Director" + + SUPPLIER = "Supplier" + MANAGE_USERS = "Manage Users" + TRANSFER = "Transfer" + COMPLIANCE_REPORTING = "Compliance Reporting" + SIGNING_AUTHORITY = "Signing Authority" + READ_ONLY = "Read Only" + + +class Role(BaseModel, Auditable): + __tablename__ = "role" + __table_args__ = ( + UniqueConstraint("name"), + {"comment": "To hold all the available roles and their descriptions."}, + ) + role_id = Column(Integer, primary_key=True, autoincrement=True) + name = Column( + Enum(RoleEnum, name="role_enum", create_type=True), + unique=True, + nullable=False, + comment="Role code. Natural key. Used internally. eg Admin, GovUser, GovDirector, etc", + ) + description = Column( + String(1000), + comment="Descriptive text explaining this role. This is what's shown to the user.", + ) + is_government_role = Column( + Boolean, + default=False, + comment="Flag. True if this is a government role (eg. Analyst, Administrator)", + ) + display_order = Column(Integer, comment="Relative rank in display sorting order") + + user_roles = relationship("UserRole", back_populates="role") + + def __repr__(self): + return "" % self.name + + def __str__(self): + return self.name diff --git a/backend/lcfs/db/models/user/UserLoginHistory.py b/backend/lcfs/db/models/user/UserLoginHistory.py new file mode 100644 index 000000000..560b80f4d --- /dev/null +++ b/backend/lcfs/db/models/user/UserLoginHistory.py @@ -0,0 +1,30 @@ +from sqlalchemy import Column, String, Boolean, Integer +from lcfs.db.base import BaseModel + + +class UserLoginHistory(BaseModel): + __tablename__ = "user_login_history" + __table_args__ = {"comment": "Keeps track of all user login attempts"} + + user_login_history_id = Column(Integer, primary_key=True) + keycloak_email = Column( + String, + nullable=False, + comment="Keycloak email address to associate on first login.", + ) + external_username = Column( + String(150), nullable=True, comment="BCeID or IDIR username" + ) + keycloak_user_id = Column( + String(150), + nullable=True, + comment="This is the unique id returned from Keycloak and is the main mapping key between the LCFS user and the Keycloak user. The identity provider type will be appended as a suffix after an @ symbol. For ex. asdf1234@bceidbasic or asdf1234@idir", + ) + is_login_successful = Column( + Boolean, + default=False, + comment="True if this login attempt was successful, false on failure.", + ) + login_error_message = Column( + String(500), nullable=True, comment="Error text on unsuccessful login attempt." + ) diff --git a/backend/lcfs/db/models/user/UserProfile.py b/backend/lcfs/db/models/user/UserProfile.py new file mode 100644 index 000000000..7fecca2f8 --- /dev/null +++ b/backend/lcfs/db/models/user/UserProfile.py @@ -0,0 +1,89 @@ +from typing import Optional +from lcfs.web.api.organizations.schema import OrganizationSummaryResponseSchema +from lcfs.db.base import Auditable, BaseModel, UserTypeEnum + +from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, UniqueConstraint +from sqlalchemy.orm import relationship +from lcfs.db.models.notification.NotificationMessage import NotificationMessage +from lcfs.db.models.user.Role import RoleEnum + + +class UserProfile(BaseModel, Auditable): + __tablename__ = "user_profile" + __table_args__ = ( + UniqueConstraint("keycloak_username"), + {"comment": "Users who may access the application"}, + ) + + user_profile_id = Column(Integer, primary_key=True, autoincrement=True) + keycloak_user_id = Column( + String(150), nullable=True, comment="Unique id returned from Keycloak" + ) + keycloak_email = Column( + String(255), nullable=True, comment="keycloak email address" + ) + keycloak_username = Column( + String(150), unique=True, nullable=False, comment="keycloak Username" + ) + email = Column(String(255), nullable=True, comment="Primary email address") + title = Column(String(100), nullable=True, comment="Professional Title") + phone = Column(String(50), nullable=True, comment="Primary phone number") + mobile_phone = Column(String(50), nullable=True, comment="Mobile phone number") + first_name = Column(String(100), nullable=True, comment="First name") + last_name = Column(String(100), nullable=True, comment="Last name") + is_active = Column( + Boolean, nullable=False, default=True, comment="Is the user active?" + ) + organization_id = Column(Integer, ForeignKey("organization.organization_id")) + + organization = relationship("Organization", back_populates="user_profiles") + user_roles = relationship("UserRole", back_populates="user_profile") + + notification_channel_subscriptions = relationship( + "NotificationChannelSubscription", back_populates="user_profile" + ) + + originated_notifications = relationship( + "NotificationMessage", + foreign_keys=[NotificationMessage.origin_user_profile_id], + back_populates="origin_user_profile", + ) + notification_messages = relationship( + "NotificationMessage", + foreign_keys=[NotificationMessage.related_user_profile_id], + back_populates="related_user_profile", + ) + + @classmethod + def form_user_profile(cls, user_profile, user_data): + """ + Copy UserProfile instance with data from UserCreate instance. + """ + if user_data.get("organization"): + organization_data = OrganizationSummaryResponseSchema( + **user_data.pop("organization", {}) + ) + user_data["organization_id"] = organization_data.organization_id + + # user_profile = cls(**user_data) + for field in user_data: + setattr(user_profile, field, user_data[field]) + + return user_profile + + @property + def role_names(self): + return [role.role.name for role in self.user_roles] + + @property + def is_government(self) -> bool: + """Returns True if the user has the 'GOVERNMENT' role.""" + return any(role == RoleEnum.GOVERNMENT for role in self.role_names) + + @property + def user_type(self) -> Optional[UserTypeEnum]: + if any(role == RoleEnum.GOVERNMENT for role in self.role_names): + return UserTypeEnum.GOVERNMENT + elif any(role == RoleEnum.SUPPLIER for role in self.role_names): + return UserTypeEnum.SUPPLIER + return None diff --git a/backend/lcfs/db/models/user/UserRole.py b/backend/lcfs/db/models/user/UserRole.py new file mode 100644 index 000000000..886efe790 --- /dev/null +++ b/backend/lcfs/db/models/user/UserRole.py @@ -0,0 +1,50 @@ +from sqlalchemy import Column, ForeignKey, Integer, text, UniqueConstraint +from sqlalchemy.orm import relationship +from sqlalchemy.dialects.postgresql import UUID + +from lcfs.db.base import Auditable, BaseModel +from lcfs.db.models.user import Role, UserProfile + + +class UserRole(BaseModel, Auditable): + __tablename__ = "user_role" + __table_args__ = ( + UniqueConstraint( + "user_profile_id", "role_id", name="user_role_unique_constraint" + ), + {"comment": "Contains the user and role relationships"}, + ) + # Columns + user_role_id = Column( + Integer, + primary_key=True, + autoincrement=True, + comment="Unique ID for the user role", + ) + user_profile_id = Column( + Integer, + ForeignKey("user_profile.user_profile_id"), + comment="Foreign key to user_profile", + ) + role_id = Column(Integer, ForeignKey("role.role_id"), comment="Foreign key to role") + # Relationships + user_profile = relationship("UserProfile", back_populates="user_roles") + role = relationship("Role", back_populates="user_roles") + + @classmethod + def get_user_roles(cls, user_profile): + return [ + UserRole( + user_profile_id=user_profile.user_profile_id, role_id=role["role_id"] + ) + for role in user_profile.roles + ] + + def to_dict(self): + return { + "role_id": self.role.role_id, + "name": self.role.name, + "description": self.role.description, + "display_order": self.role.display_order, + "is_government_role": self.role.is_government_role, + } diff --git a/backend/lcfs/db/models/user/__init__.py b/backend/lcfs/db/models/user/__init__.py new file mode 100644 index 000000000..dd75ae0cd --- /dev/null +++ b/backend/lcfs/db/models/user/__init__.py @@ -0,0 +1,11 @@ +from .Role import Role +from .UserLoginHistory import UserLoginHistory +from .UserProfile import UserProfile +from .UserRole import UserRole + +__all__ = [ + "Role", + "UserLoginHistory", + "UserProfile", + "UserRole", +] diff --git a/frontend/src/components/Table/BCTable/index.jsx b/backend/lcfs/db/seeders/__init__.py similarity index 100% rename from frontend/src/components/Table/BCTable/index.jsx rename to backend/lcfs/db/seeders/__init__.py diff --git a/backend/lcfs/db/seeders/common/admin_adjustment_status_seeder.py b/backend/lcfs/db/seeders/common/admin_adjustment_status_seeder.py new file mode 100644 index 000000000..2e847b80e --- /dev/null +++ b/backend/lcfs/db/seeders/common/admin_adjustment_status_seeder.py @@ -0,0 +1,58 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.admin_adjustment.AdminAdjustmentStatus import ( + AdminAdjustmentStatus, + AdminAdjustmentStatusEnum, +) + +logger = structlog.get_logger(__name__) + + +async def seed_admin_adjustment_statuses(session): + """ + Seeds the admin adjustment statuses into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + admin_adjustment_statuses_to_seed = [ + { + "admin_adjustment_status_id": 1, + "status": AdminAdjustmentStatusEnum.Draft, + }, + { + "admin_adjustment_status_id": 2, + "status": AdminAdjustmentStatusEnum.Recommended, + }, + { + "admin_adjustment_status_id": 3, + "status": AdminAdjustmentStatusEnum.Approved, + }, + { + "admin_adjustment_status_id": 4, + "status": AdminAdjustmentStatusEnum.Deleted, + }, + ] + + try: + for status_data in admin_adjustment_statuses_to_seed: + exists = await session.execute( + select(AdminAdjustmentStatus).where( + AdminAdjustmentStatus.status == status_data["status"] + ) + ) + if not exists.scalars().first(): + status = AdminAdjustmentStatus(**status_data) + session.add(status) + + except Exception as e: + context = { + "function": "seed_admin_adjustment_statuses", + } + logger.error( + "Error occurred while seeding admin adjustment statuses", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/allocation_agreement_seeder.py b/backend/lcfs/db/seeders/common/allocation_agreement_seeder.py new file mode 100644 index 000000000..e48a57227 --- /dev/null +++ b/backend/lcfs/db/seeders/common/allocation_agreement_seeder.py @@ -0,0 +1,56 @@ +import structlog +from sqlalchemy import select +from datetime import datetime +from lcfs.db.models.compliance.AllocationTransactionType import ( + AllocationTransactionType, +) + +logger = structlog.get_logger(__name__) + + +async def seed_allocation_transaction_types(session): + """ + Seeds the allocation transaction types into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + allocation_transaction_types_to_seed = [ + { + "allocation_transaction_type_id": 1, + "type": "Allocated from", + "description": "Fuel allocated from another supplier under an allocation agreement", + "display_order": 1, + "effective_date": datetime.strptime("2012-01-01", "%Y-%m-%d").date(), + }, + { + "allocation_transaction_type_id": 2, + "type": "Allocated to", + "description": "Fuel allocated to another supplier under an allocation agreement", + "display_order": 2, + "effective_date": datetime.strptime("2012-01-01", "%Y-%m-%d").date(), + }, + ] + + try: + for type_data in allocation_transaction_types_to_seed: + exists = await session.execute( + select(AllocationTransactionType).where( + AllocationTransactionType.type == type_data["type"] + ) + ) + if not exists.scalars().first(): + transaction_type = AllocationTransactionType(**type_data) + session.add(transaction_type) + + except Exception as e: + context = { + "function": "seed_allocation_transaction_types", + } + logger.error( + "Error occurred while seeding allocation transaction types", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/compliance_period_seeder.py b/backend/lcfs/db/seeders/common/compliance_period_seeder.py new file mode 100644 index 000000000..6c0f36487 --- /dev/null +++ b/backend/lcfs/db/seeders/common/compliance_period_seeder.py @@ -0,0 +1,190 @@ +import structlog +from sqlalchemy import select +from datetime import datetime +from lcfs.db.models.compliance.CompliancePeriod import CompliancePeriod + +logger = structlog.get_logger(__name__) + + +async def seed_compliance_periods(session): + """ + Seeds the compliance periods into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + compliance_periods_to_seed = [ + { + "compliance_period_id": 1, + "description": "2010", + "display_order": 1, + "effective_date": datetime.strptime("2010-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2010-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 2, + "description": "2011", + "display_order": 2, + "effective_date": datetime.strptime("2011-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2011-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 3, + "description": "2012", + "display_order": 3, + "effective_date": datetime.strptime("2012-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2012-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 4, + "description": "2013", + "display_order": 4, + "effective_date": datetime.strptime("2013-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2013-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 5, + "description": "2014", + "display_order": 5, + "effective_date": datetime.strptime("2014-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2014-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 6, + "description": "2015", + "display_order": 6, + "effective_date": datetime.strptime("2015-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2015-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 7, + "description": "2016", + "display_order": 7, + "effective_date": datetime.strptime("2016-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2016-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 8, + "description": "2017", + "display_order": 8, + "effective_date": datetime.strptime("2017-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2017-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 9, + "description": "2018", + "display_order": 9, + "effective_date": datetime.strptime("2018-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2018-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 10, + "description": "2019", + "display_order": 10, + "effective_date": datetime.strptime("2019-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2019-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 11, + "description": "2020", + "display_order": 11, + "effective_date": datetime.strptime("2020-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2020-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 12, + "description": "2021", + "display_order": 12, + "effective_date": datetime.strptime("2021-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2021-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 13, + "description": "2022", + "display_order": 13, + "effective_date": datetime.strptime("2022-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2022-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 14, + "description": "2023", + "display_order": 14, + "effective_date": datetime.strptime("2023-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2023-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 15, + "description": "2024", + "display_order": 15, + "effective_date": datetime.strptime("2024-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2024-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 16, + "description": "2025", + "display_order": 16, + "effective_date": datetime.strptime("2025-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2025-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 17, + "description": "2026", + "display_order": 17, + "effective_date": datetime.strptime("2026-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2026-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 18, + "description": "2027", + "display_order": 18, + "effective_date": datetime.strptime("2027-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2027-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 19, + "description": "2028", + "display_order": 19, + "effective_date": datetime.strptime("2028-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2028-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 20, + "description": "2029", + "display_order": 20, + "effective_date": datetime.strptime("2029-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2029-12-31", "%Y-%m-%d").date(), + }, + { + "compliance_period_id": 21, + "description": "2030", + "display_order": 21, + "effective_date": datetime.strptime("2030-01-01", "%Y-%m-%d").date(), + "expiration_date": datetime.strptime("2030-12-31", "%Y-%m-%d").date(), + }, + ] + + try: + for compliance_period_data in compliance_periods_to_seed: + # Check if the CompliancePeriod already exists based on the description + exists = await session.execute( + select(CompliancePeriod).where( + CompliancePeriod.description + == compliance_period_data["description"] + ) + ) + if not exists.scalars().first(): + compliance_period = CompliancePeriod(**compliance_period_data) + session.add(compliance_period) + + except Exception as e: + context = { + "function": "seed_compliance_periods", + } + logger.error( + "Error occurred while seeding compliance periods", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/compliance_report_status_seeder.py b/backend/lcfs/db/seeders/common/compliance_report_status_seeder.py new file mode 100644 index 000000000..cf9a7a568 --- /dev/null +++ b/backend/lcfs/db/seeders/common/compliance_report_status_seeder.py @@ -0,0 +1,66 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.compliance.ComplianceReportStatus import ( + ComplianceReportStatus, + ComplianceReportStatusEnum, +) + +logger = structlog.get_logger(__name__) + + +async def seed_compliance_report_statuses(session): + """ + Seeds the compliance report statuses into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + compliance_report_statuses_to_seed = [ + { + "compliance_report_status_id": 1, + "status": ComplianceReportStatusEnum.Draft, + }, + { + "compliance_report_status_id": 2, + "status": ComplianceReportStatusEnum.Submitted, + }, + { + "compliance_report_status_id": 3, + "status": ComplianceReportStatusEnum.Recommended_by_analyst, + }, + { + "compliance_report_status_id": 4, + "status": ComplianceReportStatusEnum.Recommended_by_manager, + }, + { + "compliance_report_status_id": 5, + "status": ComplianceReportStatusEnum.Assessed, + }, + { + "compliance_report_status_id": 6, + "status": ComplianceReportStatusEnum.ReAssessed, + }, + ] + + try: + for status_data in compliance_report_statuses_to_seed: + exists = await session.execute( + select(ComplianceReportStatus).where( + ComplianceReportStatus.status == status_data["status"] + ) + ) + if not exists.scalars().first(): + status = ComplianceReportStatus(**status_data) + session.add(status) + + except Exception as e: + context = { + "function": "seed_compliance_report_statuses", + } + logger.error( + "Error occurred while seeding compliance report statuses", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/end_user_type_seeder.py b/backend/lcfs/db/seeders/common/end_user_type_seeder.py new file mode 100644 index 000000000..d0790f40e --- /dev/null +++ b/backend/lcfs/db/seeders/common/end_user_type_seeder.py @@ -0,0 +1,52 @@ +import structlog +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from lcfs.db.models.compliance.EndUserType import EndUserType + +logger = structlog.get_logger(__name__) + + +async def seed_end_user_types(session): + """ + Seeds the end_user_type table with predefined values if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + end_user_types_to_seed = [ + {"type_name": "Multi-unit residential building"}, + {"type_name": "Fleet"}, + {"type_name": "Public"}, + {"type_name": "Employee"}, + ] + + try: + for end_user_type_data in end_user_types_to_seed: + # Check if the EndUserType already exists + exists = await session.execute( + select(EndUserType).where( + EndUserType.type_name == end_user_type_data["type_name"] + ) + ) + if not exists.scalars().first(): + end_user_type = EndUserType(**end_user_type_data) + session.add(end_user_type) + + except IntegrityError as ie: + logger.warning( + "Integrity error while seeding end_user_type", + error=str(ie), + exc_info=ie, + ) + except Exception as e: + context = { + "function": "seed_end_user_types", + } + logger.error( + "Error occurred while seeding end_user_type", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/fuel_data_seeder.py b/backend/lcfs/db/seeders/common/fuel_data_seeder.py new file mode 100644 index 000000000..cbb1ce003 --- /dev/null +++ b/backend/lcfs/db/seeders/common/fuel_data_seeder.py @@ -0,0 +1,113 @@ +import structlog +import json +from pathlib import Path +from sqlalchemy import select + +# database models +from lcfs.db.models.fuel.EnergyDensity import EnergyDensity +from lcfs.db.models.fuel.AdditionalCarbonIntensity import AdditionalCarbonIntensity +from lcfs.db.models.fuel.EnergyEffectivenessRatio import EnergyEffectivenessRatio +from lcfs.db.models.fuel.EndUseType import EndUseType +from lcfs.db.models.fuel.FuelCategory import FuelCategory +from lcfs.db.models.fuel.FuelCodePrefix import FuelCodePrefix +from lcfs.db.models.fuel.FuelCodeStatus import FuelCodeStatus +from lcfs.db.models.fuel.ProvisionOfTheAct import ProvisionOfTheAct +from lcfs.db.models.fuel.TransportMode import TransportMode +from lcfs.db.models.fuel.UnitOfMeasure import UnitOfMeasure +from lcfs.db.models.fuel.FuelType import FuelType, QuantityUnitsEnum +from lcfs.db.models.fuel.TargetCarbonIntensity import TargetCarbonIntensity +from lcfs.db.models.fuel.FuelInstance import FuelInstance +from lcfs.db.models.compliance import FuelMeasurementType, LevelOfEquipment + +logger = structlog.get_logger(__name__) + +UNITS_MAPPING = { + "L": QuantityUnitsEnum.Litres, + "kg": QuantityUnitsEnum.Kilograms, + "kWh": QuantityUnitsEnum.Kilowatt_hour, + "m³": QuantityUnitsEnum.Cubic_metres, +} + + +async def seed_static_fuel_data(session): + """ + Seeds the static fuel data into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + try: + with open(Path(__file__).parent / "seed_fuel_data.json") as f_data: + data = json.load(f_data) + + async def add_if_not_exists(model, unique_field, records): + for record in records: + exists = await session.execute( + select(model).where( + getattr(model, unique_field) == record[unique_field] + ) + ) + if not exists.scalars().first(): + session.add(model(**record)) + + # Ensure fuel_type units are correctly formatted + for fuel_type in data["fuel_types"]: + fuel_type["units"] = QuantityUnitsEnum(fuel_type["units"]) + + await add_if_not_exists( + TransportMode, "transport_mode_id", data["transport_modes"] + ) + await add_if_not_exists( + ProvisionOfTheAct, "provision_of_the_act_id", data["provision_acts"] + ) + await add_if_not_exists(FuelType, "fuel_type_id", data["fuel_types"]) + await add_if_not_exists( + FuelCodePrefix, "fuel_code_prefix_id", data["fuel_code_prefixes"] + ) + await add_if_not_exists( + FuelCodeStatus, "fuel_code_status_id", data["fuel_code_statuses"] + ) + await add_if_not_exists( + FuelCategory, "fuel_category_id", data["fuel_categories"] + ) + await add_if_not_exists( + EndUseType, "end_use_type_id", data["end_use_types"] + ) + await add_if_not_exists(UnitOfMeasure, "uom_id", data["unit_of_measures"]) + await add_if_not_exists( + AdditionalCarbonIntensity, "additional_uci_id", data["ucis"] + ) + await add_if_not_exists(EnergyEffectivenessRatio, "eer_id", data["eers"]) + await add_if_not_exists( + EnergyDensity, "energy_density_id", data["energy_densities"] + ) + await add_if_not_exists( + TargetCarbonIntensity, + "target_carbon_intensity_id", + data["target_carbon_intensities"], + ) + await add_if_not_exists( + FuelInstance, "fuel_instance_id", data["fuel_instances"] + ) + await add_if_not_exists( + FuelMeasurementType, + "fuel_measurement_type_id", + data["fuel_measurement_types"], + ) + await add_if_not_exists( + LevelOfEquipment, "level_of_equipment_id", data["levels_of_equipment"] + ) + + f_data.close() + + except Exception as e: + context = { + "function": "seed_static_fuel_data", + } + logger.error( + "Error occurred while seeding static fuel data", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/initiative_agreement_status_seeder.py b/backend/lcfs/db/seeders/common/initiative_agreement_status_seeder.py new file mode 100644 index 000000000..a44518054 --- /dev/null +++ b/backend/lcfs/db/seeders/common/initiative_agreement_status_seeder.py @@ -0,0 +1,58 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatus, + InitiativeAgreementStatusEnum, +) + +logger = structlog.get_logger(__name__) + + +async def seed_initiative_agreement_statuses(session): + """ + Seeds the initiative agreement statuses into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + initiative_agreement_statuses_to_seed = [ + { + "initiative_agreement_status_id": 1, + "status": InitiativeAgreementStatusEnum.Draft, + }, + { + "initiative_agreement_status_id": 2, + "status": InitiativeAgreementStatusEnum.Recommended, + }, + { + "initiative_agreement_status_id": 3, + "status": InitiativeAgreementStatusEnum.Approved, + }, + { + "initiative_agreement_status_id": 4, + "status": InitiativeAgreementStatusEnum.Deleted, + }, + ] + + try: + for status_data in initiative_agreement_statuses_to_seed: + exists = await session.execute( + select(InitiativeAgreementStatus).where( + InitiativeAgreementStatus.status == status_data["status"] + ) + ) + if not exists.scalars().first(): + status = InitiativeAgreementStatus(**status_data) + session.add(status) + + except Exception as e: + context = { + "function": "seed_initiative_agreement_statuses", + } + logger.error( + "Error occurred while seeding initiative agreement statuses", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/notifications_seeder.py b/backend/lcfs/db/seeders/common/notifications_seeder.py new file mode 100644 index 000000000..363ae6c98 --- /dev/null +++ b/backend/lcfs/db/seeders/common/notifications_seeder.py @@ -0,0 +1,208 @@ +import structlog +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from lcfs.db.models.notification.NotificationChannel import NotificationChannel +from lcfs.db.models.notification.NotificationType import NotificationType + +logger = structlog.get_logger(__name__) + + +async def seed_notification_channels(session): + """ + Seeds the notification_channel table with predefined values if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + channels_to_seed = [ + {"channel_name": "EMAIL", "enabled": True, "subscribe_by_default": True}, + {"channel_name": "IN_APP", "enabled": True, "subscribe_by_default": False}, + ] + + try: + for channel_data in channels_to_seed: + # Check if the NotificationChannel already exists + exists = await session.execute( + select(NotificationChannel).where( + NotificationChannel.channel_name == channel_data["channel_name"] + ) + ) + if not exists.scalars().first(): + channel = NotificationChannel(**channel_data) + session.add(channel) + except IntegrityError as ie: + logger.warning( + "Integrity error while seeding notification_channel", + error=str(ie), + exc_info=ie, + ) + except Exception as e: + context = { + "function": "seed_notification_channels", + } + logger.error( + "Error occurred while seeding notification_channel", + error=str(e), + exc_info=e, + **context, + ) + raise + + +async def seed_notification_types(session): + """ + Seeds the notification_type table with predefined values if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + types_to_seed = [ + { + "name": "BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT", + "description": "Director assessed a compliance report or supplemental report.", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "BCEID__INITIATIVE_AGREEMENT__DIRECTOR_APPROVAL", + "description": "Director approved the initiative agreement or transaction", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "BCEID__TRANSFER__DIRECTOR_DECISION", + "description": "Director recorded or refused a transfer request", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "BCEID__TRANSFER__PARTNER_ACTIONS", + "description": "A transfer partner took action (proposed, declined, rescinded, or signed & submitted) on a transfer request", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_ANALYST__COMPLIANCE_REPORT__DIRECTOR_DECISION", + "description": "Director assessed compliance report", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_ANALYST__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION", + "description": "Compliance manager recommended action on the compliance report.", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_ANALYST__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW", + "description": "Compliance report submitted for government analyst review or returned by compliance manager", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_ANALYST__INITIATIVE_AGREEMENT__RETURNED_TO_ANALYST", + "description": "Director approved/returned the initiative agreement to the analyst", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_ANALYST__TRANSFER__DIRECTOR_RECORDED", + "description": "Director recorded or refused a transfer request", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_ANALYST__TRANSFER__RESCINDED_ACTION", + "description": "A transfer request was rescinded by a transfer partner", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_ANALYST__TRANSFER__SUBMITTED_FOR_REVIEW", + "description": "Transfer request submitted for government analyst review", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__ANALYST_RECOMMENDATION", + "description": "Analyst recommendation on the compliance report or returned by the director", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT", + "description": "Director assessed a compliance report", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW", + "description": "Compliance report submitted for government analyst review", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_DIRECTOR__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION", + "description": "Compliance manager recommended action on the compliance report", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_DIRECTOR__INITIATIVE_AGREEMENT__ANALYST_RECOMMENDATION", + "description": "Analyst recommendation provided for the initiative agreement", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + { + "name": "IDIR_DIRECTOR__TRANSFER__ANALYST_RECOMMENDATION", + "description": "Analyst recommendation provided for the transfer request", + "email_content": "Email content", + "create_user": "system", + "update_user": "system", + }, + ] + + try: + for notification_type_data in types_to_seed: + # Check if the NotificationType already exists + exists = await session.execute( + select(NotificationType).where( + NotificationType.name == notification_type_data["name"] + ) + ) + if not exists.scalars().first(): + notification_type = NotificationType(**notification_type_data) + session.add(notification_type) + except IntegrityError as ie: + logger.warning( + "Integrity error while seeding notification_type", + error=str(ie), + exc_info=ie, + ) + except Exception as e: + context = { + "function": "seed_notification_types", + } + logger.error( + "Error occurred while seeding notification_type", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/organization_seeder.py b/backend/lcfs/db/seeders/common/organization_seeder.py deleted file mode 100644 index 598ef0048..000000000 --- a/backend/lcfs/db/seeders/common/organization_seeder.py +++ /dev/null @@ -1,36 +0,0 @@ -import logging -from sqlalchemy import select -from lcfs.db.models.Organization import Organization - -logger = logging.getLogger(__name__) - -async def seed_organizations(session): - """ - Seeds the organizations into the database, if they do not already exist. - - Args: - session: The database session for committing the new records. - """ - - organizations_to_seed = [ - { - "name": "BC Government", - "organization_status_id": 1, - "organization_type_id": 1 - }, - ] - - try: - for org_data in organizations_to_seed: - # Check if the Organization already exists based on its name - exists = await session.execute( - select(Organization).where(Organization.name == org_data["name"]) - ) - if not exists.scalars().first(): - organization = Organization(**org_data) - session.add(organization) - - await session.commit() - except Exception as e: - logger.error("Error occurred while seeding organizations: %s", e) - raise diff --git a/backend/lcfs/db/seeders/common/organization_status_seeder.py b/backend/lcfs/db/seeders/common/organization_status_seeder.py index 5ec5104e6..e56e03504 100644 --- a/backend/lcfs/db/seeders/common/organization_status_seeder.py +++ b/backend/lcfs/db/seeders/common/organization_status_seeder.py @@ -1,8 +1,12 @@ -import logging +import structlog from sqlalchemy import select -from lcfs.db.models.OrganizationStatus import OrganizationStatus, OrgStatusEnum +from lcfs.db.models.organization.OrganizationStatus import ( + OrganizationStatus, + OrgStatusEnum, +) + +logger = structlog.get_logger(__name__) -logger = logging.getLogger(__name__) async def seed_organization_statuses(session): """ @@ -13,23 +17,48 @@ async def seed_organization_statuses(session): """ org_statuses_to_seed = [ - {"status": OrgStatusEnum.Unregistered, "description": "Unregistered"}, - {"status": OrgStatusEnum.Registered, "description": "Registered"}, - {"status": OrgStatusEnum.Suspended, "description": "Suspended"}, - {"status": OrgStatusEnum.Canceled, "description": "Canceled"} + { + "organization_status_id": 1, + "status": OrgStatusEnum.Unregistered, + "description": "Unregistered", + }, + { + "organization_status_id": 2, + "status": OrgStatusEnum.Registered, + "description": "Registered", + }, + { + "organization_status_id": 3, + "status": OrgStatusEnum.Suspended, + "description": "Suspended", + }, + { + "organization_status_id": 4, + "status": OrgStatusEnum.Canceled, + "description": "Canceled", + }, ] try: for org_status_data in org_statuses_to_seed: # Check if the OrganizationStatus already exists based on status exists = await session.execute( - select(OrganizationStatus).where(OrganizationStatus.status == org_status_data["status"]) + select(OrganizationStatus).where( + OrganizationStatus.status == org_status_data["status"] + ) ) if not exists.scalars().first(): org_status = OrganizationStatus(**org_status_data) session.add(org_status) - await session.commit() except Exception as e: - logger.error("Error occurred while seeding organization statuses: %s", e) + context = { + "function": "seed_organization_statuses", + } + logger.error( + "Error occurred while seeding organization statuses", + error=str(e), + exc_info=e, + **context, + ) raise diff --git a/backend/lcfs/db/seeders/common/organization_type_seeder.py b/backend/lcfs/db/seeders/common/organization_type_seeder.py index aef27ddc0..d4d8e78a5 100644 --- a/backend/lcfs/db/seeders/common/organization_type_seeder.py +++ b/backend/lcfs/db/seeders/common/organization_type_seeder.py @@ -1,8 +1,9 @@ -import logging +import structlog from sqlalchemy import select -from lcfs.db.models.OrganizationType import OrganizationType, OrgTypeEnum +from lcfs.db.models.organization.OrganizationType import OrganizationType, OrgTypeEnum + +logger = structlog.get_logger(__name__) -logger = logging.getLogger(__name__) async def seed_organization_types(session): """ @@ -13,23 +14,48 @@ async def seed_organization_types(session): """ org_types_to_seed = [ - {"org_type": OrgTypeEnum.fuel_supplier, "description": "Fuel Supplier"}, - {"org_type": OrgTypeEnum.electricity_supplier, "description": "Electricity Supplier"}, - {"org_type": OrgTypeEnum.broker, "description": "Broker"}, - {"org_type": OrgTypeEnum.utilities, "description": "Utilities (local or public)"} + { + "organization_type_id": 1, + "org_type": OrgTypeEnum.fuel_supplier, + "description": "Fuel Supplier", + }, + { + "organization_type_id": 2, + "org_type": OrgTypeEnum.electricity_supplier, + "description": "Electricity Supplier", + }, + { + "organization_type_id": 3, + "org_type": OrgTypeEnum.broker, + "description": "Broker", + }, + { + "organization_type_id": 4, + "org_type": OrgTypeEnum.utilities, + "description": "Utilities (local or public)", + }, ] try: for org_type_data in org_types_to_seed: # Check if the OrganizationType already exists based on org_type exists = await session.execute( - select(OrganizationType).where(OrganizationType.org_type == org_type_data["org_type"]) + select(OrganizationType).where( + OrganizationType.org_type == org_type_data["org_type"] + ) ) if not exists.scalars().first(): org_type = OrganizationType(**org_type_data) session.add(org_type) - await session.commit() except Exception as e: - logger.error("Error occurred while seeding organization types: %s", e) + context = { + "function": "seed_organization_types", + } + logger.error( + "Error occurred while seeding organization types", + error=str(e), + exc_info=e, + **context, + ) raise diff --git a/backend/lcfs/db/seeders/common/role_seeder.py b/backend/lcfs/db/seeders/common/role_seeder.py index 5292dfb95..4afc8ca3c 100644 --- a/backend/lcfs/db/seeders/common/role_seeder.py +++ b/backend/lcfs/db/seeders/common/role_seeder.py @@ -1,8 +1,9 @@ -import logging +import structlog from sqlalchemy import select -from lcfs.db.models.Role import Role, RoleEnum +from lcfs.db.models.user.Role import Role, RoleEnum + +logger = structlog.get_logger(__name__) -logger = logging.getLogger(__name__) async def seed_roles(session): """ @@ -14,59 +15,82 @@ async def seed_roles(session): roles_to_seed = [ { + "role_id": 1, + "name": RoleEnum.GOVERNMENT, + "description": "Identifies a government user in the system.", + "is_government_role": True, + "display_order": 1, + }, + { + "role_id": 2, + "name": RoleEnum.SUPPLIER, + "description": "Identifies a supplier user in the system.", + "is_government_role": False, + "display_order": 2, + }, + { + "role_id": 3, "name": RoleEnum.ADMINISTRATOR, - "description": 'Can add/edit IDIR users and assign roles, add/edit organizations, BCeID users, and assign roles', + "description": "Can add/edit IDIR users and assign roles, add/edit organizations, BCeID users, and assign roles", "is_government_role": True, - "display_order": 1 + "display_order": 3, }, { + "role_id": 4, "name": RoleEnum.ANALYST, - "description": 'Can make recommendations on transfers, transactions, and compliance reports, manage file submissions, and add/edit fuel codes', + "description": "Can make recommendations on transfers, transactions, and compliance reports, manage file submissions, and add/edit fuel codes", "is_government_role": True, - "display_order": 2 + "display_order": 4, }, { + "role_id": 5, "name": RoleEnum.COMPLIANCE_MANAGER, - "description": 'Can make recommendations on compliance reports', + "description": "Can make recommendations on compliance reports", "is_government_role": True, - "display_order": 3 + "display_order": 5, }, { + "role_id": 6, "name": RoleEnum.DIRECTOR, - "description": 'Can assess compliance reports and approve transactions', + "description": "Can assess compliance reports and approve transactions", "is_government_role": True, - "display_order": 4 + "display_order": 6, }, { + "role_id": 7, "name": RoleEnum.MANAGE_USERS, - "description": 'Can add/edit BCeID users and assign roles', + "description": "Can add/edit BCeID users and assign roles", "is_government_role": False, - "display_order": 5 + "display_order": 7, }, { + "role_id": 8, "name": RoleEnum.TRANSFER, - "description": 'Can create/save transfers and submit files', + "description": "Can create/save transfers and submit files", "is_government_role": False, - "display_order": 6 + "display_order": 8, }, { + "role_id": 9, "name": RoleEnum.COMPLIANCE_REPORTING, - "description": 'Can create/save compliance reports and submit files', + "description": "Can create/save compliance reports and submit files", "is_government_role": False, - "display_order": 7 + "display_order": 9, }, { + "role_id": 10, "name": RoleEnum.SIGNING_AUTHORITY, - "description": 'Can sign and submit compliance reports to government and transfers to trade partners/government', + "description": "Can sign and submit compliance reports to government and transfers to trade partners/government", "is_government_role": False, - "display_order": 8 + "display_order": 10, }, { + "role_id": 11, "name": RoleEnum.READ_ONLY, - "description": 'Can view transactions, compliance reports, and files', + "description": "Can view transactions, compliance reports, and files", "is_government_role": False, - "display_order": 9 - } + "display_order": 11, + }, ] try: @@ -79,7 +103,14 @@ async def seed_roles(session): role = Role(**role_data) session.add(role) - await session.commit() except Exception as e: - logger.error("Error occurred while seeding roles: %s", e) + context = { + "function": "seed_roles", + } + logger.error( + "Error occurred while seeding roles", + error=str(e), + exc_info=e, + **context, + ) raise diff --git a/backend/lcfs/db/seeders/common/seed_fuel_data.json b/backend/lcfs/db/seeders/common/seed_fuel_data.json new file mode 100644 index 000000000..57b1178c4 --- /dev/null +++ b/backend/lcfs/db/seeders/common/seed_fuel_data.json @@ -0,0 +1,1098 @@ +{ + "provision_acts": [ + { + "provision_of_the_act_id": 1, + "name": "Prescribed carbon intensity - section 19 (a)", + "description": "Prescribed carbon intensity - section 19 (a)", + "effective_status": true + }, + { + "provision_of_the_act_id": 2, + "name": "Fuel code - section 19 (b) (i)", + "description": "Fuel code - section 19 (b) (i)", + "effective_status": true + }, + { + "provision_of_the_act_id": 3, + "name": "Default carbon intensity - section 19 (b) (ii)", + "description": "Default carbon intensity - section 19 (b) (ii)", + "effective_status": true + } + ], + "fuel_types": [ + { + "fuel_type_id": 1, + "fuel_type": "Biodiesel", + "fossil_derived": false, + "other_uses_fossil_derived": false, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 100.21, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 2, + "fuel_type": "CNG", + "fossil_derived": false, + "other_uses_fossil_derived": true, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 63.91, + "units": "m³", + "unrecognized": false + }, + { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "fossil_derived": false, + "other_uses_fossil_derived": true, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 12.14, + "units": "kWh", + "unrecognized": false + }, + { + "fuel_type_id": 4, + "fuel_type": "Ethanol", + "fossil_derived": false, + "other_uses_fossil_derived": false, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 93.67, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 5, + "fuel_type": "HDRD", + "fossil_derived": false, + "other_uses_fossil_derived": false, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 100.21, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 6, + "fuel_type": "Hydrogen", + "fossil_derived": false, + "other_uses_fossil_derived": true, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 123.96, + "units": "kg", + "unrecognized": false + }, + { + "fuel_type_id": 7, + "fuel_type": "LNG", + "fossil_derived": false, + "other_uses_fossil_derived": true, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 90.11, + "units": "kg", + "unrecognized": false + }, + { + "fuel_type_id": 11, + "fuel_type": "Alternative jet fuel", + "fossil_derived": false, + "other_uses_fossil_derived": false, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 88.83, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 13, + "fuel_type": "Propane", + "fossil_derived": false, + "other_uses_fossil_derived": true, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 79.87, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 14, + "fuel_type": "Renewable gasoline", + "fossil_derived": false, + "other_uses_fossil_derived": false, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 93.67, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 15, + "fuel_type": "Renewable naphtha", + "fossil_derived": false, + "other_uses_fossil_derived": false, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 93.67, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 16, + "fuel_type": "Fossil-derived diesel", + "fossil_derived": true, + "other_uses_fossil_derived": true, + "provision_1_id": 1, + "default_carbon_intensity": 94.38, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 17, + "fuel_type": "Fossil-derived gasoline", + "fossil_derived": true, + "other_uses_fossil_derived": true, + "provision_1_id": 1, + "default_carbon_intensity": 93.67, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 18, + "fuel_type": "Fossil-derived jet fuel", + "fossil_derived": true, + "other_uses_fossil_derived": true, + "provision_1_id": 1, + "default_carbon_intensity": 88.83, + "units": "L", + "unrecognized": false + }, + { + "fuel_type_id": 19, + "fuel_type": "Other", + "fossil_derived": false, + "other_uses_fossil_derived": false, + "provision_1_id": 2, + "provision_2_id": 3, + "default_carbon_intensity": 0, + "units": "L", + "unrecognized": true + }, + { + "fuel_type_id": 20, + "fuel_type": "Other diesel", + "fossil_derived": false, + "other_uses_fossil_derived": false, + "provision_1_id": 1, + "default_carbon_intensity": 100.21, + "units": "L", + "unrecognized": false + } + ], + "fuel_code_prefixes": [ + { + "fuel_code_prefix_id": 1, + "prefix": "BCLCF" + }, + { + "fuel_code_prefix_id": 2, + "prefix": "PROXY" + } + ], + "fuel_code_statuses": [ + { + "fuel_code_status_id": 1, + "status": "Draft", + "description": "Initial state of the fuel code", + "display_order": 1 + }, + { + "fuel_code_status_id": 2, + "status": "Approved", + "description": "Fuel code has been approved", + "display_order": 2 + }, + { + "fuel_code_status_id": 3, + "status": "Deleted", + "description": "Fuel code has been deleted", + "display_order": 3 + } + ], + "transport_modes": [ + { + "transport_mode_id": 1, + "transport_mode": "Truck" + }, + { + "transport_mode_id": 2, + "transport_mode": "Rail" + }, + { + "transport_mode_id": 3, + "transport_mode": "Marine-domestic" + }, + { + "transport_mode_id": 4, + "transport_mode": "Adjacent" + }, + { + "transport_mode_id": 5, + "transport_mode": "Pipeline" + } + ], + "fuel_categories": [ + { + "fuel_category_id": 1, + "category": "Gasoline", + "description": "Gasoline", + "default_carbon_intensity": 93.67 + }, + { + "fuel_category_id": 2, + "category": "Diesel", + "description": "Diesel", + "default_carbon_intensity": 100.21 + }, + { + "fuel_category_id": 3, + "category": "Jet fuel", + "description": "Jet fuel", + "default_carbon_intensity": 88.83 + } + ], + "end_use_types": [ + { + "end_use_type_id": 1, + "type": "Light duty motor vehicles", + "intended_use": true + }, + { + "end_use_type_id": 2, + "type": "Other or unknown" + }, + { + "end_use_type_id": 3, + "type": "Fuel cell vehicle" + }, + { + "end_use_type_id": 4, + "type": "Battery bus", + "intended_use": true + }, + { + "end_use_type_id": 5, + "type": "Battery truck", + "intended_use": true + }, + { + "end_use_type_id": 6, + "type": "Cargo handling equipment", + "intended_use": true + }, + { + "end_use_type_id": 7, + "type": "Fixed guiderail", + "intended_use": true + }, + { + "end_use_type_id": 8, + "type": "Ground support equipment", + "intended_use": true + }, + { + "end_use_type_id": 9, + "type": "Heavy forklift", + "intended_use": true + }, + { + "end_use_type_id": 10, + "type": "Shore power", + "intended_use": true + }, + { + "end_use_type_id": 11, + "type": "Trolley bus", + "intended_use": true + }, + { + "end_use_type_id": 12, + "type": "Compression-ignition engine" + }, + { + "end_use_type_id": 13, + "type": "Other", + "intended_use": true + }, + { + "end_use_type_id": 14, + "type": "Aircraft", + "intended_use": true + }, + { + "end_use_type_id": 15, + "type": "Compression-ignition engine- Marine, general", + "intended_use": true + }, + { + "end_use_type_id": 16, + "type": "Compression-ignition engine- Marine, operated within 51 to 75% of load range", + "intended_use": true + }, + { + "end_use_type_id": 17, + "type": "Compression-ignition engine- Marine, operated within 76 to 100% of load range", + "intended_use": true + }, + { + "end_use_type_id": 18, + "type": "Compression-ignition engine- Marine, with methane slip reduction kit- General", + "intended_use": true + }, + { + "end_use_type_id": 19, + "type": "Compression-ignition engine- Marine, with methane slip reduction kit- Operated within 26 to 75% of load range", + "intended_use": true + }, + { + "end_use_type_id": 20, + "type": "Compression-ignition engine- Marine, with methane slip reduction kit- Operated within 76 to 100% of load range", + "intended_use": true + }, + { + "end_use_type_id": 21, + "type": "Compression-ignition engine- Marine, unknown whether kit is installed or average operating load range", + "intended_use": true + }, + { + "end_use_type_id": 22, + "type": "Unknown engine type", + "intended_use": true + }, + { + "end_use_type_id": 23, + "type": "Other (i.e. road transportation)", + "intended_use": true + } + ], + "unit_of_measures": [ + { + "uom_id": 1, + "name": "MJ/L", + "description": "Megajoules per litre" + }, + { + "uom_id": 2, + "name": "MJ/kWh", + "description": "Megajoules per kilowatt hour" + }, + { + "uom_id": 3, + "name": "MJ/m³", + "description": "Megajoules per cubic metre" + }, + { + "uom_id": 4, + "name": "MJ/kg", + "description": "Megajoules per kilogram" + }, + { + "uom_id": 5, + "name": "gCO²e/MJ", + "description": "grams of carbon dioxide equivalent per megajoule" + } + ], + "ucis": [ + { + "additional_uci_id": 1, + "fuel_type_id": 7, + "uom_id": 5, + "intensity": 0 + }, + { + "additional_uci_id": 2, + "uom_id": 5, + "intensity": 0 + }, + { + "additional_uci_id": 3, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 15, + "intensity": 27.3 + }, + { + "additional_uci_id": 4, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 16, + "intensity": 17.8 + }, + { + "additional_uci_id": 5, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 17, + "intensity": 12.2 + }, + { + "additional_uci_id": 6, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 18, + "intensity": 10.6 + }, + { + "additional_uci_id": 7, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 19, + "intensity": 8.4 + }, + { + "additional_uci_id": 8, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 20, + "intensity": 8.0 + }, + { + "additional_uci_id": 9, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 21, + "intensity": 27.3 + }, + { + "additional_uci_id": 10, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 22, + "intensity": 27.3 + }, + { + "additional_uci_id": 11, + "fuel_type_id": 7, + "uom_id": 5, + "end_use_type_id": 23, + "intensity": 0 + } + ], + "eers": [ + { + "eer_id": 1, + "fuel_category_id": 1, + "fuel_type_id": 2, + "ratio": 0.9 + }, + { + "eer_id": 2, + "fuel_category_id": 1, + "fuel_type_id": 3, + "end_use_type_id": 1, + "ratio": 3.5 + }, + { + "eer_id": 3, + "fuel_category_id": 1, + "fuel_type_id": 3, + "end_use_type_id": 2, + "ratio": 1.0 + }, + { + "eer_id": 4, + "fuel_category_id": 1, + "fuel_type_id": 6, + "end_use_type_id": 3, + "ratio": 2.4 + }, + { + "eer_id": 5, + "fuel_category_id": 1, + "fuel_type_id": 6, + "end_use_type_id": 2, + "ratio": 0.9 + }, + { + "eer_id": 6, + "fuel_category_id": 1, + "fuel_type_id": 13, + "ratio": 0.9 + }, + { + "eer_id": 7, + "fuel_category_id": 2, + "fuel_type_id": 2, + "ratio": 0.9 + }, + { + "eer_id": 8, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 4, + "ratio": 3.8 + }, + { + "eer_id": 9, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 5, + "ratio": 3.2 + }, + { + "eer_id": 10, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 6, + "ratio": 2.5 + }, + { + "eer_id": 11, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 7, + "ratio": 2.9 + }, + { + "eer_id": 12, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 8, + "ratio": 2.5 + }, + { + "eer_id": 13, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 9, + "ratio": 3.9 + }, + { + "eer_id": 14, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 10, + "ratio": 2.8 + }, + { + "eer_id": 15, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 11, + "ratio": 2.4 + }, + { + "eer_id": 16, + "fuel_category_id": 2, + "fuel_type_id": 3, + "end_use_type_id": 2, + "ratio": 1.0 + }, + { + "eer_id": 17, + "fuel_category_id": 2, + "fuel_type_id": 6, + "end_use_type_id": 3, + "ratio": 1.8 + }, + { + "eer_id": 18, + "fuel_category_id": 2, + "fuel_type_id": 6, + "end_use_type_id": 2, + "ratio": 0.9 + }, + { + "eer_id": 19, + "fuel_category_id": 2, + "fuel_type_id": 13, + "ratio": 0.9 + }, + { + "eer_id": 20, + "fuel_category_id": 3, + "fuel_type_id": 3, + "ratio": 2.5 + }, + { + "eer_id": 21, + "fuel_category_id": 3, + "fuel_type_id": 11, + "ratio": 1.0 + }, + { + "eer_id": 22, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 15, + "ratio": 1.0 + }, + { + "eer_id": 23, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 16, + "ratio": 1.0 + }, + { + "eer_id": 24, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 17, + "ratio": 1.0 + }, + { + "eer_id": 25, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 18, + "ratio": 1.0 + }, + { + "eer_id": 26, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 19, + "ratio": 1.0 + }, + { + "eer_id": 27, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 20, + "ratio": 1.0 + }, + { + "eer_id": 28, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 21, + "ratio": 1.0 + }, + { + "eer_id": 29, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 22, + "ratio": 0.9 + }, + { + "eer_id": 30, + "fuel_category_id": 2, + "fuel_type_id": 7, + "end_use_type_id": 23, + "ratio": 0.9 + } + ], + "energy_densities": [ + { + "energy_density_id": 1, + "fuel_type_id": 17, + "uom_id": 1, + "density": 34.69 + }, + { + "energy_density_id": 2, + "fuel_type_id": 4, + "uom_id": 1, + "density": 23.58 + }, + { + "energy_density_id": 3, + "fuel_type_id": 14, + "uom_id": 1, + "density": 34.69 + }, + { + "energy_density_id": 4, + "fuel_type_id": 15, + "uom_id": 1, + "density": 34.51 + }, + { + "energy_density_id": 5, + "fuel_type_id": 16, + "uom_id": 1, + "density": 38.65 + }, + { + "energy_density_id": 6, + "fuel_type_id": 1, + "uom_id": 1, + "density": 35.40 + }, + { + "energy_density_id": 7, + "fuel_type_id": 5, + "uom_id": 1, + "density": 37.89 + }, + { + "energy_density_id": 9, + "fuel_type_id": 18, + "uom_id": 1, + "density": 37.40 + }, + { + "energy_density_id": 10, + "fuel_type_id": 11, + "uom_id": 1, + "density": 36.00 + }, + { + "energy_density_id": 11, + "fuel_type_id": 3, + "uom_id": 2, + "density": 3.60 + }, + { + "energy_density_id": 12, + "fuel_type_id": 6, + "uom_id": 2, + "density": 141.76 + }, + { + "energy_density_id": 13, + "fuel_type_id": 13, + "uom_id": 1, + "density": 25.62 + }, + { + "energy_density_id": 14, + "fuel_type_id": 2, + "uom_id": 3, + "density": 38.27 + }, + { + "energy_density_id": 15, + "fuel_type_id": 7, + "uom_id": 4, + "density": 53.54 + }, + { + "energy_density_id": 16, + "fuel_type_id": 20, + "uom_id": 1, + "density": 36.51 + } + ], + "target_carbon_intensities": [ + { + "target_carbon_intensity_id": 1, + "compliance_period_id": 15, + "fuel_category_id": 1, + "target_carbon_intensity": 78.68, + "reduction_target_percentage": 16.0 + }, + { + "target_carbon_intensity_id": 2, + "compliance_period_id": 16, + "fuel_category_id": 1, + "target_carbon_intensity": 76.53, + "reduction_target_percentage": 18.3 + }, + { + "target_carbon_intensity_id": 3, + "compliance_period_id": 17, + "fuel_category_id": 1, + "target_carbon_intensity": 74.37, + "reduction_target_percentage": 20.6 + }, + { + "target_carbon_intensity_id": 4, + "compliance_period_id": 18, + "fuel_category_id": 1, + "target_carbon_intensity": 72.13, + "reduction_target_percentage": 23.0 + }, + { + "target_carbon_intensity_id": 5, + "compliance_period_id": 19, + "fuel_category_id": 1, + "target_carbon_intensity": 69.97, + "reduction_target_percentage": 25.3 + }, + { + "target_carbon_intensity_id": 6, + "compliance_period_id": 20, + "fuel_category_id": 1, + "target_carbon_intensity": 67.72, + "reduction_target_percentage": 27.7 + }, + { + "target_carbon_intensity_id": 7, + "compliance_period_id": 21, + "fuel_category_id": 1, + "target_carbon_intensity": 65.57, + "reduction_target_percentage": 30.0 + }, + { + "target_carbon_intensity_id": 8, + "compliance_period_id": 15, + "fuel_category_id": 2, + "target_carbon_intensity": 79.28, + "reduction_target_percentage": 16.0 + }, + { + "target_carbon_intensity_id": 9, + "compliance_period_id": 16, + "fuel_category_id": 2, + "target_carbon_intensity": 77.11, + "reduction_target_percentage": 18.3 + }, + { + "target_carbon_intensity_id": 10, + "compliance_period_id": 17, + "fuel_category_id": 2, + "target_carbon_intensity": 74.94, + "reduction_target_percentage": 20.6 + }, + { + "target_carbon_intensity_id": 11, + "compliance_period_id": 18, + "fuel_category_id": 2, + "target_carbon_intensity": 72.67, + "reduction_target_percentage": 23.0 + }, + { + "target_carbon_intensity_id": 12, + "compliance_period_id": 19, + "fuel_category_id": 2, + "target_carbon_intensity": 70.50, + "reduction_target_percentage": 25.3 + }, + { + "target_carbon_intensity_id": 13, + "compliance_period_id": 20, + "fuel_category_id": 2, + "target_carbon_intensity": 68.24, + "reduction_target_percentage": 27.7 + }, + { + "target_carbon_intensity_id": 14, + "compliance_period_id": 21, + "fuel_category_id": 2, + "target_carbon_intensity": 66.07, + "reduction_target_percentage": 30.0 + }, + { + "target_carbon_intensity_id": 15, + "compliance_period_id": 15, + "fuel_category_id": 3, + "target_carbon_intensity": 88.83, + "reduction_target_percentage": 0.0 + }, + { + "target_carbon_intensity_id": 16, + "compliance_period_id": 16, + "fuel_category_id": 3, + "target_carbon_intensity": 88.83, + "reduction_target_percentage": 0.0 + }, + { + "target_carbon_intensity_id": 17, + "compliance_period_id": 17, + "fuel_category_id": 3, + "target_carbon_intensity": 87.05, + "reduction_target_percentage": 2.0 + }, + { + "target_carbon_intensity_id": 18, + "compliance_period_id": 18, + "fuel_category_id": 3, + "target_carbon_intensity": 85.28, + "reduction_target_percentage": 4.0 + }, + { + "target_carbon_intensity_id": 19, + "compliance_period_id": 19, + "fuel_category_id": 3, + "target_carbon_intensity": 83.50, + "reduction_target_percentage": 6.0 + }, + { + "target_carbon_intensity_id": 20, + "compliance_period_id": 20, + "fuel_category_id": 3, + "target_carbon_intensity": 81.72, + "reduction_target_percentage": 8.0 + }, + { + "target_carbon_intensity_id": 21, + "compliance_period_id": 21, + "fuel_category_id": 3, + "target_carbon_intensity": 79.95, + "reduction_target_percentage": 10.0 + } + ], + "fuel_instances": [ + { + "fuel_instance_id": 1, + "fuel_type_id": 1, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 2, + "fuel_type_id": 2, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 3, + "fuel_type_id": 2, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 4, + "fuel_type_id": 3, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 5, + "fuel_type_id": 3, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 6, + "fuel_type_id": 3, + "fuel_category_id": 3 + }, + { + "fuel_instance_id": 7, + "fuel_type_id": 4, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 8, + "fuel_type_id": 5, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 9, + "fuel_type_id": 6, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 10, + "fuel_type_id": 6, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 11, + "fuel_type_id": 6, + "fuel_category_id": 3 + }, + { + "fuel_instance_id": 12, + "fuel_type_id": 7, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 16, + "fuel_type_id": 11, + "fuel_category_id": 3 + }, + { + "fuel_instance_id": 18, + "fuel_type_id": 13, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 19, + "fuel_type_id": 13, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 20, + "fuel_type_id": 14, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 21, + "fuel_type_id": 15, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 22, + "fuel_type_id": 16, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 23, + "fuel_type_id": 17, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 24, + "fuel_type_id": 18, + "fuel_category_id": 3 + }, + { + "fuel_instance_id": 25, + "fuel_type_id": 19, + "fuel_category_id": 1 + }, + { + "fuel_instance_id": 26, + "fuel_type_id": 19, + "fuel_category_id": 2 + }, + { + "fuel_instance_id": 27, + "fuel_type_id": 19, + "fuel_category_id": 3 + }, + { + "fuel_instance_id": 28, + "fuel_type_id": 20, + "fuel_category_id": 2 + } + ], + "levels_of_equipment": [ + { + "level_of_equipment_id": 1, + "name": "Level 3 - Direct current fast charging", + "display_order": 1 + }, + { + "level_of_equipment_id": 2, + "name": "Level 2 - High voltage, operating above level 1", + "display_order": 2 + }, + { + "level_of_equipment_id": 3, + "name": "Level 1 - Low voltage, operating at 120V AC or less", + "display_order": 3 + }, + { + "level_of_equipment_id": 4, + "name": "Other - Additional information provided in notes field", + "display_order": 4 + } + ], + "fuel_measurement_types": [ + { + "fuel_measurement_type_id": 1, + "type": "Separate utility meter", + "display_order": 1 + }, + { + "fuel_measurement_type_id": 2, + "type": "Equipment meter (remote access)", + "display_order": 2 + }, + { + "fuel_measurement_type_id": 3, + "type": "Equipment meter (physical access)", + "display_order": 3 + }, + { + "fuel_measurement_type_id": 4, + "type": "No meter or estimated", + "display_order": 4 + } + ] +} \ No newline at end of file diff --git a/backend/lcfs/db/seeders/common/transfer_categories_seeder.py b/backend/lcfs/db/seeders/common/transfer_categories_seeder.py new file mode 100644 index 000000000..29d432243 --- /dev/null +++ b/backend/lcfs/db/seeders/common/transfer_categories_seeder.py @@ -0,0 +1,49 @@ +import structlog +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from lcfs.db.models.transfer.TransferCategory import ( + TransferCategory, + TransferCategoryEnum, +) + +logger = structlog.get_logger(__name__) + + +async def seed_transfer_categories(session: AsyncSession): + """ + Seeds the transfer categories into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + category_data = [ + {"transfer_category_id": 1, "category": TransferCategoryEnum.A}, + {"transfer_category_id": 2, "category": TransferCategoryEnum.B}, + {"transfer_category_id": 3, "category": TransferCategoryEnum.C}, + {"transfer_category_id": 4, "category": TransferCategoryEnum.D}, + ] + + try: + for data in category_data: + # Check if the Category already exists based on category + exists = await session.execute( + select(TransferCategory).where( + TransferCategory.category == data["category"] + ) + ) + if not exists.scalars().first(): + new_category = TransferCategory(**data) + session.add(new_category) + + except Exception as e: + context = { + "function": "seed_transfer_categories", + } + logger.error( + "Error occurred while seeding transfer categories", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common/transfer_status_seeder.py b/backend/lcfs/db/seeders/common/transfer_status_seeder.py new file mode 100644 index 000000000..fcf2bc4e9 --- /dev/null +++ b/backend/lcfs/db/seeders/common/transfer_status_seeder.py @@ -0,0 +1,104 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.transfer.TransferStatus import TransferStatus, TransferStatusEnum + +logger = structlog.get_logger(__name__) + + +async def seed_transfer_statuses(session): + """ + Seeds the transfer statuses into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + transfer_statuses_to_seed = [ + { + "transfer_status_id": 1, + "status": TransferStatusEnum.Draft, + "visible_to_transferor": True, + "visible_to_transferee": False, + "visible_to_government": False, + }, + { + "transfer_status_id": 2, + "status": TransferStatusEnum.Deleted, + "visible_to_transferor": False, + "visible_to_transferee": False, + "visible_to_government": False, + }, + { + "transfer_status_id": 3, + "status": TransferStatusEnum.Sent, + "visible_to_transferor": True, + "visible_to_transferee": True, + "visible_to_government": False, + }, + { + "transfer_status_id": 4, + "status": TransferStatusEnum.Submitted, + "visible_to_transferor": True, + "visible_to_transferee": True, + "visible_to_government": True, + }, + { + "transfer_status_id": 5, + "status": TransferStatusEnum.Recommended, + "visible_to_transferor": True, + "visible_to_transferee": True, + "visible_to_government": True, + }, + { + "transfer_status_id": 6, + "status": TransferStatusEnum.Recorded, + "visible_to_transferor": True, + "visible_to_transferee": True, + "visible_to_government": True, + }, + { + "transfer_status_id": 7, + "status": TransferStatusEnum.Refused, + "visible_to_transferor": True, + "visible_to_transferee": True, + "visible_to_government": True, + }, + { + "transfer_status_id": 8, + "status": TransferStatusEnum.Declined, + "visible_to_transferor": True, + "visible_to_transferee": True, + "visible_to_government": False, + }, + { + "transfer_status_id": 9, + "status": TransferStatusEnum.Rescinded, + "visible_to_transferor": True, + "visible_to_transferee": True, + "visible_to_government": True, + }, + ] + + try: + for transfer_status_data in transfer_statuses_to_seed: + # Check if the TransferStatus already exists based on status + exists = await session.execute( + select(TransferStatus).where( + TransferStatus.status == transfer_status_data["status"] + ) + ) + if not exists.scalars().first(): + transfer_status = TransferStatus(**transfer_status_data) + session.add(transfer_status) + + except Exception as e: + context = { + "function": "seed_transfer_statuses", + } + logger.error( + "Error occurred while seeding transfer statuses", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/common_seeder.py b/backend/lcfs/db/seeders/common_seeder.py index f3ceafcbc..55c898920 100644 --- a/backend/lcfs/db/seeders/common_seeder.py +++ b/backend/lcfs/db/seeders/common_seeder.py @@ -1,33 +1,102 @@ -import logging +import structlog import asyncio -from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession -from sqlalchemy.orm import sessionmaker -from lcfs.settings import settings +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import text +from lcfs.db.seeders.common.compliance_period_seeder import seed_compliance_periods from lcfs.db.seeders.common.organization_type_seeder import seed_organization_types from lcfs.db.seeders.common.organization_status_seeder import seed_organization_statuses -from lcfs.db.seeders.common.organization_seeder import seed_organizations from lcfs.db.seeders.common.role_seeder import seed_roles +from lcfs.db.seeders.common.transfer_status_seeder import seed_transfer_statuses +from lcfs.db.seeders.common.transfer_categories_seeder import seed_transfer_categories +from lcfs.db.seeders.common.admin_adjustment_status_seeder import ( + seed_admin_adjustment_statuses, +) +from lcfs.db.seeders.common.initiative_agreement_status_seeder import ( + seed_initiative_agreement_statuses, +) +from lcfs.db.seeders.common.fuel_data_seeder import seed_static_fuel_data +from lcfs.db.seeders.common.compliance_report_status_seeder import ( + seed_compliance_report_statuses, +) +from lcfs.db.seeders.common.allocation_agreement_seeder import ( + seed_allocation_transaction_types, +) +from lcfs.db.seeders.common.end_user_type_seeder import ( + seed_end_user_types, +) +from lcfs.db.seeders.common.notifications_seeder import ( + seed_notification_types, + seed_notification_channels, +) -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) -async def seed_common(): + +async def update_sequences(session): + """ + Function to update sequences for all tables after seeding. + """ + sequences = { + "fuel_code": "fuel_code_id", + "transport_mode": "transport_mode_id", + "provision_of_the_act": "provision_of_the_act_id", + "fuel_type": "fuel_type_id", + "fuel_code_prefix": "fuel_code_prefix_id", + "fuel_code_status": "fuel_code_status_id", + "fuel_category": "fuel_category_id", + "end_use_type": "end_use_type_id", + "unit_of_measure": "uom_id", + "additional_carbon_intensity": "additional_uci_id", + "energy_effectiveness_ratio": "eer_id", + "energy_density": "energy_density_id", + "target_carbon_intensity": "target_carbon_intensity_id", + "compliance_report_status": "compliance_report_status_id", + "admin_adjustment_status": "admin_adjustment_status_id", + "compliance_period": "compliance_period_id", + "initiative_agreement_status": "initiative_agreement_status_id", + "organization_status": "organization_status_id", + "organization_type": "organization_type_id", + "transfer_category": "transfer_category_id", + "transfer_status": "transfer_status_id", + "role": "role_id", + "end_user_type": "end_user_type_id", + "notification_type": "notification_type_id", + "notification_channel": "notification_channel_id", + } + + for table, column in sequences.items(): + sequence_name = f"{table}_{column}_seq" + max_value_query = text( + f"SELECT setval('{sequence_name}', COALESCE((SELECT MAX({column}) + 1 FROM {table}), 1), false)" + ) + await session.execute(max_value_query) + + +async def seed_common(session: AsyncSession): """ Function to seed the database with common data. """ - engine = create_async_engine(str(settings.db_url)) - AsyncSessionLocal = sessionmaker(bind=engine, class_=AsyncSession) - - async with AsyncSessionLocal() as session: - try: - await seed_organization_types(session) - await seed_organization_statuses(session) - await seed_organizations(session) - await seed_roles(session) - logger.info("Database seeding completed successfully.") - except Exception as e: - logger.error(f"An error occurred during seeding: {e}") - await session.rollback() + await seed_compliance_periods(session) + await seed_organization_types(session) + await seed_organization_statuses(session) + await seed_roles(session) + await seed_transfer_statuses(session) + await seed_transfer_categories(session) + await seed_admin_adjustment_statuses(session) + await seed_initiative_agreement_statuses(session) + await seed_static_fuel_data(session) + await seed_compliance_report_statuses(session) + await seed_allocation_transaction_types(session) + await seed_end_user_types(session) + await seed_notification_types(session) + await seed_notification_channels(session) + + # Update sequences after all seeders have run + await update_sequences(session) + + logger.info("Common database seeding completed successfully.") + if __name__ == "__main__": asyncio.run(seed_common()) diff --git a/backend/lcfs/db/seeders/dev/admin_adjustment_history_seeder.py b/backend/lcfs/db/seeders/dev/admin_adjustment_history_seeder.py new file mode 100644 index 000000000..699ac4264 --- /dev/null +++ b/backend/lcfs/db/seeders/dev/admin_adjustment_history_seeder.py @@ -0,0 +1,52 @@ +import structlog +from sqlalchemy import select, and_ +from lcfs.db.models.admin_adjustment import AdminAdjustmentHistory + +logger = structlog.get_logger(__name__) + + +async def seed_admin_adjustment_history(session): + """ + Seeds initial admin adjustment hustory records into the database, if they do not already exist. + Args: + session: The database session for committing the new records. + """ + + admin_adjustment_history_to_seed = [ + { + "admin_adjustment_id": i, + "admin_adjustment_status_id": status_id, # Recommended or Approved + "effective_status": True, + "user_profile_id": 18, # Al Ring + } + for i in range(1, 11) + for status_id in (2, 3) # 2: Recommended, 3: Approved + ] + + try: + for admin_adjustment_history_data in admin_adjustment_history_to_seed: + # Check if the admin adjustment history already exists with the same admin_adjustment_id and admin_adjustment_status_id + exists = await session.execute( + select(AdminAdjustmentHistory).where( + and_( + AdminAdjustmentHistory.admin_adjustment_id == admin_adjustment_history_data["admin_adjustment_id"], + AdminAdjustmentHistory.admin_adjustment_status_id == admin_adjustment_history_data["admin_adjustment_status_id"] + ) + ) + ) + + if not exists.scalars().first(): + admin_adjustment_history = AdminAdjustmentHistory(**admin_adjustment_history_data) + session.add(admin_adjustment_history) + + except Exception as e: + context = { + "function": "seed_admin_adjustment_history", + } + logger.error( + "Error occurred while seeding admin adjustments", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/admin_adjustment_seeder.py b/backend/lcfs/db/seeders/dev/admin_adjustment_seeder.py new file mode 100644 index 000000000..ffe75e540 --- /dev/null +++ b/backend/lcfs/db/seeders/dev/admin_adjustment_seeder.py @@ -0,0 +1,115 @@ +import structlog +from datetime import datetime +from sqlalchemy import select +from lcfs.db.models.admin_adjustment import AdminAdjustment + +logger = structlog.get_logger(__name__) + + +async def seed_admin_adjustments(session): + """ + Seeds initial admin adjustments into the database, if they do not already exist. + Args: + session: The database session for committing the new records. + """ + + # Define a standard date for transaction_effective_date + transaction_effective_date = datetime(2023, 1, 1) + + admin_adjustments_to_seed = [ + { + "compliance_units": 50000, + "to_organization_id": 1, + "transaction_id": 1, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 2, + "transaction_id": 2, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 3, + "transaction_id": 3, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 4, + "transaction_id": 4, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 5, + "transaction_id": 5, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 6, + "transaction_id": 6, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 7, + "transaction_id": 7, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 8, + "transaction_id": 8, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 9, + "transaction_id": 9, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 10, + "transaction_id": 10, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + ] + + try: + for admin_adjustment_data in admin_adjustments_to_seed: + # Check if the admin adjustment already exists + exists = await session.execute( + select(AdminAdjustment).where( + AdminAdjustment.transaction_id + == admin_adjustment_data["transaction_id"] + ) + ) + if not exists.scalars().first(): + admin_adjustment = AdminAdjustment(**admin_adjustment_data) + session.add(admin_adjustment) + + except Exception as e: + context = { + "function": "seed_admin_adjustments", + } + logger.error( + "Error occurred while seeding admin adjustments", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/expected_use_types_seeder.py b/backend/lcfs/db/seeders/dev/expected_use_types_seeder.py new file mode 100644 index 000000000..6914a3e57 --- /dev/null +++ b/backend/lcfs/db/seeders/dev/expected_use_types_seeder.py @@ -0,0 +1,42 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.fuel.ExpectedUseType import ExpectedUseType + +logger = structlog.get_logger(__name__) + + +async def seed_expected_use_types(session): + """ + Seeds initial expected use types into the database, if they do not already exist. + Args: + session: The database session for committing the new records. + """ + + expected_use_types_to_seed = [ + {"name": "Heating oil", "description": "Fuel used for heating purposes"}, + {"name": "Other", "description": "Other type of fuel description"}, + ] + + try: + for expected_use_type_data in expected_use_types_to_seed: + # Check if the expected use type already exists + exists = await session.execute( + select(ExpectedUseType).where( + ExpectedUseType.name == expected_use_type_data["name"] + ) + ) + if not exists.scalars().first(): + expected_use_type = ExpectedUseType(**expected_use_type_data) + session.add(expected_use_type) + + except Exception as e: + context = { + "function": "seed_expected_use_types", + } + logger.error( + "Error occurred while seeding expected use types", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/feedstock_fuel_transfer_mode_seeder.py b/backend/lcfs/db/seeders/dev/feedstock_fuel_transfer_mode_seeder.py new file mode 100644 index 000000000..4778b4e55 --- /dev/null +++ b/backend/lcfs/db/seeders/dev/feedstock_fuel_transfer_mode_seeder.py @@ -0,0 +1,86 @@ +import structlog +from sqlalchemy import select, func +from lcfs.db.models.fuel.FeedstockFuelTransportMode import FeedstockFuelTransportMode + +logger = structlog.get_logger(__name__) + + +async def seed_feedstock_fuel_transfer_modes(session): + feedstock_fuel_transfer_modes_to_seed = [ + { + "fuel_code_id": 1, + "transport_mode_id": 5, + }, + { + "fuel_code_id": 1, + "transport_mode_id": 4, + }, + { + "fuel_code_id": 2, + "transport_mode_id": 3, + }, + { + "fuel_code_id": 2, + "transport_mode_id": 2, + }, + { + "fuel_code_id": 3, + "transport_mode_id": 1, + }, + { + "fuel_code_id": 3, + "transport_mode_id": 5, + }, + { + "fuel_code_id": 4, + "transport_mode_id": 4, + }, + { + "fuel_code_id": 4, + "transport_mode_id": 3, + }, + { + "fuel_code_id": 5, + "transport_mode_id": 2, + }, + { + "fuel_code_id": 5, + "transport_mode_id": 1, + }, + { + "fuel_code_id": 6, + "transport_mode_id": 5, + }, + { + "fuel_code_id": 6, + "transport_mode_id": 4, + }, + ] + + try: + for feedstock_fuel_transferm_mode_data in feedstock_fuel_transfer_modes_to_seed: + exists = await session.execute( + select(FeedstockFuelTransportMode).where( + FeedstockFuelTransportMode.fuel_code_id + == feedstock_fuel_transferm_mode_data["fuel_code_id"], + FeedstockFuelTransportMode.transport_mode_id + == feedstock_fuel_transferm_mode_data["transport_mode_id"], + ) + ) + if not exists.scalars().first(): + feedstock_fuel_transfer_mode = FeedstockFuelTransportMode( + **feedstock_fuel_transferm_mode_data + ) + session.add(feedstock_fuel_transfer_mode) + + except Exception as e: + context = { + "function": "seed_feedstock_fuel_transfer_modes", + } + logger.error( + "Error occurred while seeding feedstock fuel transfer modes", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/finished_fuel_transfer_mode_seeder.py b/backend/lcfs/db/seeders/dev/finished_fuel_transfer_mode_seeder.py new file mode 100644 index 000000000..b324e09ee --- /dev/null +++ b/backend/lcfs/db/seeders/dev/finished_fuel_transfer_mode_seeder.py @@ -0,0 +1,86 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.fuel.FinishedFuelTransportMode import FinishedFuelTransportMode + +logger = structlog.get_logger(__name__) + + +async def seed_finished_fuel_transfer_modes(session): + finished_fuel_transfer_modes_to_seed = [ + { + "fuel_code_id": 1, + "transport_mode_id": 1, + }, + { + "fuel_code_id": 1, + "transport_mode_id": 2, + }, + { + "fuel_code_id": 2, + "transport_mode_id": 3, + }, + { + "fuel_code_id": 2, + "transport_mode_id": 4, + }, + { + "fuel_code_id": 3, + "transport_mode_id": 5, + }, + { + "fuel_code_id": 3, + "transport_mode_id": 1, + }, + { + "fuel_code_id": 4, + "transport_mode_id": 2, + }, + { + "fuel_code_id": 4, + "transport_mode_id": 3, + }, + { + "fuel_code_id": 5, + "transport_mode_id": 4, + }, + { + "fuel_code_id": 5, + "transport_mode_id": 5, + }, + { + "fuel_code_id": 6, + "transport_mode_id": 1, + }, + { + "fuel_code_id": 6, + "transport_mode_id": 2, + }, + ] + + try: + for finished_fuel_transferm_mode_data in finished_fuel_transfer_modes_to_seed: + exists = await session.execute( + select(FinishedFuelTransportMode).where( + FinishedFuelTransportMode.fuel_code_id + == finished_fuel_transferm_mode_data["fuel_code_id"], + FinishedFuelTransportMode.transport_mode_id + == finished_fuel_transferm_mode_data["transport_mode_id"], + ) + ) + if not exists.scalars().first(): + finished_fuel_transfer_mode = FinishedFuelTransportMode( + **finished_fuel_transferm_mode_data + ) + session.add(finished_fuel_transfer_mode) + + except Exception as e: + context = { + "function": "seed_finished_fuel_transfer_modes", + } + logger.error( + "Error occurred while seeding finished fuel transfer modes", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/fuel_code_seeder.py b/backend/lcfs/db/seeders/dev/fuel_code_seeder.py new file mode 100644 index 000000000..059fda5ad --- /dev/null +++ b/backend/lcfs/db/seeders/dev/fuel_code_seeder.py @@ -0,0 +1,234 @@ +import structlog +from datetime import datetime + +from sqlalchemy import select, func +from lcfs.db.models.fuel.FuelCode import FuelCode + +logger = structlog.get_logger(__name__) + +# Base template for the common fields +base_fuel_data = { + "fuel_status_id": 2, + "prefix_id": 1, + "contact_name": "John Doe", + "contact_email": "john.doe@lcfs.com", + "edrms": "edrms", + "last_updated": func.now(), + "application_date": func.now(), + "feedstock": "feedstock", + "feedstock_location": "123 main street", + "feedstock_misc": "misc data", + "fuel_production_facility_city": "Vancouver", + "fuel_production_facility_province_state": "British Columbia", + "fuel_production_facility_country": "Canada", + "former_company": "ABC Company", + "notes": "notes", +} + + +# Function to create a fuel data entry by extending the base with specific values +def create_fuel_entry( + fuel_suffix, + company, + carbon_intensity, + effective_date, + expiration_date, + fuel_type_id, +): + return { + **base_fuel_data, # Extend with the base fields + "fuel_suffix": fuel_suffix, + "company": company, + "carbon_intensity": carbon_intensity, + "effective_date": datetime(*effective_date), + "expiration_date": datetime(*expiration_date), + "fuel_type_id": fuel_type_id, + } + + +async def seed_fuel_codes(session): + fuel_codes_to_seed = [ + create_fuel_entry( + fuel_suffix="102.5", + company="Neste Oil Singapore", + carbon_intensity=37.21, + effective_date=(2024, 5, 3), + expiration_date=(2027, 5, 2), + fuel_type_id=5, + ), + create_fuel_entry( + fuel_suffix="124.4", + company="Ag Processing Inc.", + carbon_intensity=3.62, + effective_date=(2024, 3, 20), + expiration_date=(2027, 3, 19), + fuel_type_id=1, + ), + create_fuel_entry( + fuel_suffix="125.4", + company="Archer Daniels Midland", + carbon_intensity=-2.14, + effective_date=(2024, 1, 1), + expiration_date=(2026, 12, 31), + fuel_type_id=1, + ), + create_fuel_entry( + fuel_suffix="138.5", + company="ADM Agri-Industries Company", + carbon_intensity=4.26, + effective_date=(2024, 1, 1), + expiration_date=(2026, 12, 31), + fuel_type_id=1, + ), + create_fuel_entry( + fuel_suffix="143.4", + company="Green Plains Otter Tail LLC", + carbon_intensity=44.06, + effective_date=(2024, 1, 1), + expiration_date=(2026, 12, 31), + fuel_type_id=4, + ), + create_fuel_entry( + fuel_suffix="251.2", + company="Incobrasa Industries, Ltd.", + carbon_intensity=0.35, + effective_date=(2023, 8, 7), + expiration_date=(2026, 8, 7), + fuel_type_id=1, + ), + create_fuel_entry( + fuel_suffix="266.2", + company="FortisBC Energy Inc.", + carbon_intensity=20.41, + effective_date=(2024, 1, 2), + expiration_date=(2026, 12, 31), + fuel_type_id=2, + ), + create_fuel_entry( + fuel_suffix="274.2", + company="Paseo Cargill Energy", + carbon_intensity=-15.03, + effective_date=(2024, 1, 25), + expiration_date=(2027, 1, 24), + fuel_type_id=1, + ), + create_fuel_entry( + fuel_suffix="286.2", + company="Bonanza Bioenergy", + carbon_intensity=35.17, + effective_date=(2023, 8, 1), + expiration_date=(2026, 7, 31), + fuel_type_id=4, + ), + create_fuel_entry( + fuel_suffix="316.1", + company="FortisBC Energy Inc.", + carbon_intensity=65.34, + effective_date=(2024, 1, 1), + expiration_date=(2026, 12, 31), + fuel_type_id=7, + ), + create_fuel_entry( + fuel_suffix="317.2", + company="REG Geismar, LLC", + carbon_intensity=17.36, + effective_date=(2024, 7, 1), + expiration_date=(2027, 6, 30), + fuel_type_id=5, + ), + create_fuel_entry( + fuel_suffix="339.1", + company="HTEC Hydrogen Technology & Energy Corp", + carbon_intensity=21.45, + effective_date=(2024, 3, 2), + expiration_date=(2025, 3, 1), + fuel_type_id=6, + ), + create_fuel_entry( + fuel_suffix="353.2", + company="Seaspan Ferries Corp", + carbon_intensity=67.18, + effective_date=(2024, 7, 1), + expiration_date=(2027, 6, 30), + fuel_type_id=7, + ), + create_fuel_entry( + fuel_suffix="357.2", + company="Superior Propane", + carbon_intensity=71.21, + effective_date=(2024, 3, 9), + expiration_date=(2025, 3, 8), + fuel_type_id=13, + ), + create_fuel_entry( + fuel_suffix="362.1", + company="ADM Agri-Industries Company", + carbon_intensity=-1.00, + effective_date=(2024, 1, 1), + expiration_date=(2026, 12, 31), + fuel_type_id=1, + ), + create_fuel_entry( + fuel_suffix="405.1", + company="Diamond Green Diesel LLC", + carbon_intensity=20.37, + effective_date=(2023, 10, 27), + expiration_date=(2024, 10, 26), + fuel_type_id=15, + ), + create_fuel_entry( + fuel_suffix="423.1", + company="Tidewater Renewables Ltd.", + carbon_intensity=15.47, + effective_date=(2024, 7, 1), + expiration_date=(2025, 6, 30), + fuel_type_id=5, + ), + create_fuel_entry( + fuel_suffix="568.0", + company="Montana Renewables LLC", + carbon_intensity=40.37, + effective_date=(2024, 3, 1), + expiration_date=(2025, 2, 28), + fuel_type_id=11, + ), + create_fuel_entry( + fuel_suffix="595.0", + company="Parkland Refining (BC)", + carbon_intensity=-9.52, + effective_date=(2024, 1, 1), + expiration_date=(2025, 6, 30), + fuel_type_id=14, + ), + create_fuel_entry( + fuel_suffix="999.0", + company="Wayne Enterprises Inc.", + carbon_intensity=20.30, + effective_date=(2024, 1, 1), + expiration_date=(2025, 6, 30), + fuel_type_id=3, + ), + ] + + try: + for fuel_code_data in fuel_codes_to_seed: + exists = await session.execute( + select(FuelCode).where( + FuelCode.fuel_suffix == fuel_code_data["fuel_suffix"], + ) + ) + if not exists.scalars().first(): + fuel_code = FuelCode(**fuel_code_data) + session.add(fuel_code) + + except Exception as e: + context = { + "function": "seed_fuel_codes", + } + logger.error( + "Error occurred while seeding fuel codes", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/organization_address_seeder.py b/backend/lcfs/db/seeders/dev/organization_address_seeder.py new file mode 100644 index 000000000..e53536f5e --- /dev/null +++ b/backend/lcfs/db/seeders/dev/organization_address_seeder.py @@ -0,0 +1,145 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.organization.OrganizationAddress import OrganizationAddress + +logger = structlog.get_logger(__name__) + + +async def seed_organization_addresses(session): + """ + Seeds the organization addresses into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + organization_addresses_to_seed = [ + { + "organization_address_id": 1, + "name": "LCFS1", + "street_address": "123 Quantum Street", + "address_other": "", + "city": "Futurica", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V1X 2P3", + }, + { + "organization_address_id": 2, + "name": "LCFS2", + "street_address": "789 Stellar Lane", + "address_other": "Floor 10", + "city": "Nebula Bay", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V2O 3P6", + }, + { + "organization_address_id": 3, + "name": "LCFS3", + "street_address": "345 Radiant Road", + "address_other": "", + "city": "Solartown", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V6M 2W8", + }, + { + "organization_address_id": 4, + "name": "LCFS4", + "street_address": "567 Skyward Way", + "address_other": "", + "city": "Aero City", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "T1W 8E9", + }, + { + "organization_address_id": 5, + "name": "LCFS5", + "street_address": "890 Nature Lane", + "address_other": "Building 987", + "city": "BioVista", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "H1A 9B4", + }, + { + "organization_address_id": 6, + "name": "LCFS6", + "street_address": "1234 Volt Street", + "address_other": "Floor 567", + "city": "Electropolis", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V3S 8K6", + }, + { + "organization_address_id": 7, + "name": "LCFS7", + "street_address": "678 Combustion Road", + "address_other": "", + "city": "EnergySphere", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "T2E 5G7", + }, + { + "organization_address_id": 8, + "name": "LCFS8", + "street_address": "910 Greenwave Boulevard", + "address_other": "", + "city": "RenewaPeak", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V6H 1Z1", + }, + { + "organization_address_id": 9, + "name": "LCFS9", + "street_address": "1122 Fusion Avenue", + "address_other": "Suite 1213", + "city": "Astrumtown", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "M4L 3R9", + }, + { + "organization_address_id": 10, + "name": "LCFS10", + "street_address": "1314 Power Lane", + "address_other": "Unit 1415", + "city": "Turbopolis", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "T3E 4X8", + }, + ] + + try: + for organization_address_data in organization_addresses_to_seed: + # Check if the Organization already exists based on organization_address_id + exists = await session.execute( + select(OrganizationAddress).where( + OrganizationAddress.name == organization_address_data["name"], + OrganizationAddress.street_address + == organization_address_data["street_address"], + OrganizationAddress.address_other + == organization_address_data["address_other"], + ) + ) + if not exists.scalars().first(): + organization_address = OrganizationAddress(**organization_address_data) + session.add(organization_address) + + except Exception as e: + context = { + "function": "seed_organization_addresses", + } + logger.error( + "Error occurred while seeding organization addresses", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/organization_attorney_address_seeder.py b/backend/lcfs/db/seeders/dev/organization_attorney_address_seeder.py new file mode 100644 index 000000000..98701531e --- /dev/null +++ b/backend/lcfs/db/seeders/dev/organization_attorney_address_seeder.py @@ -0,0 +1,152 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.organization.OrganizationAttorneyAddress import ( + OrganizationAttorneyAddress, +) + +logger = structlog.get_logger(__name__) + + +async def seed_organization_attorney_addresses(session): + """ + Seeds the organization attorney addresses into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + organization_attorney_addresses_to_seed = [ + { + "organization_attorney_address_id": 1, + "name": "LCFS1", + "street_address": "456 Nexus Avenue", + "address_other": "Suite 101", + "city": "Cosmos City", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V8X 1Y9", + }, + { + "organization_attorney_address_id": 2, + "name": "LCFS2", + "street_address": "101 Celestial Road", + "address_other": "Floor 5, Unit B", + "city": "Astralville", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V4X 4Z7", + }, + { + "organization_attorney_address_id": 3, + "name": "LCFS3", + "street_address": "345 Radiant Road", + "address_other": "", + "city": "Solartown", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V6M 2W8", + }, + { + "organization_attorney_address_id": 4, + "name": "LCFS4", + "street_address": "678 Breeze Boulevard", + "address_other": "Unit 3A", + "city": "Windhaven", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "S7K 5T1", + }, + { + "organization_attorney_address_id": 5, + "name": "LCFS5", + "street_address": "890 Nature Lane", + "address_other": "Building 987", + "city": "BioVista", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "H1A 9B4", + }, + { + "organization_attorney_address_id": 6, + "name": "LCFS6", + "street_address": "1234 Volt Street", + "address_other": "Floor 567", + "city": "Electropolis", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "V3S 8K6", + }, + { + "organization_attorney_address_id": 7, + "name": "LCFS7", + "street_address": "678 Combustion Road", + "address_other": "", + "city": "EnergySphere", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "T2E 5G7", + }, + { + "organization_attorney_address_id": 8, + "name": "LCFS8", + "street_address": "1011 Eco Drive", + "address_other": "", + "city": "Greenfield", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "R1N 7L3", + }, + { + "organization_attorney_address_id": 9, + "name": "LCFS9", + "street_address": "1122 Fusion Avenue", + "address_other": "Suite 1213", + "city": "Astrumtown", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "M4L 3R9", + }, + { + "organization_attorney_address_id": 10, + "name": "LCFS10", + "street_address": "1314 Power Lane", + "address_other": "Unit 1415", + "city": "Turbopolis", + "province_state": "BC", + "country": "Canada", + "postalCode_zipCode": "T3E 4X8", + }, + ] + + try: + for ( + organization_attorney_address_data + ) in organization_attorney_addresses_to_seed: + # Check if the Organization already exists based on organization_attorney_address_id + exists = await session.execute( + select(OrganizationAttorneyAddress).where( + OrganizationAttorneyAddress.name + == organization_attorney_address_data["name"], + OrganizationAttorneyAddress.street_address + == organization_attorney_address_data["street_address"], + OrganizationAttorneyAddress.address_other + == organization_attorney_address_data["address_other"], + ) + ) + if not exists.scalars().first(): + organization_attorney_address = OrganizationAttorneyAddress( + **organization_attorney_address_data + ) + session.add(organization_attorney_address) + + except Exception as e: + context = { + "function": "seed_organization_attorney_addresses", + } + logger.error( + "Error occurred while seeding organization attorney addresses", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/organization_seeder.py b/backend/lcfs/db/seeders/dev/organization_seeder.py new file mode 100644 index 000000000..4b2e013cb --- /dev/null +++ b/backend/lcfs/db/seeders/dev/organization_seeder.py @@ -0,0 +1,188 @@ +import structlog +from sqlalchemy import select, text +from lcfs.db.models.organization.Organization import Organization + +logger = structlog.get_logger(__name__) + + +async def seed_organizations(session): + """ + Seeds the organizations into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + organizations_to_seed = [ + { + "organization_id": 1, + "organization_code": "7QEV", + "name": "LCFS Org 1", + "operating_name": "LCFS Org 1", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-1234", + "edrms_record": "12345", + "organization_status_id": 2, + "organization_type_id": 1, + "organization_address_id": 1, + "organization_attorney_address_id": 1, + }, + { + "organization_id": 2, + "organization_code": "DI8C", + "name": "LCFS Org 2", + "operating_name": "LCFS Org 2", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-5678", + "edrms_record": "12345", + "organization_status_id": 2, + "organization_type_id": 1, + "organization_address_id": 2, + "organization_attorney_address_id": 2, + }, + { + "organization_id": 3, + "organization_code": "QBE4", + "name": "LCFS Org 3", + "operating_name": "LCFS Org 3", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-9101", + "edrms_record": "12345", + "organization_status_id": 2, + "organization_type_id": 1, + "organization_address_id": 3, + "organization_attorney_address_id": 3, + }, + { + "organization_id": 4, + "organization_code": "NC12", + "name": "LCFS Org 4", + "operating_name": "LCFS Org 4", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-1122", + "edrms_record": "12345", + "organization_status_id": 2, + "organization_type_id": 1, + "organization_address_id": 4, + "organization_attorney_address_id": 4, + }, + { + "organization_id": 5, + "organization_code": "PQ9T", + "name": "LCFS Org 5", + "operating_name": "LCFS Org 5", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-1313", + "edrms_record": "12345", + "organization_status_id": 2, + "organization_type_id": 2, + "organization_address_id": 5, + "organization_attorney_address_id": 5, + }, + { + "organization_id": 6, + "organization_code": "5LJR", + "name": "LCFS Org 6", + "operating_name": "LCFS Org 6", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-1414", + "edrms_record": "12345", + "organization_status_id": 2, + "organization_type_id": 2, + "organization_address_id": 6, + "organization_attorney_address_id": 6, + }, + { + "organization_id": 7, + "organization_code": "W3UI", + "name": "LCFS Org 7", + "operating_name": "LCFS Org 7", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-1515", + "edrms_record": "12345", + "organization_status_id": 2, + "organization_type_id": 2, + "organization_address_id": 7, + "organization_attorney_address_id": 7, + }, + { + "organization_id": 8, + "organization_code": "MLPR", + "name": "LCFS Org 8", + "operating_name": "LCFS Org 8", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-1616", + "edrms_record": "12345", + "organization_status_id": 1, + "organization_type_id": 2, + "organization_address_id": 8, + "organization_attorney_address_id": 8, + }, + { + "organization_id": 9, + "organization_code": "076Q", + "name": "LCFS Org 9", + "operating_name": "LCFS Org 9", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-1717", + "edrms_record": "12345", + "organization_status_id": 3, + "organization_type_id": 3, + "organization_address_id": 9, + "organization_attorney_address_id": 9, + }, + { + "organization_id": 10, + "organization_code": "GJI7", + "name": "LCFS Org 10", + "operating_name": "LCFS Org 10", + "email": "tfrs@gov.bc.ca", + "phone": "000-555-1818", + "edrms_record": "12345", + "organization_status_id": 4, + "organization_type_id": 4, + "organization_address_id": 10, + "organization_attorney_address_id": 10, + }, + ] + + try: + for organization_data in organizations_to_seed: + # Check if the Organization already exists based on organization_id + exists = await session.execute( + select(Organization).where( + Organization.name == organization_data["name"], + ) + ) + organization = exists.scalars().first() + if not organization: + organization = Organization(**organization_data) + session.add(organization) + else: + organization.email = organization_data["email"] + organization.phone = organization_data["phone"] + organization.edrms_record = organization_data["edrms_record"] + organization.organization_address_id = organization_data[ + "organization_address_id" + ] + organization.organization_attorney_address_id = organization_data[ + "organization_attorney_address_id" + ] + session.add(organization) + # reset the sequence to latest value + await session.execute( + text( + "SELECT setval('organization_attorney_address_organization_attorney_address_seq', (SELECT MAX(organization_attorney_address_id) FROM organization_attorney_address))" + ) + ) + except Exception as e: + context = { + "function": "seed_organizations", + } + logger.error( + "Error occurred while seeding organizations", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/transaction_seeder.py b/backend/lcfs/db/seeders/dev/transaction_seeder.py new file mode 100644 index 000000000..30743b111 --- /dev/null +++ b/backend/lcfs/db/seeders/dev/transaction_seeder.py @@ -0,0 +1,104 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.transaction.Transaction import Transaction, TransactionActionEnum + +logger = structlog.get_logger(__name__) + + +async def seed_transactions(session): + """ + Seeds initial transaction for organizations into the database, if they do not already exist. + Args: + session: The database session for committing the new records. + """ + + transactions_to_seed = [ + { + "transaction_id": 1, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 1, + }, + { + "transaction_id": 2, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 2, + }, + { + "transaction_id": 3, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 3, + }, + { + "transaction_id": 4, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 4, + }, + { + "transaction_id": 5, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 5, + }, + { + "transaction_id": 6, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 6, + }, + { + "transaction_id": 7, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 7, + }, + { + "transaction_id": 8, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 8, + }, + { + "transaction_id": 9, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 9, + }, + { + "transaction_id": 10, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 10, + }, + ] + + try: + for transaction_data in transactions_to_seed: + # Check if the transaction already exists + exists = await session.execute( + select(Transaction).where( + Transaction.compliance_units + == transaction_data["compliance_units"], + Transaction.transaction_action + == transaction_data["transaction_action"], + Transaction.organization_id == transaction_data["organization_id"], + ) + ) + if not exists.scalars().first(): + transaction = Transaction(**transaction_data) + session.add(transaction) + + except Exception as e: + context = { + "function": "seed_transactions", + } + logger.error( + "Error occurred while seeding transactions", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/dev/user_profile_seeder.py b/backend/lcfs/db/seeders/dev/user_profile_seeder.py index 0ba74b2b0..13ea43ea7 100644 --- a/backend/lcfs/db/seeders/dev/user_profile_seeder.py +++ b/backend/lcfs/db/seeders/dev/user_profile_seeder.py @@ -1,8 +1,9 @@ -import logging +import structlog from sqlalchemy import select -from lcfs.db.models.UserProfile import UserProfile +from lcfs.db.models.user.UserProfile import UserProfile + +logger = structlog.get_logger(__name__) -logger = logging.getLogger(__name__) async def seed_user_profiles(session): """ @@ -15,62 +16,326 @@ async def seed_user_profiles(session): # Define the user profiles to seed user_profiles_to_seed = [ { - "keycloak_email": "alex@thecruxstudios.com", + "user_profile_id": 1, + "keycloak_email": "alex.zorkin@gov.bc.ca", "keycloak_username": "ALZORKIN", - "email": "alex@thecruxstudios.com", - "username": "azorkin", - "display_name": "Alex Zorkin", + "email": "alex.zorkin@gov.bc.ca", "title": "Developer", "phone": "1234567890", "mobile_phone": "1234567890", - "organization_id": 1 + "organization_id": None, + "is_active": True, + "first_name": "Alex", + "last_name": "Zorkin", }, { + "user_profile_id": 2, "keycloak_email": "hamed.valiollahibayeki@gov.bc.ca", "keycloak_username": "HVALIOLL", "email": "hamed.valiollahibayeki@gov.bc.ca", - "username": "hvalioll", - "display_name": "Hamed Valiollahi Bayeki", "title": "Developer", "phone": "1234567890", "mobile_phone": "1234567890", - "organization_id": 1 + "organization_id": None, + "is_active": True, + "first_name": "Hamed", + "last_name": "Bayeki", + }, + { + "user_profile_id": 3, + "keycloak_email": "kevin.hashimoto@gov.bc.ca", + "keycloak_username": "KHASHIMO", + "email": "kevin.hashimoto@gov.bc.ca", + "title": "Developer", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Kevin", + "last_name": "Hashimoto", + }, + { + "user_profile_id": 4, + "keycloak_email": "prashanth.venkateshappa@gov.bc.ca", + "keycloak_username": "PVENKATE", + "email": "prashanth.venkateshappa@gov.bc.ca", + "title": "Developer", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Prashanth", + "last_name": "V", + }, + { + "user_profile_id": 5, + "keycloak_email": "justin.lepitzki@gov.bc.ca", + "keycloak_username": "JLEPITZ", + "email": "justin.lepitzki@gov.bc.ca", + "title": "Admin", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Justin", + "last_name": "Lepitzki", }, { - "keycloak_email": 'kevin.hashimoto@gov.bc.ca', - "keycloak_username": 'KHASHIMO', - "email": 'kevin.hashimoto@gov.bc.ca', - "username": 'khashimo', - "display_name": 'Kevin Hashimoto', - "title": 'Developer', + "user_profile_id": 6, + "keycloak_email": "lindsy.grunert@gov.bc.ca", + "keycloak_username": "LGRUNERT", + "email": "lindsy.grunert@gov.bc.ca", + "title": "Admin", "phone": "1234567890", "mobile_phone": "1234567890", - "organization_id": 1 + "organization_id": None, + "is_active": True, + "first_name": "Lindsy", + "last_name": "Grunert", + }, + { + "user_profile_id": 7, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "lcfs1", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "1112223333", + "mobile_phone": "1112223333", + "organization_id": 1, + "is_active": True, + "first_name": "Jane", + "last_name": "Doe", + }, + { + "user_profile_id": 8, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS2", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "2223334444", + "mobile_phone": "2223334444", + "organization_id": 2, + "is_active": True, + "first_name": "John", + "last_name": "Smith", + }, + { + "user_profile_id": 9, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS3", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "3334445555", + "mobile_phone": "3334445555", + "organization_id": 3, + "is_active": True, + "first_name": "Alice", + "last_name": "Woo", + }, + { + "user_profile_id": 10, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS4", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "4445556666", + "mobile_phone": "4445556666", + "organization_id": 4, + "is_active": True, + "first_name": "Bob", + "last_name": "Lee", + }, + { + "user_profile_id": 11, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS5", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "6667778888", + "mobile_phone": "6667778888", + "organization_id": 5, + "is_active": True, + "first_name": "David", + "last_name": "Clark", + }, + { + "user_profile_id": 12, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS6", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "8889990000", + "mobile_phone": "8889990000", + "organization_id": 6, + "is_active": True, + "first_name": "Frank", + "last_name": "Wilson", + }, + { + "user_profile_id": 13, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS7", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "0001112222", + "mobile_phone": "0001112222", + "organization_id": 7, + "is_active": True, + "first_name": "Bill", + "last_name": "Teller", + }, + { + "user_profile_id": 14, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS8", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "2223334445", + "mobile_phone": "2223334445", + "organization_id": 8, + "is_active": True, + "first_name": "James", + "last_name": "Bell", }, { - "keycloak_email": 'protonater@live.com', - "keycloak_username": 'PVENKATE', - "email": 'protonater@live.com', - "username": 'pvenkate', - "display_name": 'Prashanth V', - "title": 'Developer', + "user_profile_id": 15, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS9", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "4445556667", + "mobile_phone": "4445556667", + "organization_id": 9, + "is_active": True, + "first_name": "Leo", + "last_name": "Martin", + }, + { + "user_profile_id": 16, + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS10", + "email": "tfrs@gov.bc.ca", + "title": "Analyst", + "phone": "6667778889", + "mobile_phone": "6667778889", + "organization_id": 10, + "is_active": True, + "first_name": "Noah", + "last_name": "Thomas", + }, + { + "user_profile_id": 17, + "keycloak_email": "kailee.douglas@gov.bc.ca", + "keycloak_username": "KADOUGLA", + "email": "kailee.douglas@gov.bc.ca", + "title": "Admin", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Kailee", + "last_name": "Douglas", + }, + { + "user_profile_id": 18, + "keycloak_email": "alasdair.ring@gov.bc.ca", + "keycloak_username": "AIRING", + "email": "alasdair.ring@gov.bc.ca", + "title": "Product Owner", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Al", + "last_name": "Ring", + }, + { + "user_profile_id": 19, + "keycloak_email": "rebekah.ford@gov.bc.ca", + "keycloak_username": "RRFORD", + "email": "rebekah.ford@gov.bc.ca", + "title": "Scrum Master", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Rebekah", + "last_name": "Ford", + }, + { + "user_profile_id": 20, + "keycloak_email": "stuart.galloway@gov.bc.ca", + "keycloak_username": "SGALLOWA", + "email": "stuart.galloway@gov.bc.ca", + "title": "UX Practicioner", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Stuart", + "last_name": "Galloway", + }, + { + "user_profile_id": 21, + "keycloak_email": "lcfstest@gov.bc.ca", + "keycloak_username": "lcfstest", + "email": "lcfstest@gov.bc.ca", + "title": "Test Account", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "LCFS_IDIR", + "last_name": "TESTER", + }, + { + "user_profile_id": 22, + "keycloak_email": "daniel.haselhan@gov.bc.ca", + "keycloak_username": "DHASELHA", + "email": "daniel.haselhan@gov.bc.ca", + "title": "Developer", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Daniel", + "last_name": "Haselhan", + }, + { + "user_profile_id": 23, + "keycloak_email": "Arturo.Reyes-Lopez@gov.bc.ca", + "keycloak_username": "ARTREYES", + "email": "Arturo.Reyes-Lopez@gov.bc.ca", + "title": "Developer", "phone": "1234567890", "mobile_phone": "1234567890", - "organization_id": 1 + "organization_id": None, + "is_active": True, + "first_name": "Arturo", + "last_name": "Reyes", }, ] try: for user_data in user_profiles_to_seed: - # Check if the user already exists based on a unique attribute, e.g., username + # Check if the user already exists based on a unique attribute exists = await session.execute( - select(UserProfile).where(UserProfile.keycloak_email == user_data["keycloak_email"]) + select(UserProfile).where( + UserProfile.keycloak_username == user_data["keycloak_username"] + ) ) if not exists.scalars().first(): user_profile = UserProfile(**user_data) session.add(user_profile) - await session.commit() except Exception as e: - logger.error("Error occurred while seeding user profiles: %s", e) + context = { + "function": "seed_user_profiles", + } + logger.error( + "Error occurred while seeding user profiles", + error=str(e), + exc_info=e, + **context, + ) raise diff --git a/backend/lcfs/db/seeders/dev/user_role_seeder.py b/backend/lcfs/db/seeders/dev/user_role_seeder.py index 315edc785..681ace649 100644 --- a/backend/lcfs/db/seeders/dev/user_role_seeder.py +++ b/backend/lcfs/db/seeders/dev/user_role_seeder.py @@ -1,8 +1,9 @@ -import logging +import structlog from sqlalchemy import select -from lcfs.db.models.UserRole import UserRole +from lcfs.db.models.user.UserRole import UserRole + +logger = structlog.get_logger(__name__) -logger = logging.getLogger(__name__) async def seed_user_roles(session): """ @@ -13,10 +14,49 @@ async def seed_user_roles(session): """ user_roles_to_seed = [ - { - "user_profile_id": 1, - "role_id": 1 - }, + {"user_profile_id": 1, "role_id": 1}, + {"user_profile_id": 1, "role_id": 3}, + {"user_profile_id": 2, "role_id": 1}, + {"user_profile_id": 2, "role_id": 3}, + {"user_profile_id": 3, "role_id": 1}, + {"user_profile_id": 3, "role_id": 3}, + {"user_profile_id": 4, "role_id": 1}, + {"user_profile_id": 4, "role_id": 3}, + {"user_profile_id": 5, "role_id": 1}, + {"user_profile_id": 5, "role_id": 3}, + {"user_profile_id": 6, "role_id": 1}, + {"user_profile_id": 6, "role_id": 3}, + {"user_profile_id": 7, "role_id": 2}, + {"user_profile_id": 7, "role_id": 8}, + {"user_profile_id": 7, "role_id": 10}, + {"user_profile_id": 8, "role_id": 2}, + {"user_profile_id": 8, "role_id": 8}, + {"user_profile_id": 8, "role_id": 10}, + {"user_profile_id": 9, "role_id": 2}, + {"user_profile_id": 9, "role_id": 8}, + {"user_profile_id": 9, "role_id": 10}, + {"user_profile_id": 10, "role_id": 2}, + {"user_profile_id": 11, "role_id": 2}, + {"user_profile_id": 12, "role_id": 2}, + {"user_profile_id": 13, "role_id": 2}, + {"user_profile_id": 14, "role_id": 2}, + {"user_profile_id": 15, "role_id": 2}, + {"user_profile_id": 16, "role_id": 2}, + {"user_profile_id": 17, "role_id": 1}, + {"user_profile_id": 17, "role_id": 3}, + {"user_profile_id": 18, "role_id": 1}, + {"user_profile_id": 18, "role_id": 3}, + {"user_profile_id": 19, "role_id": 1}, + {"user_profile_id": 19, "role_id": 3}, + {"user_profile_id": 20, "role_id": 1}, + {"user_profile_id": 20, "role_id": 3}, + {"user_profile_id": 21, "role_id": 1}, + {"user_profile_id": 21, "role_id": 3}, + {"user_profile_id": 21, "role_id": 4}, + {"user_profile_id": 22, "role_id": 1}, + {"user_profile_id": 22, "role_id": 3}, + {"user_profile_id": 23, "role_id": 1}, + {"user_profile_id": 23, "role_id": 3}, ] try: @@ -25,14 +65,21 @@ async def seed_user_roles(session): exists = await session.execute( select(UserRole).where( UserRole.user_profile_id == user_role_data["user_profile_id"], - UserRole.role_id == user_role_data["role_id"] + UserRole.role_id == user_role_data["role_id"], ) ) if not exists.scalars().first(): user_role = UserRole(**user_role_data) session.add(user_role) - await session.commit() except Exception as e: - logger.error("Error occurred while seeding user roles: %s", e) + context = { + "function": "seed_user_roles", + } + logger.error( + "Error occurred while seeding user roles", + error=str(e), + exc_info=e, + **context, + ) raise diff --git a/backend/lcfs/db/seeders/dev_seeder.py b/backend/lcfs/db/seeders/dev_seeder.py index 39769c272..993e58ab0 100644 --- a/backend/lcfs/db/seeders/dev_seeder.py +++ b/backend/lcfs/db/seeders/dev_seeder.py @@ -1,29 +1,78 @@ -import logging +import structlog import asyncio -from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession -from sqlalchemy.orm import sessionmaker -from lcfs.settings import settings +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import text from lcfs.db.seeders.dev.user_profile_seeder import seed_user_profiles from lcfs.db.seeders.dev.user_role_seeder import seed_user_roles +from lcfs.db.seeders.dev.organization_address_seeder import seed_organization_addresses +from lcfs.db.seeders.dev.organization_attorney_address_seeder import ( + seed_organization_attorney_addresses, +) +from lcfs.db.seeders.dev.organization_seeder import seed_organizations +from lcfs.db.seeders.dev.transaction_seeder import seed_transactions +from lcfs.db.seeders.dev.admin_adjustment_seeder import seed_admin_adjustments +from lcfs.db.seeders.dev.admin_adjustment_history_seeder import ( + seed_admin_adjustment_history, +) +from lcfs.db.seeders.dev.fuel_code_seeder import seed_fuel_codes +from lcfs.db.seeders.dev.finished_fuel_transfer_mode_seeder import ( + seed_finished_fuel_transfer_modes, +) +from lcfs.db.seeders.dev.feedstock_fuel_transfer_mode_seeder import ( + seed_feedstock_fuel_transfer_modes, +) +from lcfs.db.seeders.dev.expected_use_types_seeder import seed_expected_use_types -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) -async def seed_dev(): + +async def update_sequences(session): + """ + Function to update sequences for all tables after seeding. + """ + sequences = { + "organization_address": "organization_address_id", + "organization": "organization_id", + "transaction": "transaction_id", + "admin_adjustment": "admin_adjustment_id", + "user_profile": "user_profile_id", + "user_role": "user_role_id", + "fuel_code": "fuel_code_id", + } + + for table, column in sequences.items(): + sequence_name = f"{table}_{column}_seq" + max_value_query = text( + f"SELECT setval('{sequence_name}', COALESCE((SELECT MAX({column}) + 1 FROM {table}), 1), false)" + ) + await session.execute(max_value_query) + + +async def seed_dev(session: AsyncSession): """ Function to seed the database with dev data. """ - engine = create_async_engine(str(settings.db_url)) - AsyncSessionLocal = sessionmaker(bind=engine, class_=AsyncSession) - - async with AsyncSessionLocal() as session: - try: - await seed_user_profiles(session) - await seed_user_roles(session) - logger.info("Database seeding completed successfully.") - except Exception as e: - logger.error(f"An error occurred during seeding: {e}") - await session.rollback() + await seed_organization_addresses(session) + await seed_organization_attorney_addresses(session) + await seed_organizations(session) + await seed_transactions(session) + await seed_user_profiles(session) + await seed_user_roles(session) + await seed_admin_adjustments(session) + await seed_fuel_codes(session) + await seed_finished_fuel_transfer_modes(session) + await seed_feedstock_fuel_transfer_modes(session) + await seed_expected_use_types(session) + + # Update sequences after all seeders have run + await update_sequences(session) + + # TODO not working with incorrect foreign keys, needs debugging + # await seed_admin_adjustment_history(session) + + logger.info("Dev database seeding completed successfully.") + if __name__ == "__main__": asyncio.run(seed_dev()) diff --git a/backend/lcfs/db/seeders/prod_seeder.py b/backend/lcfs/db/seeders/prod_seeder.py index b6cc9d040..675d1bdc8 100644 --- a/backend/lcfs/db/seeders/prod_seeder.py +++ b/backend/lcfs/db/seeders/prod_seeder.py @@ -1,24 +1,16 @@ -import logging +import structlog import asyncio -from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession -from sqlalchemy.orm import sessionmaker -from lcfs.settings import settings +from sqlalchemy.ext.asyncio import AsyncSession -logger = logging.getLogger(__name__) +logger = structlog.get_logger(__name__) -async def seed_prod(): + +async def seed_prod(session: AsyncSession): """ Function to seed the database with prod data. """ - engine = create_async_engine(str(settings.db_url)) - AsyncSessionLocal = sessionmaker(bind=engine, class_=AsyncSession) + pass - async with AsyncSessionLocal() as session: - try: - logger.info("Database seeding completed successfully.") - except Exception as e: - logger.error(f"An error occurred during seeding: {e}") - await session.rollback() if __name__ == "__main__": asyncio.run(seed_prod()) diff --git a/backend/lcfs/db/seeders/seed_database.py b/backend/lcfs/db/seeders/seed_database.py index f23daf1a5..c840d5cf6 100644 --- a/backend/lcfs/db/seeders/seed_database.py +++ b/backend/lcfs/db/seeders/seed_database.py @@ -1,20 +1,55 @@ import sys import asyncio +import structlog + +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.orm import sessionmaker from lcfs.db.seeders.common_seeder import seed_common from lcfs.db.seeders.dev_seeder import seed_dev from lcfs.db.seeders.prod_seeder import seed_prod +from lcfs.db.seeders.test_seeder import seed_test +from lcfs.settings import settings + +logger = structlog.get_logger(__name__) + async def seed_database(environment): - if environment == 'dev': - await seed_common() - await seed_dev() - elif environment == 'prod': - await seed_common() - await seed_prod() - else: - raise ValueError("Unknown environment") + engine = create_async_engine(str(settings.db_url)) + AsyncSessionLocal = sessionmaker(bind=engine, class_=AsyncSession) + + async with AsyncSessionLocal() as session: + async with session.begin(): + try: + logger.info("Database seeding started.") + await seed_common(session) + + if environment == "dev": + await seed_dev(session) + elif environment == "prod": + await seed_prod(session) + elif environment == "test": + await seed_test(session) + else: + raise ValueError("Unknown environment") + + # commit all seeders + await session.commit() + + except Exception as e: + context = { + "function": "seed_database", + } + logger.error( + "An error occurred during seeding", + error=str(e), + exc_info=e, + **context, + ) + await session.rollback() # Ensure to rollback in case of an error + raise + if __name__ == "__main__": - env = sys.argv[1] if len(sys.argv) > 1 else 'dev' + env = sys.argv[1] if len(sys.argv) > 1 else "dev" asyncio.run(seed_database(env)) diff --git a/frontend/src/utils/.gitKeep b/backend/lcfs/db/seeders/test/__init__.py similarity index 100% rename from frontend/src/utils/.gitKeep rename to backend/lcfs/db/seeders/test/__init__.py diff --git a/backend/lcfs/db/seeders/test/test_admin_adjustment_seeder.py b/backend/lcfs/db/seeders/test/test_admin_adjustment_seeder.py new file mode 100644 index 000000000..c7d695697 --- /dev/null +++ b/backend/lcfs/db/seeders/test/test_admin_adjustment_seeder.py @@ -0,0 +1,59 @@ +import structlog +from datetime import datetime +from sqlalchemy import select +from lcfs.db.models.admin_adjustment import AdminAdjustment + +logger = structlog.get_logger(__name__) + + +async def seed_test_admin_adjustments(session): + """ + Seeds initial admin adjustments into the database, if they do not already exist. + Args: + session: The database session for committing the new records. + """ + + # Define a standard date for transaction_effective_date + transaction_effective_date = datetime(2024, 1, 1) + + admin_adjustments_to_seed = [ + { + "compliance_units": 50000, + "to_organization_id": 1, + "transaction_id": 1, + "current_status_id": 1, + "transaction_effective_date": transaction_effective_date, + }, + { + "compliance_units": 50000, + "to_organization_id": 1, + "transaction_id": 2, + "current_status_id": 3, + "transaction_effective_date": transaction_effective_date, + }, + ] + + try: + for admin_adjustment_data in admin_adjustments_to_seed: + # Check if the admin adjustment already exists + exists = await session.execute( + select(AdminAdjustment).where( + AdminAdjustment.transaction_id + == admin_adjustment_data["transaction_id"] + ) + ) + if not exists.scalars().first(): + admin_adjustment = AdminAdjustment(**admin_adjustment_data) + session.add(admin_adjustment) + + except Exception as e: + context = { + "function": "seed_test_admin_adjustments", + } + logger.error( + "Error occurred while seeding admin adjustments", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/test/test_organization_seeder.py b/backend/lcfs/db/seeders/test/test_organization_seeder.py new file mode 100644 index 000000000..84c781937 --- /dev/null +++ b/backend/lcfs/db/seeders/test/test_organization_seeder.py @@ -0,0 +1,62 @@ +import structlog +from sqlalchemy import select, text +from lcfs.db.models.organization.Organization import Organization + +logger = structlog.get_logger(__name__) + + +async def seed_test_organizations(session): + """ + Seeds the organizations into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + organizations_to_seed = [ + { + "name": "GreenLeaf Dynamics", + "operating_name": "GreenLeaf Dynamics", + "organization_status_id": 2, + "organization_type_id": 1, + }, + { + "name": "PureEarth Ventures", + "operating_name": "PureEarth Ventures", + "organization_status_id": 2, + "organization_type_id": 1, + }, + { + "name": "TerraNova Industries", + "operating_name": "TerraNova Industries", + "organization_status_id": 2, + "organization_type_id": 1, + }, + ] + + try: + for org_data in organizations_to_seed: + # Check if the Organization already exists based on its name + exists = await session.execute( + select(Organization).where(Organization.name == org_data["name"]) + ) + if not exists.scalars().first(): + organization = Organization(**org_data) + session.add(organization) + await session.execute( + text( + "SELECT setval('organization_attorney_address_organization_attorney_address_seq', (SELECT MAX(organization_attorney_address_id) FROM organization_attorney_address))" + ) + ) + + except Exception as e: + context = { + "function": "seed_test_organizations", + } + logger.error( + "Error occurred while seeding organizations", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/test/test_transaction_seeder.py b/backend/lcfs/db/seeders/test/test_transaction_seeder.py new file mode 100644 index 000000000..4ec016ff0 --- /dev/null +++ b/backend/lcfs/db/seeders/test/test_transaction_seeder.py @@ -0,0 +1,56 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.transaction.Transaction import Transaction, TransactionActionEnum + +logger = structlog.get_logger(__name__) + + +async def seed_test_transactions(session): + """ + Seeds initial transaction for organizations into the database, if they do not already exist. + Args: + session: The database session for committing the new records. + """ + + transactions_to_seed = [ + { + "transaction_id": 1, + "compliance_units": 50000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 1, + }, + { + "transaction_id": 2, + "compliance_units": 1000, + "transaction_action": TransactionActionEnum.Adjustment, + "organization_id": 1, + }, + ] + + try: + for transaction_data in transactions_to_seed: + # Check if the transaction already exists + exists = await session.execute( + select(Transaction).where( + Transaction.compliance_units + == transaction_data["compliance_units"], + Transaction.transaction_action + == transaction_data["transaction_action"], + Transaction.organization_id == transaction_data["organization_id"], + ) + ) + if not exists.scalars().first(): + transaction = Transaction(**transaction_data) + session.add(transaction) + + except Exception as e: + context = { + "function": "seed_test_transactions", + } + logger.error( + "Error occurred while seeding transactions", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/test/test_transfer_seeder.py b/backend/lcfs/db/seeders/test/test_transfer_seeder.py new file mode 100644 index 000000000..2cc5a0ed6 --- /dev/null +++ b/backend/lcfs/db/seeders/test/test_transfer_seeder.py @@ -0,0 +1,108 @@ +import structlog +from datetime import datetime +from sqlalchemy import select, and_, text +from lcfs.db.models.transfer.Transfer import Transfer +from lcfs.db.models.organization.Organization import Organization + +logger = structlog.get_logger(__name__) + + +async def seed_test_transfers(session): + """ + Seeds the transfers into the test database, if they do not already exist. + Args: + session: The database session for committing the new records. + """ + transfers_to_seed = [ + { + "from_organization_id": 1, + "to_organization_id": 2, + "current_status_id": 2, + "transfer_category_id": 1, + "agreement_date": datetime.strptime("2023-01-01", "%Y-%m-%d").date(), + "quantity": 100, + "price_per_unit": 10.0, + }, + { + "from_organization_id": 2, + "to_organization_id": 1, + "current_status_id": 2, + "transfer_category_id": 1, + "agreement_date": datetime.strptime("2023-01-02", "%Y-%m-%d").date(), + "quantity": 50, + "price_per_unit": 5.0, + }, + { + "from_organization_id": 2, + "to_organization_id": 1, + "current_status_id": 3, + "transfer_category_id": 1, + "from_transaction_id": 2, + "agreement_date": datetime.strptime("2023-01-02", "%Y-%m-%d").date(), + "quantity": 50, + "price_per_unit": 5.0, + }, + ] + + for transfer_data in transfers_to_seed: + from_org_exists = await session.get( + Organization, transfer_data["from_organization_id"] + ) + to_org_exists = await session.get( + Organization, transfer_data["to_organization_id"] + ) + + if not from_org_exists or not to_org_exists: + context = { + 'transfer_data': transfer_data, + 'from_org_exists': from_org_exists, + 'to_org_exists': to_org_exists, + } + logger.error( + "Referenced organizations for transfer do not exist.", + **context, + ) + continue + + try: + exists = await session.execute( + select(Transfer).where( + and_( + Transfer.from_organization_id + == transfer_data["from_organization_id"], + Transfer.to_organization_id + == transfer_data["to_organization_id"], + Transfer.current_status_id + == transfer_data["current_status_id"], + Transfer.transfer_category_id + == transfer_data["transfer_category_id"], + Transfer.agreement_date == transfer_data["agreement_date"], + Transfer.quantity == transfer_data["quantity"], + Transfer.price_per_unit == transfer_data["price_per_unit"], + ) + ) + ) + transfer = exists.scalars().first() + if not transfer: + transfer = Transfer(**transfer_data) + session.add(transfer) + else: + # Update existing transfer if needed + pass + + except Exception as e: + context = { + "function": "seed_test_transfers", + } + logger.error( + "Error occurred while seeding transfers", + error=str(e), + exc_info=e, + **context, + ) + raise + + # Refresh the materialized view to include the new/updated transfers + await session.execute( + text("REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate") + ) diff --git a/backend/lcfs/db/seeders/test/test_user_profile_seeder.py b/backend/lcfs/db/seeders/test/test_user_profile_seeder.py new file mode 100644 index 000000000..d367948fd --- /dev/null +++ b/backend/lcfs/db/seeders/test/test_user_profile_seeder.py @@ -0,0 +1,210 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.user.UserProfile import UserProfile + +logger = structlog.get_logger(__name__) + + +async def seed_test_user_profiles(session): + """ + Seeds the user profiles into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + # Define the user profiles to seed + user_profiles_to_seed = [ + { + "keycloak_email": "stuart.galloway@gov.bc.ca", + "keycloak_username": "SGALLOWA", + "email": "stuart.galloway@gov.bc.ca", + "title": "Sr. UX Practitioner", + "phone": "1234567890", + "mobile_phone": "1234567890", + "first_name": "Stuart", + "last_name": "Galloway", + "organization_id": None, + "is_active": True, + }, + { + "keycloak_email": "daniel.haselhan@gov.bc.ca", + "keycloak_username": "DHASELHA", + "email": "daniel.haselhan@gov.bc.ca", + "title": "Dev", + "phone": "1234567890", + "mobile_phone": "1234567890", + "first_name": "Daniel", + "last_name": "Haselhan", + "organization_id": None, + "is_active": True, + }, + { + "keycloak_email": "Haris.Ishaq@gov.bc.ca", + "keycloak_username": "HISHAQ", + "email": "Haris.Ishaq@gov.bc.ca", + "title": "Chief Engineer", + "phone": None, + "mobile_phone": None, + "first_name": "Haris", + "last_name": "Ishaq", + "organization_id": None, + "is_active": True, + }, + { + "keycloak_email": "shannon.payne@gov.bc.ca", + "keycloak_username": "shpayne", + "email": "shannon.payne@gov.bc.ca", + "title": "Chief Engineer", + "phone": None, + "mobile_phone": None, + "first_name": "Shannon", + "last_name": "Payne", + "organization_id": None, + "is_active": True, + }, + { + "keycloak_email": "tfrs@gov.bc.ca", + "keycloak_username": "LCFS1_bat", + "email": "tfrs@gov.bc.ca", + "title": "CEO", + "phone": None, + "mobile_phone": None, + "first_name": "Donald", + "last_name": "Freeman", + "organization_id": 1, + "is_active": True, + }, + { + "keycloak_email": "alex.zorkin@gov.bc.ca", + "keycloak_username": "ALZORKIN", + "email": "alex.zorkin@gov.bc.ca", + "title": "Developer", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Alex", + "last_name": "Zorkin", + }, + { + "keycloak_email": "hamed.valiollahibayeki@gov.bc.ca", + "keycloak_username": "HVALIOLL", + "email": "hamed.valiollahibayeki@gov.bc.ca", + "title": "Developer", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Hamed", + "last_name": "Bayeki", + }, + { + "keycloak_email": "kevin.hashimoto@gov.bc.ca", + "keycloak_username": "KHASHIMO", + "email": "kevin.hashimoto@gov.bc.ca", + "title": "Developer", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Kevin", + "last_name": "Hashimoto", + }, + { + "keycloak_email": "prashanth.venkateshappa@gov.bc.ca", + "keycloak_username": "PVENKATE", + "email": "prashanth.venkateshappa@gov.bc.ca", + "title": "Developer", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Prashanth", + "last_name": "V", + }, + { + "keycloak_email": "justin.lepitzki@gov.bc.ca", + "keycloak_username": "JLEPITZ", + "email": "justin.lepitzki@gov.bc.ca", + "title": "Admin", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Justin", + "last_name": "Lepitzki", + }, + { + "keycloak_email": "lindsy.grunert@gov.bc.ca", + "keycloak_username": "LGRUNERT", + "email": "lindsy.grunert@gov.bc.ca", + "title": "Admin", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Lindsy", + "last_name": "Grunert", + }, + { + "keycloak_email": "Arturo.Reyes-Lopez@gov.bc.ca", + "keycloak_username": "ARTREYES", + "email": "Arturo.Reyes-Lopez@gov.bc.ca", + "title": "Developer", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Arturo", + "last_name": "Reyes", + }, + { + "keycloak_email": "alasdair.ring@gov.bc.ca", + "keycloak_username": "AIRING", + "email": "alasdair.ring@gov.bc.ca", + "title": "Product Owner", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Al", + "last_name": "Ring", + }, + { + "keycloak_email": "rebekah.ford@gov.bc.ca", + "keycloak_username": "RRFORD", + "email": "rebekah.ford@gov.bc.ca", + "title": "Scrum Master", + "phone": "1234567890", + "mobile_phone": "1234567890", + "organization_id": None, + "is_active": True, + "first_name": "Rebekah", + "last_name": "Ford", + }, + ] + + try: + for user_data in user_profiles_to_seed: + # Check if the user already exists based on a unique attribute + exists = await session.execute( + select(UserProfile).where( + UserProfile.keycloak_email == user_data["keycloak_email"] + ) + ) + if not exists.scalars().first(): + user_profile = UserProfile(**user_data) + session.add(user_profile) + + except Exception as e: + context = { + "function": "seed_test_user_profiles", + } + logger.error( + "Error occurred while seeding user profiles", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/test/test_user_role_seeder.py b/backend/lcfs/db/seeders/test/test_user_role_seeder.py new file mode 100644 index 000000000..62d79e9df --- /dev/null +++ b/backend/lcfs/db/seeders/test/test_user_role_seeder.py @@ -0,0 +1,49 @@ +import structlog +from sqlalchemy import select +from lcfs.db.models.user.UserRole import UserRole + +logger = structlog.get_logger(__name__) + + +async def seed_test_user_roles(session): + """ + Seeds the user roles into the database, if they do not already exist. + + Args: + session: The database session for committing the new records. + """ + + user_roles_to_seed = [ + {"user_profile_id": 1, "role_id": 1}, + {"user_profile_id": 2, "role_id": 2}, + {"user_profile_id": 3, "role_id": 2}, + {"user_profile_id": 4, "role_id": 2}, + {"user_profile_id": 5, "role_id": 2}, + {"user_profile_id": 6, "role_id": 2}, + {"user_profile_id": 7, "role_id": 2}, + ] + + try: + for user_role_data in user_roles_to_seed: + # Check if the UserRole already exists based on user_profile_id and role_id + exists = await session.execute( + select(UserRole).where( + UserRole.user_profile_id == user_role_data["user_profile_id"], + UserRole.role_id == user_role_data["role_id"], + ) + ) + if not exists.scalars().first(): + user_role = UserRole(**user_role_data) + session.add(user_role) + + except Exception as e: + context = { + "function": "seed_test_user_roles", + } + logger.error( + "Error occurred while seeding user roles", + error=str(e), + exc_info=e, + **context, + ) + raise diff --git a/backend/lcfs/db/seeders/test_seeder.py b/backend/lcfs/db/seeders/test_seeder.py new file mode 100644 index 000000000..14c451bc9 --- /dev/null +++ b/backend/lcfs/db/seeders/test_seeder.py @@ -0,0 +1,56 @@ +import structlog +import asyncio +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import text +from lcfs.db.seeders.test.test_admin_adjustment_seeder import ( + seed_test_admin_adjustments, +) + +from lcfs.db.seeders.test.test_organization_seeder import seed_test_organizations +from lcfs.db.seeders.test.test_user_profile_seeder import seed_test_user_profiles +from lcfs.db.seeders.test.test_user_role_seeder import seed_test_user_roles +from lcfs.db.seeders.test.test_transaction_seeder import seed_test_transactions + +logger = structlog.get_logger(__name__) + + +async def update_sequences(session: AsyncSession): + """ + Function to update sequences for all tables after seeding. + """ + sequences = { + "organization": "organization_id", + "user_profile": "user_profile_id", + "user_role": "user_role_id", + "transaction": "transaction_id", + "admin_adjustment": "admin_adjustment_id", + # Add other tables and their primary key columns as needed + } + + for table, column in sequences.items(): + sequence_name = f"{table}_{column}_seq" + max_value_query = text( + f"SELECT setval('{sequence_name}', COALESCE((SELECT MAX({column}) + 1 FROM {table}), 1), false)" + ) + await session.execute(max_value_query) + + +async def seed_test(session: AsyncSession): + """ + Function to seed the database with test data. + """ + await seed_test_organizations(session) + await seed_test_user_profiles(session) + await seed_test_user_roles(session) + await seed_test_transactions(session) + await seed_test_admin_adjustments(session) + # await seed_test_transfers(session) + + # Update sequences after all seeders have run + await update_sequences(session) + + logger.info("Test database seeding completed successfully.") + + +if __name__ == "__main__": + asyncio.run(seed_test()) diff --git a/backend/lcfs/db/utils.py b/backend/lcfs/db/utils.py index 2ab956ef5..035c91d62 100644 --- a/backend/lcfs/db/utils.py +++ b/backend/lcfs/db/utils.py @@ -5,40 +5,40 @@ from lcfs.settings import settings -async def create_database() -> None: - """Create a database.""" - db_url = make_url(str(settings.db_url.with_path("/postgres"))) +async def create_test_database() -> None: + """Create a test database.""" + db_url = make_url(str(settings.db_test_url.with_path("/postgres"))) engine = create_async_engine(db_url, isolation_level="AUTOCOMMIT") async with engine.connect() as conn: database_existance = await conn.execute( text( - f"SELECT 1 FROM pg_database WHERE datname='{settings.db_base}'", # noqa: E501, S608 + f"SELECT 1 FROM pg_database WHERE datname='{settings.db_test}'", # noqa: E501, S608 ), ) database_exists = database_existance.scalar() == 1 if database_exists: - await drop_database() + await drop_test_database() async with engine.connect() as conn: # noqa: WPS440 await conn.execute( text( - f'CREATE DATABASE "{settings.db_base}" ENCODING "utf8" TEMPLATE template1', # noqa: E501 + f'CREATE DATABASE "{settings.db_test}" ENCODING "utf8" TEMPLATE template1', # noqa: E501 ), ) -async def drop_database() -> None: +async def drop_test_database() -> None: """Drop current database.""" - db_url = make_url(str(settings.db_url.with_path("/postgres"))) + db_url = make_url(str(settings.db_test_url.with_path("/postgres"))) engine = create_async_engine(db_url, isolation_level="AUTOCOMMIT") async with engine.connect() as conn: disc_users = ( "SELECT pg_terminate_backend(pg_stat_activity.pid) " # noqa: S608 "FROM pg_stat_activity " - f"WHERE pg_stat_activity.datname = '{settings.db_base}' " + f"WHERE pg_stat_activity.datname = '{settings.db_test}' " "AND pid <> pg_backend_pid();" ) await conn.execute(text(disc_users)) - await conn.execute(text(f'DROP DATABASE "{settings.db_base}"')) + await conn.execute(text(f'DROP DATABASE "{settings.db_test}"')) diff --git a/backend/lcfs/logging_config.py b/backend/lcfs/logging_config.py new file mode 100644 index 000000000..08bbe421c --- /dev/null +++ b/backend/lcfs/logging_config.py @@ -0,0 +1,163 @@ +import sys +import logging +import structlog +import contextvars +from io import StringIO +from rich.console import Console +from rich.pretty import Pretty +from rich.text import Text +from rich.traceback import Traceback +from lcfs.settings import settings + +# Context variables for correlation ID +correlation_id_var = contextvars.ContextVar("correlation_id", default=None) + + +def add_correlation_id(logger, method_name, event_dict): + """Add correlation ID to event dict.""" + correlation_id = correlation_id_var.get() + if correlation_id is not None: + event_dict["correlation_id"] = correlation_id + return event_dict + + +def censor_sensitive_data_processor(_, __, event_dict): + """Censor sensitive information in logs.""" + sensitive_keys = {"password", "token", "secret_key", "authorization", "api_key"} + + def censor(obj): + if isinstance(obj, dict): + return { + k: "***" if k.lower() in sensitive_keys else censor(v) + for k, v in obj.items() + } + elif isinstance(obj, list): + return [censor(item) for item in obj] + else: + return obj + + return censor(event_dict) + + +def custom_console_renderer(): + def renderer(logger, name, event_dict): + console_output = StringIO() + console = Console( + file=console_output, + force_terminal=True, # Force terminal output + color_system="auto", # Auto-detect color support + ) + + log_level = event_dict.get("level", "INFO").upper() + event = event_dict.pop("event", "") + + # Build the header without the timestamp + header = Text() + + # Customize log level colors + level_style = { + "CRITICAL": "bold bright_magenta", + "ERROR": "bold bright_red", + "WARNING": "bold bright_yellow", + "INFO": "bold bright_green", + "DEBUG": "bold bright_blue", + }.get(log_level, "bold white") + + header.append(f"[{log_level}] ", style=level_style) + header.append(f"{event}", style="bold") + + # Print the header to the console + console.print(header) + + # Print key-value pairs with custom colors + for key, value in event_dict.items(): + if key == "exc_info" and value: + # value is a tuple containing exception info + if isinstance(value, tuple): + exc_type, exc_value, exc_traceback = value + else: + # Fallback if value is not a tuple + exc_type, exc_value, exc_traceback = sys.exc_info() + # Create a Traceback object + rich_traceback = Traceback.from_exception( + exc_type, exc_value, exc_traceback, width=console.width + ) + console.print(rich_traceback) + else: + # Customize key color and style + console.print(f" {key}=", style="bold bright_cyan", end="") + # Print the value with Pretty + console.print(Pretty(value)) + + # Get the rendered message + rendered_message = console_output.getvalue() + return rendered_message + + return renderer + + +def setup_logging(level=logging.INFO): + """Set up structured logging configuration""" + # Clear existing handlers and add a StreamHandler + root_logger = logging.getLogger() + root_logger.handlers = [] + handler = logging.StreamHandler() + handler.setLevel(level) + + # Use a formatter that doesn't interfere with color codes + handler.setFormatter(logging.Formatter("%(message)s", validate=False)) + root_logger.addHandler(handler) + root_logger.setLevel(level) + + # Choose renderer based on environment + if settings.environment.lower() == "dev": + renderer = custom_console_renderer() + processors = [ + structlog.contextvars.merge_contextvars, + add_correlation_id, + structlog.processors.add_log_level, + structlog.processors.CallsiteParameterAdder( + [ + structlog.processors.CallsiteParameter.PATHNAME, + structlog.processors.CallsiteParameter.FUNC_NAME, + structlog.processors.CallsiteParameter.LINENO, + ] + ), + structlog.processors.StackInfoRenderer(), + exc_info_processor, + censor_sensitive_data_processor, + renderer, + ] + else: + renderer = structlog.processors.JSONRenderer() + processors = [ + structlog.contextvars.merge_contextvars, + add_correlation_id, + structlog.processors.add_log_level, + structlog.processors.CallsiteParameterAdder( + [ + structlog.processors.CallsiteParameter.PATHNAME, + structlog.processors.CallsiteParameter.FUNC_NAME, + structlog.processors.CallsiteParameter.LINENO, + ] + ), + structlog.processors.StackInfoRenderer(), + structlog.processors.format_exc_info, + censor_sensitive_data_processor, + renderer, + ] + + structlog.configure( + processors=processors, + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, + ) + + +def exc_info_processor(logger, method_name, event_dict): + """Replace exc_info=True with the actual exception information.""" + exc_info = event_dict.get("exc_info") + if exc_info: + if exc_info is True: + event_dict["exc_info"] = sys.exc_info() + return event_dict diff --git a/backend/lcfs/prestart.sh b/backend/lcfs/prestart.sh index 952f220c6..3fe3f9cda 100755 --- a/backend/lcfs/prestart.sh +++ b/backend/lcfs/prestart.sh @@ -1,6 +1,13 @@ #!/usr/bin/env bash -echo "running prestart.sh from $(pwd)" +echo "Running prestart.sh from $(pwd)" + +# Check for Alembic head conflicts +HEAD_COUNT=$(poetry run alembic heads | wc -l) +if [ "$HEAD_COUNT" -gt 1 ]; then + echo "Alembic head conflict detected: Multiple migration heads present." + exit 1 +fi # Apply base database migrations echo "Applying base migrations." @@ -23,5 +30,4 @@ if [ $? -ne 0 ]; then fi echo "Migrations and seeding completed successfully." - -echo "done running prestart.sh from $(pwd)" +echo "Done running prestart.sh from $(pwd)" diff --git a/backend/lcfs/scripts/send_transaction.py b/backend/lcfs/scripts/send_transaction.py new file mode 100644 index 000000000..ef4bcd8e8 --- /dev/null +++ b/backend/lcfs/scripts/send_transaction.py @@ -0,0 +1,75 @@ +import pika +import json + +""" +RabbitMQ Transaction Message Sender Script + +This script is designed to send messages to the Transaction queue +It connects to a RabbitMQ instance with specified credentials, +and sends a JSON message with 'tfrs_id`, `organization_id`, and `compliance_units_amount`. + +### Variables: +- `rabbitmq_host`: Hostname of the RabbitMQ server (default: "localhost"). +- `rabbitmq_port`: Port for connecting to RabbitMQ (default: 5672). +- `rabbitmq_user`: Username for RabbitMQ authentication. +- `rabbitmq_pass`: Password for RabbitMQ authentication. +- `rabbitmq_vhost`: Virtual host to connect to on RabbitMQ. +- `rabbitmq_transaction_queue`: Name of the queue to send the message to. + +### Usage: +1. Define the message parameters (`tfrs_id`, `organization_id`, `compliance_units_amount`). +2. Execute the script with python +3. The message is sent as a JSON object to the specified queue in RabbitMQ. +""" + +# Variables for RabbitMQ +rabbitmq_host: str = "localhost" +rabbitmq_port: int = 5672 +rabbitmq_pass: str = "development_only" +rabbitmq_user: str = "lcfs" +rabbitmq_vhost: str = "lcfs" +rabbitmq_transaction_queue: str = "transaction_queue" + +def send_message(tfrs_id: int, organization_id: int, compliance_units_amount: int): + # Set up the credentials + credentials = pika.PlainCredentials(rabbitmq_user, rabbitmq_pass) + + # Establish the connection with the credentials + connection = pika.BlockingConnection( + pika.ConnectionParameters( + host=rabbitmq_host, + virtual_host=rabbitmq_vhost, + port=rabbitmq_port, + credentials=credentials, + ) + ) + channel = connection.channel() + + # Declare the queue if not already declared + channel.queue_declare(queue=rabbitmq_transaction_queue, durable=True) + + # Create the message body + message = { + "tfrs_id": tfrs_id, + "organization_id": organization_id, + "compliance_units_amount": compliance_units_amount, + } + + # Publish the message to the queue + channel.basic_publish( + exchange="", + routing_key="transaction_queue", + body=json.dumps(message), + properties=pika.BasicProperties( + delivery_mode=2, # Make message persistent + ), + ) + + print(f" [x] Sent message: {message}") + + # Close the connection + connection.close() + + +if __name__ == "__main__": + send_message(tfrs_id=1, organization_id=1, compliance_units_amount=1000) diff --git a/frontend/src/views/organizations/.gitKeep b/backend/lcfs/services/clamav/__init__.py similarity index 100% rename from frontend/src/views/organizations/.gitKeep rename to backend/lcfs/services/clamav/__init__.py diff --git a/backend/lcfs/services/clamav/client.py b/backend/lcfs/services/clamav/client.py new file mode 100644 index 000000000..d08b6988e --- /dev/null +++ b/backend/lcfs/services/clamav/client.py @@ -0,0 +1,52 @@ +import structlog +import socket + +from lcfs.settings import settings + +logger = structlog.get_logger(__name__) + + +class VirusScanException(Exception): + """Custom exception for virus detection.""" + + pass + + +class ClamAVService: + def __init__(self): + pass + + def scan_file(self, file): + # Create a socket connection to ClamAV + + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.connect((settings.clamav_host, settings.clamav_port)) + sock.sendall(b"zINSTREAM\0") # Initiate the INSTREAM scan + + # Reset file pointer and send the file in chunks to ClamAV + file.file.seek(0) + while True: + chunk = file.file.read(1024) + if not chunk: + break + size = len(chunk).to_bytes(4, byteorder="big") + sock.sendall(size + chunk) + + sock.sendall(b"\0\0\0\0") # Send a zero-length chunk to signal end + + # Read ClamAV's response + response = sock.recv(1024) + result = response.decode("utf-8").strip() + + # Reset file pointer after scan + file.file.seek(0) + + # Check the response for virus detection + if "FOUND" in result: + logger.error( + "Virus detected", + result=result, + ) + raise VirusScanException(f"Virus detected: {result}") + + return result diff --git a/backend/lcfs/services/keycloak/authentication.py b/backend/lcfs/services/keycloak/authentication.py index 3b620f5b7..dd6d51f29 100644 --- a/backend/lcfs/services/keycloak/authentication.py +++ b/backend/lcfs/services/keycloak/authentication.py @@ -1,148 +1,208 @@ import json + import httpx import jwt -from redis.asyncio import Redis from fastapi import HTTPException -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy import and_ -from sqlalchemy.orm import joinedload -from sqlalchemy.future import select +from redis.asyncio import Redis, ConnectionPool +from sqlalchemy import func from sqlalchemy.exc import NoResultFound -from starlette.requests import Request -from starlette.authentication import ( - AuthenticationBackend, AuthCredentials -) +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.future import select +from sqlalchemy.orm import joinedload +from starlette.authentication import AuthenticationBackend, AuthCredentials + +from lcfs.db.models import UserLoginHistory +from lcfs.db.models.user.UserProfile import UserProfile +from lcfs.db.models.user.UserRole import UserRole +from lcfs.services.keycloak.dependencies import parse_external_username from lcfs.settings import Settings -from lcfs.db.models.UserProfile import UserProfile -from lcfs.db.models.UserLoginHistory import UserLoginHistory -from lcfs.services.keycloak.dependencies import _parse_external_username +from lcfs.web.api.user.repo import UserRepository class UserAuthentication(AuthenticationBackend): """ Class to handle authentication when calling the lcfs api """ - def __init__(self, redis_pool: Redis, session: AsyncSession, settings: Settings): - self.async_session = session + + def __init__( + self, + redis_client: Redis, + session_factory: async_sessionmaker, + settings: Settings, + ): + self.session_factory = session_factory self.settings = settings - self.redis_pool = redis_pool + self.redis_client = redis_client self.jwks = None self.jwks_uri = None + self.test_keycloak_user = None async def refresh_jwk(self): - # Try to get the JWKS data from Redis cache - async with Redis(connection_pool=self.redis_pool) as redis: - jwks_data = await redis.get('jwks_data') - - if jwks_data: - jwks_data = json.loads(jwks_data) - self.jwks = jwks_data.get('jwks') - self.jwks_uri = jwks_data.get('jwks_uri') - return - - # If not in cache, retrieve from the well-known endpoint - async with httpx.AsyncClient() as client: - oidc_response = await client.get(self.settings.well_known_endpoint) - jwks_uri = oidc_response.json().get('jwks_uri') - certs_response = await client.get(jwks_uri) - jwks = certs_response.json() - - # Composite object containing both JWKS and JWKS URI - jwks_data = { - 'jwks': jwks, - 'jwks_uri': jwks_uri - } - - # Cache the composite JWKS data with a TTL of 1 day (86400 seconds) - async with Redis(connection_pool=self.redis_pool) as redis: - await redis.set('jwks_data', json.dumps(jwks_data), ex=86400) - - self.jwks = jwks - self.jwks_uri = jwks_uri + """ + Refreshes the JSON Web Key (JWK) used for token verification. + This method attempts to retrieve the JWK from Redis cache. + If not found, it fetches it from the well-known endpoint + and stores it in Redis for future use. + """ + try: + # Try to get the JWKS data from Redis cache + jwks_data = await self.redis_client.get("jwks_data") + + if jwks_data: + jwks_data = json.loads(jwks_data) + self.jwks = jwks_data.get("jwks") + self.jwks_uri = jwks_data.get("jwks_uri") + return + + # If not in cache, retrieve from the well-known endpoint + async with httpx.AsyncClient() as client: + oidc_response = await client.get(self.settings.well_known_endpoint) + oidc_response.raise_for_status() + jwks_uri = oidc_response.json().get("jwks_uri") + + if not jwks_uri: + raise ValueError( + "JWKS URI not found in the well-known endpoint response." + ) + + certs_response = await client.get(jwks_uri) + certs_response.raise_for_status() + jwks = certs_response.json() + + # Composite object containing both JWKS and JWKS URI + jwks_data = {"jwks": jwks, "jwks_uri": jwks_uri} + + # Cache the composite JWKS data with a TTL of 1 day (86400 seconds) + await self.redis_client.set("jwks_data", json.dumps(jwks_data), ex=86400) + + self.jwks = jwks + self.jwks_uri = jwks_uri + + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Error refreshing JWK: {str(e)}" + ) async def authenticate(self, request): # Extract the authorization header from the request - auth = request.headers.get('Authorization') + auth = request.headers.get("Authorization") if not auth: - raise HTTPException(status_code=401, detail='Authorization header is required') + raise HTTPException( + status_code=401, detail="Authorization header is required" + ) # Split the authorization header into scheme and token parts = auth.split() - if parts[0].lower() != 'bearer': - raise HTTPException(status_code=401, detail='Authorization header must start with Bearer') + if parts[0].lower() != "bearer": + raise HTTPException( + status_code=401, detail="Authorization header must start with Bearer" + ) elif len(parts) == 1: - raise HTTPException(status_code=401, detail='Token not found') + raise HTTPException(status_code=401, detail="Token not found") elif len(parts) > 2: - raise HTTPException(status_code=401, detail='Authorization header must be Bearer token') + raise HTTPException( + status_code=401, detail="Authorization header must be Bearer token" + ) token = parts[1] # Call the method to refresh the JWKs from the cache or the endpoint await self.refresh_jwk() - # Use PyJWKClient with the JWKS URI to get the signing key - jwks_client = jwt.PyJWKClient(self.jwks_uri) - signing_key = jwks_client.get_signing_key_from_jwt(token) + if not self.test_keycloak_user: + # Use PyJWKClient with the JWKS URI to get the signing key + jwks_client = jwt.PyJWKClient(self.jwks_uri) + signing_key = jwks_client.get_signing_key_from_jwt(token) - # Decode and validate the JWT token - try: - user_token = jwt.decode( - token, - signing_key.key, - algorithms=["RS256"], - audience=self.settings.keycloak_audience, - options={"verify_exp": True}, - ) - except jwt.ExpiredSignatureError as exc: - raise HTTPException(status_code=401, detail='Token has expired') - except (jwt.InvalidTokenError, jwt.DecodeError) as exc: - raise HTTPException(status_code=401, detail=str(exc)) + # Decode and validate the JWT token + try: + user_token = jwt.decode( + token, + signing_key.key, + algorithms=["RS256"], + audience=self.settings.keycloak_audience, + options={"verify_exp": True}, + leeway=5, # Allows for 5 seconds of clock skew + ) + except jwt.ExpiredSignatureError as exc: + raise HTTPException(status_code=401, detail="Token has expired") + except (jwt.InvalidTokenError, jwt.DecodeError) as exc: + raise HTTPException(status_code=401, detail=str(exc)) + else: + user_token = self.test_keycloak_user - if 'preferred_username' in user_token: + # Normalize the username and email to lowercase + preferred_username = user_token.get("preferred_username", "").lower() + email = user_token.get("email", "").lower() + + if preferred_username: try: - async with self.async_session() as session: + async with self.session_factory() as session: result = await session.execute( - select(UserProfile).options(joinedload(UserProfile.organization), joinedload(UserProfile.user_roles)).where( - UserProfile.keycloak_user_id == user_token['preferred_username']) + select(UserProfile) + .options( + joinedload(UserProfile.organization), + joinedload(UserProfile.user_roles).joinedload( + UserRole.role + ), + ) + .where( + func.lower(UserProfile.keycloak_user_id) + == preferred_username + ) ) - user = result.unique().scalar_one() + user = result.unique().scalar_one_or_none() + + if user is None: + # Handle the case where no user is found + pass + # Check if the user is active + elif not user.is_active: + error_text = "The account is currently inactive." + await self.create_login_history(user_token, False, error_text) + raise HTTPException(status_code=401, detail=error_text) + else: + return AuthCredentials(["authenticated"]), user - await self.create_login_history(user_token, True, None, request.url.path) - return AuthCredentials(["authenticated"]), user - except NoResultFound: pass - external_username = _parse_external_username(user_token) - - if 'email' in user_token: - # Construct the query to find the user - user_query = select(UserProfile).options(joinedload(UserProfile.organization), joinedload(UserProfile.user_roles)).where( - and_( - UserProfile.keycloak_email == user_token['email'], - UserProfile.keycloak_username == external_username + if email: + user_query = ( + select(UserProfile) + .options( + joinedload(UserProfile.organization), + joinedload(UserProfile.user_roles).joinedload(UserRole.role), + ) + .where( + func.lower(UserProfile.keycloak_email) == email, + func.lower(UserProfile.keycloak_username) + == parse_external_username(user_token), ) ) - - # TODO may need to not use org id == 1 if gov no longer is organization in lcfs - if user_token['identity_provider'] == 'idir': - user_query = user_query.where(UserProfile.organization_id == 1) - elif user_token['identity_provider'] == 'bceidbusiness': - user_query = user_query.where(UserProfile.organization_id != 1) + # Check for Government or Supplier affiliation + if user_token["identity_provider"] == "idir": + user_query = user_query.where(UserProfile.organization_id.is_(None)) + elif user_token["identity_provider"] == "bceidbusiness": + user_query = user_query.where(UserProfile.organization_id.isnot(None)) else: - error_text = 'Unknown identity provider.' - await self.create_login_history(user_token, False, error_text, request.url.path) + error_text = "Unknown identity provider." raise HTTPException(status_code=401, detail=error_text) - async with self.async_session() as session: + async with self.session_factory() as session: user_result = await session.execute(user_query) - user = user_result.unique().scalar_one() + user = user_result.unique().scalar_one_or_none() if user is None: - error_text = 'No User with that configuration exists.' - await self.create_login_history(user_token, False, error_text, request.url.path) + error_text = "No User with that configuration exists." + await self.create_login_history(user_token, False, error_text) + raise HTTPException(status_code=401, detail=error_text) + + # Check if the user is active + if not user.is_active: + error_text = "The account is currently inactive." + await self.create_login_history(user_token, False, error_text) raise HTTPException(status_code=401, detail=error_text) else: - error_text = 'preferred_username or email is required in JWT payload.' - await self.create_login_history(user_token, False, error_text, request.url.path) + error_text = "preferred_username or email is required in JWT payload." raise HTTPException(status_code=401, detail=error_text) await self.map_user_keycloak_id(user, user_token) @@ -154,36 +214,28 @@ async def map_user_keycloak_id(self, user_profile, user_token): Updates the user's keycloak_user_id and commits the changes to the database. """ # Map the keycloak user id to the user for future login caching - user_profile.keycloak_user_id = user_token['preferred_username'] - # TODO may want to map keycloak display name to user as well - # user.display_name = user_token['display_name'] + user_profile.keycloak_user_id = user_token["preferred_username"] # The merge method is used to merge the state of the given object into the current session # If the instance does not exist in the session, insert it. # If the instance already exists in the session, update it. - async with self.async_session() as session: + async with self.session_factory() as session: await session.merge(user_profile) await session.commit() return user_profile - async def create_login_history(self, user_token, success=False, error=None, path=''): - """ - Creates a user login history entry asynchronously. - """ - # We only want to create a user_login_history when the current user is fetched - if path == '/api/users/current': - email = user_token.get('email', '') - username = _parse_external_username(user_token) - preferred_username = user_token.get('preferred_username', '') - login_history = UserLoginHistory( - keycloak_email=email, - external_username=username, - keycloak_user_id=preferred_username, - is_login_successful=success, - login_error_message=error - ) - async with self.async_session() as session: - session.add(login_history) - await session.commit() - + async def create_login_history(self, user_token, is_success=True, error_msg=None): + email = user_token.get("email", "").lower() + username = parse_external_username(user_token) + preferred_username = user_token.get("preferred_username", "").lower() + login_history = UserLoginHistory( + keycloak_email=email, + external_username=username, + keycloak_user_id=preferred_username, + is_login_successful=is_success, + login_error_message=error_msg, + ) + async with self.session_factory() as session: + session.add(login_history) + await session.commit() diff --git a/backend/lcfs/services/keycloak/dependencies.py b/backend/lcfs/services/keycloak/dependencies.py index 43f5339da..30e384f2c 100644 --- a/backend/lcfs/services/keycloak/dependencies.py +++ b/backend/lcfs/services/keycloak/dependencies.py @@ -1,8 +1,10 @@ +from fastapi import HTTPException -def _parse_external_username(user_token): - identity_provider = user_token.get('identity_provider') - if identity_provider == 'idir': - return user_token.get('idir_username') - elif identity_provider == 'bceidbusiness': - return user_token.get('bceid_username') - raise Exception('Unknown or missing identity provider.') + +def parse_external_username(user_token): + identity_provider = user_token.get("identity_provider") + if identity_provider == "idir": + return user_token.get("idir_username", "").lower() + elif identity_provider == "bceidbusiness": + return user_token.get("bceid_username", "").lower() + raise HTTPException(status_code=401, detail="Unknown or missing identity provider.") diff --git a/backend/lcfs/services/minio/client.py b/backend/lcfs/services/minio/client.py deleted file mode 100644 index 0c9b105f0..000000000 --- a/backend/lcfs/services/minio/client.py +++ /dev/null @@ -1,71 +0,0 @@ -from os import path -from minio import Minio -from fastapi import HTTPException -from api.config import MINIO_ACCESS_KEY, MINIO_SECRET_KEY, MINIO_HOST_URL -from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR - - -minio_client = Minio(MINIO_HOST_URL, - access_key=MINIO_ACCESS_KEY, - secret_key=MINIO_SECRET_KEY, - secure=False) - - -def s3_upload_file(destination_file_name: str, local_file_path: str, content_type: str, bucket_name: str): - """ - Uploads a file to s3 Minio storage and returns the file response object - """ - if not path.exists(local_file_path): - raise FileNotFoundError( - f'file_path: {local_file_path} is not a valid file') - - try: - result = minio_client.fput_object(bucket_name=bucket_name, - object_name=destination_file_name, - file_path=local_file_path, - content_type=content_type) - return result - except Exception as exc: - raise HTTPException(status_code=HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Unknown Error During Upload Process {exc}') - - -def s3_delete_file(bucket_name: str, object_name: str): - """ - Deletes a file from s3 minio storage - """ - try: - print('deleting object:', object_name) - result = minio_client.remove_object(bucket_name, object_name) - return result - except Exception as exc: - print('error deleting file: ', exc) - raise HTTPException(status_code=HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Unknown Error During Delete Process {exc}') - - -def s3_list_directory(bucket_name: str, folder_name: str): - """ - Lists all files in a s3 minio storage - """ - try: - print('listing files for directory:', folder_name) - files = minio_client.list_objects(bucket_name, prefix=folder_name, recursive=True) - return files - except Exception as exc: - print('error listing directory:', exc) - raise HTTPException(status_code=HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Unknown Error Listing Directory {exc}') - - -def s3_get_file(bucket_name: str, object_name: str): - """ - Gets a file from s3 minio storage - """ - try: - result = minio_client.get_object(bucket_name, object_name) - return result - except Exception as exc: - print('error getting file: ', exc) - raise HTTPException(status_code=HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Unknown Error Getting File {exc}') diff --git a/frontend/src/views/organizations/__tests__/.gitKeep b/backend/lcfs/services/rabbitmq/__init__.py similarity index 100% rename from frontend/src/views/organizations/__tests__/.gitKeep rename to backend/lcfs/services/rabbitmq/__init__.py diff --git a/backend/lcfs/services/rabbitmq/base_consumer.py b/backend/lcfs/services/rabbitmq/base_consumer.py new file mode 100644 index 000000000..9a80bf56a --- /dev/null +++ b/backend/lcfs/services/rabbitmq/base_consumer.py @@ -0,0 +1,58 @@ +import asyncio +import logging + +import aio_pika +from aio_pika.abc import AbstractChannel, AbstractQueue +from fastapi import FastAPI + +from lcfs.settings import settings + +logging.getLogger("aiormq").setLevel(logging.WARNING) +logging.getLogger("aio_pika").setLevel(logging.WARNING) +logger = logging.getLogger(__name__) + + +class BaseConsumer: + def __init__(self, app: FastAPI, queue_name: str): + self.connection = None + self.channel = None + self.queue = None + self.queue_name = queue_name + self.app = app + + async def connect(self): + """Connect to RabbitMQ and set up the consumer.""" + connection_url = f"amqp://{settings.rabbitmq_user}:{settings.rabbitmq_pass}@{settings.rabbitmq_host}:{settings.rabbitmq_port}/{settings.rabbitmq_vhost}" + + self.connection = await aio_pika.connect_robust(connection_url) + self.channel: AbstractChannel = await self.connection.channel() + self.queue: AbstractQueue = await self.channel.declare_queue( + name=self.queue_name, + auto_delete=False, + durable=True, + ) + + logger.debug(f"Queue '{self.queue_name}' declared and ready for consuming") + + async def start_consuming(self): + """Start consuming messages from the queue.""" + if not self.queue: + raise RuntimeError("Queue is not initialized. Call connect() first.") + + async with self.queue.iterator() as queue_iter: + async for message in queue_iter: + async with message.process(): + logger.debug(f"Received message: {message.body.decode()}") + await self.process_message(message.body) + + async def process_message(self, body: bytes): + """Process the incoming message. Override this method in subclasses.""" + logger.warning( + f"Base process_message called, you probably want to override process_message" + ) + + async def close_connection(self): + """Close the RabbitMQ connection.""" + if self.connection: + logger.debug("Closing RabbitMQ Connection") + await self.connection.close() diff --git a/backend/lcfs/services/rabbitmq/consumers.py b/backend/lcfs/services/rabbitmq/consumers.py new file mode 100644 index 000000000..17cfdf193 --- /dev/null +++ b/backend/lcfs/services/rabbitmq/consumers.py @@ -0,0 +1,14 @@ +import asyncio + +from lcfs.services.rabbitmq.report_consumer import ( + setup_report_consumer, + close_report_consumer, +) + + +async def start_consumers(app): + await setup_report_consumer(app) + + +async def stop_consumers(): + await close_report_consumer() diff --git a/backend/lcfs/services/rabbitmq/report_consumer.py b/backend/lcfs/services/rabbitmq/report_consumer.py new file mode 100644 index 000000000..551f3781b --- /dev/null +++ b/backend/lcfs/services/rabbitmq/report_consumer.py @@ -0,0 +1,295 @@ +import asyncio +import json +import logging +from typing import Optional + +from fastapi import FastAPI +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.dependencies import async_engine +from lcfs.db.models.compliance.ComplianceReportStatus import ComplianceReportStatusEnum +from lcfs.db.models.transaction.Transaction import TransactionActionEnum +from lcfs.db.models.user import UserProfile +from lcfs.services.rabbitmq.base_consumer import BaseConsumer +from lcfs.services.tfrs.redis_balance import RedisBalanceService +from lcfs.settings import settings +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.web.api.compliance_report.schema import ComplianceReportCreateSchema +from lcfs.web.api.compliance_report.services import ComplianceReportServices +from lcfs.web.api.organizations.repo import OrganizationsRepository +from lcfs.web.api.organizations.services import OrganizationsService +from lcfs.web.api.transaction.repo import TransactionRepository +from lcfs.web.api.user.repo import UserRepository +from lcfs.web.exception.exceptions import ServiceException + +logger = logging.getLogger(__name__) + +consumer = None +consumer_task = None + +VALID_ACTIONS = {"Created", "Submitted", "Approved"} + + +async def setup_report_consumer(app: FastAPI): + """ + Set up the report consumer and start consuming messages. + """ + global consumer, consumer_task + consumer = ReportConsumer(app) + await consumer.connect() + consumer_task = asyncio.create_task(consumer.start_consuming()) + + +async def close_report_consumer(): + """ + Cancel the consumer task if it exists and close the consumer connection. + """ + global consumer, consumer_task + + if consumer_task: + consumer_task.cancel() + + if consumer: + await consumer.close_connection() + + +class ReportConsumer(BaseConsumer): + """ + A consumer for handling TFRS compliance report messages from a RabbitMQ queue. + """ + + def __init__( + self, app: FastAPI, queue_name: str = settings.rabbitmq_transaction_queue + ): + super().__init__(app, queue_name) + + async def process_message(self, body: bytes): + """ + Process an incoming message from the queue. + + Expected message structure: + { + "tfrs_id": int, + "organization_id": int, + "compliance_period": str, + "nickname": str, + "action": "Created"|"Submitted"|"Approved", + "credits": int (optional), + "user_id": int + } + """ + message = self._parse_message(body) + if not message: + return # Invalid message already logged + + action = message["action"] + org_id = message["organization_id"] + + if action not in VALID_ACTIONS: + logger.error(f"Invalid action '{action}' in message.") + return + + logger.info(f"Received '{action}' action from TFRS for Org {org_id}") + + try: + await self.handle_message( + action=action, + compliance_period=message.get("compliance_period"), + compliance_units=message.get("credits"), + legacy_id=message["tfrs_id"], + nickname=message.get("nickname"), + org_id=org_id, + user_id=message["user_id"], + ) + except Exception: + logger.exception("Failed to handle message") + + def _parse_message(self, body: bytes) -> Optional[dict]: + """ + Parse the message body into a dictionary. + Log and return None if parsing fails or required fields are missing. + """ + try: + message_content = json.loads(body.decode()) + except json.JSONDecodeError: + logger.error("Failed to decode message body as JSON.") + return None + + required_fields = ["tfrs_id", "organization_id", "action", "user_id"] + if any(field not in message_content for field in required_fields): + logger.error("Message missing required fields.") + return None + + return message_content + + async def handle_message( + self, + action: str, + compliance_period: str, + compliance_units: Optional[int], + legacy_id: int, + nickname: Optional[str], + org_id: int, + user_id: int, + ): + """ + Handle a given message action by loading dependencies and calling the respective handler. + """ + redis_client = self.app.state.redis_client + + async with AsyncSession(async_engine) as session: + async with session.begin(): + # Initialize repositories and services + org_repo = OrganizationsRepository(db=session) + transaction_repo = TransactionRepository(db=session) + redis_balance_service = RedisBalanceService( + transaction_repo=transaction_repo, redis_client=redis_client + ) + org_service = OrganizationsService( + repo=org_repo, + transaction_repo=transaction_repo, + redis_balance_service=redis_balance_service, + ) + compliance_report_repo = ComplianceReportRepository(db=session) + compliance_report_service = ComplianceReportServices( + repo=compliance_report_repo + ) + user = await UserRepository(db=session).get_user_by_id(user_id) + + if not user: + logger.error(f"Cannot parse Report {legacy_id} from TFRS, no user with ID {user_id}") + + if action == "Created": + await self._handle_created( + org_id, + legacy_id, + compliance_period, + nickname, + user, + compliance_report_service, + ) + elif action == "Submitted": + await self._handle_submitted( + compliance_report_repo, + compliance_units, + legacy_id, + org_id, + org_service, + session, + user, + ) + elif action == "Approved": + await self._handle_approved( + legacy_id, + compliance_report_repo, + transaction_repo, + user, + session, + ) + + async def _handle_created( + self, + org_id: int, + legacy_id: int, + compliance_period: str, + nickname: str, + user: UserProfile, + compliance_report_service: ComplianceReportServices, + ): + """ + Handle the 'Created' action by creating a new compliance report draft. + """ + lcfs_report = ComplianceReportCreateSchema( + legacy_id=legacy_id, + compliance_period=compliance_period, + organization_id=org_id, + nickname=nickname, + status=ComplianceReportStatusEnum.Draft.value, + ) + await compliance_report_service.create_compliance_report( + org_id, lcfs_report, user + ) + + async def _handle_approved( + self, + legacy_id: int, + compliance_report_repo: ComplianceReportRepository, + transaction_repo: TransactionRepository, + user: UserProfile, + session: AsyncSession, + ): + """ + Handle the 'Approved' action by updating the report status to 'Assessed' + and confirming the associated transaction. + """ + existing_report = ( + await compliance_report_repo.get_compliance_report_by_legacy_id(legacy_id) + ) + if not existing_report: + raise ServiceException( + f"No compliance report found for legacy ID {legacy_id}" + ) + + new_status = await compliance_report_repo.get_compliance_report_status_by_desc( + ComplianceReportStatusEnum.Assessed.value + ) + existing_report.current_status_id = new_status.compliance_report_status_id + session.add(existing_report) + await session.flush() + + await compliance_report_repo.add_compliance_report_history( + existing_report, user + ) + + existing_transaction = await transaction_repo.get_transaction_by_id( + existing_report.transaction_id + ) + if not existing_transaction: + raise ServiceException( + "Compliance Report does not have an associated transaction" + ) + + if existing_transaction.transaction_action != TransactionActionEnum.Reserved: + raise ServiceException( + f"Transaction {existing_transaction.transaction_id} is not in 'Reserved' status" + ) + + await transaction_repo.confirm_transaction(existing_transaction.transaction_id) + + async def _handle_submitted( + self, + compliance_report_repo: ComplianceReportRepository, + compliance_units: int, + legacy_id: int, + org_id: int, + org_service: OrganizationsService, + session: AsyncSession, + user: UserProfile, + ): + """ + Handle the 'Submitted' action by linking a reserved transaction + to the compliance report and updating its status. + """ + existing_report = ( + await compliance_report_repo.get_compliance_report_by_legacy_id(legacy_id) + ) + if not existing_report: + raise ServiceException( + f"No compliance report found for legacy ID {legacy_id}" + ) + + transaction = await org_service.adjust_balance( + TransactionActionEnum.Reserved, compliance_units, org_id + ) + existing_report.transaction_id = transaction.transaction_id + + new_status = await compliance_report_repo.get_compliance_report_status_by_desc( + ComplianceReportStatusEnum.Submitted.value + ) + existing_report.current_status_id = new_status.compliance_report_status_id + session.add(existing_report) + await session.flush() + + await compliance_report_repo.add_compliance_report_history( + existing_report, user + ) diff --git a/backend/lcfs/services/redis/dependency.py b/backend/lcfs/services/redis/dependency.py index 368994ffd..01cc689eb 100644 --- a/backend/lcfs/services/redis/dependency.py +++ b/backend/lcfs/services/redis/dependency.py @@ -1,26 +1,19 @@ -from typing import AsyncGenerator - from redis.asyncio import Redis from starlette.requests import Request -async def get_redis_pool( +# Redis Client Dependency +async def get_redis_client( request: Request, -) -> AsyncGenerator[Redis, None]: # pragma: no cover +) -> Redis: """ - Returns connection pool. - - You can use it like this: - - >>> from redis.asyncio import ConnectionPool, Redis - >>> - >>> async def handler(redis_pool: ConnectionPool = Depends(get_redis_pool)): - >>> async with Redis(connection_pool=redis_pool) as redis: - >>> await redis.get('key') + Returns the Redis client. - I use pools, so you don't acquire connection till the end of the handler. + Usage: + >>> async def handler(redis_client: Redis = Depends(get_redis_client)): + >>> value = await redis_client.get('key') - :param request: current request. - :returns: redis connection pool. + :param request: Current request object. + :returns: Redis client. """ - return request.app.state.redis_pool + return request.app.state.redis_client diff --git a/backend/lcfs/services/redis/lifetime.py b/backend/lcfs/services/redis/lifetime.py index b0c2177d8..3b7347ea2 100644 --- a/backend/lcfs/services/redis/lifetime.py +++ b/backend/lcfs/services/redis/lifetime.py @@ -1,24 +1,64 @@ +import logging from fastapi import FastAPI -from redis.asyncio import ConnectionPool - +from redis.asyncio import Redis +from redis.exceptions import RedisError, TimeoutError +import asyncio from lcfs.settings import settings +logger = logging.getLogger(__name__) + -def init_redis(app: FastAPI) -> None: # pragma: no cover +async def init_redis(app: FastAPI) -> None: """ - Creates connection pool for redis. + Initializes the Redis client and tests the connection. - :param app: current fastapi application. + :param app: current FastAPI application. """ - app.state.redis_pool = ConnectionPool.from_url( - str(settings.redis_url), - ) + retries = 5 # Retry logic in case Redis is unavailable initially + for i in range(retries): + try: + # Initialize Redis client + app.state.redis_client = Redis( + host=settings.redis_host, + port=settings.redis_port, + password=settings.redis_pass, + db=settings.redis_base or 0, + decode_responses=True, + max_connections=10, + socket_timeout=5, + socket_connect_timeout=5, + ) + + # Test the connection + await app.state.redis_client.ping() + logger.info("Redis client initialized and connection successful.") + break + except TimeoutError as e: + logger.error(f"Redis timeout during initialization attempt {i + 1}: {e}") + if i == retries - 1: + raise + await asyncio.sleep(2**i) # Exponential backoff + except RedisError as e: + logger.error(f"Redis error during initialization attempt {i + 1}: {e}") + if i == retries - 1: + raise + await asyncio.sleep(2**i) # Exponential backoff + except Exception as e: + logger.error(f"Unexpected error during Redis initialization: {e}") + raise -async def shutdown_redis(app: FastAPI) -> None: # pragma: no cover +async def shutdown_redis(app: FastAPI) -> None: """ - Closes redis connection pool. + Closes the Redis client during application shutdown. :param app: current FastAPI app. """ - await app.state.redis_pool.disconnect() + try: + if hasattr(app.state, "redis_client") and app.state.redis_client: + await app.state.redis_client.close() + logger.info("Redis client closed successfully.") + except RedisError as e: + logger.error(f"Redis error during shutdown: {e}") + except Exception as e: + logger.error(f"Unexpected error during Redis shutdown: {e}") diff --git a/frontend/src/views/users/.gitKeep b/backend/lcfs/services/s3/__init__.py similarity index 100% rename from frontend/src/views/users/.gitKeep rename to backend/lcfs/services/s3/__init__.py diff --git a/backend/lcfs/services/s3/client.py b/backend/lcfs/services/s3/client.py new file mode 100644 index 000000000..5d25ff689 --- /dev/null +++ b/backend/lcfs/services/s3/client.py @@ -0,0 +1,166 @@ +from fastapi import HTTPException +import os +import uuid +from lcfs.db.models.compliance.ComplianceReportStatus import ComplianceReportStatusEnum +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.compliance_report.services import ComplianceReportServices +from fastapi import Depends +from pydantic.v1 import ValidationError +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from lcfs.services.s3.dependency import get_s3_client +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.compliance import ComplianceReport +from lcfs.db.models.compliance.ComplianceReport import ( + compliance_report_document_association, +) +from lcfs.db.models.document import Document +from lcfs.services.clamav.client import ClamAVService +from lcfs.settings import settings +from lcfs.web.core.decorators import repo_handler + +BUCKET_NAME = settings.s3_bucket +MAX_FILE_SIZE_MB = 50 +MAX_FILE_SIZE_BYTES = MAX_FILE_SIZE_MB * 1024 * 1024 # Convert MB to bytes + + +class DocumentService: + def __init__( + self, + db: AsyncSession = Depends(get_async_db_session), + clamav_service: ClamAVService = Depends(), + s3_client=Depends(get_s3_client), + compliance_report_service: ComplianceReportServices = Depends(), + ): + self.db = db + self.clamav_service = clamav_service + self.s3_client = s3_client + self.compliance_report_service = compliance_report_service + + @repo_handler + async def upload_file( + self, file, parent_id: str, parent_type="compliance_report", user=None + ): + compliance_report = ( + await self.compliance_report_service.get_compliance_report_by_id(parent_id) + ) + if not compliance_report: + raise HTTPException(status_code=404, detail="Compliance report not found") + + # Check if the user is a supplier and the compliance report status is different from Draft + if ( + RoleEnum.SUPPLIER in user.role_names + and compliance_report.current_status.status + != ComplianceReportStatusEnum.Draft.value + ): + raise HTTPException( + status_code=400, + detail="Suppliers can only upload files when the compliance report status is Draft", + ) + + file_id = uuid.uuid4() + file_key = f"{settings.s3_docs_path}/{parent_type}/{parent_id}/{file_id}" + + # Scan file size + file_size = os.fstat(file.file.fileno()).st_size + + if file_size > MAX_FILE_SIZE_BYTES: + raise ValidationError( + f"File size exceeds the maximum limit of {MAX_FILE_SIZE_MB} MB." + ) + + if settings.clamav_enabled: + self.clamav_service.scan_file(file) + + # Upload file to S3 + self.s3_client.upload_fileobj( + Fileobj=file.file, + Bucket=BUCKET_NAME, + Key=file_key, + ExtraArgs={"ContentType": file.content_type}, + ) + + document = Document( + file_key=file_key, + file_name=file.filename, + file_size=file_size, + mime_type=file.content_type, + ) + + if parent_type == "compliance_report": + compliance_report = await self.db.get(ComplianceReport, parent_id) + if not compliance_report: + raise Exception("Compliance report not found") + + self.db.add(document) + await self.db.flush() + + # Insert the association + stmt = compliance_report_document_association.insert().values( + compliance_report_id=compliance_report.compliance_report_id, + document_id=document.document_id, + ) + await self.db.execute(stmt) + else: + raise ValidationError(f"Invalid Type {parent_type}") + + await self.db.flush() + await self.db.refresh(document) + + return document + + @repo_handler + async def generate_presigned_url(self, document_id: int): + document = await self.db.get_one(Document, document_id) + + if not document: + raise Exception("Document not found") + + presigned_url = self.s3_client.generate_presigned_url( + "get_object", + Params={"Bucket": BUCKET_NAME, "Key": document.file_key}, + ExpiresIn=60, # URL expiration in seconds + ) + return presigned_url + + @repo_handler + async def delete_file(self, document_id: int): + document = await self.db.get_one(Document, document_id) + + if not document: + raise Exception("Document not found") + + # Delete the file from S3 + self.s3_client.delete_object(Bucket=BUCKET_NAME, Key=document.file_key) + + # Delete the entry from the database + await self.db.delete(document) + await self.db.flush() + + @repo_handler + async def get_by_id_and_type(self, parent_id: int, parent_type="compliance_report"): + # Select documents that are associated with the given compliance report ID + if parent_type == "compliance_report": + stmt = ( + select(Document) + .join(compliance_report_document_association) + .where( + compliance_report_document_association.c.compliance_report_id + == parent_id + ) + ) + else: + raise ValidationError(f"Invalid Type for loading Documents {parent_type}") + result = await self.db.execute(stmt) + documents = result.scalars().all() + return documents + + @repo_handler + async def get_object(self, document_id: int): + document = await self.db.get_one(Document, document_id) + + if not document: + raise Exception("Document not found") + + response = self.s3_client.get_object(Bucket=BUCKET_NAME, Key=document.file_key) + return response, document diff --git a/backend/lcfs/services/s3/dependency.py b/backend/lcfs/services/s3/dependency.py new file mode 100644 index 000000000..fc38ed388 --- /dev/null +++ b/backend/lcfs/services/s3/dependency.py @@ -0,0 +1,32 @@ +import boto3 +from typing import Generator +from lcfs.settings import settings + + +def get_s3_client() -> Generator: + """ + Dependency function to provide a synchronous S3 client using boto3. + + This function creates a new S3 client session for each request that requires it. + The client is properly configured with the necessary AWS credentials and + endpoint settings. + + Usage: + >>> def some_endpoint(s3_client = Depends(get_s3_client)): + >>> # Use the s3_client here + """ + # Initialize the S3 client with the required configurations + client = boto3.client( + "s3", + aws_access_key_id=settings.s3_access_key, # Your AWS access key + aws_secret_access_key=settings.s3_secret_key, # Your AWS secret key + endpoint_url=settings.s3_endpoint, # Custom S3 endpoint (if any) + region_name="us-east-1", # AWS region + ) + + try: + # Yield the S3 client to be used within the request scope + yield client + finally: + # boto3 clients do not require explicit closing, but this ensures cleanup if needed + pass diff --git a/backend/lcfs/services/s3/schema.py b/backend/lcfs/services/s3/schema.py new file mode 100644 index 000000000..41e040c49 --- /dev/null +++ b/backend/lcfs/services/s3/schema.py @@ -0,0 +1,15 @@ +from typing import Optional, List, Union +from datetime import datetime, date +from lcfs.web.api.fuel_code.schema import EndUseTypeSchema + +from lcfs.web.api.base import BaseSchema, FilterModel, SortOrder +from lcfs.web.api.base import PaginationResponseSchema +from pydantic import Field, Extra + +class FileResponseSchema(BaseSchema): + document_id: int + file_name: str + file_size: int + +class UrlResponseSchema(BaseSchema): + url: str diff --git a/frontend/src/views/users/__tests__/.gitKeep b/backend/lcfs/services/tfrs/__init__.py similarity index 100% rename from frontend/src/views/users/__tests__/.gitKeep rename to backend/lcfs/services/tfrs/__init__.py diff --git a/backend/lcfs/services/tfrs/redis_balance.py b/backend/lcfs/services/tfrs/redis_balance.py new file mode 100644 index 000000000..a1add9a9a --- /dev/null +++ b/backend/lcfs/services/tfrs/redis_balance.py @@ -0,0 +1,108 @@ +import logging +from datetime import datetime + +from fastapi import FastAPI, Depends +from redis.asyncio import Redis +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.dependencies import async_engine +from lcfs.services.redis.dependency import get_redis_client +from lcfs.settings import settings +from lcfs.web.api.organizations.repo import OrganizationsRepository +from lcfs.web.api.transaction.repo import TransactionRepository +from lcfs.web.core.decorators import service_handler + +app = FastAPI() +logger = logging.getLogger(__name__) + + +async def init_org_balance_cache(app: FastAPI): + """ + Initialize the organization balance cache and populate it with data. + + :param app: FastAPI application instance. + """ + # Get the Redis client from app state + redis: Redis = app.state.redis_client + + async with AsyncSession(async_engine) as session: + async with session.begin(): + organization_repo = OrganizationsRepository(db=session) + transaction_repo = TransactionRepository(db=session) + + # Get the oldest transaction year + oldest_year = await transaction_repo.get_transaction_start_year() or int( + 2019 + ) + + # Get the current year + current_year = datetime.now().year + logger.info(f"Starting balance cache population for {current_year}") + + # Fetch all organizations + all_orgs = await organization_repo.get_organizations() + + # Loop from the oldest year to the current year + for year in range(int(oldest_year), current_year + 1): + for org in all_orgs: + # Calculate the balance for each organization and year + balance = ( + await transaction_repo.calculate_available_balance_for_period( + org.organization_id, year + ) + ) + # Set the balance in Redis + await set_cache_value(org.organization_id, year, balance, redis) + logger.debug( + f"Set balance for organization {org.name} " + f"for {year} to {balance}" + ) + + logger.info(f"Cache populated with {len(all_orgs)} organizations") + + +class RedisBalanceService: + def __init__( + self, + transaction_repo=Depends(TransactionRepository), + redis_client: Redis = Depends(get_redis_client), + ): + self.transaction_repo = transaction_repo + self.redis_client = redis_client + + @service_handler + async def populate_organization_redis_balance( + self, + organization_id, + ): + # Get the current year + current_year = datetime.now().year + oldest_year = await self.transaction_repo.get_transaction_start_year() + + # Loop from the oldest year to the current year + for year in range(int(oldest_year), current_year + 1): + # Call the function to process transactions for each year + balance = ( + await self.transaction_repo.calculate_available_balance_for_period( + organization_id, year + ) + ) + + await set_cache_value(organization_id, year, balance, self.redis_client) + logger.debug( + f"Set balance for org {organization_id} for {year} to {balance}" + ) + + +async def set_cache_value( + organization_id: int, period: int, balance: int, redis: Redis +) -> None: + """ + Set a cache value in Redis for a specific organization and period. + + :param organization_id: ID of the organization. + :param period: The year or period for which the balance is being set. + :param balance: The balance value to set in the cache. + :param redis: Redis client instance. + """ + await redis.set(name=f"balance_{organization_id}_{period}", value=balance) diff --git a/backend/lcfs/settings.py b/backend/lcfs/settings.py index bc4d54807..199ba941d 100644 --- a/backend/lcfs/settings.py +++ b/backend/lcfs/settings.py @@ -30,10 +30,12 @@ class Settings(BaseSettings): host: str = "0.0.0.0" port: int = 8000 - # quantity of workers for uvicorn - workers_count: int = 1 - # Enable uvicorn reloading - reload: bool = True + # Number of Uvicorn workers + workers_count: int = 2 + # Enable Uvicorn reload (True for development, False for production) + reload: bool = False + # App timeout matching OpenShift's ROUTER_DEFAULT_SERVER_TIMEOUT + timeout_keep_alive: int = 30 # Current environment environment: str = "dev" @@ -46,18 +48,49 @@ class Settings(BaseSettings): db_user: str = "lcfs" db_pass: str = "development_only" db_base: str = "lcfs" + db_test: str = "lcfs_test" db_echo: bool = False # Variables for Redis redis_host: str = "localhost" redis_port: int = 6379 + redis_db: str = "lcfs" redis_user: Optional[str] = None - redis_pass: Optional[str] = None + redis_pass: Optional[str] = "development_only" redis_base: Optional[int] = None - # Variables for Keycloak - well_known_endpoint: str = "https://dev.loginproxy.gov.bc.ca/auth/realms/standard/.well-known/openid-configuration" - keycloak_audience: str = "tfrs-on-gold-4308" + # Variables for Keycloak + well_known_endpoint: str = ( + "https://dev.loginproxy.gov.bc.ca/auth/realms/standard/.well-known/openid-configuration" + ) + keycloak_audience: str = "low-carbon-fuel-standard-5147" + + # Variables for S3 + s3_endpoint: str = "http://minio:9000" + s3_bucket: str = "lcfs" + s3_access_key: str = "s3_access_key" + s3_secret_key: str = "development_only" + s3_docs_path: str = "lcfs-docs" + + # Variables for ClamAV + clamav_enabled: bool = False + clamav_host: str = "clamav" + clamav_port: int = 3310 + + # Variables for RabbitMQ + rabbitmq_host: str = "rabbitmq" + rabbitmq_port: int = 5672 + rabbitmq_pass: str = "development_only" + rabbitmq_user: str = "lcfs" + rabbitmq_vhost: str = "lcfs" + rabbitmq_transaction_queue: str = "transaction_queue" + + ches_auth_url: str = "" + ches_email_url: str = "" + ches_client_id: str = "" + ches_client_secret: str = "" + ches_sender_email: str = "noreply@gov.bc.ca" + ches_sender_name: str = "LCFS Notification System" @property def db_url(self) -> URL: @@ -75,6 +108,22 @@ def db_url(self) -> URL: path=f"/{self.db_base}", ) + @property + def db_test_url(self) -> URL: + """ + Assemble database URL from settings. + + :return: database URL. + """ + return URL.build( + scheme="postgresql+asyncpg", + host=self.db_host, + port=self.db_port, + user=self.db_user, + password=self.db_pass, + path=f"/{self.db_test}", + ) + @property def redis_url(self) -> URL: """ diff --git a/backend/lcfs/start.sh b/backend/lcfs/start.sh index cede867e5..7e19eb1ef 100755 --- a/backend/lcfs/start.sh +++ b/backend/lcfs/start.sh @@ -1,7 +1,10 @@ -#!/bin/bash +#!/usr/bin/env bash + +# Enable strict error handling +set -e # Wait for the database to be ready -./wait-for-it.sh db:5432 --timeout=30 +./wait-for-it.sh $LCFS_DB_HOST:5432 --timeout=30 # Load migrations or other prestart tasks ./lcfs/prestart.sh diff --git a/backend/lcfs/tests/admin_adjustment/test_admin_adjustment.py b/backend/lcfs/tests/admin_adjustment/test_admin_adjustment.py new file mode 100644 index 000000000..67ab867b4 --- /dev/null +++ b/backend/lcfs/tests/admin_adjustment/test_admin_adjustment.py @@ -0,0 +1,122 @@ +from unittest.mock import MagicMock, patch +import pytest +from httpx import AsyncClient +from fastapi import FastAPI, status +from lcfs.db.models.user.Role import RoleEnum + + +@pytest.mark.anyio +async def test_create_admin_adjustment( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + url = fastapi_app.url_path_for("create_admin_adjustment") + admin_adjustment_payload = { + "transactionEffectiveDate": "2024-08-06", + "txnType": "administrativeAdjustment", + "complianceUnits": 1000, + "toOrganizationId": "1", + "govComment": "Initial creation of admin adjustment.", + "internalComment": "

my comments

", + "currentStatus": "Recommended", + } + response = await client.post(url, json=admin_adjustment_payload) + assert response.status_code == status.HTTP_201_CREATED + + +@pytest.mark.anyio +async def test_get_admin_adjustment( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + admin_adjustment_id = 1 # This test assumes that there exists an admin adjustment with ID 1 in the test database. + url = fastapi_app.url_path_for( + "get_admin_adjustment", admin_adjustment_id=admin_adjustment_id + ) + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.anyio +async def test_get_admin_adjustment_unauthorized( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + """Test that a user without correct organization access cannot retrieve an admin adjustment.""" + # Set user with an unauthorized role or incorrect organization + set_mock_user(fastapi_app, [RoleEnum.MANAGE_USERS]) # A non-government role + admin_adjustment_id = 1 # Assuming an admin adjustment with ID 1 exists + + # Mock the service to return an admin adjustment belonging to a different organization + mock_admin_adjustment = MagicMock() + mock_admin_adjustment.to_organization.organization_id = 2 # Different organization + with patch( + "lcfs.web.api.admin_adjustment.services.AdminAdjustmentServices.get_admin_adjustment" + ) as mock_get_admin_adjustment: + mock_get_admin_adjustment.return_value = mock_admin_adjustment + + url = fastapi_app.url_path_for( + "get_admin_adjustment", admin_adjustment_id=admin_adjustment_id + ) + response = await client.get(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_get_admin_adjustment_not_found( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + """Test that a 404 error is raised if the admin adjustment does not exist.""" + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) # Authorized role + admin_adjustment_id = 999 # Non-existing admin adjustment ID + + # Mock the service to return None, simulating a missing admin adjustment + with patch( + "lcfs.web.api.admin_adjustment.services.AdminAdjustmentServices.get_admin_adjustment", + ) as mock_get_admin_adjustment: + mock_get_admin_adjustment.return_value = None + + url = fastapi_app.url_path_for( + "get_admin_adjustment", admin_adjustment_id=admin_adjustment_id + ) + response = await client.get(url) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +async def test_update_admin_adjustment( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.ANALYST, RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("update_admin_adjustment") + admin_adjustment_update_payload = { + "transactionEffectiveDate": "2024-08-06", + "txnType": "administrativeAdjustment", + "complianceUnits": 1500, + "toOrganizationId": "1", + "govComment": None, + "internalComment": "", + "currentStatus": "Draft", + "adminAdjustmentId": "1", + } + response = await client.put(url, json=admin_adjustment_update_payload) + assert response.status_code == status.HTTP_202_ACCEPTED + + +@pytest.mark.anyio +async def test_fail_update_processed_admin_adjustment( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.DIRECTOR, RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("update_admin_adjustment") + admin_adjustment_update_payload = { + "transactionEffectiveDate": "2024-08-06", + "txnType": "administrativeAdjustment", + "complianceUnits": 1500, + "toOrganizationId": "1", + "govComment": None, + "internalComment": "", + "currentStatus": "Draft", + "adminAdjustmentId": "2", + } + response = await client.put(url, json=admin_adjustment_update_payload) + assert response.status_code == status.HTTP_403_FORBIDDEN diff --git a/backend/lcfs/tests/audit_log/test_audit_log_repo.py b/backend/lcfs/tests/audit_log/test_audit_log_repo.py new file mode 100644 index 000000000..02cab9e6c --- /dev/null +++ b/backend/lcfs/tests/audit_log/test_audit_log_repo.py @@ -0,0 +1,89 @@ +import pytest +from unittest.mock import AsyncMock, MagicMock +from lcfs.web.api.audit_log.repo import AuditLogRepository +from lcfs.db.models.audit.AuditLog import AuditLog + + +@pytest.fixture +def mock_db(): + return AsyncMock() + + +@pytest.fixture +def audit_log_repo(mock_db): + repo = AuditLogRepository() + repo.db = mock_db + return repo + + +@pytest.mark.anyio +async def test_get_audit_logs_paginated_success(audit_log_repo, mock_db): + # Arrange + expected_audit_logs = [AuditLog(audit_log_id=1), AuditLog(audit_log_id=2)] + expected_total_count = 2 + + # Mock total_count_result for count query + mock_total_count_result = MagicMock() + mock_total_count_result.scalar_one.return_value = expected_total_count + + # Mock result for the data query + mock_result = MagicMock() + mock_scalars = MagicMock() + mock_scalars.all.return_value = expected_audit_logs + mock_result.scalars.return_value = mock_scalars + + # Mock execute to return the total count result and the data result + mock_db.execute.side_effect = [mock_total_count_result, mock_result] + + # Act + offset = 0 + limit = 10 + conditions = [] + sort_orders = [] + audit_logs, total_count = await audit_log_repo.get_audit_logs_paginated( + offset, limit, conditions, sort_orders + ) + + # Assert + assert audit_logs == expected_audit_logs + assert total_count == expected_total_count + assert ( + mock_db.execute.call_count == 2 + ) # One for the count query, one for the data query + + +@pytest.mark.anyio +async def test_get_audit_log_by_id_success(audit_log_repo, mock_db): + # Arrange + audit_log_id = 1 + expected_audit_log = AuditLog(audit_log_id=audit_log_id) + + # Mock result for the query + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = expected_audit_log + mock_db.execute.return_value = mock_result + + # Act + result = await audit_log_repo.get_audit_log_by_id(audit_log_id) + + # Assert + assert result == expected_audit_log + mock_db.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_audit_log_by_id_not_found(audit_log_repo, mock_db): + # Arrange + audit_log_id = 999 + + # Mock result for the query to return None + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_db.execute.return_value = mock_result + + # Act + result = await audit_log_repo.get_audit_log_by_id(audit_log_id) + + # Assert + assert result is None + mock_db.execute.assert_called_once() diff --git a/backend/lcfs/tests/audit_log/test_audit_log_services.py b/backend/lcfs/tests/audit_log/test_audit_log_services.py new file mode 100644 index 000000000..92dbee85a --- /dev/null +++ b/backend/lcfs/tests/audit_log/test_audit_log_services.py @@ -0,0 +1,141 @@ +import pytest +from unittest.mock import AsyncMock +from lcfs.web.api.audit_log.services import AuditLogService +from lcfs.web.api.audit_log.repo import AuditLogRepository +from lcfs.web.api.audit_log.schema import ( + AuditLogListSchema, + AuditLogSchema, +) +from lcfs.web.api.base import ( + PaginationRequestSchema, + FilterModel, +) +from lcfs.db.models.audit.AuditLog import AuditLog +from lcfs.web.exception.exceptions import DataNotFoundException + + +@pytest.fixture +def mock_repo(): + return AsyncMock(spec=AuditLogRepository) + + +@pytest.fixture +def audit_log_service(mock_repo): + service = AuditLogService() + service.repo = mock_repo + return service + + +@pytest.mark.anyio +async def test_get_audit_logs_paginated_success(audit_log_service, mock_repo): + # Arrange + pagination = PaginationRequestSchema(page=1, size=10, filters=[], sort_orders=[]) + expected_audit_logs = [ + AuditLog( + audit_log_id=1, + table_name="users", + operation="INSERT", + row_id=123, + delta={"name": "John Doe"}, + create_date="2023-11-01", + create_user="admin", + ), + AuditLog( + audit_log_id=2, + table_name="orders", + operation="UPDATE", + row_id=456, + delta={"status": "completed"}, + create_date="2023-11-02", + create_user="manager", + ), + ] + expected_total_count = 2 + mock_repo.get_audit_logs_paginated.return_value = ( + expected_audit_logs, + expected_total_count, + ) + + # Act + result = await audit_log_service.get_audit_logs_paginated(pagination) + + # Assert + assert isinstance(result, AuditLogListSchema) + assert len(result.audit_logs) == 2 + assert result.pagination.total == expected_total_count + mock_repo.get_audit_logs_paginated.assert_called_once() + + +@pytest.mark.anyio +async def test_get_audit_logs_paginated_no_data(audit_log_service, mock_repo): + # Arrange + pagination = PaginationRequestSchema(page=1, size=10, filters=[], sort_orders=[]) + mock_repo.get_audit_logs_paginated.return_value = ([], 0) + + # Act & Assert + with pytest.raises(DataNotFoundException): + await audit_log_service.get_audit_logs_paginated(pagination) + + +@pytest.mark.anyio +async def test_get_audit_log_by_id_success(audit_log_service, mock_repo): + # Arrange + audit_log_id = 1 + expected_audit_log = AuditLog( + audit_log_id=audit_log_id, + table_name="users", + operation="INSERT", + row_id=123, + delta={"name": "John Doe"}, + create_date="2023-11-01", + create_user="admin", + ) + mock_repo.get_audit_log_by_id.return_value = expected_audit_log + + # Act + result = await audit_log_service.get_audit_log_by_id(audit_log_id) + + # Assert + assert isinstance(result, AuditLogSchema) + assert result.audit_log_id == audit_log_id + assert result.table_name == "users" + mock_repo.get_audit_log_by_id.assert_called_once_with(audit_log_id) + + +@pytest.mark.anyio +async def test_get_audit_log_by_id_not_found(audit_log_service, mock_repo): + # Arrange + audit_log_id = 999 + mock_repo.get_audit_log_by_id.return_value = None + + # Act & Assert + with pytest.raises(DataNotFoundException): + await audit_log_service.get_audit_log_by_id(audit_log_id) + + +@pytest.mark.anyio +async def test_apply_audit_log_filters(audit_log_service): + # Arrange + pagination = PaginationRequestSchema( + page=1, + size=10, + filters=[ + FilterModel( + field="operation", filter_type="text", type="equals", filter="UPDATE" + ), + FilterModel( + field="createDate", + filter_type="date", + type="greaterThan", + date_from="2021-01-01", + ), + ], + sort_orders=[], + ) + conditions = [] + + # Act + audit_log_service.apply_audit_log_filters(pagination, conditions) + + # Assert + assert len(conditions) == 2 # Two filters applied diff --git a/backend/lcfs/tests/audit_log/test_audit_log_views.py b/backend/lcfs/tests/audit_log/test_audit_log_views.py new file mode 100644 index 000000000..0f38fde72 --- /dev/null +++ b/backend/lcfs/tests/audit_log/test_audit_log_views.py @@ -0,0 +1,129 @@ +import pytest +from unittest.mock import patch +from httpx import AsyncClient +from fastapi import FastAPI + +from lcfs.web.api.audit_log.schema import ( + AuditLogListSchema, + AuditLogSchema, +) +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.exception.exceptions import DataNotFoundException + + +@pytest.mark.anyio +async def test_get_audit_logs_paginated_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + with patch( + "lcfs.web.api.audit_log.views.AuditLogService.get_audit_logs_paginated" + ) as mock_service: + # Arrange + mock_service.return_value = AuditLogListSchema( + audit_logs=[ + { + "audit_log_id": 1, + "table_name": "users", + "operation": "INSERT", + "row_id": 101, + "create_date": "2023-01-01T12:00:00", + "create_user": "admin", + }, + { + "audit_log_id": 2, + "table_name": "orders", + "operation": "UPDATE", + "row_id": 202, + "create_date": "2023-01-02T13:00:00", + "create_user": "manager", + }, + ], + pagination={ + "total": 2, + "page": 1, + "size": 10, + "total_pages": 1, + }, + ) + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + url = fastapi_app.url_path_for("get_audit_logs_paginated") + payload = {"page": 1, "size": 10, "filters": [], "sortOrders": []} + + # Act + response = await client.post(url, json=payload) + + # Assert + assert response.status_code == 200 + data = response.json() + assert data["pagination"]["total"] == 2 + assert len(data["auditLogs"]) == 2 + mock_service.assert_called_once() + + +@pytest.mark.anyio +async def test_get_audit_logs_paginated_forbidden( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) # Insufficient permissions + + url = fastapi_app.url_path_for("get_audit_logs_paginated") + payload = {"page": 1, "size": 10, "filters": [], "sortOrders": []} + + response = await client.post(url, json=payload) + + assert response.status_code == 403 # Forbidden + + +@pytest.mark.anyio +async def test_get_audit_log_by_id_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + with patch( + "lcfs.web.api.audit_log.views.AuditLogService.get_audit_log_by_id" + ) as mock_service: + # Arrange + audit_log_id = 1 + mock_service.return_value = AuditLogSchema( + audit_log_id=audit_log_id, + table_name="users", + operation="UPDATE", + row_id=101, + create_date="2023-01-01T12:00:00", + create_user="admin", + ) + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + url = fastapi_app.url_path_for("get_audit_log_by_id", audit_log_id=audit_log_id) + + # Act + response = await client.get(url) + + # Assert + assert response.status_code == 200 + data = response.json() + assert data["auditLogId"] == audit_log_id + assert data["tableName"] == "users" + mock_service.assert_called_once_with(audit_log_id) + + +@pytest.mark.anyio +async def test_get_audit_log_by_id_not_found( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + with patch( + "lcfs.web.api.audit_log.views.AuditLogService.get_audit_log_by_id" + ) as mock_service: + # Arrange + audit_log_id = 999 + mock_service.side_effect = DataNotFoundException("Audit log not found") + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + url = fastapi_app.url_path_for("get_audit_log_by_id", audit_log_id=audit_log_id) + + # Act + response = await client.get(url) + + # Assert + assert response.status_code == 404 + mock_service.assert_called_once_with(audit_log_id) diff --git a/backend/lcfs/tests/compliance_report/conftest.py b/backend/lcfs/tests/compliance_report/conftest.py new file mode 100644 index 000000000..58fccd340 --- /dev/null +++ b/backend/lcfs/tests/compliance_report/conftest.py @@ -0,0 +1,312 @@ +import uuid +import pytest +from datetime import datetime +from unittest.mock import AsyncMock, MagicMock +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.web.api.compliance_report.services import ComplianceReportServices +from lcfs.web.api.compliance_report.summary_service import ( + ComplianceReportSummaryService, +) +from lcfs.web.api.compliance_report.update_service import ( + ComplianceReportUpdateService, +) + +from lcfs.web.api.compliance_report.schema import ( + ComplianceReportBaseSchema, + CompliancePeriodSchema, + ComplianceReportOrganizationSchema, + SummarySchema, + ComplianceReportStatusSchema, + ComplianceReportHistorySchema, + ComplianceReportUserSchema, + ComplianceReportSummarySchema, + ComplianceReportSummaryRowSchema, + ComplianceReportListSchema, + ComplianceReportCreateSchema, +) +from lcfs.web.api.notional_transfer.services import NotionalTransferServices +from lcfs.web.api.transaction.repo import TransactionRepository +from lcfs.web.api.base import PaginationResponseSchema +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository +from lcfs.web.api.fuel_export.repo import FuelExportRepository + + +@pytest.fixture +def compliance_period_schema(): + return CompliancePeriodSchema( + compliance_period_id=1, + description="2024", + effective_date=datetime(2024, 1, 1), + expiration_date=datetime(2024, 3, 31), + display_order=1, + ) + + +@pytest.fixture +def compliance_report_organization_schema(): + return ComplianceReportOrganizationSchema( + organization_id=1, name="Acme Corporation" + ) + + +@pytest.fixture +def compliance_report_status_schema(): + return ComplianceReportStatusSchema(compliance_report_status_id=1, status="Draft") + + +@pytest.fixture +def summary_schema(): + return SummarySchema(summary_id=1, is_locked=False) + + +@pytest.fixture +def compliance_report_user_schema(compliance_report_organization_schema): + return ComplianceReportUserSchema( + first_name="John", + last_name="Doe", + organization=compliance_report_organization_schema, + ) + + +@pytest.fixture +def compliance_report_history_schema( + compliance_report_status_schema, compliance_report_user_schema +): + return ComplianceReportHistorySchema( + compliance_report_history_id=1, + compliance_report_id=1, + status=compliance_report_status_schema, + user_profile=compliance_report_user_schema, + create_date=datetime(2024, 4, 1, 12, 0, 0), + ) + + +@pytest.fixture +def compliance_report_base_schema( + compliance_period_schema, + compliance_report_organization_schema, + summary_schema, + compliance_report_status_schema, + compliance_report_history_schema, +): + def _create_compliance_report_base_schema( + compliance_report_id: int = 1, + compliance_period_id: int = None, + compliance_period: CompliancePeriodSchema = None, + organization_id: int = None, + organization: ComplianceReportOrganizationSchema = None, + summary: SummarySchema = None, + current_status_id: int = None, + current_status: ComplianceReportStatusSchema = None, + transaction_id: int = None, + nickname: str = "Annual Compliance", + supplemental_note: str = "Initial submission.", + update_date: datetime = datetime(2024, 4, 1, 12, 0, 0), + history: list = None, + compliance_report_group_uuid: str = None, + version: int = 0, + supplemental_initiator: str = None, + has_supplemental: bool = False, + ): + # Assign default values from dependent fixtures if not overridden + compliance_period_id = ( + compliance_period_id or compliance_period_schema.compliance_period_id + ) + compliance_period = compliance_period or compliance_period_schema + organization_id = ( + organization_id or compliance_report_organization_schema.organization_id + ) + organization = organization or compliance_report_organization_schema + summary = summary or summary_schema + current_status_id = ( + current_status_id + or compliance_report_status_schema.compliance_report_status_id + ) + current_status = current_status or compliance_report_status_schema + history = history or [compliance_report_history_schema] + compliance_report_group_uuid = compliance_report_group_uuid or str(uuid.uuid4()) + supplemental_initiator = supplemental_initiator + + return ComplianceReportBaseSchema( + compliance_report_id=compliance_report_id, + compliance_period_id=compliance_period_id, + compliance_period=compliance_period, + organization_id=organization_id, + organization=organization, + summary=summary, + current_status_id=current_status_id, + current_status=current_status, + transaction_id=transaction_id, + nickname=nickname, + supplemental_note=supplemental_note, + update_date=update_date, + history=history, + compliance_report_group_uuid=compliance_report_group_uuid, + version=version, + supplemental_initiator=supplemental_initiator, + has_supplemental=has_supplemental, + ) + + return _create_compliance_report_base_schema + + +@pytest.fixture +def compliance_report_summary_row_schema(): + def _create_compliance_report_summary_row( + line="", + description="", + field="", + gasoline=0, + diesel=0, + jet_fuel=0, + value=0, + total_value=0, + format="", + ): + return ComplianceReportSummaryRowSchema( + line=line, + description=description, + field=field, + gasoline=gasoline, + diesel=diesel, + jet_fuel=jet_fuel, + value=value, + total_value=total_value, + format=format, + ) + + return _create_compliance_report_summary_row + + +@pytest.fixture +def compliance_report_summary_schema( + summary_schema, compliance_report_base_schema, compliance_report_summary_row_schema +): + mock_renewable_fuel_target_summary = [compliance_report_summary_row_schema()] + mock_low_carbon_fuel_target_summary = [compliance_report_summary_row_schema()] + mock_non_compliance_penalty_summary = [compliance_report_summary_row_schema()] + + def _create_compliance_report_summary( + renewable_fuel_target_summary=mock_renewable_fuel_target_summary, + low_carbon_fuel_target_summary=mock_low_carbon_fuel_target_summary, + non_compliance_penalty_summary=mock_non_compliance_penalty_summary, + summary_id=summary_schema.summary_id, + compliance_report_id=compliance_report_base_schema().compliance_report_id, + version=1, + is_locked=False, + quarter=None, + can_sign=False + ): + + return ComplianceReportSummarySchema( + renewable_fuel_target_summary=renewable_fuel_target_summary, + low_carbon_fuel_target_summary=low_carbon_fuel_target_summary, + non_compliance_penalty_summary=non_compliance_penalty_summary, + summary_id=summary_id, + compliance_report_id=compliance_report_id, + version=version, + is_locked=is_locked, + quarter=quarter, + can_sign=can_sign + ) + + return _create_compliance_report_summary + + +@pytest.fixture +def compliance_report_list_schema(compliance_report_base_schema): + return ComplianceReportListSchema( + pagination=PaginationResponseSchema(total=100, page=1, size=10, total_pages=10), + reports=[ + compliance_report_base_schema(), + # Add more ComplianceReportBaseSchema instances if needed + ], + ) + + +@pytest.fixture +def compliance_report_create_schema(): + return ComplianceReportCreateSchema( + compliance_period="2024", organization_id=1, status="Draft" + ) + + +@pytest.fixture +def mock_repo(): + repo = AsyncMock(spec=ComplianceReportRepository) + return repo + + +@pytest.fixture +def mock_trxn_repo(): + trxn_repo = AsyncMock(spec=TransactionRepository) + return trxn_repo + + +@pytest.fixture +def mock_fuel_supply_repo(): + mock_repo = AsyncMock(spec=FuelSupplyRepository) + mock_repo.get_effective_fuel_supplies = AsyncMock(return_value=[]) + return mock_repo + + +@pytest.fixture +def mock_fuel_export_repo(): + return AsyncMock(spec=FuelExportRepository) + + +@pytest.fixture +def compliance_report_summary_service( + mock_repo, + mock_trxn_repo, + mock_notional_transfer_service, + mock_fuel_supply_repo, + mock_fuel_export_repo, +): + service = ComplianceReportSummaryService() + service.repo = mock_repo + service.trxn_repo = mock_trxn_repo + service.notional_transfer_service = mock_notional_transfer_service + service.fuel_supply_repo = mock_fuel_supply_repo + service.fuel_export_repo = mock_fuel_export_repo + return service + + +@pytest.fixture +def compliance_report_update_service( + mock_repo, compliance_report_summary_service, mock_user_profile +): + service = ComplianceReportUpdateService() + service.repo = mock_repo + service.summary_service = compliance_report_summary_service + service.request = MagicMock() + service.request.user = mock_user_profile + return service + + +@pytest.fixture +def compliance_report_service(mock_user_profile, mock_repo): + service = ComplianceReportServices() + service.repo = mock_repo + service.request = MagicMock() + service.request.user = mock_user_profile + return service + + +@pytest.fixture +def mock_notional_transfer_service(): + return AsyncMock(spec=NotionalTransferServices) + + +@pytest.fixture +def compliance_report_repo(dbsession): + # Create a mock for fuel_supply_repo + fuel_supply_repo_mock = MagicMock() + fuel_supply_repo_mock.get_effective_fuel_supplies = AsyncMock() + + # Create an instance of ComplianceReportRepository with the mock + repo = ComplianceReportRepository( + db=dbsession, fuel_supply_repo=fuel_supply_repo_mock + ) + + return repo diff --git a/backend/lcfs/tests/compliance_report/test_compliance_report_repo.py b/backend/lcfs/tests/compliance_report/test_compliance_report_repo.py new file mode 100644 index 000000000..fdce6f9ee --- /dev/null +++ b/backend/lcfs/tests/compliance_report/test_compliance_report_repo.py @@ -0,0 +1,1001 @@ +from unittest.mock import MagicMock, AsyncMock + +import pytest +from datetime import datetime +import uuid +from lcfs.db.models.compliance import ( + CompliancePeriod, + ComplianceReport, + ComplianceReportStatus, + ComplianceReportHistory, + ComplianceReportSummary, + FuelSupply, +) +from lcfs.web.api.compliance_report.schema import ComplianceReportSummarySchema +from lcfs.web.api.base import ( + PaginationRequestSchema, +) +from lcfs.db.base import UserTypeEnum +from lcfs.db.models.organization import Organization +from lcfs.web.exception.exceptions import DatabaseException +from lcfs.web.api.compliance_report.schema import ComplianceReportBaseSchema +from lcfs.db.models.user import UserProfile +from lcfs.db.models.fuel import ( + FuelType, + FuelCategory, + ExpectedUseType, +) +from lcfs.db.models.transfer import Transfer +from lcfs.db.models.initiative_agreement import InitiativeAgreement +from lcfs.db.models.compliance.ComplianceReport import ReportingFrequency + + +# Fixtures +@pytest.fixture +async def expected_uses(dbsession): + expected_uses = [ + ExpectedUseType(expected_use_type_id=998, name="Heating Oil"), + ExpectedUseType(expected_use_type_id=999, name="Other"), + ] + + dbsession.add_all(expected_uses) + await dbsession.commit() + for expected_use in expected_uses: + await dbsession.refresh(expected_use) + return expected_uses + + +@pytest.fixture +async def fuel_categories(dbsession): + fuel_categories = [ + FuelCategory( + fuel_category_id=998, category="Gasoline", default_carbon_intensity=0 + ), + FuelCategory( + fuel_category_id=999, category="Diesel", default_carbon_intensity=0 + ), + ] + + dbsession.add_all(fuel_categories) + await dbsession.commit() + for fuel_category in fuel_categories: + await dbsession.refresh(fuel_category) + return fuel_categories + + +@pytest.fixture +async def fuel_types(dbsession): + fuel_types = [ + FuelType( + fuel_type_id=996, + fuel_type="Fossil-derived diesel", + units="Litres", + unrecognized=False, + fossil_derived=True, + other_uses_fossil_derived=True, + ), + FuelType( + fuel_type_id=997, + fuel_type="Fossil-derived gasoline", + units="Litres", + unrecognized=False, + fossil_derived=True, + other_uses_fossil_derived=True, + ), + FuelType( + fuel_type_id=998, + fuel_type="Biodiesel", + units="Litres", + unrecognized=False, + fossil_derived=False, + ), + FuelType( + fuel_type_id=999, + fuel_type="Electricity", + units="Litres", + unrecognized=False, + fossil_derived=False, + ), + ] + + dbsession.add_all(fuel_types) + await dbsession.commit() + for fuel_type in fuel_types: + await dbsession.refresh(fuel_type) + return fuel_types + + +@pytest.fixture +async def users(dbsession): + users = [ + UserProfile(user_profile_id=998, keycloak_username="user998", is_active=True), + UserProfile(user_profile_id=999, keycloak_username="user999", is_active=True), + ] + + dbsession.add_all(users) + await dbsession.commit() + for user in users: + await dbsession.refresh(user) + return users + + +@pytest.fixture +async def organizations(dbsession): + orgs = [ + Organization( + organization_id=998, + organization_code="o998", + total_balance=0, + reserved_balance=0, + count_transfers_in_progress=0, + name="org998", + ), + Organization( + organization_id=999, + organization_code="o999", + total_balance=0, + reserved_balance=0, + count_transfers_in_progress=0, + name="org999", + ), + ] + dbsession.add_all(orgs) + await dbsession.commit() + for org in orgs: + await dbsession.refresh(org) + return orgs + + +@pytest.fixture +async def compliance_periods(dbsession): + periods = [ + CompliancePeriod(compliance_period_id=998, description="998"), + CompliancePeriod(compliance_period_id=999, description="999"), + ] + dbsession.add_all(periods) + await dbsession.commit() + for period in periods: + await dbsession.refresh(period) + return periods + + +@pytest.fixture +async def compliance_report_statuses(dbsession): + statuses = [ + ComplianceReportStatus(compliance_report_status_id=997, status="Assessed"), + ComplianceReportStatus(compliance_report_status_id=998, status="Draft"), + ComplianceReportStatus( + compliance_report_status_id=999, status="Recommended_by_analyst" + ), + ] + dbsession.add_all(statuses) + await dbsession.commit() + for status in statuses: + await dbsession.refresh(status) + return statuses + + +@pytest.fixture +async def compliance_reports( + dbsession, organizations, compliance_periods, compliance_report_statuses +): + reports = [ + ComplianceReport( + compliance_report_id=994, + compliance_period_id=compliance_periods[0].compliance_period_id, + organization_id=organizations[0].organization_id, + reporting_frequency=ReportingFrequency.ANNUAL, + current_status_id=compliance_report_statuses[0].compliance_report_status_id, + compliance_report_group_uuid=str(uuid.uuid4()), + version=1, + ), + ComplianceReport( + compliance_report_id=995, + compliance_period_id=compliance_periods[1].compliance_period_id, + organization_id=organizations[1].organization_id, + reporting_frequency=ReportingFrequency.ANNUAL, + current_status_id=compliance_report_statuses[1].compliance_report_status_id, + compliance_report_group_uuid=str(uuid.uuid4()), + version=1, + ), + ] + dbsession.add_all(reports) + await dbsession.commit() + for report in reports: + await dbsession.refresh(report) + return reports + + +@pytest.fixture +async def supplemental_reports( + compliance_reports, + dbsession, + organizations, + compliance_report_statuses, + compliance_periods, +): + reports = [ + ComplianceReport( + compliance_report_id=996, + compliance_period_id=compliance_periods[0].compliance_period_id, + organization_id=organizations[0].organization_id, + current_status_id=compliance_report_statuses[0].compliance_report_status_id, + compliance_report_group_uuid=compliance_reports[ + 0 + ].compliance_report_group_uuid, + version=2, + reporting_frequency=ReportingFrequency.ANNUAL, + ), + ComplianceReport( + compliance_report_id=997, + compliance_period_id=compliance_periods[0].compliance_period_id, + organization_id=organizations[0].organization_id, + current_status_id=compliance_report_statuses[1].compliance_report_status_id, + compliance_report_group_uuid=compliance_reports[ + 0 + ].compliance_report_group_uuid, + version=3, + reporting_frequency=ReportingFrequency.ANNUAL, + ), + ComplianceReport( + compliance_report_id=998, + compliance_period_id=compliance_periods[1].compliance_period_id, + organization_id=organizations[1].organization_id, + current_status_id=compliance_report_statuses[0].compliance_report_status_id, + compliance_report_group_uuid=compliance_reports[ + 1 + ].compliance_report_group_uuid, + version=2, + reporting_frequency=ReportingFrequency.ANNUAL, + ), + ComplianceReport( + compliance_report_id=999, + compliance_period_id=compliance_periods[1].compliance_period_id, + organization_id=organizations[1].organization_id, + current_status_id=compliance_report_statuses[1].compliance_report_status_id, + compliance_report_group_uuid=compliance_reports[ + 1 + ].compliance_report_group_uuid, + version=3, + reporting_frequency=ReportingFrequency.ANNUAL, + ), + ] + dbsession.add_all(reports) + await dbsession.commit() + for report in reports: + await dbsession.refresh(report) + return reports + + +@pytest.fixture +async def compliance_report_summaries( + dbsession, compliance_reports, supplemental_reports +): + summaries = [ + ComplianceReportSummary( + summary_id=996, + compliance_report_id=compliance_reports[0].compliance_report_id, + ), + ComplianceReportSummary( + summary_id=997, + compliance_report_id=compliance_reports[1].compliance_report_id, + ), + ComplianceReportSummary( + summary_id=998, + compliance_report_id=supplemental_reports[0].compliance_report_id, + ), + ComplianceReportSummary( + summary_id=999, + compliance_report_id=supplemental_reports[1].compliance_report_id, + ), + ] + + dbsession.add_all(summaries) + await dbsession.commit() + for summary in summaries: + await dbsession.refresh(summary) + return summaries + + +date = datetime.strptime("2024-01-01", "%Y-%m-%d").date() + + +@pytest.fixture +async def transfers(dbsession, organizations): + + transfers = [ + Transfer( + transfer_id=992, + agreement_date=date, + from_organization_id=organizations[0].organization_id, + current_status_id=6, + quantity=1, + ), + Transfer( + transfer_id=993, + agreement_date=date, + from_organization_id=organizations[0].organization_id, + current_status_id=6, + quantity=1, + ), + Transfer( + transfer_id=994, + agreement_date=date, + from_organization_id=organizations[1].organization_id, + current_status_id=6, + quantity=1, + ), + Transfer( + transfer_id=995, + agreement_date=date, + from_organization_id=organizations[1].organization_id, + current_status_id=6, + quantity=1, + ), + Transfer( + transfer_id=996, + agreement_date=date, + to_organization_id=organizations[0].organization_id, + current_status_id=6, + quantity=1, + ), + Transfer( + transfer_id=997, + agreement_date=date, + to_organization_id=organizations[0].organization_id, + current_status_id=6, + quantity=1, + ), + Transfer( + transfer_id=998, + agreement_date=date, + to_organization_id=organizations[1].organization_id, + current_status_id=6, + quantity=1, + ), + Transfer( + transfer_id=999, + agreement_date=date, + to_organization_id=organizations[1].organization_id, + current_status_id=6, + quantity=1, + ), + ] + + dbsession.add_all(transfers) + await dbsession.commit() + for transfer in transfers: + await dbsession.refresh(transfer) + return transfers + + +@pytest.fixture +async def initiative_agreements(dbsession, organizations): + initiative_agreements = [ + InitiativeAgreement( + initiative_agreement_id=996, + compliance_units=1, + transaction_effective_date=date, + to_organization_id=organizations[0].organization_id, + current_status_id=3, + ), + InitiativeAgreement( + initiative_agreement_id=997, + compliance_units=1, + transaction_effective_date=date, + to_organization_id=organizations[0].organization_id, + current_status_id=3, + ), + InitiativeAgreement( + initiative_agreement_id=998, + compliance_units=1, + transaction_effective_date=date, + to_organization_id=organizations[1].organization_id, + current_status_id=3, + ), + InitiativeAgreement( + initiative_agreement_id=999, + compliance_units=1, + transaction_effective_date=date, + to_organization_id=organizations[1].organization_id, + current_status_id=3, + ), + ] + + dbsession.add_all(initiative_agreements) + await dbsession.commit() + for initiative_agreement in initiative_agreements: + await dbsession.refresh(initiative_agreement) + + return initiative_agreements + + +@pytest.fixture +async def fuel_supplies(dbsession, compliance_reports, fuel_categories, fuel_types): + fuel_supplies = [ + FuelSupply( + fuel_supply_id=996, + compliance_report_id=compliance_reports[0].compliance_report_id, + quantity=1, + units="Litres", + fuel_category_id=fuel_categories[0].fuel_category_id, + fuel_type_id=fuel_types[0].fuel_type_id, + provision_of_the_act_id=1, + user_type=UserTypeEnum.SUPPLIER, + ), + FuelSupply( + fuel_supply_id=997, + compliance_report_id=compliance_reports[0].compliance_report_id, + quantity=1, + units="Litres", + fuel_category_id=fuel_categories[1].fuel_category_id, + fuel_type_id=fuel_types[1].fuel_type_id, + provision_of_the_act_id=1, + user_type=UserTypeEnum.SUPPLIER, + ), + FuelSupply( + fuel_supply_id=998, + compliance_report_id=compliance_reports[0].compliance_report_id, + quantity=1, + units="Litres", + fuel_category_id=fuel_categories[0].fuel_category_id, + fuel_type_id=fuel_types[2].fuel_type_id, + provision_of_the_act_id=1, + user_type=UserTypeEnum.SUPPLIER, + ), + FuelSupply( + fuel_supply_id=999, + compliance_report_id=compliance_reports[0].compliance_report_id, + quantity=1, + units="Litres", + fuel_category_id=fuel_categories[1].fuel_category_id, + fuel_type_id=fuel_types[3].fuel_type_id, + provision_of_the_act_id=1, + user_type=UserTypeEnum.SUPPLIER, + ), + ] + + dbsession.add_all(fuel_supplies) + await dbsession.commit() + for fuel_supply in fuel_supplies: + await dbsession.refresh(fuel_supply) + return fuel_supplies + + +# Tests +@pytest.mark.anyio +async def test_get_all_compliance_periods(compliance_report_repo, compliance_periods): + + periods = await compliance_report_repo.get_all_compliance_periods() + assert ( + next( + ( + period + for period in periods + if period.description == compliance_periods[0].description + ), + False, + ) + == compliance_periods[0] + ) + + +@pytest.mark.anyio +async def test_get_compliance_period_success( + compliance_report_repo, compliance_periods +): + + period = await compliance_report_repo.get_compliance_period( + period=compliance_periods[0].description + ) + + assert isinstance(period, CompliancePeriod) + assert period == compliance_periods[0] + + +@pytest.mark.anyio +async def test_get_compliance_period_not_found(compliance_report_repo): + period = await compliance_report_repo.get_compliance_period(period="1000") + + assert period is None + + +@pytest.mark.anyio +async def test_get_compliance_report_success( + compliance_report_repo, compliance_reports +): + + report = await compliance_report_repo.get_compliance_report_by_id( + report_id=compliance_reports[0].compliance_report_id, is_model=True + ) + assert isinstance(report, ComplianceReport) + assert report.compliance_report_id == compliance_reports[0].compliance_report_id + + +@pytest.mark.anyio +async def test_get_compliance_report_not_found(compliance_report_repo): + + report = await compliance_report_repo.get_compliance_report_by_id( + report_id=1000, is_model=True + ) + + assert report is None + + +@pytest.mark.anyio +async def test_get_compliance_report_status_by_desc_success( + compliance_report_repo, compliance_report_statuses +): + status = await compliance_report_repo.get_compliance_report_status_by_desc( + status="Recommended by analyst" + ) + + assert isinstance(status, ComplianceReportStatus) + assert status.status == compliance_report_statuses[2].status + + +@pytest.mark.anyio +async def test_get_compliance_report_status_by_desc_unknown_status( + compliance_report_repo, +): + + with pytest.raises(DatabaseException): + await compliance_report_repo.get_compliance_report_status_by_desc( + status="Not a real status" + ) + + +@pytest.mark.anyio +async def test_add_compliance_report_success( + compliance_report_repo, + compliance_periods, + organizations, + compliance_report_statuses, +): + + new_report = ComplianceReport( + compliance_period_id=compliance_periods[0].compliance_period_id, + organization_id=organizations[0].organization_id, + reporting_frequency=ReportingFrequency.ANNUAL, + current_status_id=compliance_report_statuses[0].compliance_report_status_id, + compliance_report_group_uuid=str(uuid.uuid4()), + version=1, + ) + + report = await compliance_report_repo.create_compliance_report(report=new_report) + + assert isinstance(report, ComplianceReportBaseSchema) + assert report.compliance_period_id == compliance_periods[0].compliance_period_id + assert report.organization_id == organizations[0].organization_id + assert ( + report.current_status_id + == compliance_report_statuses[0].compliance_report_status_id + ) + + +@pytest.mark.anyio +async def test_add_compliance_report_exception( + compliance_report_repo, +): + + new_report = ComplianceReport() + + with pytest.raises(DatabaseException): + await compliance_report_repo.create_compliance_report(report=new_report) + + +@pytest.mark.anyio +async def test_add_compliance_report_history_success( + compliance_report_repo, + users, + compliance_reports, +): + + history = await compliance_report_repo.add_compliance_report_history( + report=compliance_reports[0], user=users[0] + ) + + assert isinstance(history, ComplianceReportHistory) + assert history.compliance_report_id == compliance_reports[0].compliance_report_id + assert history.user_profile_id == users[0].user_profile_id + assert history.status_id == compliance_reports[0].current_status_id + + +@pytest.mark.anyio +async def test_add_compliance_report_history_exception( + compliance_report_repo, +): + with pytest.raises(DatabaseException): + await compliance_report_repo.add_compliance_report_history( + report=None, user=None + ) + + +@pytest.mark.anyio +async def test_get_reports_paginated_success( + compliance_report_repo, + compliance_reports, +): + + pagination = PaginationRequestSchema( + page=1, + size=10, + sort_orders=[], + filters=[], + ) + + reports, total_count = await compliance_report_repo.get_reports_paginated( + pagination=pagination + ) + + assert isinstance(reports, list) + assert len(reports) > 0 + assert isinstance(reports[0], ComplianceReportBaseSchema) + assert total_count >= len(reports) + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_success_is_not_model( + compliance_report_repo, + compliance_reports, +): + + report = await compliance_report_repo.get_compliance_report_by_id( + report_id=compliance_reports[0].compliance_report_id, is_model=False + ) + + assert isinstance(report, ComplianceReportBaseSchema) + assert report.compliance_report_id == compliance_reports[0].compliance_report_id + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_success_is_model( + compliance_report_repo, + compliance_reports, +): + + report = await compliance_report_repo.get_compliance_report_by_id( + report_id=compliance_reports[0].compliance_report_id, is_model=True + ) + + assert isinstance(report, ComplianceReport) + assert report.compliance_report_id == compliance_reports[0].compliance_report_id + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_success_not_found( + compliance_report_repo, +): + + report = await compliance_report_repo.get_compliance_report_by_id( + report_id=1000, is_model=True + ) + + assert report is None + + +@pytest.mark.anyio +async def test_get_fuel_type_success(compliance_report_repo, fuel_types): + + fuel_type = await compliance_report_repo.get_fuel_type( + fuel_type_id=fuel_types[0].fuel_type_id + ) + + assert isinstance(fuel_type, FuelType) + assert fuel_type.fuel_type_id == fuel_types[0].fuel_type_id + + +@pytest.mark.anyio +async def test_get_fuel_type_not_found(compliance_report_repo): + + fuel_type = await compliance_report_repo.get_fuel_type(fuel_type_id=1000) + + assert fuel_type is None + + +@pytest.mark.anyio +async def test_get_fuel_category_success(compliance_report_repo, fuel_categories): + + fuel_category = await compliance_report_repo.get_fuel_category( + fuel_category_id=fuel_categories[0].fuel_category_id + ) + + assert isinstance(fuel_category, FuelCategory) + assert fuel_category.fuel_category_id == fuel_categories[0].fuel_category_id + + +@pytest.mark.anyio +async def test_get_fuel_category_not_found(compliance_report_repo): + + fuel_category = await compliance_report_repo.get_fuel_category( + fuel_category_id=1000 + ) + + assert fuel_category is None + + +@pytest.mark.anyio +async def test_get_expected_use_success(compliance_report_repo, expected_uses): + + expected_use = await compliance_report_repo.get_expected_use( + expected_use_type_id=expected_uses[0].expected_use_type_id + ) + + assert isinstance(expected_use, ExpectedUseType) + assert expected_use.expected_use_type_id == expected_uses[0].expected_use_type_id + + +@pytest.mark.anyio +async def test_get_expected_use_not_found(compliance_report_repo): + + expected_use = await compliance_report_repo.get_expected_use( + expected_use_type_id=1000 + ) + + assert expected_use is None + + +@pytest.mark.anyio +async def test_update_compliance_report_success( + compliance_report_repo, compliance_reports, compliance_periods +): + + compliance_reports[0].compliance_period_id = compliance_periods[ + 1 + ].compliance_period_id + + report = await compliance_report_repo.update_compliance_report( + report=compliance_reports[0] + ) + + assert isinstance(report, ComplianceReportBaseSchema) + assert report.compliance_period_id == compliance_periods[1].compliance_period_id + + +@pytest.mark.anyio +async def test_update_compliance_report_exception(compliance_report_repo): + + with pytest.raises(Exception): + await compliance_report_repo.update_compliance_report(report=None) + + +@pytest.mark.anyio +async def test_add_compliance_report_summary_success( + compliance_report_repo, compliance_reports +): + + summary = ComplianceReportSummary( + compliance_report_id=compliance_reports[0].compliance_report_id, + ) + + result = await compliance_report_repo.add_compliance_report_summary(summary=summary) + + assert isinstance(result, ComplianceReportSummary) + assert result.compliance_report_id == compliance_reports[0].compliance_report_id + + +@pytest.mark.anyio +async def test_add_compliance_report_summary_exception( + compliance_report_repo, +): + with pytest.raises(DatabaseException): + await compliance_report_repo.add_compliance_report_summary(summary=None) + + +@pytest.mark.anyio +async def test_save_compliance_report_summary_success( + compliance_report_repo, + compliance_reports, + compliance_report_summaries, +): + summary_schema = ComplianceReportSummarySchema( + compliance_report_id=compliance_reports[0].compliance_report_id, + renewable_fuel_target_summary=[], + low_carbon_fuel_target_summary=[], + non_compliance_penalty_summary=[], + ) + + result = await compliance_report_repo.save_compliance_report_summary( + summary=summary_schema + ) + + assert isinstance(result, ComplianceReportSummary) + assert result.compliance_report_id == compliance_reports[0].compliance_report_id + + +@pytest.mark.anyio +async def test_save_compliance_report_summary_exception( + compliance_report_repo, +): + summary_schema = ComplianceReportSummarySchema( + compliance_report_id=1000, # Non-existent report_id + renewable_fuel_target_summary=[], + low_carbon_fuel_target_summary=[], + non_compliance_penalty_summary=[], + ) + + with pytest.raises(DatabaseException): + await compliance_report_repo.save_compliance_report_summary( + summary=summary_schema + ) + + +@pytest.mark.anyio +async def test_get_summary_by_report_id_success( + compliance_report_repo, compliance_reports, compliance_report_summaries +): + + summary = await compliance_report_repo.get_summary_by_report_id( + report_id=compliance_reports[0].compliance_report_id + ) + + assert isinstance(summary, ComplianceReportSummary) + assert summary.compliance_report_id == compliance_reports[0].compliance_report_id + + +@pytest.mark.anyio +async def test_get_summary_by_report_id_success_supplemental( + compliance_report_repo, supplemental_reports, compliance_report_summaries +): + + summary = await compliance_report_repo.get_summary_by_report_id( + report_id=supplemental_reports[0].compliance_report_id + ) + + assert isinstance(summary, ComplianceReportSummary) + assert summary.compliance_report_id == supplemental_reports[0].compliance_report_id + + +@pytest.mark.anyio +async def test_get_summary_by_report_id_not_found(compliance_report_repo): + + summary = await compliance_report_repo.get_summary_by_report_id(report_id=1000) + + assert summary is None + + +@pytest.mark.anyio +async def test_get_transferred_out_compliance_units_success( + compliance_report_repo, organizations, transfers +): + + units = await compliance_report_repo.get_transferred_out_compliance_units( + compliance_period_start=datetime.strptime("2023-01-01", "%Y-%m-%d").date(), + compliance_period_end=datetime.strptime("2025-01-01", "%Y-%m-%d").date(), + organization_id=organizations[0].organization_id, + ) + + assert units == 2 + + +@pytest.mark.anyio +async def test_get_transferred_out_compliance_units_not_found(compliance_report_repo): + + units = await compliance_report_repo.get_transferred_out_compliance_units( + compliance_period_start=datetime.strptime("2025-01-01", "%Y-%m-%d").date(), + compliance_period_end=datetime.strptime("2026-01-01", "%Y-%m-%d").date(), + organization_id=None, + ) + + assert units == 0 + + +@pytest.mark.anyio +async def test_get_received_compliance_units_success( + compliance_report_repo, organizations, transfers +): + + units = await compliance_report_repo.get_received_compliance_units( + compliance_period_start=datetime.strptime("2023-01-01", "%Y-%m-%d").date(), + compliance_period_end=datetime.strptime("2025-01-01", "%Y-%m-%d").date(), + organization_id=organizations[0].organization_id, + ) + + assert units == 2 + + +@pytest.mark.anyio +async def test_get_received_compliance_units_not_found(compliance_report_repo): + + units = await compliance_report_repo.get_received_compliance_units( + compliance_period_start=datetime.strptime("2025-01-01", "%Y-%m-%d").date(), + compliance_period_end=datetime.strptime("2026-01-01", "%Y-%m-%d").date(), + organization_id=None, + ) + + assert units == 0 + + +@pytest.mark.anyio +async def test_get_issued_compliance_units_success( + compliance_report_repo, organizations, initiative_agreements +): + + units = await compliance_report_repo.get_issued_compliance_units( + compliance_period_start=datetime.strptime("2023-01-01", "%Y-%m-%d").date(), + compliance_period_end=datetime.strptime("2025-01-01", "%Y-%m-%d").date(), + organization_id=organizations[0].organization_id, + ) + + assert units == 2 + + +@pytest.mark.anyio +async def test_get_issued_compliance_units_not_found(compliance_report_repo): + + units = await compliance_report_repo.get_issued_compliance_units( + compliance_period_start=datetime.strptime("2025-01-01", "%Y-%m-%d").date(), + compliance_period_end=datetime.strptime("2026-01-01", "%Y-%m-%d").date(), + organization_id=None, + ) + + assert units == 0 + + +@pytest.mark.anyio +async def test_get_all_org_reported_years_success( + compliance_report_repo, compliance_reports, compliance_periods +): + periods = await compliance_report_repo.get_all_org_reported_years( + organization_id=compliance_reports[0].organization_id + ) + + assert len(periods) == 1 + assert isinstance(periods[0], CompliancePeriod) + assert periods[0] == compliance_periods[0] + + +@pytest.mark.anyio +async def test_get_all_org_reported_years_not_found( + compliance_report_repo, +): + periods = await compliance_report_repo.get_all_org_reported_years( + organization_id=1000 + ) + + assert len(periods) == 0 + + +@pytest.mark.parametrize( + "fossil_derived, expected", + [ + (True, {"diesel": 1.0, "gasoline": 1.0}), + (False, {"diesel": 1.0, "gasoline": 1.0}), + ], +) +def test_aggregate_fuel_supplies( + compliance_report_repo, fuel_supplies, fossil_derived, expected +): + result = compliance_report_repo.aggregate_fuel_supplies( + fuel_supplies, fossil_derived + ) + + assert result == expected + + +@pytest.mark.anyio +async def test_aggregate_other_uses(compliance_report_repo, dbsession): + mock_result = [ + MagicMock(category="Gasoline", quantity=50.0), + MagicMock(category="Ethanol", quantity=75.0), + ] + dbsession.execute = AsyncMock(return_value=mock_result) + + result = await compliance_report_repo.aggregate_other_uses(1, True) + + assert result == {"gasoline": 50.0, "ethanol": 75.0} + dbsession.execute.assert_awaited_once() + + +@pytest.mark.anyio +async def test_aggregate_allocation_agreements(compliance_report_repo, dbsession): + mock_result = [ + MagicMock(category="Biogas", quantity=25.0), + MagicMock(category="Biodiesel", quantity=100.0), + ] + dbsession.execute = AsyncMock(return_value=mock_result) + + result = await compliance_report_repo.aggregate_allocation_agreements(2) + + assert result == {"biogas": 25.0, "biodiesel": 100.0} + dbsession.execute.assert_awaited_once() diff --git a/backend/lcfs/tests/compliance_report/test_compliance_report_services.py b/backend/lcfs/tests/compliance_report/test_compliance_report_services.py new file mode 100644 index 000000000..9300d2918 --- /dev/null +++ b/backend/lcfs/tests/compliance_report/test_compliance_report_services.py @@ -0,0 +1,207 @@ +import pytest +from unittest.mock import MagicMock, AsyncMock +from lcfs.db.models.compliance.CompliancePeriod import CompliancePeriod +from lcfs.db.models.compliance.ComplianceReportStatus import ComplianceReportStatus +from lcfs.web.exception.exceptions import ServiceException, DataNotFoundException + + +# get_all_compliance_periods +@pytest.mark.anyio +async def test_get_all_compliance_periods_success(compliance_report_service, mock_repo): + mock_periods = [ + {"compliance_period_id": 1, "description": "2024 Compliance Period"}, + {"compliance_period_id": 2, "description": "2025 Compliance Period"}, + ] + mock_repo.get_all_compliance_periods.return_value = mock_periods + + result = await compliance_report_service.get_all_compliance_periods() + + assert len(result) == 2 + assert result[0].compliance_period_id == 1 + assert result[0].description == "2024 Compliance Period" + mock_repo.get_all_compliance_periods.assert_called_once() + + +@pytest.mark.anyio +async def test_get_all_compliance_periods_unexpected_error( + compliance_report_service, mock_repo +): + mock_repo.get_all_compliance_periods.side_effect = Exception("Unexpected error") + + with pytest.raises(ServiceException): + await compliance_report_service.get_all_compliance_periods() + + mock_repo.get_all_compliance_periods.assert_called_once() + + +# create_compliance_report +@pytest.mark.anyio +async def test_create_compliance_report_success( + compliance_report_service, + mock_repo, + compliance_report_base_schema, + compliance_report_create_schema, +): + mock_user = MagicMock() + + # Mock the compliance period + mock_compliance_period = CompliancePeriod( + compliance_period_id=1, + description="2024", + ) + mock_repo.get_compliance_period.return_value = mock_compliance_period + + # Mock the compliance report status + mock_draft_status = ComplianceReportStatus( + compliance_report_status_id=1, status="Draft" + ) + mock_repo.get_compliance_report_status_by_desc.return_value = mock_draft_status + + # Mock the added compliance report + mock_compliance_report = compliance_report_base_schema() + + mock_repo.create_compliance_report.return_value = mock_compliance_report + + result = await compliance_report_service.create_compliance_report( + 1, compliance_report_create_schema, mock_user + ) + + assert result == mock_compliance_report + mock_repo.get_compliance_period.assert_called_once_with( + compliance_report_create_schema.compliance_period + ) + mock_repo.get_compliance_report_status_by_desc.assert_called_once_with( + compliance_report_create_schema.status + ) + mock_repo.create_compliance_report.assert_called_once() + + +@pytest.mark.anyio +async def test_create_compliance_report_unexpected_error( + compliance_report_service, mock_repo +): + mock_repo.create_compliance_report.side_effect = Exception( + "Unexpected error occurred" + ) + + with pytest.raises(ServiceException): + await compliance_report_service.create_compliance_report( + 1, {"compliance_period": 1, "status": "Draft"} + ) + + +# get_compliance_reports_paginated +@pytest.mark.anyio +async def test_get_compliance_reports_paginated_success( + compliance_report_service, mock_repo, compliance_report_base_schema +): + pagination_mock = AsyncMock() + pagination_mock.page = 1 + pagination_mock.size = 10 + + mock_compliance_report = compliance_report_base_schema() + + mock_repo.get_reports_paginated.return_value = ([mock_compliance_report], 1) + + result = await compliance_report_service.get_compliance_reports_paginated( + pagination_mock + ) + + assert result.pagination.total == 1 + assert result.reports == [mock_compliance_report] + mock_repo.get_reports_paginated.assert_called_once() + + +@pytest.mark.anyio +async def test_get_compliance_reports_paginated_not_found( + compliance_report_service, mock_repo +): + pagination_mock = AsyncMock() + pagination_mock.page = 1 + pagination_mock.size = 10 + + mock_repo.get_reports_paginated.return_value = ([], 0) + + with pytest.raises(DataNotFoundException): + await compliance_report_service.get_compliance_reports_paginated( + pagination_mock + ) + + +@pytest.mark.anyio +async def test_get_compliance_reports_paginated_unexpected_error( + compliance_report_service, mock_repo +): + mock_repo.get_reports_paginated.side_effect = Exception("Unexpected error occurred") + + with pytest.raises(ServiceException): + await compliance_report_service.get_compliance_reports_paginated(AsyncMock(), 1) + + +# get_compliance_report_by_id +@pytest.mark.anyio +async def test_get_compliance_report_by_id_success( + compliance_report_service, mock_repo, compliance_report_base_schema +): + mock_compliance_report = compliance_report_base_schema() + + compliance_report_service._mask_report_status_for_history = MagicMock( + return_value=mock_compliance_report + ) + + mock_repo.get_compliance_report_by_id.return_value = mock_compliance_report + + result = await compliance_report_service.get_compliance_report_by_id(1) + + assert result == mock_compliance_report + mock_repo.get_compliance_report_by_id.assert_called_once_with(1) + compliance_report_service._mask_report_status_for_history.assert_called_once_with( + mock_compliance_report, False + ) + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_not_found( + compliance_report_service, mock_repo +): + mock_repo.get_compliance_report_by_id.return_value = None + + with pytest.raises(DataNotFoundException): + await compliance_report_service.get_compliance_report_by_id(999) + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_unexpected_error( + compliance_report_service, mock_repo +): + mock_repo.get_compliance_report_by_id.side_effect = Exception("Unexpected error") + + with pytest.raises(ServiceException): + await compliance_report_service.get_compliance_report_by_id(1) + + +# get_all_org_reported_years +@pytest.mark.anyio +async def test_get_all_org_reported_years_success( + compliance_report_service, mock_repo, compliance_period_schema +): + + mock_repo.get_all_org_reported_years.return_value = [compliance_period_schema] + + result = await compliance_report_service.get_all_org_reported_years(1) + + assert len(result) == 1 + assert result[0] == compliance_period_schema + mock_repo.get_all_org_reported_years.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_get_all_org_reported_years_unexpected_error( + compliance_report_service, mock_repo +): + mock_repo.get_all_org_reported_years.side_effect = Exception( + "Unexpected error occurred" + ) + + with pytest.raises(ServiceException): + await compliance_report_service.get_all_org_reported_years(1) diff --git a/backend/lcfs/tests/compliance_report/test_compliance_report_views.py b/backend/lcfs/tests/compliance_report/test_compliance_report_views.py new file mode 100644 index 000000000..8d7f1058b --- /dev/null +++ b/backend/lcfs/tests/compliance_report/test_compliance_report_views.py @@ -0,0 +1,792 @@ +import json +from lcfs.web.api.email.repo import CHESEmailRepository +import pytest + +from unittest.mock import patch, AsyncMock +from httpx import AsyncClient +from fastapi import FastAPI + +from lcfs.db.models.user.Role import RoleEnum + +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.web.api.base import FilterModel +from lcfs.web.api.compliance_report.schema import ( + ComplianceReportUpdateSchema, + ComplianceReportSummaryUpdateSchema, + ChainedComplianceReportSchema, +) +from lcfs.services.s3.client import DocumentService + +@pytest.fixture +def mock_email_repo(): + return AsyncMock(spec=CHESEmailRepository) + +@pytest.fixture +def mock_environment_vars(): + with patch("lcfs.web.api.email.services.settings") as mock_settings: + mock_settings.ches_auth_url = "http://mock_auth_url" + mock_settings.ches_email_url = "http://mock_email_url" + mock_settings.ches_client_id = "mock_client_id" + mock_settings.ches_client_secret = "mock_client_secret" + mock_settings.ches_sender_email = "noreply@gov.bc.ca" + mock_settings.ches_sender_name = "Mock Notification System" + yield mock_settings + +# get_compliance_periods +@pytest.mark.anyio +async def test_get_compliance_periods_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportServices.get_all_compliance_periods" + ) as mock_get_all_compliance_periods: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) # Set a valid role + + # Mock response data + mock_get_all_compliance_periods.return_value = [ + { + "compliance_period_id": 1, + "description": "2024 Compliance Period", + "effective_date": "2024-01-01T00:00:00", + "expiration_date": "2024-12-31T23:59:59", + "display_order": 1, + }, + { + "compliance_period_id": 2, + "description": "2025 Compliance Period", + "effective_date": "2025-01-01T00:00:00", + "expiration_date": "2025-12-31T23:59:59", + "display_order": 2, + }, + ] + + url = fastapi_app.url_path_for("get_compliance_periods") + + response = await client.get(url) + + assert response.status_code == 200 + + expected_response = [ + { + "compliancePeriodId": 1, + "description": "2024 Compliance Period", + "effectiveDate": "2024-01-01T00:00:00", + "expirationDate": "2024-12-31T23:59:59", + "displayOrder": 1, + }, + { + "compliancePeriodId": 2, + "description": "2025 Compliance Period", + "effectiveDate": "2025-01-01T00:00:00", + "expirationDate": "2025-12-31T23:59:59", + "displayOrder": 2, + }, + ] + + assert response.json() == expected_response + mock_get_all_compliance_periods.assert_called_once() + + +@pytest.mark.anyio +async def test_get_compliance_periods_not_found( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportServices.get_all_compliance_periods" + ) as mock_get_all_compliance_periods: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) # Set a valid role + + # Simulate an empty list response indicating no compliance periods found + mock_get_all_compliance_periods.return_value = [] + + url = fastapi_app.url_path_for("get_compliance_periods") + + response = await client.get(url) + + assert response.status_code == 200 + assert response.json() == [] # Expected empty list + + +# get_compliance_reports +@pytest.mark.anyio +async def test_get_compliance_reports_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_list_schema, + set_mock_user, + pagination_request_schema, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportServices.get_compliance_reports_paginated" + ) as mock_get_compliance_reports_paginated: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_get_compliance_reports_paginated.return_value = ( + compliance_report_list_schema + ) + + url = fastapi_app.url_path_for("get_compliance_reports") + + response = await client.post( + url, json=pagination_request_schema.dict(by_alias=True) + ) + + assert response.status_code == 200 + + expected_response = json.loads( + compliance_report_list_schema.json(by_alias=True) + ) + + assert response.json() == expected_response + + pagination_request_schema.filters.append( + FilterModel( + field="status", filter="Draft", filter_type="text", type="notEqual" + ) + ) + + mock_get_compliance_reports_paginated.assert_called_once_with( + pagination_request_schema + ) + + +@pytest.mark.anyio +async def test_get_compliance_reports_forbidden( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + pagination_request_schema, +): + # Set a role that does not have access + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + url = fastapi_app.url_path_for("get_compliance_reports") + + response = await client.post( + url, json=pagination_request_schema.dict(by_alias=True) + ) + + assert response.status_code == 403 # Forbidden + + +@pytest.mark.anyio +async def test_get_compliance_reports_invalid_payload( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + url = fastapi_app.url_path_for("get_compliance_reports") + + # Invalid payload with incorrect data type for `page` and missing `size` + invalid_payload = { + "page": "invalid_page", # Should be an integer + "filters": [ + { + "field": "exampleField", + "filter": "exampleValue", + "filter_type": "text", + "type": "equals", + } + ], + } + + response = await client.post(url, json=invalid_payload) + + assert response.status_code == 422 # Unprocessable Entity (Validation Error) + + +@pytest.mark.anyio +async def test_get_compliance_reports_not_found( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + pagination_request_schema, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportServices.get_compliance_reports_paginated" + ) as mock_get_compliance_reports_paginated: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + # Simulate DataNotFoundException for no reports found + mock_get_compliance_reports_paginated.side_effect = DataNotFoundException( + "No compliance reports found" + ) + + url = fastapi_app.url_path_for("get_compliance_reports") + + response = await client.post( + url, json=pagination_request_schema.dict(by_alias=True) + ) + + assert response.status_code == 404 # Not Found + + +# get_compliance_report_by_id +@pytest.mark.anyio +async def test_get_compliance_report_by_id_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_base_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportServices.get_compliance_report_by_id" + ) as mock_get_compliance_report_by_id, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_compliance_report = ChainedComplianceReportSchema( + report=compliance_report_base_schema(), chain=[] + ) + + mock_get_compliance_report_by_id.return_value = mock_compliance_report + mock_validate_organization_access.return_value = None + + url = fastapi_app.url_path_for("get_compliance_report_by_id", report_id=1) + + response = await client.get(url) + + assert response.status_code == 200 + + expected_response = json.loads(mock_compliance_report.json(by_alias=True)) + + assert response.json() == expected_response + mock_get_compliance_report_by_id.assert_called_once_with( + 1, False, get_chain=True + ) + mock_validate_organization_access.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_forbidden( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) # User with the wrong role + + url = fastapi_app.url_path_for("get_compliance_report_by_id", report_id=1) + + response = await client.get(url) + + assert response.status_code == 403 + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_invalid_payload( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + url = fastapi_app.url_path_for("get_compliance_report_by_id", report_id="invalid") + + response = await client.get(url) + + assert response.status_code == 422 # Unprocessable Entity (Validation Error) + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_not_found( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportServices.get_compliance_report_by_id" + ) as mock_get_compliance_report_by_id: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + # Simulate DataNotFoundException for a non-existent report + mock_get_compliance_report_by_id.side_effect = DataNotFoundException( + "Report not found" + ) + + url = fastapi_app.url_path_for("get_compliance_report_by_id", report_id=9999) + + response = await client.get(url) + + assert response.status_code == 404 # Not Found + + +# get_compliance_report_summary +@pytest.mark.anyio +async def test_get_compliance_report_summary_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_summary_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportSummaryService.calculate_compliance_report_summary" + ) as mock_calculate_compliance_report_summary, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + mock_compliance_report_summary = compliance_report_summary_schema() + + mock_calculate_compliance_report_summary.return_value = ( + mock_compliance_report_summary + ) + mock_validate_organization_access.return_value = None + + url = fastapi_app.url_path_for("get_compliance_report_summary", report_id=1) + + response = await client.get(url) + + assert response.status_code == 200 + + expected_response = json.loads( + mock_compliance_report_summary.json(by_alias=True) + ) + + assert response.json() == expected_response + mock_calculate_compliance_report_summary.assert_called_once_with(1) + mock_validate_organization_access.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_get_compliance_report_summary_invalid_payload( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + # Assuming 'abc' is an invalid report_id + url = fastapi_app.url_path_for("get_compliance_report_summary", report_id="abc") + + response = await client.get(url) + + assert response.status_code == 422 # Unprocessable Entity (Validation Error) + + +@pytest.mark.anyio +async def test_get_compliance_report_summary_not_found( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportSummaryService.calculate_compliance_report_summary" + ) as mock_calculate_compliance_report_summary: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + # Simulate DataNotFoundException for a non-existent report + mock_calculate_compliance_report_summary.side_effect = DataNotFoundException( + "Summary not found" + ) + + url = fastapi_app.url_path_for( + "get_compliance_report_summary", report_id=9999 # Non-existent report ID + ) + + response = await client.get(url) + + assert response.status_code == 404 # Not Found + + +# update_compliance_report_summary +@pytest.mark.anyio +async def test_update_compliance_report_summary_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_summary_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportSummaryService.update_compliance_report_summary" + ) as mock_update_compliance_report_summary, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + mock_compliance_report_summary = compliance_report_summary_schema() + request_schema = ComplianceReportSummaryUpdateSchema( + compliance_report_id=1, + renewable_fuel_target_summary=mock_compliance_report_summary.renewable_fuel_target_summary, + low_carbon_fuel_target_summary=mock_compliance_report_summary.low_carbon_fuel_target_summary, + non_compliance_penalty_summary=mock_compliance_report_summary.non_compliance_penalty_summary, + summary_id=mock_compliance_report_summary.summary_id, + ) + mock_validate_organization_access.return_value = None + mock_update_compliance_report_summary.return_value = ( + mock_compliance_report_summary + ) + + url = fastapi_app.url_path_for("update_compliance_report_summary", report_id=1) + + payload = request_schema.model_dump(by_alias=True) + + response = await client.put(url, json=payload) + + assert response.status_code == 200 + + expected_response = json.loads( + mock_compliance_report_summary.json(by_alias=True) + ) + + assert response.json() == expected_response + mock_update_compliance_report_summary.assert_called_once_with(1, request_schema) + mock_validate_organization_access.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_update_compliance_report_summary_forbidden( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_summary_schema, + set_mock_user, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) # User with the wrong role + + url = fastapi_app.url_path_for("update_compliance_report_summary", report_id=1) + payload = compliance_report_summary_schema().dict(by_alias=True) + + response = await client.put(url, json=payload) + + assert response.status_code == 403 # Forbidden + + +@pytest.mark.anyio +async def test_update_compliance_report_summary_invalid_payload( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + url = fastapi_app.url_path_for("update_compliance_report_summary", report_id=1) + payload = {"invalidField": "invalidValue"} # Invalid payload structure + + response = await client.put(url, json=payload) + + assert response.status_code == 422 # Unprocessable Entity (Validation Error) + + +@pytest.mark.anyio +async def test_update_compliance_report_summary_not_found( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + compliance_report_summary_schema, +): + with patch( + "lcfs.web.api.compliance_report.summary_service.ComplianceReportSummaryService.update_compliance_report_summary" + ) as mock_update_compliance_report_summary: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + # Simulate DataNotFoundException for a non-existent summary + mock_update_compliance_report_summary.side_effect = DataNotFoundException( + "Summary not found" + ) + + url = fastapi_app.url_path_for("update_compliance_report_summary", report_id=1) + payload = compliance_report_summary_schema().dict(by_alias=True) + + response = await client.put(url, json=payload) + + assert response.status_code == 404 # Not Found + + +# update_compliance_report +@pytest.mark.anyio +async def test_update_compliance_report_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_base_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportUpdateService.update_compliance_report" + ) as mock_update_compliance_report, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_compliance_report = compliance_report_base_schema() + mock_validate_organization_access.return_value = None + mock_update_compliance_report.return_value = mock_compliance_report + + url = fastapi_app.url_path_for( + "update_compliance_report", + report_id=1, + ) + + payload = {"status": "Draft", "supplementalNote": "new supplemental note"} + + response = await client.put(url, json=payload) + + assert response.status_code == 200 + + expected_response = json.loads(mock_compliance_report.json(by_alias=True)) + + assert response.json() == expected_response + + mock_update_compliance_report.assert_called_once_with( + 1, ComplianceReportUpdateSchema(**payload) + ) + mock_validate_organization_access.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_update_compliance_report_forbidden( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + url = fastapi_app.url_path_for("update_compliance_report", report_id=1) + + payload = {"status": "Draft", "supplementalNote": "new supplemental note"} + + response = await client.put(url, json=payload) + + assert response.status_code == 403 + + +@pytest.mark.anyio +async def test_update_compliance_report_not_found( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportUpdateService.update_compliance_report" + ) as mock_update_compliance_report: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + # Simulate that the report does not exist by raising an exception + mock_update_compliance_report.side_effect = DataNotFoundException( + "Compliance report not found" + ) + + url = fastapi_app.url_path_for( + "update_compliance_report", report_id=0 + ) # Non-existent ID + + payload = {"status": "Draft", "supplementalNote": "new supplemental note"} + + response = await client.put(url, json=payload) + + assert response.status_code == 404 + # assert response.json() == {"detail": "Compliance report not found"} + + +@pytest.mark.anyio +async def test_update_compliance_report_invalid_payload( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + url = fastapi_app.url_path_for("update_compliance_report", report_id=1) + payload = {"invalidField": "invalidValue"} # Invalid payload structure + + response = await client.put(url, json=payload) + + assert response.status_code == 422 # Unprocessable Entity (Validation Error) + + +@pytest.mark.anyio +async def test_update_compliance_report_draft_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_base_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportUpdateService.update_compliance_report" + ) as mock_update_compliance_report, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_compliance_report = compliance_report_base_schema() + mock_validate_organization_access.return_value = None + mock_update_compliance_report.return_value = mock_compliance_report + + url = fastapi_app.url_path_for( + "update_compliance_report", + report_id=1, + ) + + payload = {"status": "Draft", "supplementalNote": "Drafting a new report"} + + response = await client.put(url, json=payload) + + assert response.status_code == 200 + + expected_response = json.loads(mock_compliance_report.json(by_alias=True)) + + assert response.json() == expected_response + + mock_update_compliance_report.assert_called_once_with( + 1, ComplianceReportUpdateSchema(**payload) + ) + + +@pytest.mark.anyio +async def test_update_compliance_report_submitted_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_base_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportUpdateService.update_compliance_report" + ) as mock_update_compliance_report, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_compliance_report = compliance_report_base_schema() + mock_validate_organization_access.return_value = None + mock_update_compliance_report.return_value = mock_compliance_report + + url = fastapi_app.url_path_for( + "update_compliance_report", + report_id=1, + ) + + payload = {"status": "Submitted", "supplementalNote": "Submitting the report"} + + response = await client.put(url, json=payload) + + assert response.status_code == 200 + + expected_response = json.loads(mock_compliance_report.json(by_alias=True)) + + assert response.json() == expected_response + + mock_update_compliance_report.assert_called_once_with( + 1, ComplianceReportUpdateSchema(**payload) + ) + + +@pytest.mark.anyio +async def test_update_compliance_report_recommended_by_analyst_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_base_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportUpdateService.update_compliance_report" + ) as mock_update_compliance_report, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_compliance_report = compliance_report_base_schema() + mock_validate_organization_access.return_value = None + mock_update_compliance_report.return_value = mock_compliance_report + + url = fastapi_app.url_path_for( + "update_compliance_report", + report_id=1, + ) + + payload = { + "status": "Recommended by analyst", + "supplementalNote": "Analyst recommendation", + } + + response = await client.put(url, json=payload) + + assert response.status_code == 200 + + expected_response = json.loads(mock_compliance_report.json(by_alias=True)) + + assert response.json() == expected_response + + mock_update_compliance_report.assert_called_once_with( + 1, ComplianceReportUpdateSchema(**payload) + ) + + +@pytest.mark.anyio +async def test_update_compliance_report_recommended_by_manager_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_base_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportUpdateService.update_compliance_report" + ) as mock_update_compliance_report, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_compliance_report = compliance_report_base_schema() + mock_validate_organization_access.return_value = None + mock_update_compliance_report.return_value = mock_compliance_report + + url = fastapi_app.url_path_for( + "update_compliance_report", + report_id=1, + ) + + payload = { + "status": "Recommended by manager", + "supplementalNote": "Manager recommendation", + } + + response = await client.put(url, json=payload) + + assert response.status_code == 200 + + expected_response = json.loads(mock_compliance_report.json(by_alias=True)) + + assert response.json() == expected_response + + mock_update_compliance_report.assert_called_once_with( + 1, ComplianceReportUpdateSchema(**payload) + ) + + +@pytest.mark.anyio +async def test_update_compliance_report_assessed_success( + client: AsyncClient, + fastapi_app: FastAPI, + compliance_report_base_schema, + set_mock_user, +): + with patch( + "lcfs.web.api.compliance_report.views.ComplianceReportUpdateService.update_compliance_report" + ) as mock_update_compliance_report, patch( + "lcfs.web.api.compliance_report.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_compliance_report = compliance_report_base_schema() + mock_validate_organization_access.return_value = None + mock_update_compliance_report.return_value = mock_compliance_report + + url = fastapi_app.url_path_for( + "update_compliance_report", + report_id=1, + ) + + payload = {"status": "Assessed", "supplementalNote": "Report has been assessed"} + + response = await client.put(url, json=payload) + + assert response.status_code == 200 + + expected_response = json.loads(mock_compliance_report.json(by_alias=True)) + + assert response.json() == expected_response + + mock_update_compliance_report.assert_called_once_with( + 1, ComplianceReportUpdateSchema(**payload) + ) diff --git a/backend/lcfs/tests/compliance_report/test_summary_service.py b/backend/lcfs/tests/compliance_report/test_summary_service.py new file mode 100644 index 000000000..dce14fef2 --- /dev/null +++ b/backend/lcfs/tests/compliance_report/test_summary_service.py @@ -0,0 +1,840 @@ +from datetime import datetime +from typing import List +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from lcfs.db.models import FuelSupply +from lcfs.db.models.compliance.ComplianceReportSummary import ComplianceReportSummary +from lcfs.web.api.compliance_report.schema import ( + ComplianceReportSummaryRowSchema, +) + + +@pytest.mark.anyio +async def test_calculate_low_carbon_fuel_target_summary( + compliance_report_summary_service, mock_trxn_repo, mock_repo +): + + # Mock input data + compliance_period_start = datetime(2024, 1, 1) + compliance_period_end = datetime(2024, 12, 31) + organization_id = 1 + report_id = 1 + + # Mock fuel supply records + mock_fuel_supplies = [ + MagicMock( + target_ci=100, eer=1.0, ci_of_fuel=80, quantity=100000, energy_density=10 + ), # Expected units: 20 + MagicMock( + target_ci=90, eer=1.2, ci_of_fuel=70, quantity=200000, energy_density=8 + ), # Expected units: 60.8 + MagicMock( + target_ci=80, eer=0.5, ci_of_fuel=60, quantity=300000, energy_density=8 + ), # Expected units: -48 + ] + compliance_report_summary_service.get_effective_fuel_supplies = AsyncMock( + return_value=mock_fuel_supplies + ) + + # Mock repository method returns + mock_repo.get_transferred_out_compliance_units.return_value = 500 + mock_repo.get_received_compliance_units.return_value = 300 + mock_repo.get_issued_compliance_units.return_value = 200 + mock_trxn_repo.calculate_available_balance_for_period.return_value = 1000 + compliance_report_summary_service.calculate_fuel_supply_compliance_units = ( + AsyncMock(return_value=100) + ) + compliance_report_summary_service.calculate_fuel_export_compliance_units = ( + AsyncMock(return_value=100) + ) + + # Call the method + summary, penalty = ( + await compliance_report_summary_service.calculate_low_carbon_fuel_target_summary( + compliance_period_start, compliance_period_end, organization_id, report_id + ) + ) + + # Assertions + assert isinstance(summary, list) + assert all(isinstance(item, ComplianceReportSummaryRowSchema) + for item in summary) + assert len(summary) == 11 # Ensure all 11 lines are present + + # Check specific line values + line_values = {item.line: item.value for item in summary} + assert line_values["12"] == 500 # Transferred out + assert line_values["13"] == 300 # Received + assert line_values["14"] == 200 # Issued + assert line_values["18"] == 100 + assert line_values["19"] == 100 + assert line_values["20"] == 200 + assert line_values["21"] == 0 # Not calculated yet + assert line_values["22"] == 1200 # Add all the above + + # Verify method calls + mock_repo.get_transferred_out_compliance_units.assert_called_once_with( + compliance_period_start, compliance_period_end, organization_id + ) + mock_repo.get_received_compliance_units.assert_called_once_with( + compliance_period_start, compliance_period_end, organization_id + ) + mock_repo.get_issued_compliance_units.assert_called_once_with( + compliance_period_start, compliance_period_end, organization_id + ) + mock_trxn_repo.calculate_available_balance_for_period.assert_called_once_with( + organization_id, compliance_period_start.year + ) + + +@pytest.mark.anyio +async def test_calculate_renewable_fuel_target_summary_2024( + compliance_report_summary_service, +): + + fossil_quantities = {"gasoline": 100, "diesel": 100, "jet_fuel": 100} + renewable_quantities = {"gasoline": 100, "diesel": 100, "jet_fuel": 100} + previous_retained = {"gasoline": 100, "diesel": 100, "jet_fuel": 100} + previous_obligation = {"gasoline": 100, "diesel": 100, "jet_fuel": 100} + notional_transfers_sum = {"gasoline": 100, "diesel": 100, "jet_fuel": 100} + compliance_period = 2024 + summary_model = ComplianceReportSummary( + line_6_renewable_fuel_retained_gasoline=100, + line_6_renewable_fuel_retained_diesel=200, + line_6_renewable_fuel_retained_jet_fuel=300, + line_8_obligation_deferred_gasoline=100, + line_8_obligation_deferred_diesel=100, + line_8_obligation_deferred_jet_fuel=100, + ) + + result = compliance_report_summary_service.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sum, + compliance_period, + summary_model, + ) + + # Line 1: Volume of fossil-derived base fuel supplied + assert result[0].gasoline == 100.0 + assert result[0].diesel == 100.0 + assert result[0].jet_fuel == 100.0 + + # Line 2: Volume of eligible renewable fuel supplied + assert result[1].gasoline == 100.0 + assert result[1].diesel == 100.0 + assert result[1].jet_fuel == 100.0 + + # Line 3: Total volume of tracked fuel supplied (Line 1 + Line 2) + assert result[2].gasoline == 200.0 + assert result[2].diesel == 200.0 + assert result[2].jet_fuel == 200.0 + + # Line 4: Volume of eligible renewable fuel required + assert result[3].gasoline == 10.0 # 5% of 200 + assert result[3].diesel == 8.0 # 4% of 200 + assert result[3].jet_fuel == 0.0 # Jet fuel percentage is 0 in 2024 + + # Line 5: Net volume of eligible renewable fuel notionally transferred + assert result[4].gasoline == 100.0 + assert result[4].diesel == 100.0 + assert result[4].jet_fuel == 100.0 + + # Line 6: Volume of eligible renewable fuel retained + assert result[5].gasoline == 0.0 + assert result[5].diesel == 0.0 + assert result[5].jet_fuel == 0.0 + + # Line 7: Volume of eligible renewable fuel previously retained + assert result[6].gasoline == 100.0 + assert result[6].diesel == 100.0 + assert result[6].jet_fuel == 100.0 + + # Line 8: Volume of eligible renewable obligation deferred + assert result[7].gasoline == 0.0 + assert result[7].diesel == 0.0 + assert result[7].jet_fuel == 0.0 + + # Line 9: Volume of renewable obligation added + assert result[8].gasoline == 100.0 + assert result[8].diesel == 100.0 + assert result[8].jet_fuel == 100.0 + + # Line 10: Net volume of eligible renewable fuel supplied + assert result[9].gasoline == 200.0 + assert result[9].diesel == 200.0 + assert result[9].jet_fuel == 200.0 + + # Line 11: Non-compliance penalty payable + assert result[10].gasoline == 0.0 + assert result[10].diesel == 0.0 + assert result[10].jet_fuel == 0.0 + + +@pytest.mark.anyio +async def test_calculate_renewable_fuel_target_summary_2028( + compliance_report_summary_service, +): + + fossil_quantities = {"gasoline": 100, "diesel": 300, "jet_fuel": 500} + renewable_quantities = {"gasoline": 200, "diesel": 400, "jet_fuel": 600} + previous_retained = {"gasoline": 300, "diesel": 500, "jet_fuel": 100} + previous_obligation = {"gasoline": 400, "diesel": 600, "jet_fuel": 200} + notional_transfers_sum = {"gasoline": 500, "diesel": 100, "jet_fuel": 300} + compliance_period = 2028 + summary_model = ComplianceReportSummary( + line_6_renewable_fuel_retained_gasoline=100, + line_6_renewable_fuel_retained_diesel=200, + line_6_renewable_fuel_retained_jet_fuel=300, + line_8_obligation_deferred_gasoline=300, + line_8_obligation_deferred_diesel=200, + line_8_obligation_deferred_jet_fuel=100, + ) + + result = compliance_report_summary_service.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sum, + compliance_period, + summary_model, + ) + + assert len(result) == 11 + assert isinstance(result[0], ComplianceReportSummaryRowSchema) + + # Line 1: Volume of fossil-derived base fuel supplied + assert result[0].gasoline == 100.0 + assert result[0].diesel == 300.0 + assert result[0].jet_fuel == 500.0 + + # Line 2: Volume of eligible renewable fuel supplied + assert result[1].gasoline == 200.0 + assert result[1].diesel == 400.0 + assert result[1].jet_fuel == 600.0 + + # Line 3: Total volume of tracked fuel supplied (Line 1 + Line 2) + assert result[2].gasoline == 300.0 + assert result[2].diesel == 700.0 + assert result[2].jet_fuel == 1100.0 + + # Line 4: Volume of eligible renewable fuel required + assert result[3].gasoline == 15.0 # 5% of 300 + assert result[3].diesel == 28.0 # 4% of 700 + assert result[3].jet_fuel == 11 # 1% of 1100 + + # Line 5: Net volume of eligible renewable fuel notionally transferred + assert result[4].gasoline == 500.0 + assert result[4].diesel == 100.0 + assert result[4].jet_fuel == 300.0 + + # Line 6: Volume of eligible renewable fuel retained + assert result[5].gasoline == 0.0 + assert result[5].diesel == 0.0 + assert result[5].jet_fuel == 0.0 + + # Line 7: Volume of eligible renewable fuel previously retained + assert result[6].gasoline == 300.0 + assert result[6].diesel == 500.0 + assert result[6].jet_fuel == 100.0 + + # Line 8: Volume of eligible renewable obligation deferred + assert result[7].gasoline == 0.0 + assert result[7].diesel == 0.0 + assert result[7].jet_fuel == 0.0 + + # Line 9: Volume of renewable obligation added + assert result[8].gasoline == 400.0 + assert result[8].diesel == 600.0 + assert result[8].jet_fuel == 200.0 + + # Line 10: Net volume of eligible renewable fuel supplied + assert result[9].gasoline == 600.0 + assert result[9].diesel == 400.0 + assert result[9].jet_fuel == 800.0 + + # Line 11: Non-compliance penalty payable + assert result[10].gasoline == 0.0 + assert result[10].diesel == 0.0 + assert result[10].jet_fuel == 0.0 + + +@pytest.mark.anyio +async def test_calculate_renewable_fuel_target_summary_2029( + compliance_report_summary_service, +): + + fossil_quantities = {"gasoline": 300, "diesel": 200, "jet_fuel": 100} + renewable_quantities = {"gasoline": 100, "diesel": 300, "jet_fuel": 200} + previous_retained = {"gasoline": 200, "diesel": 100, "jet_fuel": 300} + previous_obligation = {"gasoline": 300, "diesel": 200, "jet_fuel": 100} + notional_transfers_sum = {"gasoline": 100, "diesel": 300, "jet_fuel": 200} + compliance_period = 2029 + summary_model = ComplianceReportSummary( + line_6_renewable_fuel_retained_gasoline=100, + line_6_renewable_fuel_retained_diesel=200, + line_6_renewable_fuel_retained_jet_fuel=300, + line_8_obligation_deferred_gasoline=300, + line_8_obligation_deferred_diesel=200, + line_8_obligation_deferred_jet_fuel=100, + ) + + result = compliance_report_summary_service.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sum, + compliance_period, + summary_model, + ) + + assert len(result) == 11 + assert isinstance(result[0], ComplianceReportSummaryRowSchema) + + # Line 1: Volume of fossil-derived base fuel supplied + assert result[0].gasoline == 300.0 + assert result[0].diesel == 200.0 + assert result[0].jet_fuel == 100.0 + + # Line 2: Volume of eligible renewable fuel supplied + assert result[1].gasoline == 100.0 + assert result[1].diesel == 300.0 + assert result[1].jet_fuel == 200.0 + + # Line 3: Total volume of tracked fuel supplied (Line 1 + Line 2) + assert result[2].gasoline == 400.0 + assert result[2].diesel == 500.0 + assert result[2].jet_fuel == 300.0 + + # Line 4: Volume of eligible renewable fuel required + assert result[3].gasoline == 20.0 # 5% of 400 + assert result[3].diesel == 20.0 # 4% of 500 + assert result[3].jet_fuel == 6.0 # 2% of 300 + + # Line 5: Net volume of eligible renewable fuel notionally transferred + assert result[4].gasoline == 100.0 + assert result[4].diesel == 300.0 + assert result[4].jet_fuel == 200.0 + + # Line 6: Volume of eligible renewable fuel retained + assert result[5].gasoline == 0.0 + assert result[5].diesel == 0.0 + assert result[5].jet_fuel == 0.0 + + # Line 7: Volume of eligible renewable fuel previously retained + assert result[6].gasoline == 200.0 + assert result[6].diesel == 100.0 + assert result[6].jet_fuel == 300.0 + + # Line 8: Volume of eligible renewable obligation deferred + assert result[7].gasoline == 0.0 + assert result[7].diesel == 0.0 + assert result[7].jet_fuel == 0.0 + + # Line 9: Volume of renewable obligation added + assert result[8].gasoline == 300.0 + assert result[8].diesel == 200.0 + assert result[8].jet_fuel == 100.0 + + # Line 10: Net volume of eligible renewable fuel supplied + assert result[9].gasoline == 100.0 + assert result[9].diesel == 500.0 + assert result[9].jet_fuel == 600.0 + + # Line 11: Non-compliance penalty payable + assert result[10].gasoline == 0.0 + assert result[10].diesel == 0.0 + assert result[10].jet_fuel == 0.0 + + +@pytest.mark.anyio +async def test_calculate_renewable_fuel_target_summary_2030( + compliance_report_summary_service, +): + + fossil_quantities = {"gasoline": 100, "diesel": 200, "jet_fuel": 300} + renewable_quantities = {"gasoline": 200, "diesel": 300, "jet_fuel": 100} + previous_retained = {"gasoline": 300, "diesel": 100, "jet_fuel": 200} + previous_obligation = {"gasoline": 100, "diesel": 200, "jet_fuel": 300} + notional_transfers_sum = {"gasoline": 200, "diesel": 300, "jet_fuel": 100} + compliance_period = 2030 + summary_model = ComplianceReportSummary( + line_6_renewable_fuel_retained_gasoline=300, + line_6_renewable_fuel_retained_diesel=200, + line_6_renewable_fuel_retained_jet_fuel=100, + line_8_obligation_deferred_gasoline=100, + line_8_obligation_deferred_diesel=100, + line_8_obligation_deferred_jet_fuel=100, + ) + + result = compliance_report_summary_service.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sum, + compliance_period, + summary_model, + ) + + # Line 1: Volume of fossil-derived base fuel supplied + assert result[0].gasoline == 100.0 + assert result[0].diesel == 200.0 + assert result[0].jet_fuel == 300.0 + + # Line 2: Volume of eligible renewable fuel supplied + assert result[1].gasoline == 200.0 + assert result[1].diesel == 300.0 + assert result[1].jet_fuel == 100.0 + + # Line 3: Total volume of tracked fuel supplied (Line 1 + Line 2) + assert result[2].gasoline == 300.0 + assert result[2].diesel == 500.0 + assert result[2].jet_fuel == 400.0 + + # Line 4: Volume of eligible renewable fuel required + assert result[3].gasoline == 15.0 # 5% of 300 + assert result[3].diesel == 20.0 # 4% of 500 + assert result[3].jet_fuel == 12.0 + + # Line 5: Net volume of eligible renewable fuel notionally transferred + assert result[4].gasoline == 200.0 + assert result[4].diesel == 300.0 + assert result[4].jet_fuel == 100.0 + + # Line 6: Volume of eligible renewable fuel retained + assert result[5].gasoline == 0.0 + assert result[5].diesel == 0.0 + assert result[5].jet_fuel == 0.0 + + # Line 7: Volume of eligible renewable fuel previously retained + assert result[6].gasoline == 300.0 + assert result[6].diesel == 100.0 + assert result[6].jet_fuel == 200.0 + + # Line 8: Volume of eligible renewable obligation deferred + assert result[7].gasoline == 0.0 + assert result[7].diesel == 0.0 + assert result[7].jet_fuel == 0.0 + + # Line 9: Volume of renewable obligation added + assert result[8].gasoline == 100.0 + assert result[8].diesel == 200.0 + assert result[8].jet_fuel == 300.0 + + # Line 10: Net volume of eligible renewable fuel supplied + assert result[9].gasoline == 600.0 + assert result[9].diesel == 500.0 + assert result[9].jet_fuel == 100.0 + + # Line 11: Non-compliance penalty payable + assert result[10].gasoline == 0.0 + assert result[10].diesel == 0.0 + assert result[10].jet_fuel == 0.0 + + +@pytest.mark.anyio +async def test_calculate_non_compliance_penalty_summary_without_penalty_payable( + compliance_report_summary_service, compliance_report_summary_row_schema +): + mock_compliance_report_summary = [ + compliance_report_summary_row_schema( + line="11", gasoline=1000, diesel=2000, jet_fuel=3000, total_value=6000 + ) + ] + + result = compliance_report_summary_service.calculate_non_compliance_penalty_summary( + 0, mock_compliance_report_summary + ) + + assert len(result) == 3 + assert result[0].total_value == 6000 + assert result[1].total_value == 0 + assert result[2].total_value == 6000 + + +@pytest.mark.anyio +async def test_calculate_non_compliance_penalty_summary_with_penalty_payable( + compliance_report_summary_service, compliance_report_summary_row_schema +): + mock_compliance_report_summary = [ + compliance_report_summary_row_schema( + line="11", gasoline=1000, diesel=2000, jet_fuel=3000, total_value=6000 + ) + ] + + result = compliance_report_summary_service.calculate_non_compliance_penalty_summary( + -2, mock_compliance_report_summary + ) + + assert len(result) == 3 + assert result[0].total_value == 6000 + assert result[1].total_value == 1200 + assert result[2].total_value == 7200 + + +@pytest.mark.anyio +async def test_calculate_renewable_fuel_target_summary_no_renewables( + compliance_report_summary_service, +): + # Test case where there are no renewable quantities + fossil_quantities = {"gasoline": 1000, "diesel": 2000, "jet_fuel": 3000} + renewable_quantities = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + previous_retained = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + previous_obligation = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + notional_transfers_sum = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + compliance_period = 2030 + summary_model = ComplianceReportSummary( + line_6_renewable_fuel_retained_gasoline=0, + line_6_renewable_fuel_retained_diesel=0, + line_6_renewable_fuel_retained_jet_fuel=0, + line_8_obligation_deferred_gasoline=0, + line_8_obligation_deferred_diesel=0, + line_8_obligation_deferred_jet_fuel=0, + ) + + result = compliance_report_summary_service.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sum, + compliance_period, + summary_model, + ) + + assert len(result) == 11 + assert isinstance(result[0], ComplianceReportSummaryRowSchema) + # Penalty should be applied due to no renewables + assert result[10].gasoline == 15.0 + assert result[10].diesel == 36.0 + assert result[10].jet_fuel == 45.0 + assert result[10].total_value == 96.0 + + +@pytest.mark.anyio +async def test_calculate_renewable_fuel_target_summary_high_renewables( + compliance_report_summary_service, +): + # Test case where renewable quantities exceed requirements + fossil_quantities = {"gasoline": 100, "diesel": 200, "jet_fuel": 300} + renewable_quantities = {"gasoline": 500, "diesel": 600, "jet_fuel": 700} + previous_retained = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + previous_obligation = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + notional_transfers_sum = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + compliance_period = 2030 + summary_model = ComplianceReportSummary( + line_6_renewable_fuel_retained_gasoline=0, + line_6_renewable_fuel_retained_diesel=0, + line_6_renewable_fuel_retained_jet_fuel=0, + line_8_obligation_deferred_gasoline=0, + line_8_obligation_deferred_diesel=0, + line_8_obligation_deferred_jet_fuel=0, + ) + + result = compliance_report_summary_service.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sum, + compliance_period, + summary_model, + ) + + assert len(result) == 11 + assert isinstance(result[0], ComplianceReportSummaryRowSchema) + # No penalty since renewables exceed requirements + assert result[10].gasoline == 0 + assert result[10].diesel == 0 + assert result[10].jet_fuel == 0 + + +@pytest.mark.anyio +async def test_calculate_renewable_fuel_target_summary_copy_lines_6_and_8( + compliance_report_summary_service, +): + # Test case where required renewable quantities have not changed, so lines 6 and 8 should be copied + fossil_quantities = {"gasoline": 100, "diesel": 200, "jet_fuel": 300} + renewable_quantities = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + previous_retained = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + previous_obligation = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + notional_transfers_sum = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + compliance_period = 2030 + summary_model = ComplianceReportSummary( + line_6_renewable_fuel_retained_gasoline=10, + line_6_renewable_fuel_retained_diesel=20, + line_6_renewable_fuel_retained_jet_fuel=30, + line_8_obligation_deferred_gasoline=5, + line_8_obligation_deferred_diesel=10, + line_8_obligation_deferred_jet_fuel=15, + ) + + # Set the expected eligible renewable fuel required to match the summary model + expected_eligible_renewable_fuel_required = { + "gasoline": 5.0, # 100 * 0.05 + "diesel": 8.0, # 200 * 0.04 + "jet_fuel": 9.0, # 300 * 0.03 + } + + # Mock the summary model's line 4 values to match the expected required values + summary_model.line_4_eligible_renewable_fuel_required_gasoline = ( + expected_eligible_renewable_fuel_required["gasoline"] + ) + summary_model.line_4_eligible_renewable_fuel_required_diesel = ( + expected_eligible_renewable_fuel_required["diesel"] + ) + summary_model.line_4_eligible_renewable_fuel_required_jet_fuel = ( + expected_eligible_renewable_fuel_required["jet_fuel"] + ) + + result = compliance_report_summary_service.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sum, + compliance_period, + summary_model, + ) + + assert len(result) == 11 + assert isinstance(result[0], ComplianceReportSummaryRowSchema) + + # Line 6: Volume of eligible renewable fuel retained + assert result[5].gasoline == 10.0 # Should be copied if conditions are met + assert result[5].diesel == 20.0 + assert result[5].jet_fuel == 30.0 + + # Line 8: Volume of eligible renewable obligation deferred + assert result[7].gasoline == 5.0 # Should be copied if conditions are met + assert result[7].diesel == 10.0 + assert result[7].jet_fuel == 15.0 + + +@pytest.mark.anyio +async def test_calculate_renewable_fuel_target_summary_no_copy_lines_6_and_8( + compliance_report_summary_service, +): + # Test case where required renewable quantities have changed, so lines 6 and 8 should not be copied + fossil_quantities = {"gasoline": 100, "diesel": 200, "jet_fuel": 300} + renewable_quantities = {"gasoline": 50, "diesel": 150, "jet_fuel": 50} + previous_retained = {"gasoline": 20, "diesel": 30, "jet_fuel": 40} + previous_obligation = {"gasoline": 10, "diesel": 20, "jet_fuel": 30} + notional_transfers_sum = {"gasoline": 5, "diesel": 10, "jet_fuel": 15} + compliance_period = 2030 + summary_model = ComplianceReportSummary( + line_6_renewable_fuel_retained_gasoline=10, + line_6_renewable_fuel_retained_diesel=20, + line_6_renewable_fuel_retained_jet_fuel=30, + line_8_obligation_deferred_gasoline=5, + line_8_obligation_deferred_diesel=10, + line_8_obligation_deferred_jet_fuel=15, + ) + + # Set the expected eligible renewable fuel required to differ from the summary model + expected_eligible_renewable_fuel_required = { + "gasoline": 10.0, # Different from summary model + "diesel": 16.0, # Different from summary model + "jet_fuel": 18.0, # Different from summary model + } + + # Mock the summary model's line 4 values to differ from the expected required values + summary_model.line_4_eligible_renewable_fuel_required_gasoline = ( + expected_eligible_renewable_fuel_required["gasoline"] + 1 + ) + summary_model.line_4_eligible_renewable_fuel_required_diesel = ( + expected_eligible_renewable_fuel_required["diesel"] + 1 + ) + summary_model.line_4_eligible_renewable_fuel_required_jet_fuel = ( + expected_eligible_renewable_fuel_required["jet_fuel"] + 1 + ) + + result = compliance_report_summary_service.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sum, + compliance_period, + summary_model, + ) + + assert len(result) == 11 + assert isinstance(result[0], ComplianceReportSummaryRowSchema) + assert result[5].gasoline == 0 # Line 6 should not be copied + assert result[5].diesel == 0 + assert result[5].jet_fuel == 0 + assert result[7].gasoline == 0 # Line 8 should not be copied + assert result[7].diesel == 0 + assert result[7].jet_fuel == 0 + + +@pytest.mark.anyio +async def test_can_sign_flag_logic( + compliance_report_summary_service, mock_repo, mock_trxn_repo +): + # Scenario 1: All conditions met + mock_effective_fuel_supplies = [MagicMock()] + mock_notional_transfers = MagicMock(notional_transfers=[MagicMock()]) + mock_fuel_exports = [MagicMock()] + mock_allocation_agreements = [MagicMock()] + mock_compliance_report = MagicMock( + compliance_report_group_uuid="mock-group-uuid", + compliance_period=MagicMock(effective_date=MagicMock(year=2024)), + organization_id=1, + compliance_report_id=1, + summary=MagicMock(is_locked=False), + ) + + mock_trxn_repo.calculate_available_balance_for_period.return_value = 1000 + + # Mock previous retained and obligation dictionaries + previous_retained = {"gasoline": 10, "diesel": 20, "jet_fuel": 30} + previous_obligation = {"gasoline": 5, "diesel": 10, "jet_fuel": 15} + + # Mock repository methods + mock_repo.get_compliance_report_by_id = AsyncMock( + return_value=mock_compliance_report + ) + mock_repo.calculate_fuel_quantities = AsyncMock( + return_value={ + "gasoline": 100, + "diesel": 50, + "jet_fuel": 25, + } + ) + mock_repo.aggregate_other_uses = AsyncMock( + return_value={ + "gasoline": 50, + "diesel": 25, + "jet_fuel": 10, + } + ) + mock_repo.get_assessed_compliance_report_by_period = AsyncMock( + return_value=MagicMock( + summary=MagicMock( + line_6_renewable_fuel_retained_gasoline=previous_retained["gasoline"], + line_6_renewable_fuel_retained_diesel=previous_retained["diesel"], + line_6_renewable_fuel_retained_jet_fuel=previous_retained["jet_fuel"], + line_8_obligation_deferred_gasoline=previous_obligation["gasoline"], + line_8_obligation_deferred_diesel=previous_obligation["diesel"], + line_8_obligation_deferred_jet_fuel=previous_obligation["jet_fuel"], + ) + ) + ) + + compliance_report_summary_service.fuel_supply_repo.get_effective_fuel_supplies = ( + AsyncMock(return_value=mock_effective_fuel_supplies) + ) + compliance_report_summary_service.notional_transfer_service.calculate_notional_transfers = AsyncMock( + return_value=mock_notional_transfers + ) + compliance_report_summary_service.fuel_export_repo.get_effective_fuel_exports = ( + AsyncMock(return_value=mock_fuel_exports) + ) + compliance_report_summary_service.allocation_agreement_repo.get_allocation_agreements = AsyncMock( + return_value=mock_allocation_agreements + ) + + # Call the method + result = ( + await compliance_report_summary_service.calculate_compliance_report_summary(1) + ) + + # Assert that `can_sign` is True + assert result.can_sign is True + + # Scenario 2: No conditions met + compliance_report_summary_service.fuel_supply_repo.get_effective_fuel_supplies = ( + AsyncMock(return_value=[]) + ) + compliance_report_summary_service.notional_transfer_service.calculate_notional_transfers = AsyncMock( + return_value=MagicMock(notional_transfers=[]) + ) + compliance_report_summary_service.fuel_export_repo.get_effective_fuel_exports = ( + AsyncMock(return_value=[]) + ) + compliance_report_summary_service.allocation_agreement_repo.get_allocation_agreements = AsyncMock( + return_value=[] + ) + + # Call the method again + result = ( + await compliance_report_summary_service.calculate_compliance_report_summary(1) + ) + + # Assert that `can_sign` is False + assert result.can_sign is False + + +@pytest.mark.anyio +async def test_calculate_fuel_quantities_fossil_derived( + compliance_report_summary_service, + mock_repo, + mock_trxn_repo, + mock_fuel_supply_repo, +): + # Create a mock repository + mock_repo.aggregate_fuel_supplies.return_value = {"diesel": 100.0} + mock_repo.aggregate_other_uses.return_value = {"gasoline": 50.0} + + # Define test inputs + compliance_report_id = 1 + effective_fuel_supplies: List[FuelSupply] = ( + [] + ) # Add mock FuelSupply objects as needed + fossil_derived = True + + # Call the method under test + result = await compliance_report_summary_service.calculate_fuel_quantities( + compliance_report_id, effective_fuel_supplies, fossil_derived + ) + + # Assertions + assert result == {"diesel": 100.0, "gasoline": 50.0} + mock_repo.aggregate_fuel_supplies.assert_called_once_with( + effective_fuel_supplies, fossil_derived + ) + mock_repo.aggregate_other_uses.assert_awaited_once_with( + compliance_report_id, fossil_derived + ) + mock_repo.aggregate_allocation_agreements.assert_not_called() + + +@pytest.mark.anyio +async def test_calculate_fuel_quantities_renewable( + compliance_report_summary_service, + mock_repo, + mock_trxn_repo, + mock_fuel_supply_repo, +): + # Create a mock repository + mock_repo.aggregate_fuel_supplies.return_value = {"gasoline": 200.0} + mock_repo.aggregate_other_uses.return_value = { + "diesel": 75.0, "jet-fuel": 25.0} + + # Define test inputs + compliance_report_id = 2 + effective_fuel_supplies: List[FuelSupply] = [] + fossil_derived = False + + # Call the method under test + result = await compliance_report_summary_service.calculate_fuel_quantities( + compliance_report_id, effective_fuel_supplies, fossil_derived + ) + + # Assertions + mock_repo.aggregate_fuel_supplies.assert_called_once_with( + effective_fuel_supplies, fossil_derived + ) + mock_repo.aggregate_other_uses.assert_awaited_once_with( + compliance_report_id, fossil_derived + ) + assert result == {"gasoline": 200.0, "diesel": 75.0, "jet-fuel": 25.0} diff --git a/backend/lcfs/tests/compliance_report/test_update_service.py b/backend/lcfs/tests/compliance_report/test_update_service.py new file mode 100644 index 000000000..12532c4e0 --- /dev/null +++ b/backend/lcfs/tests/compliance_report/test_update_service.py @@ -0,0 +1,606 @@ +from fastapi import HTTPException +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.compliance_report.update_service import ComplianceReportUpdateService +from lcfs.web.api.notification.services import NotificationService +import pytest +from unittest.mock import AsyncMock, MagicMock, patch +from lcfs.db.models.compliance.ComplianceReport import ComplianceReport +from lcfs.db.models.compliance.ComplianceReportStatus import ( + ComplianceReportStatus, + ComplianceReportStatusEnum, +) +from lcfs.db.models.compliance.ComplianceReportSummary import ComplianceReportSummary +from lcfs.db.models.transaction.Transaction import TransactionActionEnum +from lcfs.web.api.compliance_report.schema import ( + ComplianceReportUpdateSchema, + ComplianceReportSummaryRowSchema, + ComplianceReportSummarySchema, +) +from lcfs.web.exception.exceptions import DataNotFoundException, ServiceException + + +# Mock for user_has_roles function +@pytest.fixture +def mock_user_has_roles(): + with patch("lcfs.web.api.compliance_report.update_service.user_has_roles") as mock: + yield mock + + +@pytest.fixture +def mock_notification_service(): + mock_service = AsyncMock(spec=NotificationService) + with patch( + "lcfs.web.api.compliance_report.update_service.Depends", + return_value=mock_service, + ): + yield mock_service + + +@pytest.fixture +def mock_environment_vars(): + with patch("lcfs.web.api.email.services.settings") as mock_settings: + mock_settings.ches_auth_url = "http://mock_auth_url" + mock_settings.ches_email_url = "http://mock_email_url" + mock_settings.ches_client_id = "mock_client_id" + mock_settings.ches_client_secret = "mock_client_secret" + mock_settings.ches_sender_email = "noreply@gov.bc.ca" + mock_settings.ches_sender_name = "Mock Notification System" + yield mock_settings + + +# Mock for adjust_balance method within the OrganizationsService +@pytest.fixture +def mock_org_service(): + mock_org_service = MagicMock() + mock_org_service.adjust_balance = AsyncMock() # Mock the adjust_balance method + return mock_org_service + + +# update_compliance_report +@pytest.mark.anyio +async def test_update_compliance_report_status_change( + compliance_report_update_service, mock_repo, mock_notification_service +): + # Mock data + report_id = 1 + mock_report = MagicMock(spec=ComplianceReport) + mock_report.compliance_report_id = report_id + mock_report.current_status = MagicMock(spec=ComplianceReportStatus) + mock_report.current_status.status = ComplianceReportStatusEnum.Draft + mock_report.compliance_period = MagicMock() + mock_report.compliance_period.description = "2024" + mock_report.transaction_id = 123 + + new_status = MagicMock(spec=ComplianceReportStatus) + new_status.status = ComplianceReportStatusEnum.Submitted + + report_data = ComplianceReportUpdateSchema( + status="Submitted", supplemental_note="Test note" + ) + + # Set up mocks + mock_repo.get_compliance_report_by_id.return_value = mock_report + mock_repo.get_compliance_report_status_by_desc.return_value = new_status + compliance_report_update_service.handle_status_change = AsyncMock() + mock_repo.update_compliance_report.return_value = mock_report + compliance_report_update_service._perform_notification_call = AsyncMock() + + # Call the method + updated_report = await compliance_report_update_service.update_compliance_report( + report_id, report_data + ) + + # Assertions + assert updated_report == mock_report + mock_repo.get_compliance_report_by_id.assert_called_once_with( + report_id, is_model=True + ) + mock_repo.get_compliance_report_status_by_desc.assert_called_once_with( + report_data.status + ) + compliance_report_update_service.handle_status_change.assert_called_once_with( + mock_report, new_status.status + ) + mock_repo.add_compliance_report_history.assert_called_once_with( + mock_report, compliance_report_update_service.request.user + ) + mock_repo.update_compliance_report.assert_called_once_with(mock_report) + compliance_report_update_service._perform_notification_call.assert_called_once_with( + mock_report, "Submitted" + ) + + +@pytest.mark.anyio +async def test_update_compliance_report_no_status_change( + compliance_report_update_service, mock_repo +): + # Mock data + report_id = 1 + mock_report = MagicMock(spec=ComplianceReport) + mock_report.compliance_report_id = report_id + mock_report.current_status = MagicMock(spec=ComplianceReportStatus) + mock_report.current_status.status = ComplianceReportStatusEnum.Draft + mock_report.compliance_period = MagicMock() + mock_report.compliance_period.description = "2024" + mock_report.transaction_id = 123 + + # Status does not change + report_data = ComplianceReportUpdateSchema( + status="Draft", supplemental_note="Test note" + ) + + # Set up mocks + mock_repo.get_compliance_report_by_id.return_value = mock_report + mock_repo.get_compliance_report_status_by_desc.return_value = ( + mock_report.current_status + ) + mock_repo.update_compliance_report.return_value = mock_report + compliance_report_update_service._perform_notification_call = AsyncMock() + + # Call the method + updated_report = await compliance_report_update_service.update_compliance_report( + report_id, report_data + ) + + # Assertions + assert updated_report == mock_report + compliance_report_update_service._perform_notification_call.assert_called_once_with( + mock_report, "Draft" + ) + mock_repo.update_compliance_report.assert_called_once_with(mock_report) + + +@pytest.mark.anyio +async def test_update_compliance_report_not_found( + compliance_report_update_service, mock_repo +): + # Mock data + report_id = 1 + report_data = ComplianceReportUpdateSchema( + status="Submitted", supplemental_note="Test note" + ) + + # Set up mocks + mock_repo.get_compliance_report_by_id.return_value = None + + # Call the method and check for exception + with pytest.raises(DataNotFoundException): + await compliance_report_update_service.update_compliance_report( + report_id, report_data + ) + + mock_repo.get_compliance_report_by_id.assert_called_once_with( + report_id, is_model=True + ) + + +@pytest.mark.anyio +async def test_handle_submitted_status_insufficient_permissions( + compliance_report_update_service, mock_user_has_roles +): + # Mock data + mock_report = MagicMock(spec=ComplianceReport) + + # Mock user roles (user doesn't have required roles) + mock_user_has_roles.return_value = False + compliance_report_update_service.request = MagicMock() + compliance_report_update_service.request.user = MagicMock() + + # Call the method and check for exception + with pytest.raises(HTTPException) as exc_info: + await compliance_report_update_service.handle_submitted_status(mock_report) + + assert exc_info.value.status_code == 403 + assert exc_info.value.detail == "Forbidden." + + +# SUBMIT STATUS TESTS + + +@pytest.mark.anyio +async def test_handle_submitted_status_with_existing_summary( + compliance_report_update_service, + mock_repo, + mock_user_has_roles, + mock_org_service, + compliance_report_summary_service, +): + # Mock data + report_id = 1 + mock_report = MagicMock(spec=ComplianceReport) + mock_report.compliance_report_id = report_id + mock_report.organization_id = 123 # Mock organization ID + mock_report.summary = MagicMock(spec=ComplianceReportSummary) + mock_report.summary.line_20_surplus_deficit_units = 100 # Mock compliance units + + # Mock existing summary with user-edited values + existing_summary = MagicMock(spec=ComplianceReportSummary) + existing_summary.line_6_renewable_fuel_retained_gasoline = 1000 + existing_summary.line_7_previously_retained_diesel = 2000 + existing_summary.line_8_obligation_deferred_jet_fuel = 3000 + + # Mock user roles (user has required roles) + mock_user_has_roles.return_value = True + compliance_report_update_service.request = MagicMock() + compliance_report_update_service.request.user = MagicMock() + + # Mock calculated summary + calculated_summary = ComplianceReportSummarySchema( + summary_id=100, + compliance_report_id=report_id, + renewable_fuel_target_summary=[ + ComplianceReportSummaryRowSchema( + line="6", + field="renewable_fuel_retained", + gasoline=0, + diesel=0, + jet_fuel=0, + ), + ComplianceReportSummaryRowSchema( + line="7", field="previously_retained", gasoline=0, diesel=0, jet_fuel=0 + ), + ComplianceReportSummaryRowSchema( + line="8", field="obligation_deferred", gasoline=0, diesel=0, jet_fuel=0 + ), + ], + low_carbon_fuel_target_summary=[ + ComplianceReportSummaryRowSchema( + line="12", field="low_carbon_fuel_required", value=0 + ), + ], + non_compliance_penalty_summary=[ + ComplianceReportSummaryRowSchema( + line="21", field="non_compliance_penalty_payable", value=0 + ), + ], + can_sign=True, + ) + + # Set up mocks + mock_repo.get_summary_by_report_id.return_value = existing_summary + compliance_report_summary_service.calculate_compliance_report_summary = AsyncMock( + return_value=calculated_summary + ) + + # Inject the mocked org_service into the service being tested + compliance_report_update_service.org_service = mock_org_service + + # Mock the adjust_balance method to return a mocked transaction result + mock_org_service.adjust_balance.return_value = MagicMock() + + # Call the method + await compliance_report_update_service.handle_submitted_status(mock_report) + + # Assertions + mock_user_has_roles.assert_called_once_with( + compliance_report_update_service.request.user, + [RoleEnum.SUPPLIER, RoleEnum.SIGNING_AUTHORITY], + ) + mock_repo.get_summary_by_report_id.assert_called_once_with(report_id) + compliance_report_summary_service.calculate_compliance_report_summary.assert_called_once_with( + report_id + ) + + # Ensure the adjust_balance method is called with the correct parameters + mock_org_service.adjust_balance.assert_called_once_with( + transaction_action=TransactionActionEnum.Reserved, + compliance_units=mock_report.summary.line_20_surplus_deficit_units, + organization_id=mock_report.organization_id, + ) + + # Check if the report was updated with the result of adjust_balance + assert mock_report.transaction == mock_org_service.adjust_balance.return_value + + # Check if the summary is locked + saved_summary = mock_repo.save_compliance_report_summary.call_args[0][0] + assert saved_summary.is_locked == True + + +@pytest.mark.anyio +async def test_handle_submitted_status_without_existing_summary( + compliance_report_update_service, + mock_repo, + mock_user_has_roles, + mock_org_service, + compliance_report_summary_service, +): + # Mock data + report_id = 1 + mock_report = MagicMock(spec=ComplianceReport) + mock_report.compliance_report_id = report_id + mock_report.summary = None + # Mock user roles (user has required roles) + mock_user_has_roles.return_value = True + compliance_report_update_service.request = MagicMock() + compliance_report_update_service.request.user = MagicMock() + # Mock calculated summary + calculated_summary = ComplianceReportSummarySchema( + compliance_report_id=report_id, + renewable_fuel_target_summary=[ + ComplianceReportSummaryRowSchema( + line="6", + field="renewable_fuel_retained", + gasoline=100, + diesel=200, + jet_fuel=300, + ), + ComplianceReportSummaryRowSchema( + line="7", + field="previously_retained", + gasoline=400, + diesel=500, + jet_fuel=600, + ), + ComplianceReportSummaryRowSchema( + line="8", + field="obligation_deferred", + gasoline=700, + diesel=800, + jet_fuel=900, + ), + ], + low_carbon_fuel_target_summary=[ + ComplianceReportSummaryRowSchema( + line="12", field="low_carbon_fuel_required", value=0 + ), + ], + non_compliance_penalty_summary=[ + ComplianceReportSummaryRowSchema( + line="21", field="non_compliance_penalty_payable", value=0 + ), + ], + can_sign=True, + ) + + # Set up mocks + mock_repo.get_summary_by_report_id.return_value = None + compliance_report_summary_service.calculate_compliance_report_summary = AsyncMock( + return_value=calculated_summary + ) + # Inject the mocked org_service into the service being tested + compliance_report_update_service.org_service = mock_org_service + + # Mock the adjust_balance method to return a mocked transaction result + mock_org_service.adjust_balance.return_value = MagicMock() + # Call the method + await compliance_report_update_service.handle_submitted_status(mock_report) + + # Assertions + mock_repo.get_summary_by_report_id.assert_called_once_with(report_id) + compliance_report_summary_service.calculate_compliance_report_summary.assert_called_once_with( + report_id + ) + + # Check if a new summary is created + mock_repo.add_compliance_report_summary.assert_called_once() + new_summary = mock_repo.add_compliance_report_summary.call_args[0][0] + + # Check if calculated values are used + assert new_summary.renewable_fuel_target_summary[0].gasoline == 100 # line 6 + assert new_summary.renewable_fuel_target_summary[1].diesel == 500 # line 7 + assert new_summary.renewable_fuel_target_summary[2].jet_fuel == 900 # line 8 + + # Check if summary is locked + assert new_summary.is_locked == True + + # Check if report is updated with new summary + mock_repo.update_compliance_report.assert_called_once_with(mock_report) + + +@pytest.mark.anyio +async def test_handle_submitted_status_partial_existing_values( + compliance_report_update_service, + mock_repo, + mock_user_has_roles, + mock_org_service, + compliance_report_summary_service, +): + # Mock data + report_id = 1 + mock_report = MagicMock(spec=ComplianceReport) + mock_report.compliance_report_id = report_id + mock_report.summary = MagicMock(spec=ComplianceReportSummary) + mock_report.summary.summary_id = 100 + # Mock user roles (user has required roles) + mock_user_has_roles.return_value = True + compliance_report_update_service.request = MagicMock() + compliance_report_update_service.request.user = MagicMock() + # Mock existing summary with some user-edited values + existing_summary = MagicMock(spec=ComplianceReportSummary) + existing_summary.line_6_renewable_fuel_retained_gasoline = 1000 + existing_summary.line_7_previously_retained_diesel = None # Not edited by user + existing_summary.line_8_obligation_deferred_jet_fuel = 3000 + + # Mock calculated summary + calculated_summary = ComplianceReportSummarySchema( + summary_id=100, + compliance_report_id=report_id, + renewable_fuel_target_summary=[ + ComplianceReportSummaryRowSchema( + line="6", + field="renewable_fuel_retained", + gasoline=0, + diesel=0, + jet_fuel=0, + ), + ComplianceReportSummaryRowSchema( + line="7", + field="previously_retained", + gasoline=0, + diesel=2000, + jet_fuel=0, + ), + ComplianceReportSummaryRowSchema( + line="8", field="obligation_deferred", gasoline=0, diesel=0, jet_fuel=0 + ), + ], + low_carbon_fuel_target_summary=[ + ComplianceReportSummaryRowSchema( + line="12", field="low_carbon_fuel_required", value=0 + ), + ], + non_compliance_penalty_summary=[ + ComplianceReportSummaryRowSchema( + line="21", field="non_compliance_penalty_payable", value=0 + ), + ], + can_sign=True, + ) + + # Set up mocks + mock_repo.get_summary_by_report_id.return_value = existing_summary + compliance_report_summary_service.calculate_compliance_report_summary = AsyncMock( + return_value=calculated_summary + ) + # Inject the mocked org_service into the service being tested + compliance_report_update_service.org_service = mock_org_service + + # Mock the adjust_balance method to return a mocked transaction result + mock_org_service.adjust_balance.return_value = MagicMock() + # Call the method + await compliance_report_update_service.handle_submitted_status(mock_report) + + # Assertions + saved_summary = mock_repo.save_compliance_report_summary.call_args[0][0] + assert ( + saved_summary.renewable_fuel_target_summary[0].gasoline == 1000 + ) # Preserved user-edited value + assert ( + saved_summary.renewable_fuel_target_summary[1].diesel == 2000 + ) # Used calculated value + assert ( + saved_summary.renewable_fuel_target_summary[2].jet_fuel == 3000 + ) # Preserved user-edited value + + +@pytest.mark.anyio +async def test_handle_submitted_status_no_user_edits( + compliance_report_update_service, + mock_repo, + mock_user_has_roles, + mock_org_service, + compliance_report_summary_service, +): + # Mock data + report_id = 1 + mock_report = MagicMock(spec=ComplianceReport) + mock_report.compliance_report_id = report_id + mock_report.summary = MagicMock(spec=ComplianceReportSummary) + mock_report.summary.summary_id = 100 + # Mock user roles (user has required roles) + mock_user_has_roles.return_value = True + compliance_report_update_service.request = MagicMock() + compliance_report_update_service.request.user = MagicMock() + # Mock existing summary with no user-edited values + existing_summary = MagicMock(spec=ComplianceReportSummary) + existing_summary.line_6_renewable_fuel_retained_gasoline = None + existing_summary.line_7_previously_retained_diesel = None + existing_summary.line_8_obligation_deferred_jet_fuel = None + + # Mock calculated summary + calculated_summary = ComplianceReportSummarySchema( + summary_id=100, + compliance_report_id=report_id, + renewable_fuel_target_summary=[ + ComplianceReportSummaryRowSchema( + line="6", + field="renewable_fuel_retained", + gasoline=100, + diesel=200, + jet_fuel=300, + ), + ComplianceReportSummaryRowSchema( + line="7", + field="previously_retained", + gasoline=400, + diesel=500, + jet_fuel=600, + ), + ComplianceReportSummaryRowSchema( + line="8", + field="obligation_deferred", + gasoline=700, + diesel=800, + jet_fuel=900, + ), + ], + low_carbon_fuel_target_summary=[ + ComplianceReportSummaryRowSchema( + line="12", field="low_carbon_fuel_required", value=0 + ), + ], + non_compliance_penalty_summary=[ + ComplianceReportSummaryRowSchema( + line="21", field="non_compliance_penalty_payable", value=0 + ), + ], + can_sign=True, + ) + + # Set up mocks + mock_repo.get_summary_by_report_id.return_value = existing_summary + compliance_report_summary_service.calculate_compliance_report_summary = AsyncMock( + return_value=calculated_summary + ) + # Inject the mocked org_service into the service being tested + compliance_report_update_service.org_service = mock_org_service + + # Mock the adjust_balance method to return a mocked transaction result + mock_org_service.adjust_balance.return_value = MagicMock() + # Call the method + await compliance_report_update_service.handle_submitted_status(mock_report) + + # Assertions + saved_summary = mock_repo.save_compliance_report_summary.call_args[0][0] + assert ( + saved_summary.renewable_fuel_target_summary[0].gasoline == 100 + ) # Used calculated value + assert ( + saved_summary.renewable_fuel_target_summary[1].diesel == 500 + ) # Used calculated value + assert ( + saved_summary.renewable_fuel_target_summary[2].jet_fuel == 900 + ) # Used calculated value + + +@pytest.mark.anyio +async def test_handle_submitted_no_sign( + compliance_report_update_service, + mock_repo, + mock_user_has_roles, + mock_org_service, + compliance_report_summary_service, +): + # Mock data + report_id = 1 + mock_report = MagicMock(spec=ComplianceReport) + mock_report.compliance_report_id = report_id + mock_report.summary = MagicMock(spec=ComplianceReportSummary) + mock_report.summary.summary_id = 100 + # Mock user roles (user has required roles) + mock_user_has_roles.return_value = True + compliance_report_update_service.request = MagicMock() + compliance_report_update_service.request.user = MagicMock() + # Mock existing summary with no user-edited values + existing_summary = MagicMock(spec=ComplianceReportSummary) + + # Mock calculated summary + calculated_summary = ComplianceReportSummarySchema( + summary_id=100, + compliance_report_id=report_id, + renewable_fuel_target_summary=[], + low_carbon_fuel_target_summary=[], + non_compliance_penalty_summary=[], + can_sign=False, + ) + + # Set up mocks + mock_repo.get_summary_by_report_id.return_value = existing_summary + compliance_report_summary_service.calculate_compliance_report_summary = AsyncMock( + return_value=calculated_summary + ) + # Inject the mocked org_service into the service being tested + compliance_report_update_service.org_service = mock_org_service + + with pytest.raises(ServiceException): + await compliance_report_update_service.handle_submitted_status(mock_report) diff --git a/backend/lcfs/tests/conftest.py b/backend/lcfs/tests/conftest.py new file mode 100644 index 000000000..5cdd52ce1 --- /dev/null +++ b/backend/lcfs/tests/conftest.py @@ -0,0 +1,69 @@ +import pytest + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.base import PaginationRequestSchema, FilterModel, SortOrder +from fakeredis.aioredis import FakeRedis + + +@pytest.fixture +def pagination_request_schema(): + return PaginationRequestSchema( + page=1, + size=10, + sort_orders=[ + SortOrder(field="createdDate", direction="asc"), + SortOrder(field="status", direction="desc"), + ], + filters=[ + FilterModel( + filter_type="text", + type="contains", + filter="exampleValue", + field="exampleField", + date_from="2024-01-01", + date_to="2024-12-31", + ), + FilterModel( + filter_type="date", + type="range", + field="createdDate", + date_from="2024-01-01", + date_to="2024-12-31", + ), + ], + ) + + +@pytest.fixture +def mock_user_profile( + role=RoleEnum.GOVERNMENT, + organization_id=1, + email="john.doe@example.com", + user_profile_id=1, +): + class MockUserProfile: + def __init__(self): + self.user_profile_id = user_profile_id + self.first_name = "John" + self.last_name = "Doe" + self.keycloak_username = "johndoe" + self.organization_id = organization_id + self.email = email + self.role_names = [role] + + def role_names(self): + return self.role_names + + return MockUserProfile() + + +@pytest.fixture +async def redis_client(): + """ + Fixture to provide a fake Redis client for tests. + """ + client = FakeRedis() + try: + yield client + finally: + await client.close() diff --git a/backend/lcfs/tests/email/test_email_repo.py b/backend/lcfs/tests/email/test_email_repo.py new file mode 100644 index 000000000..2444caf1e --- /dev/null +++ b/backend/lcfs/tests/email/test_email_repo.py @@ -0,0 +1,83 @@ +import pytest +from unittest.mock import MagicMock, AsyncMock +from lcfs.web.api.email.repo import CHESEmailRepository + +@pytest.fixture +def mock_db(): + return AsyncMock() + +@pytest.mark.anyio +async def test_get_subscribed_user_emails_success(mock_db): + # Arrange + mock_db.execute.return_value = MagicMock( + fetchall=MagicMock( + return_value=[ + ("user1@example.com",), + ("user2@example.com",), + ] + ) + ) + + repo = CHESEmailRepository(db=mock_db) + notification_type = "INITIATIVE_APPROVED" + organization_id = 1 + + # Act + result = await repo.get_subscribed_user_emails(notification_type, organization_id) + + # Assert + assert result == ["user1@example.com", "user2@example.com"] + mock_db.execute.assert_called_once() + +@pytest.mark.anyio +async def test_get_subscribed_user_emails_no_recipients(mock_db): + # Arrange + mock_db.execute.return_value = MagicMock( + fetchall=MagicMock(return_value=[]) + ) + + repo = CHESEmailRepository(db=mock_db) + notification_type = "INITIATIVE_APPROVED" + organization_id = 1 + + # Act + result = await repo.get_subscribed_user_emails(notification_type, organization_id) + + # Assert + assert result == [] + mock_db.execute.assert_called_once() + +@pytest.mark.anyio +async def test_get_notification_template_existing_type(mock_db): + # Arrange + expected_template = "initiative_approved.html" + mock_db.execute.return_value = MagicMock( + scalar_one_or_none=MagicMock(return_value=expected_template) + ) + + repo = CHESEmailRepository(db=mock_db) + notification_type = "INITIATIVE_APPROVED" + + # Act + result = await repo.get_notification_template(notification_type) + + # Assert + assert result == expected_template + mock_db.execute.assert_called_once() + +@pytest.mark.anyio +async def test_get_notification_template_default(mock_db): + # Arrange + mock_db.execute.return_value = MagicMock( + scalar_one_or_none=MagicMock(return_value=None) + ) + + repo = CHESEmailRepository(db=mock_db) + notification_type = "INITIATIVE_APPROVED" + + # Act + result = await repo.get_notification_template(notification_type) + + # Assert + assert result == "default.html" + mock_db.execute.assert_called_once() \ No newline at end of file diff --git a/backend/lcfs/tests/email/test_email_service.py b/backend/lcfs/tests/email/test_email_service.py new file mode 100644 index 000000000..7b8196fc3 --- /dev/null +++ b/backend/lcfs/tests/email/test_email_service.py @@ -0,0 +1,136 @@ +from lcfs.web.api.base import NotificationTypeEnum +import pytest +from unittest.mock import AsyncMock, MagicMock, patch +from lcfs.web.api.email.repo import CHESEmailRepository +from lcfs.web.api.email.services import CHESEmailService +import os + + +@pytest.fixture +def mock_email_repo(): + return AsyncMock(spec=CHESEmailRepository) + + +@pytest.fixture +def mock_environment_vars(): + with patch("lcfs.web.api.email.services.settings") as mock_settings: + mock_settings.ches_auth_url = "http://mock_auth_url" + mock_settings.ches_email_url = "http://mock_email_url" + mock_settings.ches_client_id = "mock_client_id" + mock_settings.ches_client_secret = "mock_client_secret" + mock_settings.ches_sender_email = "noreply@gov.bc.ca" + mock_settings.ches_sender_name = "Mock Notification System" + yield mock_settings + + +@pytest.mark.anyio +async def test_send_notification_email_success(mock_email_repo, mock_environment_vars): + # Arrange + notification_type = NotificationTypeEnum.BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT + notification_context = { + "subject": "Test Notification", + "user_name": "John Doe", + "message_body": "Test message content", + } + organization_id = 1 + + # Setup mock repo and service + mock_email_repo.get_subscribed_user_emails.return_value = ["user@example.com"] + service = CHESEmailService(repo=mock_email_repo) + + # Mock internal methods + service._render_email_template = MagicMock(return_value="Rendered HTML Content") + service.send_email = AsyncMock(return_value=True) + + # Act + result = await service.send_notification_email( + notification_type, notification_context, organization_id + ) + + # Assert + assert result is True + mock_email_repo.get_subscribed_user_emails.assert_called_once_with( + notification_type.value, organization_id # Ensure value is passed + ) + service._render_email_template.assert_called_once_with( + notification_type.value, notification_context + ) + service.send_email.assert_called_once() + + +@pytest.mark.anyio +async def test_send_notification_email_no_recipients( + mock_email_repo, mock_environment_vars +): + # Arrange + notification_type = NotificationTypeEnum.BCEID__TRANSFER__PARTNER_ACTIONS + notification_context = { + "subject": "Test Notification", + "user_name": "John Doe", + "message_body": "Test message content", + } + organization_id = 1 + + # Setup mock repo and service + mock_email_repo.get_subscribed_user_emails.return_value = [] + service = CHESEmailService(repo=mock_email_repo) + + # Act + result = await service.send_notification_email( + notification_type, notification_context, organization_id + ) + + # Assert + assert result is False + mock_email_repo.get_subscribed_user_emails.assert_called_once_with( + notification_type.value, organization_id # Ensure value is passed + ) + + +@pytest.mark.anyio +async def test_get_ches_token_success(mock_environment_vars): + # Arrange + mock_token = "mock_access_token" + with patch("requests.post") as mock_post: + mock_response = MagicMock() + mock_response.json.return_value = { + "access_token": mock_token, + "expires_in": 3600, + } + mock_post.return_value = mock_response + + service = CHESEmailService() + + # Act + token = await service.get_ches_token() + + # Assert + assert token == mock_token + mock_post.assert_called_once() + + +@pytest.mark.anyio +async def test_get_ches_token_cached(mock_environment_vars): + # Arrange + with patch("requests.post") as mock_post: + mock_response = MagicMock() + mock_response.json.return_value = { + "access_token": "initial_token", + "expires_in": 3600, + } + mock_post.return_value = mock_response + + service = CHESEmailService() + + # First call to get token + first_token = await service.get_ches_token() + + # Reset mock to ensure no second call is made + mock_post.reset_mock() + + # Act: Second call should return cached token + second_token = await service.get_ches_token() + + # Assert + assert first_token == second_token + mock_post.assert_not_called() diff --git a/backend/lcfs/tests/fuel_code/test_fuel_code_exporter.py b/backend/lcfs/tests/fuel_code/test_fuel_code_exporter.py new file mode 100644 index 000000000..7b7840be5 --- /dev/null +++ b/backend/lcfs/tests/fuel_code/test_fuel_code_exporter.py @@ -0,0 +1,90 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest +from starlette.responses import StreamingResponse + +from lcfs.db.models.fuel.FuelCodeStatus import FuelCodeStatusEnum +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.fuel_code.export import FuelCodeExporter + + +@pytest.mark.anyio +async def test_export_success(): + repo_mock = AsyncMock() + exporter = FuelCodeExporter(repo=repo_mock) + export_format = "csv" + + mock_fuel_codes = [ + MagicMock( + fuel_code_status=MagicMock(status=FuelCodeStatusEnum.Draft), + fuel_code_prefix=MagicMock(prefix="BCLCF"), + fuel_suffix="001.0", + carbon_intensity=10.5, + edrms="EDRMS-123", + company="XYZ Corp", + contact_name="John Doe", + contact_email="john.doe@example.com", + application_date="2023-10-01", + approval_date="2023-11-01", + effective_date="2023-12-01", + expiration_date="2024-01-01", + fuel_type=MagicMock(fuel_type="Diesel"), + feedstock="Corn oil", + feedstock_location="Canada", + feedstock_misc=None, + fuel_production_facility_city="Victoria", + fuel_production_facility_province_state="BC", + fuel_production_facility_country="Canada", + facility_nameplate_capacity=1000, + facility_nameplate_capacity_unit=MagicMock(value="MW"), + feedstock_fuel_transport_modes=[ + MagicMock( + feedstock_fuel_transport_mode=MagicMock(transport_mode="Pipeline") + ) + ], + finished_fuel_transport_modes=[ + MagicMock( + finished_fuel_transport_mode=MagicMock(transport_mode="Truck") + ) + ], + former_company=None, + notes=None, + ) + ] + repo_mock.get_fuel_codes_paginated.return_value = (mock_fuel_codes, 1) + + response = await exporter.export(export_format) + + assert isinstance(response, StreamingResponse) + assert response.media_type == "text/csv" + assert "attachment; filename=" in response.headers["Content-Disposition"] + repo_mock.get_fuel_codes_paginated.assert_called_once_with( + pagination=PaginationRequestSchema(page=1, size=1000, filters=[], sortOrders=[]) + ) + + # Verify file content + headers = await response.body_iterator.__anext__() + file_content = await response.body_iterator.__anext__() + print("-", file_content) + assert ( + b"Status,Prefix,Fuel code,Carbon intensity,EDRMS#,Company,Contact name,Contact email,Application date,Approval date,Effective date,Expiry date,Fuel,Feedstock,Feedstock location,Misc,Fuel production facility city,Fuel production facility province/state,Fuel production facility country,Facility nameplate capacity,Unit,Feedstock transport mode,Finished fuel transport mode,Former company,Notes\n" + in headers + ) + assert ( + b"Draft,BCLCF,001.0,10.5,EDRMS-123,XYZ Corp,John Doe,john.doe@example.com,2023-10-01,2023-11-01,2023-12-01,2024-01-01,Diesel,Corn oil,Canada,,Victoria,BC,Canada,1000,MW,Pipeline,Truck,,\n" + in file_content + ) + + +@pytest.mark.anyio +async def test_export_invalid_format(): + # Arrange + repo_mock = AsyncMock() + service = FuelCodeExporter(repo=repo_mock) + invalid_format = "pdf" + + # Act & Assert + with pytest.raises(Exception) as exc_info: + await service.export(invalid_format) + + assert "Export format not supported" in str(exc_info.value) diff --git a/backend/lcfs/tests/fuel_code/test_fuel_code_repo.py b/backend/lcfs/tests/fuel_code/test_fuel_code_repo.py new file mode 100644 index 000000000..2fdfc689b --- /dev/null +++ b/backend/lcfs/tests/fuel_code/test_fuel_code_repo.py @@ -0,0 +1,754 @@ +from datetime import date +from unittest import mock + +import pytest +from unittest.mock import AsyncMock, MagicMock +from sqlalchemy.exc import NoResultFound +from sqlalchemy.orm import joinedload + +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.db.models.fuel.TransportMode import TransportMode +from lcfs.db.models.fuel.FuelType import FuelType +from lcfs.db.models.fuel.FuelCategory import FuelCategory +from lcfs.db.models.fuel.UnitOfMeasure import UnitOfMeasure +from lcfs.db.models.fuel.ExpectedUseType import ExpectedUseType +from lcfs.db.models.fuel.FuelCode import FuelCode +from lcfs.db.models.fuel.FuelCodePrefix import FuelCodePrefix +from lcfs.db.models.fuel.FuelCodeStatus import FuelCodeStatus, FuelCodeStatusEnum +from lcfs.db.models.fuel.EnergyDensity import EnergyDensity +from lcfs.db.models.fuel.EnergyEffectivenessRatio import EnergyEffectivenessRatio +from lcfs.db.models.fuel.ProvisionOfTheAct import ProvisionOfTheAct +from lcfs.db.models.fuel.AdditionalCarbonIntensity import AdditionalCarbonIntensity +from lcfs.db.models.fuel.TargetCarbonIntensity import TargetCarbonIntensity +from lcfs.db.models.compliance.CompliancePeriod import CompliancePeriod +from lcfs.web.exception.exceptions import DatabaseException + + +@pytest.fixture +def mock_db(): + """Fixture for mocking the database session.""" + mock_session = AsyncMock() + mock_session.execute = AsyncMock() + mock_session.get_one = AsyncMock() + mock_session.add = MagicMock() + mock_session.flush = AsyncMock() + mock_session.refresh = AsyncMock() + mock_session.scalar = AsyncMock() + return mock_session + + +@pytest.fixture +def fuel_code_repo(mock_db): + """Fixture for creating a repository with a mocked database.""" + repo = FuelCodeRepository() + repo.db = mock_db + return repo + + +@pytest.fixture +def valid_fuel_code(): + """Fixture for creating a repository with a mocked database.""" + fc = FuelCode( + fuel_code_id=5, + fuel_suffix="105.0", + prefix_id=1, # Assuming prefix_id=1 exists + fuel_type_id=1, # Assuming fuel_type_id=1 exists + company="Test Company", + contact_name="Test Contact", + contact_email="test@example.com", + carbon_intensity=50.00, + edrms="EDRMS-001", + application_date=date.today(), + feedstock="Corn", + feedstock_location="USA", + feedstock_misc="", + fuel_production_facility_city="CityA", + fuel_production_facility_province_state="ProvinceA", + fuel_production_facility_country="CountryA", + last_updated=date.today(), + ) + return fc + + +@pytest.mark.anyio +async def test_get_fuel_types(fuel_code_repo, mock_db): + mock_fuel_type = FuelType(fuel_type_id=1, fuel_type="Diesel") + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [mock_fuel_type] + + mock_db.execute.return_value = mock_result + result = await fuel_code_repo.get_fuel_types() + assert len(result) == 1 + assert result[0] == mock_fuel_type + mock_db.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_formatted_fuel_types(fuel_code_repo, mock_db): + # Setup mock data + mock_fuel_type = FuelType( + fuel_type_id=1, + fuel_type="Diesel", + default_carbon_intensity=80.0, + units="gCO2e/MJ", + unrecognized=False, + ) + mock_result = MagicMock() + mock_result.unique.return_value.scalars.return_value.all.return_value = [ + mock_fuel_type + ] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_formatted_fuel_types() + assert len(result) == 1 + assert result[0]["fuel_type"] == "Diesel" + mock_db.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_fuel_type_by_name_found(fuel_code_repo, mock_db): + mock_fuel_type = FuelType(fuel_type_id=2, fuel_type="Gasoline") + mock_result = MagicMock() + mock_result.scalars.return_value.first.return_value = mock_fuel_type + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_fuel_type_by_name("Gasoline") + assert result == mock_fuel_type + + +@pytest.mark.anyio +async def test_get_fuel_type_by_name_not_found(fuel_code_repo, mock_db): + mock_result = MagicMock() + mock_result.scalars.return_value.first.return_value = None + mock_db.execute.return_value = mock_result + + with pytest.raises(DatabaseException): + await fuel_code_repo.get_fuel_type_by_name("Nonexistent") + + +@pytest.mark.anyio +async def test_get_fuel_type_by_id_found(fuel_code_repo, mock_db): + mock_fuel_type = FuelType(fuel_type_id=3, fuel_type="Biofuel") + mock_db.get_one.return_value = mock_fuel_type + + result = await fuel_code_repo.get_fuel_type_by_id(3) + assert result == mock_fuel_type + mock_db.get_one.assert_called_once() + + +@pytest.mark.anyio +async def test_get_fuel_type_by_id_not_found(fuel_code_repo, mock_db): + mock_db.get_one.return_value = None + with pytest.raises(DatabaseException): + await fuel_code_repo.get_fuel_type_by_id(999) + + +@pytest.mark.anyio +async def test_get_fuel_categories(fuel_code_repo, mock_db): + mock_fc = FuelCategory( + fuel_category_id=1, category="Renewable", default_carbon_intensity=0 + ) + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [mock_fc] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_fuel_categories() + assert len(result) == 1 + assert result[0] == mock_fc + + +@pytest.mark.anyio +async def test_get_fuel_category_by(fuel_code_repo, mock_db): + mock_fc = FuelCategory( + fuel_category_id=2, category="Fossil", default_carbon_intensity=0 + ) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = mock_fc + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_fuel_category_by(category="Fossil") + assert result == mock_fc + + +@pytest.mark.anyio +async def test_get_transport_modes(fuel_code_repo, mock_db): + mock_tm = TransportMode(transport_mode_id=1, transport_mode="Truck") + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [mock_tm] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_transport_modes() + assert len(result) == 1 + assert result[0] == mock_tm + + +@pytest.mark.anyio +async def test_get_transport_mode(fuel_code_repo, mock_db): + mock_tm = TransportMode(transport_mode_id=10, transport_mode="Ship") + mock_db.scalar.return_value = mock_tm + + result = await fuel_code_repo.get_transport_mode(10) + assert result == mock_tm + mock_db.scalar.assert_called_once() + + +@pytest.mark.anyio +async def test_get_transport_mode_by_name_found(fuel_code_repo, mock_db): + mock_tm = TransportMode(transport_mode_id=1, transport_mode="Truck") + mock_result = MagicMock() + mock_result.scalar_one.return_value = mock_tm + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_transport_mode_by_name("Truck") + assert result == mock_tm + + +@pytest.mark.anyio +async def test_get_transport_mode_by_name_not_found(fuel_code_repo, mock_db): + mock_result = MagicMock() + mock_result.scalar_one.side_effect = NoResultFound + mock_db.execute.return_value = mock_result + + with pytest.raises(DatabaseException): + await fuel_code_repo.get_transport_mode_by_name("NonexistentMode") + + +@pytest.mark.anyio +async def test_get_fuel_code_prefixes(fuel_code_repo, mock_db): + mock_prefix = FuelCodePrefix(fuel_code_prefix_id=1, prefix="BC") + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [mock_prefix] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_fuel_code_prefixes() + assert len(result) == 1 + assert result[0] == mock_prefix + + +@pytest.mark.anyio +async def test_get_fuel_code_prefix(fuel_code_repo, mock_db): + mock_prefix = FuelCodePrefix(fuel_code_prefix_id=2, prefix="AB") + mock_db.get_one.return_value = mock_prefix + + result = await fuel_code_repo.get_fuel_code_prefix(2) + assert result == mock_prefix + + +@pytest.mark.anyio +async def test_get_fuel_status_by_status(fuel_code_repo, mock_db): + mock_status = FuelCodeStatus( + fuel_code_status_id=1, status=FuelCodeStatusEnum.Approved + ) + mock_result = MagicMock() + mock_result.scalar.return_value = mock_status + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_fuel_status_by_status(FuelCodeStatusEnum.Approved) + assert result == mock_status + + +@pytest.mark.anyio +async def test_get_energy_densities(fuel_code_repo, mock_db): + ed = EnergyDensity(energy_density_id=1, density=35.0) + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [ed] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_energy_densities() + assert len(result) == 1 + assert result[0] == ed + + +@pytest.mark.anyio +async def test_get_energy_density(fuel_code_repo, mock_db): + ed = EnergyDensity(energy_density_id=2, density=40.0) + mock_result = MagicMock() + mock_result.scalars.return_value.first.return_value = ed + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_energy_density(10) + assert result == ed + + +@pytest.mark.anyio +async def test_get_energy_effectiveness_ratios(fuel_code_repo, mock_db): + eer = EnergyEffectivenessRatio(eer_id=1, ratio=2.0) + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [eer] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_energy_effectiveness_ratios() + assert len(result) == 1 + assert result[0] == eer + + +@pytest.mark.anyio +async def test_get_units_of_measure(fuel_code_repo, mock_db): + uom = UnitOfMeasure(uom_id=1, name="gCO2e/MJ") + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [uom] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_units_of_measure() + assert len(result) == 1 + assert result[0] == uom + + +@pytest.mark.anyio +async def test_get_expected_use_types(fuel_code_repo, mock_db): + eut = ExpectedUseType(expected_use_type_id=1, name="Vehicle") + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [eut] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_expected_use_types() + assert len(result) == 1 + assert result[0] == eut + + +@pytest.mark.anyio +async def test_get_expected_use_type_by_name(fuel_code_repo, mock_db): + eut = ExpectedUseType(expected_use_type_id=2, name="Heating") + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = eut + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_expected_use_type_by_name("Heating") + assert result == eut + + +@pytest.mark.anyio +async def test_get_fuel_codes_paginated(fuel_code_repo, mock_db): + fc = FuelCode(fuel_code_id=1, fuel_suffix="101.0") + mock_db.execute.side_effect = [ + MagicMock(scalar=MagicMock(return_value=FuelCodeStatus())), + MagicMock(scalar=MagicMock(return_value=1)), + MagicMock( + unique=MagicMock( + return_value=MagicMock( + scalars=MagicMock( + return_value=MagicMock( + all=MagicMock(return_value=[fc])) + ) + ) + ) + ), + ] + pagination = MagicMock(page=1, size=10, filters=[], sort_orders=[]) + result, count = await fuel_code_repo.get_fuel_codes_paginated(pagination) + assert len(result) == 1 + assert result[0] == fc + assert count == 1 + + +@pytest.mark.anyio +async def test_get_fuel_code_statuses(fuel_code_repo, mock_db): + fcs = FuelCodeStatus(fuel_code_status_id=1, + status=FuelCodeStatusEnum.Approved) + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [fcs] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_fuel_code_statuses() + assert len(result) == 1 + assert result[0] == fcs + + +@pytest.mark.anyio +async def test_create_fuel_code(fuel_code_repo, mock_db, valid_fuel_code): + mock_db.flush = AsyncMock() + mock_db.scalar.return_value = valid_fuel_code + + result = await fuel_code_repo.create_fuel_code(valid_fuel_code) + assert result == valid_fuel_code + mock_db.add.assert_called_once_with(valid_fuel_code) + + +@pytest.mark.anyio +async def test_get_fuel_code(fuel_code_repo, mock_db, valid_fuel_code): + mock_db.scalar.return_value = valid_fuel_code + result = await fuel_code_repo.get_fuel_code(1) + assert result == valid_fuel_code + + +@pytest.mark.anyio +async def test_get_fuel_code_status_enum(fuel_code_repo, mock_db): + fcs = FuelCodeStatus(fuel_code_status_id=2, + status=FuelCodeStatusEnum.Deleted) + mock_db.scalar.return_value = fcs + result = await fuel_code_repo.get_fuel_code_status(FuelCodeStatusEnum.Deleted) + assert result == fcs + + +@pytest.mark.anyio +async def test_update_fuel_code(fuel_code_repo, mock_db, valid_fuel_code): + mock_db.flush = AsyncMock() + mock_db.refresh = AsyncMock() + updated = await fuel_code_repo.update_fuel_code(valid_fuel_code) + assert updated.fuel_code_id == 5 + + +@pytest.mark.anyio +async def test_delete_fuel_code(fuel_code_repo, mock_db): + mock_delete_status = FuelCodeStatus( + fuel_code_status_id=3, status=FuelCodeStatusEnum.Deleted + ) + mock_execute_result = MagicMock() + mock_execute_result.scalar.return_value = mock_delete_status + mock_db.execute.return_value = mock_execute_result + + mock_db.flush = AsyncMock() + + await fuel_code_repo.delete_fuel_code(10) + mock_db.execute.assert_awaited() # Check that execute was awaited + + +@pytest.mark.anyio +async def test_get_distinct_company_names(fuel_code_repo, mock_db): + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [ + "CompanyA", "CompanyB"] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_distinct_company_names("Com") + assert len(result) == 2 + + +@pytest.mark.anyio +async def test_get_contact_names_by_company(fuel_code_repo, mock_db): + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [ + "John Doe", "Jane Doe"] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_contact_names_by_company("CompanyA", "J") + assert len(result) == 2 + + +@pytest.mark.anyio +async def test_get_contact_email_by_company_and_name(fuel_code_repo, mock_db): + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = ["john@example.com"] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_contact_email_by_company_and_name( + "CompanyA", "John Doe", "john@" + ) + assert len(result) == 1 + + +@pytest.mark.anyio +async def test_get_distinct_fuel_codes_by_code(fuel_code_repo, mock_db): + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = ["101.0", "101.1"] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_distinct_fuel_codes_by_code("101", "BC") + assert len(result) == 2 + + +@pytest.mark.anyio +async def test_get_fuel_code_by_code_prefix(fuel_code_repo, mock_db): + fc = FuelCode(fuel_code_id=10, fuel_suffix="200.0") + mock_result = MagicMock() + mock_result.unique.return_value.scalars.return_value.all.return_value = [ + fc] + mock_db.execute.return_value = mock_result + + # Mock the next available suffix + fuel_code_repo.get_next_available_sub_version_fuel_code_by_prefix = AsyncMock( + return_value="200.1" + ) + + result = await fuel_code_repo.get_fuel_code_by_code_prefix("200.0", "BC") + assert len(result) == 1 + assert result[0].fuel_suffix == "200.1" + + +@pytest.mark.anyio +async def test_validate_fuel_code(fuel_code_repo, mock_db): + # Mock no existing code + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.validate_fuel_code("300.0", 1) + assert result == "300.0" + + # Mock existing code + mock_result.scalar_one_or_none.return_value = FuelCode( + fuel_code_id=5, fuel_suffix="300.0" + ) + mock_db.execute.return_value = mock_result + fuel_code_repo.get_next_available_sub_version_fuel_code_by_prefix = AsyncMock( + return_value="300.1" + ) + result = await fuel_code_repo.validate_fuel_code("300.0", 1) + assert result == "300.1" + + +@pytest.mark.anyio +async def test_get_next_available_fuel_code_by_prefix(fuel_code_repo, mock_db): + mock_execute_result = MagicMock() + mock_execute_result.scalar_one_or_none.return_value = "102.0" + mock_db.execute.return_value = mock_execute_result + + result = await fuel_code_repo.get_next_available_fuel_code_by_prefix("BC") + assert result == "102.0" + + +@pytest.mark.anyio +async def test_get_next_available_sub_version_fuel_code_by_prefix( + fuel_code_repo, mock_db +): + mock_execute_result = MagicMock() + mock_execute_result.scalar_one_or_none.return_value = "200.1" + mock_db.execute.return_value = mock_execute_result + + result = await fuel_code_repo.get_next_available_sub_version_fuel_code_by_prefix( + "200", 1 + ) + assert result == "200.1" + + +@pytest.mark.anyio +async def test_get_latest_fuel_codes(fuel_code_repo, mock_db, valid_fuel_code): + prefix = FuelCodePrefix(fuel_code_prefix_id=1, prefix="BC") + valid_fuel_code.fuel_code_prefix = prefix + + mock_result = MagicMock() + mock_result.unique.return_value.scalars.return_value.all.return_value = [ + valid_fuel_code + ] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_latest_fuel_codes() + assert len(result) == 1 + # The code increments the version, e.g. "BC101.0" -> "BC101.1" + # Assuming suffix "105.0": + assert result[0]["fuel_code"].endswith(".1") + + +@pytest.mark.anyio +async def test_get_fuel_code_field_options(fuel_code_repo, mock_db): + mock_execute_result = MagicMock() + mock_execute_result.all.return_value = [ + ("CompanyA", "Corn", "USA", None, None, "John Doe", "john@example.com") + ] + mock_db.execute.return_value = mock_execute_result + + result = await fuel_code_repo.get_fuel_code_field_options() + assert len(result) == 1 + + +@pytest.mark.anyio +async def test_get_fp_locations(fuel_code_repo, mock_db): + mock_execute_result = MagicMock() + mock_execute_result.all.return_value = [("CityA", "ProvinceA", "CountryA")] + mock_db.execute.return_value = mock_execute_result + + result = await fuel_code_repo.get_fp_locations() + assert len(result) == 1 + + +@pytest.mark.anyio +async def test_get_fuel_code_by_name(fuel_code_repo, mock_db): + fc = FuelCode(fuel_code_id=50, fuel_suffix="150.0") + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = fc + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_fuel_code_by_name("BC150.0") + assert result == fc + + +@pytest.mark.anyio +async def test_get_provision_of_the_act_by_name(fuel_code_repo, mock_db): + poa = ProvisionOfTheAct(provision_of_the_act_id=1, name="Act Name") + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = poa + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_provision_of_the_act_by_name("Act Name") + assert result == poa + + +@pytest.mark.anyio +async def test_get_energy_effectiveness_ratio(fuel_code_repo, mock_db): + eer = EnergyEffectivenessRatio(eer_id=1, ratio=1.5) + mock_result = MagicMock() + mock_result.scalars.return_value.first.return_value = eer + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_energy_effectiveness_ratio(1, 2, 3) + assert result == eer + + +@pytest.mark.anyio +async def test_get_target_carbon_intensities(fuel_code_repo, mock_db): + tci = TargetCarbonIntensity( + target_carbon_intensity_id=1, target_carbon_intensity=50.0 + ) + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [tci] + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_target_carbon_intensities(1, "2024") + assert len(result) == 1 + assert result[0] == tci + + +@pytest.mark.anyio +async def test_get_standardized_fuel_data(fuel_code_repo, mock_db): + # Mock dependencies + mock_fuel_type = FuelType( + fuel_type_id=1, fuel_type="Diesel", default_carbon_intensity=80.0 + ) + mock_db.get_one.return_value = mock_fuel_type + mock_db.execute.side_effect = [ + # energy density + MagicMock( + scalars=MagicMock( + return_value=MagicMock( + first=MagicMock(return_value=EnergyDensity(density=35.0)) + ) + ) + ), + # eer + MagicMock( + scalars=MagicMock( + return_value=MagicMock( + first=MagicMock( + return_value=EnergyEffectivenessRatio(ratio=2.0)) + ) + ) + ), + # target carbon intensities + MagicMock( + scalars=MagicMock( + return_value=MagicMock( + all=MagicMock( + return_value=[ + TargetCarbonIntensity(target_carbon_intensity=50.0) + ] + ) + ) + ) + ), + # additional carbon intensity + MagicMock( + scalars=MagicMock( + return_value=MagicMock( + one_or_none=MagicMock( + return_value=AdditionalCarbonIntensity(intensity=5.0) + ) + ) + ) + ), + ] + + result = await fuel_code_repo.get_standardized_fuel_data( + fuel_type_id=1, fuel_category_id=2, end_use_id=3, compliance_period="2024" + ) + assert result.effective_carbon_intensity == 80.0 + assert result.target_ci == 50.0 + assert result.eer == 2.0 + assert result.energy_density == 35.0 + assert result.uci == 5.0 + + +@pytest.mark.anyio +async def test_get_standardized_fuel_data_unrecognized(fuel_code_repo, mock_db): + # Mock an unrecognized fuel type + mock_fuel_type = FuelType( + fuel_type_id=1, + fuel_type="UnknownFuel", + default_carbon_intensity=None, + unrecognized=True, + ) + + # Mock a fuel category with a default CI + mock_fuel_category = FuelCategory( + fuel_category_id=2, category="SomeCategory", default_carbon_intensity=93.67 + ) + + # The repo uses get_one to get the fuel type. + mock_db.get_one.return_value = mock_fuel_type + + # Mock the repo method to get the fuel category + fuel_code_repo.get_fuel_category_by = AsyncMock( + return_value=mock_fuel_category) + + # Setup side effects for subsequent queries: + # Energy Density + energy_density_result = MagicMock( + scalars=MagicMock( + return_value=MagicMock( + first=MagicMock(return_value=EnergyDensity(density=35.0)) + ) + ) + ) + # EER + eer_result = MagicMock( + scalars=MagicMock( + return_value=MagicMock( + first=MagicMock( + return_value=EnergyEffectivenessRatio(ratio=2.0)) + ) + ) + ) + # Target Carbon Intensities + tci_result = MagicMock( + scalars=MagicMock( + return_value=MagicMock( + all=MagicMock( + return_value=[TargetCarbonIntensity( + target_carbon_intensity=50.0)] + ) + ) + ) + ) + # Additional Carbon Intensity + aci_result = MagicMock( + scalars=MagicMock( + return_value=MagicMock( + one_or_none=MagicMock( + return_value=AdditionalCarbonIntensity(intensity=5.0) + ) + ) + ) + ) + + # Set the side_effect for the mock_db.execute calls in the order they're invoked + mock_db.execute.side_effect = [ + energy_density_result, + eer_result, + tci_result, + aci_result, + ] + + result = await fuel_code_repo.get_standardized_fuel_data( + fuel_type_id=1, fuel_category_id=2, end_use_id=3, compliance_period="2024" + ) + + # Since fuel_type is unrecognized, it should use the FuelCategory's default CI + assert result.effective_carbon_intensity == 93.67 + assert result.target_ci == 50.0 + assert result.eer == 2.0 + assert result.energy_density == 35.0 + assert result.uci == 5.0 + + # Ensure get_fuel_category_by was called once with the correct parameter + fuel_code_repo.get_fuel_category_by.assert_awaited_once_with( + fuel_category_id=2) + + +@pytest.mark.anyio +async def test_get_additional_carbon_intensity(fuel_code_repo, mock_db): + aci = AdditionalCarbonIntensity(additional_uci_id=1, intensity=10.0) + mock_result = MagicMock() + mock_result.scalars.return_value.one_or_none.return_value = aci + mock_db.execute.return_value = mock_result + + result = await fuel_code_repo.get_additional_carbon_intensity(1, 2) + assert result == aci diff --git a/backend/lcfs/tests/fuel_code/test_fuel_code_service.py b/backend/lcfs/tests/fuel_code/test_fuel_code_service.py new file mode 100644 index 000000000..5dce49e52 --- /dev/null +++ b/backend/lcfs/tests/fuel_code/test_fuel_code_service.py @@ -0,0 +1,235 @@ +from unittest.mock import AsyncMock + +import pytest + +from lcfs.db.models import FuelCode +from lcfs.db.models.fuel.FuelCodeStatus import FuelCodeStatusEnum +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.fuel_code.schema import ( + FuelCodeCreateUpdateSchema, + FuelCodeSchema, + PaginationResponseSchema, +) +from lcfs.web.api.fuel_code.services import FuelCodeServices +from lcfs.web.exception.exceptions import ServiceException + + +@pytest.mark.anyio +async def test_get_fuel_codes_success(): + # Arrange + repo_mock = AsyncMock() + service = FuelCodeServices(repo=repo_mock) + + mock_fuel_codes = [ + FuelCodeSchema( + fuel_code_id=1, + company="XYZ Corp", + prefix_id=1, + fuel_suffix="001.0", + carbon_intensity=10.5, + status="Draft", + application_date="2023-10-01", + last_updated="2023-10-01", + fuel="Diesel", + fuel_type_id=1, + prefix="BCLCF", + edrms="EDRMS-123", + feedstock="Corn oil", + feedstock_location="Canada", + ) + ] + repo_mock.get_fuel_codes_paginated.return_value = (mock_fuel_codes, 1) + + pagination = PaginationRequestSchema(page=1, size=10) + + # Act + result = await service.search_fuel_codes(pagination) + + # Assert + assert isinstance(result.pagination, PaginationResponseSchema) + assert len(result.fuel_codes) == 1 + assert result.fuel_codes[0].company == "XYZ Corp" + repo_mock.get_fuel_codes_paginated.assert_called_once_with(pagination) + + +@pytest.mark.anyio +async def test_create_fuel_code_success(): + # Arrange + repo_mock = AsyncMock() + service = FuelCodeServices(repo=repo_mock) + + input_data = FuelCodeCreateUpdateSchema( + fuel_code_id=1, + fuel_type_id=1, + status="Draft", + prefix="ABC", + prefix_id=1001, + fuel_suffix="001", + carbon_intensity=20.5, + company="XYZ Corp", + application_date="2023-10-01", + approval_date="2023-10-02", + effective_date="2023-10-03", + expiration_date="2024-10-01", + edrms="EDRMS-123", + feedstock="Corn oil", + feedstock_location="Canada", + fuel_production_facility_city="Victoria", + fuel_production_facility_country="Canada", + fuel_production_facility_province_state="BC", + ) + mock_fuel_code_data = FuelCodeSchema( + fuel_code_id=1, + status="Draft", + prefix="ABC", + prefix_id=1001, + fuel_suffix="001", + carbon_intensity=20.5, + company="XYZ Corp", + fuel="Diesel", + fuel_type_id=1, + application_date="2023-10-01", + edrms="EDRMS-123", + feedstock="Corn oil", + feedstock_location="Canada", + fuel_production_facility_city="Victoria", + fuel_production_facility_country="Canada", + fuel_production_facility_province_state="BC", + last_updated="2023-10-01", + feedstock_fuel_transport_modes=[], + finished_fuel_transport_modes=[], + ) + + repo_mock.create_fuel_code.return_value = mock_fuel_code_data + + # Act + result = await service.create_fuel_code(input_data) + + # Assert + assert result.fuel_code_id == 1 + assert result.company == "XYZ Corp" + repo_mock.create_fuel_code.assert_called_once() + + +@pytest.mark.anyio +async def test_update_fuel_code_success(): + repo_mock = AsyncMock() + service = FuelCodeServices(repo=repo_mock) + + fuel_code_id = 1 + mock_fuel_code = FuelCodeCreateUpdateSchema( + fuel_code_id=1, + fuel_type_id=1, + status="Draft", + prefix="ABC", + prefix_id=1001, + fuel_suffix="001", + carbon_intensity=20.5, + company="XYZ Corp", + application_date="2023-10-01", + approval_date="2023-10-02", + effective_date="2023-10-03", + expiration_date="2024-10-01", + edrms="EDRMS-123", + feedstock="Corn oil", + feedstock_location="Canada", + fuel_production_facility_city="Victoria", + fuel_production_facility_country="Canada", + fuel_production_facility_province_state="BC", + feedstock_fuel_transport_modes=[], + finished_fuel_transport_modes=[], + ) + + repo_mock.get_fuel_code.return_value = mock_fuel_code + repo_mock.update_fuel_code.return_value = mock_fuel_code + + # Act + result = await service.update_fuel_code(mock_fuel_code) + + # Assert + assert result.fuel_code_id == 1 + assert result.company == "XYZ Corp" + repo_mock.get_fuel_code.assert_called_once_with(fuel_code_id) + repo_mock.update_fuel_code.assert_called_once_with(mock_fuel_code) + + +@pytest.mark.anyio +async def test_delete_fuel_code_success(): + # Arrange + repo_mock = AsyncMock() + service = FuelCodeServices(repo=repo_mock) + + fuel_code_id = 1 + repo_mock.delete_fuel_code.return_value = True + + # Act + result = await service.delete_fuel_code(fuel_code_id) + + # Assert + assert result is True + repo_mock.delete_fuel_code.assert_called_once_with(fuel_code_id) + + +@pytest.mark.anyio +async def test_approve_fuel_code_success(): + """Test successful approval of a fuel code.""" + # Arrange + repo_mock = AsyncMock() + service = FuelCodeServices(repo=repo_mock) + + fuel_code_id = 1 + # Mock a fuel code in Draft status + mock_fuel_code = FuelCode(fuel_code_status=AsyncMock()) + mock_fuel_code.fuel_code_status.status = FuelCodeStatusEnum.Draft + repo_mock.get_fuel_code.return_value = mock_fuel_code + + # Mock the Approved status and update operation + mock_approved_status = AsyncMock() + mock_approved_status.status = FuelCodeStatusEnum.Approved + repo_mock.get_fuel_code_status.return_value = mock_approved_status + repo_mock.update_fuel_code.return_value = mock_fuel_code + + # Act + result = await service.approve_fuel_code(fuel_code_id) + + # Assert + assert result == mock_fuel_code + repo_mock.get_fuel_code.assert_called_once_with(fuel_code_id) + repo_mock.get_fuel_code_status.assert_called_once_with(FuelCodeStatusEnum.Approved) + repo_mock.update_fuel_code.assert_called_once_with(mock_fuel_code) + + +@pytest.mark.anyio +async def test_approve_fuel_code_not_found(): + """Test approving a non-existent fuel code.""" + # Arrange + repo_mock = AsyncMock() + service = FuelCodeServices(repo=repo_mock) + + fuel_code_id = 9999 + repo_mock.get_fuel_code.return_value = None + + # Act & Assert + with pytest.raises(ValueError, match="Fuel code not found"): + await service.approve_fuel_code(fuel_code_id) + repo_mock.get_fuel_code.assert_called_once_with(fuel_code_id) + + +@pytest.mark.anyio +async def test_approve_fuel_code_invalid_status(): + """Test approving a fuel code that is not in Draft status.""" + # Arrange + repo_mock = AsyncMock() + service = FuelCodeServices(repo=repo_mock) + + fuel_code_id = 1 + + mock_fuel_code = FuelCode(fuel_code_status=AsyncMock()) + mock_fuel_code.fuel_code_status.status = FuelCodeStatusEnum.Approved + repo_mock.get_fuel_code.return_value = mock_fuel_code + + # Act & Assert + with pytest.raises(ValueError, match="Fuel code is not in Draft"): + await service.approve_fuel_code(fuel_code_id) + + repo_mock.get_fuel_code.assert_called_once_with(fuel_code_id) diff --git a/backend/lcfs/tests/fuel_code/test_fuel_code_views.py b/backend/lcfs/tests/fuel_code/test_fuel_code_views.py new file mode 100644 index 000000000..4bfa75bfc --- /dev/null +++ b/backend/lcfs/tests/fuel_code/test_fuel_code_views.py @@ -0,0 +1,638 @@ +from datetime import date +from unittest.mock import patch + +import pytest +from fastapi import FastAPI +from fastapi.exceptions import RequestValidationError +from httpx import AsyncClient +from starlette import status + +from fastapi import FastAPI + +from lcfs.db.models.fuel.FuelCodeStatus import FuelCodeStatusEnum +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.fuel_code.schema import ( + FuelCodeCreateUpdateSchema, + TransportModeSchema, + FuelCodeStatusSchema, +) +from lcfs.web.exception.exceptions import DataNotFoundException + + +# Fixtures for mock data +@pytest.fixture +def mock_table_options(): + return { + "fuelTypes": ["Diesel", "Electric"], + "transportModes": ["Road", "Rail"], + "latestFuelCodes": ["FC-2021-001", "FC-2021-002"], + "facilityNameplateCapacityUnits": ["kW", "MW"], + } + + +@pytest.fixture +def mock_fuel_code_data(): + return { + "fuel_code_id": 1, + "company": "ABC Corp", + "fuel_suffix": "001.0", + "prefixId": 1, + "carbonIntensity": 25.0, + "edrms": "EDRMS-123", + "lastUpdated": "2023-10-01", + "applicationDate": "2023-09-15", + "fuelTypeId": 2, + "feedstock": "Corn", + "feedstockLocation": "Canada", + } + + +@pytest.fixture +def updated_fuel_code_data(): + return FuelCodeCreateUpdateSchema( + fuel_code_id=1, + prefix_id=1001, + fuel_suffix="001", + carbon_intensity=20.5, + edrms="EDRMS-123", + company="XYZ Energy Corp", + contact_name="John Doe", + contact_email="john.doe@example.com", + application_date=date(2023, 9, 15), + approval_date=date(2023, 10, 1), + effective_date=date(2023, 10, 15), + expiration_date=date(2024, 10, 15), + fuel_type_id=2, + feedstock="Corn", + feedstock_location="USA", + fuel_production_facility_city="Vancouver", + fuel_production_facility_province_state="BC", + fuel_production_facility_country="Canada", + facility_nameplate_capacity=1000, + facility_nameplate_capacity_unit="L", + feedstock_fuel_transport_mode=["Truck", "Ship"], + finished_fuel_transport_mode=["Truck"], + is_valid=True, + validation_msg="Validated successfully", + deleted=False, + ) + + +@pytest.fixture +def request_fuel_code_data(): + return { + "status": "Draft", + "prefix": "ABC", + "prefixId": 1001, + "fuelSuffix": "001", + "carbonIntensity": 20.5, + "edrms": "EDRMS-123", + "company": "XYZ Energy Corp", + "lastUpdated": "2023-11-05T10:30:00", + "contactName": "John Doe", + "contactEmail": "john.doe@example.com", + "applicationDate": "2023-09-15", + "approvalDate": "2023-10-01", + "effectiveDate": "2023-10-15", + "expirationDate": "2024-10-15", + "fuel": "Diesel", + "fuelTypeId": 2, + "feedstock": "Corn", + "feedstockLocation": "USA", + "fuelProductionFacilityCity": "Vancouver", + "fuelProductionFacilityProvinceState": "BC", + "fuelProductionFacilityCountry": "Canada", + "facilityNameplateCapacity": 1000, + "facilityNameplateCapacityUnit": "L", + "feedstockFuelTransportMode": ["Truck", "Ship"], + "finishedFuelTransportMode": ["Truck"], + "isValid": True, + "validationMsg": "Validated successfully", + "deleted": False, + } + + +@pytest.fixture +def valid_fuel_code_data(): + return { + "fuel_code_id": 1, + "prefix_id": 1, + "fuel_suffix": "A", + "carbon_intensity": 1.23, + "edrms": "12345", + "company": "Test Company", + "contact_name": "John Doe", + "contact_email": "johndoe@example.com", + "application_date": date(2023, 1, 1), + "approval_date": date(2023, 2, 1), + "effective_date": date(2023, 3, 1), + "expiration_date": date(2023, 12, 31), + "fuel_type_id": 1, + "feedstock": "Test Feedstock", + "feedstock_location": "Location", + "fuel_production_facility_city": "City", + "fuel_production_facility_province_state": "Province", + "fuel_production_facility_country": "Country", + } + + +# Fixture to set user role +@pytest.fixture +def set_user_role(fastapi_app, set_mock_user): + def _set_user_role(role): + set_mock_user(fastapi_app, [role]) + + return _set_user_role + + +# get_table_options +@pytest.mark.anyio +async def test_get_table_options_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + mock_table_options = { + "fuelTypes": ["Diesel", "Electric"], + "transportModes": ["Road", "Rail"], + "latestFuelCodes": ["FC-2021-001", "FC-2021-002"], + "facilityNameplateCapacityUnits": ["kW", "MW"], + } + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = "/api/fuel-codes/table-options" + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.get_table_options" + ) as mock_get_table_options: + mock_get_table_options.return_value.status_code = status.HTTP_200_OK + mock_get_table_options.return_value.json.return_value = mock_table_options + response = await client.get(url) + + # Perform assertions + assert response.status_code == status.HTTP_200_OK + result = response.json() + assert isinstance(result, dict) + assert "fuelTypes" in result + assert "transportModes" in result + assert "latestFuelCodes" in result + assert "facilityNameplateCapacityUnits" in result + + +@pytest.mark.anyio +async def test_get_table_options_forbidden( + client: AsyncClient, + fastapi_app: FastAPI, + set_user_role, +): + set_user_role(RoleEnum.SUPPLIER) # Incorrect role + url = "/api/fuel-codes/table-options" + response = await client.get(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +# search_table_options_strings +@pytest.mark.anyio +async def test_search_table_options_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_user_role, +): + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.search_fuel_code" + ) as mock_search_fuel_code: + set_user_role(RoleEnum.GOVERNMENT) + + mock_search_fuel_code.return_value = {"fuel_codes": ["AB001"]} + + url = "/api/fuel-codes/search" + params = {"fuelCode": "AB001"} + response = await client.get(url, params=params) + + assert response.status_code == 200 + result = response.json() + assert isinstance(result, dict) + assert "fuelCodes" in result + assert result["fuelCodes"] == ["AB001"] + mock_search_fuel_code.assert_called_once_with("AB001", None, False) + + +@pytest.mark.anyio +async def test_search_table_options_invalid_params( + client: AsyncClient, + fastapi_app: FastAPI, + set_user_role, +): + set_user_role(RoleEnum.GOVERNMENT) + url = "/api/fuel-codes/search" + params = {"invalidParam": "invalid"} + response = await client.get(url, params=params) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +# get_fuel_codes (POST /list) +@pytest.mark.anyio +async def test_get_fuel_codes_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_user_role, + pagination_request_schema, +): + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.search_fuel_codes" + ) as mock_get_fuel_codes: + set_user_role(RoleEnum.GOVERNMENT) + + mock_get_fuel_codes.return_value = { + "fuel_codes": [ + { + "fuel_code_id": 1, + "company": "ABC Corp", + "fuel_suffix": "001.0", + "prefixId": 1, + "carbonIntensity": 25.0, + "edrms": "EDRMS-123", + "lastUpdated": "2023-10-01", + "applicationDate": "2023-09-15", + "fuelTypeId": 2, + "feedstock": "Corn", + "feedstockLocation": "Canada", + } + ], + "pagination": { + "total": 1, + "page": 1, + "size": 10, + "total_pages": 1, + }, + } + + url = "/api/fuel-codes/list" + response = await client.post( + url, json=pagination_request_schema.dict(by_alias=True) + ) + + assert response.status_code == status.HTTP_200_OK + result = response.json() + assert isinstance(result, dict) + assert "pagination" in result + assert "fuelCodes" in result + mock_get_fuel_codes.assert_called_once_with(pagination_request_schema) + + +# get_fuel_code by ID +@pytest.mark.anyio +async def test_get_fuel_code_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_user_role, + mock_fuel_code_data, +): + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.get_fuel_code" + ) as mock_get_fuel_code: + set_user_role(RoleEnum.GOVERNMENT) + + mock_get_fuel_code.return_value = mock_fuel_code_data + url = "/api/fuel-codes/1" + response = await client.get(url) + + assert response.status_code == status.HTTP_200_OK + result = response.json() + assert isinstance(result, dict) + assert result["fuelCodeId"] == 1 + assert result["company"] == "ABC Corp" + mock_get_fuel_code.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_get_fuel_code_not_found( + client: AsyncClient, + fastapi_app: FastAPI, + set_user_role, +): + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.get_fuel_code" + ) as mock_get_fuel_code: + set_user_role(RoleEnum.GOVERNMENT) + + mock_get_fuel_code.side_effect = DataNotFoundException("Fuel code not found") + url = "/api/fuel-codes/9999" + response = await client.get(url) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +async def test_update_fuel_code_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_user_role, + updated_fuel_code_data, +): + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.update_fuel_code" + ) as mock_update_fuel_code: + set_user_role(RoleEnum.ANALYST) + + mock_update_fuel_code.return_value = updated_fuel_code_data.model_dump( + by_alias=True, mode="json" + ) + url = "/api/fuel-codes" + + # Send the PUT request with the mock updated data + response = await client.post( + url, json=updated_fuel_code_data.model_dump(by_alias=True, mode="json") + ) + # Assert the response status and data + assert response.status_code == status.HTTP_200_OK + result = response.json() + assert isinstance(result, dict) + assert result["fuelCodeId"] == 1 + assert result["company"] == "XYZ Energy Corp" + + +@pytest.mark.anyio +async def test_delete_fuel_code_success( + client: AsyncClient, fastapi_app: FastAPI, set_user_role +): + """Test successful deletion of a fuel code.""" + set_user_role(RoleEnum.ANALYST) + url = "/api/fuel-codes/1" + mock_response = {"message": "Fuel code deleted successfully"} + + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.delete_fuel_code" + ) as mock_delete_fuel_code: + mock_delete_fuel_code.return_value = mock_response + response = await client.delete(url) + + assert response.status_code == status.HTTP_200_OK + assert response.json() == mock_response + mock_delete_fuel_code.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_delete_fuel_code_not_found( + client: AsyncClient, fastapi_app: FastAPI, set_user_role +): + """Test deletion of a non-existent fuel code.""" + set_user_role(RoleEnum.ANALYST) + url = "/api/fuel-codes/9999" + + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.delete_fuel_code" + ) as mock_delete_fuel_code: + mock_delete_fuel_code.side_effect = DataNotFoundException("Fuel code not found") + response = await client.delete(url) + + assert response.status_code == status.HTTP_404_NOT_FOUND + assert response.json()["detail"] == "Not Found" + + +@pytest.mark.anyio +async def test_delete_fuel_code_unauthorized( + client: AsyncClient, fastapi_app: FastAPI, set_user_role +): + """Test deletion of a fuel code with unauthorized user role.""" + set_user_role(RoleEnum.SUPPLIER) # Unauthorized role + url = "/api/fuel-codes/1" + response = await client.delete(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_create_fuel_code_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_user_role, + request_fuel_code_data, +): + """Test successful creation of a fuel code.""" + set_user_role(RoleEnum.ANALYST) + url = "/api/fuel-codes" + + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.create_fuel_code" + ) as mock_create_fuel_code: + mock_create_fuel_code.return_value = request_fuel_code_data + response = await client.post(url, json=request_fuel_code_data) + + assert response.status_code == status.HTTP_200_OK + result = response.json() + assert result["company"] == request_fuel_code_data["company"] + assert result["fuelTypeId"] == request_fuel_code_data["fuelTypeId"] + mock_create_fuel_code.assert_called_once_with( + FuelCodeCreateUpdateSchema(**request_fuel_code_data) + ) + + +@pytest.mark.anyio +async def test_create_fuel_code_invalid_data( + client: AsyncClient, fastapi_app: FastAPI, set_user_role +): + """Test creation of a fuel code with invalid data.""" + set_user_role(RoleEnum.ANALYST) + url = "/api/fuel-codes" + invalid_data = {"invalidField": "Invalid"} + + response = await client.post(url, json=invalid_data) + + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +@pytest.mark.anyio +async def test_create_fuel_code_unauthorized( + client: AsyncClient, fastapi_app: FastAPI, set_user_role, request_fuel_code_data +): + """Test creation of a fuel code with unauthorized user role.""" + set_user_role(RoleEnum.SUPPLIER) # Unauthorized role + url = "/api/fuel-codes" + + response = await client.post(url, json=request_fuel_code_data) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_approve_fuel_code_success( + client: AsyncClient, fastapi_app: FastAPI, set_user_role +): + """Test successful approval of a fuel code.""" + set_user_role(RoleEnum.GOVERNMENT) + url = "/api/fuel-codes/1/approve" + mock_response = {"message": "Fuel code approved successfully"} + + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.approve_fuel_code" + ) as mock_approve_fuel_code: + mock_approve_fuel_code.return_value = mock_response + response = await client.post(url) + + assert response.status_code == status.HTTP_200_OK + assert response.json() == mock_response + mock_approve_fuel_code.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_approve_fuel_code_not_found( + client: AsyncClient, fastapi_app: FastAPI, set_user_role +): + """Test approval of a non-existent fuel code.""" + set_user_role(RoleEnum.GOVERNMENT) + url = "/api/fuel-codes/9999/approve" + + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.approve_fuel_code" + ) as mock_approve_fuel_code: + mock_approve_fuel_code.side_effect = DataNotFoundException( + "Fuel code not found" + ) + response = await client.post(url) + + assert response.status_code == status.HTTP_404_NOT_FOUND + assert response.json()["detail"] == "Not Found" + + +@pytest.mark.anyio +async def test_approve_fuel_code_invalid_request( + client: AsyncClient, fastapi_app: FastAPI, set_user_role +): + """Test approval of a fuel code with invalid data.""" + set_user_role(RoleEnum.GOVERNMENT) + url = "/api/fuel-codes/invalid_id/approve" + + response = await client.post(url) + + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +@pytest.mark.anyio +async def test_approve_fuel_code_unauthorized( + client: AsyncClient, fastapi_app: FastAPI, set_user_role +): + set_user_role(RoleEnum.SUPPLIER) + url = "/api/fuel-codes/1/approve" + response = await client.post(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +# Tests for FuelCodeCreateUpdateSchema validation +@pytest.mark.anyio +def test_valid_dates(valid_fuel_code_data): + # Valid input + model = FuelCodeCreateUpdateSchema(**valid_fuel_code_data) + assert model.application_date == date(2023, 1, 1) + + +@pytest.mark.anyio +def test_invalid_application_date_before_approval_date(valid_fuel_code_data): + # Application Date is after Approval Date + invalid_data = valid_fuel_code_data.copy() + invalid_data["application_date"] = date(2023, 2, 2) + invalid_data["approval_date"] = date(2023, 2, 1) + with pytest.raises(RequestValidationError) as excinfo: + FuelCodeCreateUpdateSchema(**invalid_data) + assert "applicationDate" in str(excinfo.value) + + +@pytest.mark.anyio +def test_invalid_effective_date_before_application_date(valid_fuel_code_data): + # Effective Date is before Application Date + invalid_data = valid_fuel_code_data.copy() + invalid_data["effective_date"] = date(2022, 12, 31) + with pytest.raises(RequestValidationError) as excinfo: + FuelCodeCreateUpdateSchema(**invalid_data) + assert "effectiveDate" in str(excinfo.value) + + +@pytest.mark.anyio +def test_invalid_expiration_date_before_effective_date(valid_fuel_code_data): + # Expiration Date is before Effective Date + invalid_data = valid_fuel_code_data.copy() + invalid_data["expiration_date"] = date(2023, 2, 28) + with pytest.raises(RequestValidationError) as excinfo: + FuelCodeCreateUpdateSchema(**invalid_data) + assert "expirationDate" in str(excinfo.value) + + +@pytest.mark.anyio +def test_missing_capacity_unit(valid_fuel_code_data): + # Facility capacity is provided but no unit + invalid_data = valid_fuel_code_data.copy() + invalid_data["facility_nameplate_capacity"] = 100 + invalid_data["facility_nameplate_capacity_unit"] = None + with pytest.raises(RequestValidationError) as excinfo: + FuelCodeCreateUpdateSchema(**invalid_data) + assert "facilityNameplateCapacityUnit" in str(excinfo.value) + + +@pytest.mark.anyio +def test_valid_capacity_unit(valid_fuel_code_data): + # Valid capacity and unit + valid_data = valid_fuel_code_data.copy() + valid_data["facility_nameplate_capacity"] = 100 + valid_data["facility_nameplate_capacity_unit"] = "Gallons" + model = FuelCodeCreateUpdateSchema(**valid_data) + assert model.facility_nameplate_capacity == 100 + + +@pytest.mark.anyio +async def test_get_fuel_code_statuses_success( + client: AsyncClient, fastapi_app: FastAPI +): + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.get_fuel_code_statuses" + ) as mock_get_statuses: + # Mock the return value of the service + mock_get_statuses.return_value = [ + FuelCodeStatusSchema( + fuel_code_status_id=1, status=FuelCodeStatusEnum.Draft + ), + FuelCodeStatusSchema( + fuel_code_status_id=2, status=FuelCodeStatusEnum.Approved + ), + ] + + # Send GET request to the endpoint + url = "/api/fuel-codes/statuses" + response = await client.get(url) + + # Assertions + assert response.status_code == status.HTTP_200_OK + result = response.json() + assert isinstance(result, list) + assert len(result) == 2 + print(result[0]) + assert result[0]["fuelCodeStatusId"] == 1 + assert result[0]["status"] == "Draft" + assert result[1]["fuelCodeStatusId"] == 2 + assert result[1]["status"] == "Approved" + + +@pytest.mark.anyio +async def test_get_transport_modes_success(client: AsyncClient, fastapi_app: FastAPI): + with patch( + "lcfs.web.api.fuel_code.services.FuelCodeServices.get_transport_modes" + ) as mock_get_modes: + # Mock the return value of the service + mock_get_modes.return_value = [ + TransportModeSchema(transport_mode_id=1, transport_mode="Truck"), + TransportModeSchema(transport_mode_id=2, transport_mode="Ship"), + ] + + # Send GET request to the endpoint + url = "/api/fuel-codes/transport-modes" + response = await client.get(url) + + # Assertions + assert response.status_code == status.HTTP_200_OK + result = response.json() + assert isinstance(result, list) + assert len(result) == 2 + assert result[0]["transportModeId"] == 1 + assert result[0]["transportMode"] == "Truck" + assert result[1]["transportModeId"] == 2 + assert result[1]["transportMode"] == "Ship" diff --git a/backend/lcfs/tests/fuel_export/conftest.py b/backend/lcfs/tests/fuel_export/conftest.py new file mode 100644 index 000000000..fdb99e06d --- /dev/null +++ b/backend/lcfs/tests/fuel_export/conftest.py @@ -0,0 +1,84 @@ +import pytest +from unittest.mock import AsyncMock, MagicMock +from lcfs.web.api.fuel_export.repo import FuelExportRepository +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.fuel_export.services import FuelExportServices +from lcfs.web.api.fuel_export.actions_service import FuelExportActionService +from fastapi_cache import FastAPICache +from fastapi_cache.backends.inmemory import InMemoryBackend +from sqlalchemy.ext.asyncio import AsyncSession +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository + + +@pytest.fixture(scope="function", autouse=True) +async def init_cache(): + """Initialize the cache for testing.""" + FastAPICache.init(InMemoryBackend(), prefix="test-cache") + + +@pytest.fixture +def mock_db(): + """Mock the AsyncSession for database interactions.""" + return AsyncMock(spec=AsyncSession) + + +@pytest.fixture +def mock_repo(mock_db): + """Mock FuelExportRepository.""" + repo = FuelExportRepository(db=mock_db) + repo.get_fuel_export_table_options = AsyncMock() + repo.get_fuel_export_list = AsyncMock() + repo.get_fuel_exports_paginated = AsyncMock() + repo.get_fuel_export_by_id = AsyncMock() + repo.create_fuel_export = AsyncMock() + repo.update_fuel_export = AsyncMock() + repo.delete_fuel_export = AsyncMock() + repo.get_effective_fuel_exports = AsyncMock() + repo.get_fuel_export_version_by_user = AsyncMock() + repo.get_latest_fuel_export_by_group_uuid = AsyncMock() + return repo + + +@pytest.fixture +def mock_compliance_report_repo(): + """Mock ComplianceReportRepository.""" + repo = AsyncMock(spec=ComplianceReportRepository) + return repo + + +@pytest.fixture +def mock_fuel_code_repo(): + """Mock FuelCodeRepository.""" + repo = AsyncMock(spec=FuelCodeRepository) + repo.get_standardized_fuel_data = AsyncMock() + return repo + + +@pytest.fixture +def fuel_export_repo(mock_db): + repo = FuelExportRepository() + repo.db = mock_db + return repo + + +@pytest.fixture +def fuel_export_service(mock_repo, mock_compliance_report_repo): + """Mock FuelExportServices.""" + service = FuelExportServices( + repo=mock_repo, + compliance_report_repo=mock_compliance_report_repo, + ) + return service + + +@pytest.fixture +def fuel_export_action_service(mock_repo, mock_fuel_code_repo): + """Mock FuelExportActionService.""" + service = FuelExportActionService(repo=mock_repo, fuel_repo=mock_fuel_code_repo) + return service + + +@pytest.fixture +def mock_user_profile(): + """Mock user profile with minimal required attributes.""" + return MagicMock(id=1, organization_id=1, user_type="SUPPLIER") diff --git a/backend/lcfs/tests/fuel_export/test_fuel_exports_actions_service.py b/backend/lcfs/tests/fuel_export/test_fuel_exports_actions_service.py new file mode 100644 index 000000000..048259c8b --- /dev/null +++ b/backend/lcfs/tests/fuel_export/test_fuel_exports_actions_service.py @@ -0,0 +1,522 @@ +import pytest +from unittest.mock import AsyncMock, MagicMock +from uuid import uuid4 +from datetime import datetime +from lcfs.db.models.compliance.FuelExport import FuelExport +from lcfs.db.models.compliance.ComplianceReport import QuantityUnitsEnum +from lcfs.web.api.fuel_code.repo import CarbonIntensityResult +from lcfs.web.api.fuel_export.actions_service import FuelExportActionService +from lcfs.web.api.fuel_export.schema import ( + FuelExportCreateUpdateSchema, + FuelExportSchema, + DeleteFuelExportResponseSchema, +) +from lcfs.db.base import ActionTypeEnum, UserTypeEnum +from lcfs.web.exception.exceptions import DatabaseException + +from fastapi import HTTPException + +FUEL_EXPORT_EXCLUDE_FIELDS = { + "id", + "fuel_export_id", + "compliance_period", + "deleted", + "group_uuid", + "user_type", + "version", + "action_type", + "units", +} + +# Example test cases from the dataset +test_cases = [ + { + "description": "Jet fuel, default carbon intensity, liters", + "input": { + "quantity": 100000, + "units": "L", + "target_ci": 88.83, + "ci_of_fuel": 88.83, + "energy_density": 36, + "eer": 1, + }, + "expected_compliance_units": 0, + }, + { + "description": "Diesel, prescribed carbon intensity, liters", + "input": { + "quantity": 100000, + "units": "L", + "target_ci": 79.28, + "ci_of_fuel": 94.38, + "energy_density": 38.65, + "eer": 1, + }, + "expected_compliance_units": 0, + }, + { + "description": "Gasoline, default carbon intensity, liters", + "input": { + "quantity": 100000, + "units": "L", + "target_ci": 78.68, + "ci_of_fuel": 93.67, + "energy_density": 34.69, + "eer": 1, + }, + "expected_compliance_units": 0, + }, + { + "description": "Diesel, fuel code, kWh", + "input": { + "quantity": 100000, + "units": "kWh", + "target_ci": 79.28, + "ci_of_fuel": 12.14, + "energy_density": 3.6, + "eer": 2.5, + }, + "expected_compliance_units": -67, + }, + { + "description": "Gasoline, default carbon intensity, m³", + "input": { + "quantity": 100000, + "units": "m³", + "target_ci": 78.68, + "ci_of_fuel": 63.91, + "energy_density": 38.27, + "eer": 0.9, + }, + "expected_compliance_units": -26, + }, +] + + +# Adjusted create_sample_fe_data function (if necessary) +def create_sample_fe_data(): + return FuelExportCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + end_use_id=1, + fuel_code_id=1, + compliance_period="2024", # Schema-only field + quantity=1000.0, + units="L", + energy_density=35.0, + group_uuid=str(uuid4()), + version=0, + provisionOfTheActId=123, + exportDate=datetime.now().date(), + ) + + +# Adjusted tests +@pytest.mark.anyio +async def test_create_fuel_export_success( + fuel_export_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fe_data() + user_type = UserTypeEnum.SUPPLIER + + # Mock the response from get_standardized_fuel_data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=50.0, + target_ci=80.0, + eer=1.0, + energy_density=35.0, + uci=None, + ) + + fe_data.fuel_type_id = 3 + fe_data.fuel_category_id = 2 + fe_data.provision_of_the_act_id = 3 + + # Exclude invalid fields and set related objects + fe_data_dict = fe_data.model_dump(exclude=FUEL_EXPORT_EXCLUDE_FIELDS) + + # Mock the created fuel export + created_export = FuelExport( + **fe_data_dict, + ) + created_export.compliance_units = -100 + created_export.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + created_export.fuel_category = {"category": "Diesel"} + created_export.units = "Litres" + mock_repo.create_fuel_export.return_value = created_export + + # Call the method under test + result = await fuel_export_action_service.create_fuel_export(fe_data, user_type) + + # Assertions + assert result == FuelExportSchema.model_validate(created_export) + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once_with( + fuel_type_id=fe_data.fuel_type_id, + fuel_category_id=fe_data.fuel_category_id, + end_use_id=fe_data.end_use_id, + fuel_code_id=fe_data.fuel_code_id, + compliance_period=fe_data.compliance_period, + ) + mock_repo.create_fuel_export.assert_awaited_once() + # Ensure compliance units were calculated correctly + assert result.compliance_units < 0 + + +@pytest.mark.anyio +async def test_update_fuel_export_success_existing_report( + fuel_export_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fe_data() + user_type = UserTypeEnum.SUPPLIER + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_EXPORT_EXCLUDE_FIELDS) + + # Mock existing export with matching compliance_report_id + existing_export = FuelExport( + **fe_data_dict, + fuel_export_id=1, + ) + existing_export.compliance_units = -100 + existing_export.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + existing_export.fuel_category = {"category": "Diesel"} + existing_export.units = "Litres" + mock_repo.get_fuel_export_version_by_user.return_value = existing_export + + # Mock the response from get_standardized_fuel_data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=55.0, + target_ci=85.0, + eer=1.2, + energy_density=36.0, + uci=None, + ) + + # Mock the updated fuel export + updated_export = FuelExport( + **fe_data_dict, + fuel_export_id=1, + ) + updated_export.compliance_units = -150 + updated_export.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + updated_export.fuel_category = {"category": "Diesel"} + updated_export.units = "Litres" + mock_repo.update_fuel_export.return_value = updated_export + + # Call the method under test + result = await fuel_export_action_service.update_fuel_export(fe_data, user_type) + + # Assertions + assert result == FuelExportSchema.model_validate(updated_export) + mock_repo.get_fuel_export_version_by_user.assert_awaited_once_with( + fe_data.group_uuid, fe_data.version, user_type + ) + mock_repo.update_fuel_export.assert_awaited_once() + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once() + # Ensure compliance units were updated correctly + assert result.compliance_units == -150 + + +@pytest.mark.anyio +async def test_update_fuel_export_create_new_version( + fuel_export_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fe_data() + fe_data.compliance_report_id = 2 # Different compliance report ID + user_type = UserTypeEnum.SUPPLIER + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_EXPORT_EXCLUDE_FIELDS) + + # Mock existing export with different compliance_report_id + existing_export = FuelExport( + **fe_data_dict, + fuel_export_id=1, + ) + existing_export.compliance_report_id = 1 # Original compliance_report_id + existing_export.compliance_units = -100 + existing_export.version = 0 + existing_export.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + existing_export.fuel_category = {"category": "Diesel"} + existing_export.units = "Litres" + mock_repo.get_fuel_export_version_by_user.return_value = existing_export + + # Mock the response from get_standardized_fuel_data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=60.0, + target_ci=90.0, + eer=1.5, + energy_density=37.0, + uci=None, + ) + + # Mock the newly created export (new version) + new_export = FuelExport( + **fe_data_dict, + fuel_export_id=2, + version=existing_export.version + 1, + ) + new_export.compliance_units = -150 + new_export.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + new_export.fuel_category = {"category": "Diesel"} + new_export.units = "Litres" + mock_repo.create_fuel_export.return_value = new_export + + # Call the method under test + result = await fuel_export_action_service.update_fuel_export(fe_data, user_type) + + # Assertions + assert result == FuelExportSchema.model_validate(new_export) + mock_repo.get_fuel_export_version_by_user.assert_awaited_once_with( + fe_data.group_uuid, fe_data.version, user_type + ) + mock_repo.create_fuel_export.assert_awaited_once() + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once() + # Ensure compliance units were calculated correctly + assert result.compliance_units == -150 + + +@pytest.mark.anyio +async def test_delete_fuel_export_success( + fuel_export_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fe_data() + user_type = UserTypeEnum.SUPPLIER + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_EXPORT_EXCLUDE_FIELDS) + + # Mock existing export + existing_export = FuelExport( + **fe_data_dict, + fuel_export_id=1, + version=0, + action_type=ActionTypeEnum.CREATE, + ) + existing_export.compliance_units = -100 + existing_export.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + existing_export.fuel_category = {"category": "Diesel"} + existing_export.units = "Litres" + mock_repo.get_latest_fuel_export_by_group_uuid.return_value = existing_export + + # Mock the deletion export + deleted_export = FuelExport( + **fe_data_dict, + fuel_export_id=2, + version=1, + action_type=ActionTypeEnum.DELETE, + ) + deleted_export.compliance_units = 0 + deleted_export.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + deleted_export.fuel_category = {"category": "Diesel"} + deleted_export.units = "Litres" + mock_repo.create_fuel_export.return_value = deleted_export + + # Call the method under test + result = await fuel_export_action_service.delete_fuel_export(fe_data, user_type) + + # Assertions + assert isinstance(result, DeleteFuelExportResponseSchema) + assert result.success is True + assert result.message == "Fuel export record marked as deleted." + mock_repo.get_latest_fuel_export_by_group_uuid.assert_awaited_once_with( + fe_data.group_uuid + ) + mock_repo.create_fuel_export.assert_awaited_once() + + +@pytest.mark.anyio +async def test_delete_fuel_export_already_deleted( + fuel_export_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fe_data() + user_type = UserTypeEnum.SUPPLIER + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_EXPORT_EXCLUDE_FIELDS) + + # Mock existing export already marked as deleted + existing_export = FuelExport( + **fe_data_dict, + fuel_export_id=1, + version=1, + action_type=ActionTypeEnum.DELETE, + ) + existing_export.compliance_units = 0 + existing_export.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + existing_export.fuel_category = {"category": "Diesel"} + existing_export.units = "Litres" + mock_repo.get_latest_fuel_export_by_group_uuid.return_value = existing_export + + # Call the method under test + result = await fuel_export_action_service.delete_fuel_export(fe_data, user_type) + + # Assertions + assert isinstance(result, DeleteFuelExportResponseSchema) + assert result.success is True + assert result.message == "Fuel export record already deleted." + mock_repo.get_latest_fuel_export_by_group_uuid.assert_awaited_once_with( + fe_data.group_uuid + ) + mock_repo.create_fuel_export.assert_not_awaited() + + +@pytest.mark.anyio +async def test_populate_fuel_export_fields( + fuel_export_action_service, mock_fuel_code_repo +): + fe_data = create_sample_fe_data() + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_EXPORT_EXCLUDE_FIELDS) + + # Create a FuelExport instance without populated fields + fuel_export = FuelExport(**fe_data_dict) + + # Mock standardized fuel data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=50.0, + target_ci=80.0, + eer=1.0, + energy_density=fe_data.energy_density, + uci=None, + ) + + # Call the method under test + populated_export = await fuel_export_action_service._populate_fuel_export_fields( + fuel_export, fe_data + ) + + # Assertions + assert populated_export.units == QuantityUnitsEnum(fe_data.units) + assert populated_export.ci_of_fuel == 50.0 + assert populated_export.target_ci == 80.0 + assert populated_export.eer == 1 # Default EER + assert populated_export.energy_density == fe_data.energy_density + # Energy calculation + assert populated_export.energy == round(fe_data.energy_density * fe_data.quantity) + # Compliance units calculation (should be negative) + assert populated_export.compliance_units < 0 + + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once_with( + fuel_type_id=fuel_export.fuel_type_id, + fuel_category_id=fuel_export.fuel_category_id, + end_use_id=fuel_export.end_use_id, + fuel_code_id=fuel_export.fuel_code_id, + compliance_period=fe_data.compliance_period, + ) + + +@pytest.mark.anyio +@pytest.mark.parametrize("case", test_cases) +async def test_compliance_units_calculation( + case, fuel_export_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = FuelExportCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=3, + fuel_category_id=2, + end_use_id=1, + fuel_code_id=1, + compliance_period="2024", # Schema-only field + quantity=case["input"]["quantity"], + units=case["input"]["units"], + energy_density=case["input"]["energy_density"], + group_uuid=str(uuid4()), + version=0, + provisionOfTheActId=123, + exportDate=datetime.now().date(), + ) + + # Mock standardized fuel data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=case["input"]["ci_of_fuel"], + target_ci=case["input"]["target_ci"], + eer=case["input"]["eer"], + energy_density=case["input"]["energy_density"], + uci=None, + ) + + # Define the side_effect function to simulate repository behavior + async def create_fuel_export_side_effect(fuel_export: FuelExport): + """ + Simulate the behavior of the repository's create_fuel_export method. + This function populates necessary fields that the service would normally handle. + """ + # Assign a mock fuel_export_id + fuel_export.fuel_export_id = 1 # or any unique identifier as needed + + # Simulate populating related objects + fuel_export.fuel_type = { + "fuel_type_id": fuel_export.fuel_type_id, + "fuel_type": "Electricity", + "units": "kWh", + } + fuel_export.fuel_category = {"category": "Diesel"} + fuel_export.units = "Litres" + + # The service should have already calculated compliance_units + return fuel_export + + # Set the side_effect for create_fuel_export + mock_repo.create_fuel_export.side_effect = create_fuel_export_side_effect + + # Call the service to create the fuel export + result = await fuel_export_action_service.create_fuel_export( + fe_data, UserTypeEnum.SUPPLIER + ) + + # Assign mocked related objects for schema validation + result.fuel_type = { + "fuel_type_id": fe_data.fuel_type_id, + "fuel_type": "Electricity", + "units": "kWh", + } + result.fuel_category = {"category": "Diesel"} + result.units = fe_data.units + + # Assertions + assert ( + result.compliance_units == case["expected_compliance_units"] + ), f"Failed {case['description']}. Expected {case['expected_compliance_units']}, got {result.compliance_units}" + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once_with( + fuel_type_id=fe_data.fuel_type_id, + fuel_category_id=fe_data.fuel_category_id, + end_use_id=fe_data.end_use_id, + fuel_code_id=fe_data.fuel_code_id, + compliance_period=fe_data.compliance_period, + ) + mock_repo.create_fuel_export.assert_awaited_once() diff --git a/backend/lcfs/tests/fuel_export/test_fuel_exports_repo.py b/backend/lcfs/tests/fuel_export/test_fuel_exports_repo.py new file mode 100644 index 000000000..539bb834a --- /dev/null +++ b/backend/lcfs/tests/fuel_export/test_fuel_exports_repo.py @@ -0,0 +1,248 @@ +import pytest +from unittest.mock import MagicMock, AsyncMock +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy import select, and_, delete + +from lcfs.db.models.compliance import FuelExport, ComplianceReport +from lcfs.web.exception.exceptions import DatabaseException +from lcfs.db.base import UserTypeEnum, ActionTypeEnum + + +# get_fuel_export_table_options +@pytest.mark.anyio +async def test_get_fuel_export_table_options_success(mock_db, fuel_export_repo): + mock_result = MagicMock() + expected_data = [] + mock_result.all.return_value = expected_data + + mock_db.execute.return_value = mock_result + + result = await fuel_export_repo.get_fuel_export_table_options("2024") + + mock_db.execute.assert_called_once() + mock_result.all.assert_called_once() + assert result == expected_data + + +# get_fuel_export_list +@pytest.mark.anyio +async def test_get_fuel_export_list_success(fuel_export_repo, mock_db): + compliance_report_id = 1 + expected_result = [FuelExport(fuel_export_id=1), FuelExport(fuel_export_id=2)] + + # Mock the group UUID query + group_uuid_result = MagicMock() + group_uuid_result.scalar.return_value = "test-uuid" + mock_db.execute.return_value = group_uuid_result + + # Mock the effective fuel exports query + async def mock_get_effective_exports(*args, **kwargs): + return expected_result + + fuel_export_repo.get_effective_fuel_exports = mock_get_effective_exports + + result = await fuel_export_repo.get_fuel_export_list(compliance_report_id) + + assert result == expected_result + assert mock_db.execute.call_count == 1 + + +@pytest.mark.anyio +async def test_get_fuel_export_list_no_results(fuel_export_repo, mock_db): + compliance_report_id = 999 + + # Mock the group UUID query returning None + group_uuid_result = MagicMock() + group_uuid_result.scalar.return_value = None + mock_db.execute.return_value = group_uuid_result + + result = await fuel_export_repo.get_fuel_export_list(compliance_report_id) + + assert result == [] + assert mock_db.execute.call_count == 1 + + +@pytest.mark.anyio +async def test_get_fuel_export_list_db_exception(fuel_export_repo, mock_db): + compliance_report_id = 1 + mock_db.execute.side_effect = SQLAlchemyError("DB error") + + with pytest.raises(DatabaseException): + await fuel_export_repo.get_fuel_export_list(compliance_report_id) + + mock_db.execute.assert_called_once() + + +# get_fuel_exports_paginated +@pytest.mark.anyio +async def test_get_fuel_exports_paginated_success(fuel_export_repo, mock_db): + from lcfs.web.api.base import PaginationRequestSchema + + compliance_report_id = 1 + expected_exports = [FuelExport(fuel_export_id=1), FuelExport(fuel_export_id=2)] + + # Mock the group UUID query + group_uuid_result = MagicMock() + group_uuid_result.scalar.return_value = "test-uuid" + mock_db.execute.return_value = group_uuid_result + + # Mock the effective fuel exports query + async def mock_get_effective_exports(*args, **kwargs): + return expected_exports + + fuel_export_repo.get_effective_fuel_exports = mock_get_effective_exports + + pagination = PaginationRequestSchema(page=1, size=10) + result, total = await fuel_export_repo.get_fuel_exports_paginated( + pagination, compliance_report_id + ) + + assert result == expected_exports[:10] + assert total == len(expected_exports) + assert mock_db.execute.call_count == 1 + + +# get_fuel_export_by_id +@pytest.mark.anyio +async def test_get_fuel_export_by_id_success(fuel_export_repo, mock_db): + fuel_export_id = 1 + expected_fuel_export = FuelExport(fuel_export_id=fuel_export_id) + + mock_result = MagicMock() + mock_result.unique.return_value.scalar_one_or_none.return_value = ( + expected_fuel_export + ) + mock_db.execute.return_value = mock_result + + result = await fuel_export_repo.get_fuel_export_by_id(fuel_export_id) + + mock_db.execute.assert_called_once() + mock_result.unique.assert_called_once() + mock_result.unique.return_value.scalar_one_or_none.assert_called_once() + assert result == expected_fuel_export + + +# Version control related tests +@pytest.mark.anyio +async def test_get_fuel_export_version_by_user_success(fuel_export_repo, mock_db): + group_uuid = "test-uuid" + version = 0 + user_type = UserTypeEnum.SUPPLIER + expected_export = FuelExport( + group_uuid=group_uuid, version=version, user_type=user_type + ) + + mock_result = MagicMock() + mock_result.scalars.return_value.first.return_value = expected_export + mock_db.execute.return_value = mock_result + + result = await fuel_export_repo.get_fuel_export_version_by_user( + group_uuid, version, user_type + ) + + assert result == expected_export + mock_db.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_latest_fuel_export_by_group_uuid_success(fuel_export_repo, mock_db): + group_uuid = "test-uuid" + expected_export = FuelExport( + group_uuid=group_uuid, version=1, user_type=UserTypeEnum.SUPPLIER + ) + + mock_result = MagicMock() + mock_result.scalars.return_value.first.return_value = expected_export + mock_db.execute.return_value = mock_result + + result = await fuel_export_repo.get_latest_fuel_export_by_group_uuid(group_uuid) + + assert result == expected_export + mock_db.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_effective_fuel_exports_success(fuel_export_repo, mock_db): + group_uuid = "test-uuid" + expected_exports = [ + FuelExport(group_uuid=group_uuid, version=1, user_type=UserTypeEnum.SUPPLIER) + ] + + mock_result = MagicMock() + mock_result.unique.return_value.scalars.return_value.all.return_value = ( + expected_exports + ) + mock_db.execute.return_value = mock_result + + result = await fuel_export_repo.get_effective_fuel_exports(group_uuid) + + assert result == expected_exports + mock_db.execute.assert_called_once() + + +# CRUD operations +@pytest.mark.anyio +async def test_create_fuel_export_success(fuel_export_repo, mock_db): + fuel_export = FuelExport( + compliance_report_id=1, + group_uuid="test-uuid", + version=0, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + ) + + mock_db.add = MagicMock() + mock_db.flush = AsyncMock() + mock_db.refresh = AsyncMock() + + result = await fuel_export_repo.create_fuel_export(fuel_export) + + assert result == fuel_export + mock_db.add.assert_called_once_with(fuel_export) + assert mock_db.flush.await_count == 1 + assert mock_db.refresh.await_count == 1 + + +@pytest.mark.anyio +async def test_update_fuel_export_success(fuel_export_repo, mock_db): + fuel_export = FuelExport( + fuel_export_id=1, + group_uuid="test-uuid", + version=1, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.UPDATE, + ) + updated_fuel_export = FuelExport(fuel_export_id=1) + + mock_db.merge = AsyncMock(return_value=updated_fuel_export) + mock_db.flush = AsyncMock() + mock_db.refresh = AsyncMock() + + result = await fuel_export_repo.update_fuel_export(fuel_export) + + assert result == updated_fuel_export + mock_db.merge.assert_called_once_with(fuel_export) + mock_db.flush.assert_awaited_once() + mock_db.refresh.assert_awaited_once_with( + updated_fuel_export, + [ + "fuel_category", + "fuel_type", + "provision_of_the_act", + "end_use_type", + ], + ) + + +@pytest.mark.anyio +async def test_delete_fuel_export_success(fuel_export_repo, mock_db): + fuel_export_id = 1 + + # Mock the execute method for deletion + mock_db.execute = AsyncMock() + mock_db.flush = AsyncMock() + + await fuel_export_repo.delete_fuel_export(fuel_export_id) + + mock_db.execute.assert_called_once() + mock_db.flush.assert_called_once() diff --git a/backend/lcfs/tests/fuel_export/test_fuel_exports_services.py b/backend/lcfs/tests/fuel_export/test_fuel_exports_services.py new file mode 100644 index 000000000..92d35bd4d --- /dev/null +++ b/backend/lcfs/tests/fuel_export/test_fuel_exports_services.py @@ -0,0 +1,235 @@ +import pytest +from lcfs.web.api.fuel_export.schema import ( + FuelExportSchema, + FuelExportCreateUpdateSchema, + FuelTypeSchema, + FuelCategoryResponseSchema, + FuelExportsSchema, + FuelTypeOptionsResponse, + DeleteFuelExportResponseSchema, +) +from datetime import date +from unittest.mock import MagicMock, patch, AsyncMock +from lcfs.web.api.fuel_export.services import FuelExportServices +from lcfs.web.api.fuel_export.actions_service import FuelExportActionService +from lcfs.web.api.fuel_export.repo import FuelExportRepository +from lcfs.db.models.compliance.FuelExport import FuelExport +from lcfs.db.base import ActionTypeEnum, UserTypeEnum + +# Mock common data for reuse +mock_fuel_type = FuelTypeSchema( + fuel_type_id=1, + fuel_type="Diesel", + fossil_derived=True, + provision_1_id=None, + provision_2_id=None, + default_carbon_intensity=10.5, + units="L", +) + +mock_fuel_category = FuelCategoryResponseSchema( + fuel_category_id=1, + category="Diesel", +) + + +# FuelExportServices Tests +@pytest.mark.anyio +async def test_get_fuel_export_options_success(fuel_export_service, mock_repo): + mock_repo.get_fuel_export_table_options.return_value = [] + result = await fuel_export_service.get_fuel_export_options("2024") + assert isinstance(result, FuelTypeOptionsResponse) + mock_repo.get_fuel_export_table_options.assert_called_once_with("2024") + + +@pytest.mark.anyio +async def test_get_fuel_export_list_success(fuel_export_service, mock_repo): + # Create a mock FuelExport with all required fields + mock_export = FuelExport( + fuel_export_id=1, + compliance_report_id=1, + fuel_type_id=1, + fuel_type=mock_fuel_type, + fuel_category_id=1, + fuel_category=mock_fuel_category, + quantity=100, + units="L", + export_date=date.today(), + group_uuid="test-uuid", + provision_of_the_act_id=1, + version=0, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + ) + mock_repo.get_fuel_export_list.return_value = [mock_export] + + result = await fuel_export_service.get_fuel_export_list(1) + + assert isinstance(result, FuelExportsSchema) + mock_repo.get_fuel_export_list.assert_called_once_with(1) + + +@pytest.mark.anyio +async def test_get_fuel_exports_paginated_success(fuel_export_service, mock_repo): + mock_export = FuelExport( + fuel_export_id=1, + compliance_report_id=1, + fuel_type_id=1, + fuel_type=mock_fuel_type, + fuel_category_id=1, + fuel_category=mock_fuel_category, + quantity=100, + units="L", + export_date=date.today(), + ) + mock_repo.get_fuel_exports_paginated.return_value = ([mock_export], 1) + + pagination_mock = MagicMock() + pagination_mock.page = 1 + pagination_mock.size = 10 + + result = await fuel_export_service.get_fuel_exports_paginated(pagination_mock, 1) + + assert isinstance(result, FuelExportsSchema) + assert result.pagination.total == 1 + assert result.pagination.page == 1 + assert result.pagination.size == 10 + mock_repo.get_fuel_exports_paginated.assert_called_once_with(pagination_mock, 1) + + +# FuelExportActionService Tests +@pytest.mark.anyio +async def test_action_create_fuel_export_success(fuel_export_action_service, mock_repo): + input_data = FuelExportCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=1, + fuel_type=mock_fuel_type.dict(), + fuel_category_id=1, + fuel_category=mock_fuel_category.dict(), + provisionOfTheActId=1, + provisionOfTheAct={"provision_of_the_act_id": 1, "name": "Act Provision"}, + quantity=100, + units="L", + export_date=date.today(), + compliance_period="2024", + ) + + mock_created_export = FuelExport( + fuel_export_id=1, + compliance_report_id=1, + group_uuid="test-uuid", + version=0, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + provision_of_the_act_id=1, + fuel_type_id=1, + fuel_category_id=1, + quantity=100, + units="L", + export_date=date.today(), + fuel_type=mock_fuel_type.dict(), + fuel_category=mock_fuel_category.dict(), + ) + mock_repo.create_fuel_export.return_value = mock_created_export + + result = await fuel_export_action_service.create_fuel_export( + input_data, UserTypeEnum.SUPPLIER + ) + assert isinstance(result, FuelExportSchema) + mock_repo.create_fuel_export.assert_called_once() + + +@pytest.mark.anyio +async def test_action_update_fuel_export_success(fuel_export_action_service, mock_repo): + input_data = FuelExportCreateUpdateSchema( + fuel_export_id=1, + compliance_report_id=1, + group_uuid="test-uuid", + version=0, + fuel_type_id=1, + fuel_type=mock_fuel_type.dict(), + fuel_category_id=1, + fuel_category=mock_fuel_category.dict(), + quantity=100, + provisionOfTheActId=1, + provisionOfTheAct={"provision_of_the_act_id": 1, "name": "Act Provision"}, + units="L", + export_date=date.today(), + compliance_period="2024", + ) + + mock_existing_export = FuelExport( + fuel_export_id=1, + compliance_report_id=1, + group_uuid="test-uuid", + version=0, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=100, + units="L", + export_date=date.today(), + fuel_type=mock_fuel_type.dict(), + fuel_category=mock_fuel_category.dict(), + ) + mock_repo.get_fuel_export_version_by_user.return_value = mock_existing_export + mock_repo.update_fuel_export.return_value = mock_existing_export + + result = await fuel_export_action_service.update_fuel_export( + input_data, UserTypeEnum.SUPPLIER + ) + assert isinstance(result, FuelExportSchema) + mock_repo.get_fuel_export_version_by_user.assert_called_once() + mock_repo.update_fuel_export.assert_called_once() + + +@pytest.mark.anyio +async def test_action_delete_fuel_export_success(fuel_export_action_service, mock_repo): + input_data = FuelExportCreateUpdateSchema( + fuel_export_id=1, + compliance_report_id=1, + group_uuid="test-uuid", + version=0, + fuel_type_id=1, + fuel_type=mock_fuel_type.dict(), + fuel_category_id=1, + provisionOfTheActId=1, + provisionOfTheAct={"provisionOfTheActId": 1, "name": "Act Provision"}, + fuel_category=mock_fuel_category.dict(), + quantity=100, + units="L", + export_date=date.today(), + compliance_period="2024", + ) + + mock_latest_export = FuelExport( + fuel_export_id=1, + compliance_report_id=1, + group_uuid="test-uuid", + version=0, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=100, + units="L", + export_date=date.today(), + fuel_type=mock_fuel_type.dict(), + fuel_category=mock_fuel_category.dict(), + ) + + mock_repo.get_latest_fuel_export_by_group_uuid.return_value = mock_latest_export + mock_repo.create_fuel_export.return_value = None + + result = await fuel_export_action_service.delete_fuel_export( + input_data, UserTypeEnum.SUPPLIER + ) + + assert isinstance(result, DeleteFuelExportResponseSchema) + assert result.success is True + assert result.message == "Fuel export record marked as deleted." + mock_repo.get_latest_fuel_export_by_group_uuid.assert_called_once() + mock_repo.create_fuel_export.assert_called_once() diff --git a/backend/lcfs/tests/fuel_export/test_fuel_exports_views.py b/backend/lcfs/tests/fuel_export/test_fuel_exports_views.py new file mode 100644 index 000000000..9afa2e68d --- /dev/null +++ b/backend/lcfs/tests/fuel_export/test_fuel_exports_views.py @@ -0,0 +1,318 @@ +import pytest +import json +from unittest.mock import patch +from lcfs.db.models.user.Role import RoleEnum +from httpx import AsyncClient +from fastapi import FastAPI +from fastapi.encoders import jsonable_encoder + +from lcfs.web.api.fuel_export.schema import ( + FuelExportSchema, + FuelExportCreateUpdateSchema, + FuelTypeOptionsResponse, + CommonPaginatedReportRequestSchema, + FuelCategoryResponseSchema, + FuelTypeSchema, + ProvisionOfTheActSchema, + FuelTypeSchema, + FuelExportsSchema, + FuelTypeOptionsResponse, + DeleteFuelExportResponseSchema, +) + + +# get_fuel_export_table_options +@pytest.mark.anyio +async def test_get_fuel_export_table_options_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + with patch( + "lcfs.web.api.fuel_export.views.FuelExportServices.get_fuel_export_options" + ) as mock_get_fuel_export_options: + + mock_fuel_export_options = FuelTypeOptionsResponse(fuel_types=[]) + + mock_get_fuel_export_options.return_value = mock_fuel_export_options + + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + url = fastapi_app.url_path_for("get_fuel_export_table_options") + + response = await client.get(url, params={"compliancePeriod": "2024"}) + + assert response.status_code == 200 + + expected_response = json.loads(mock_fuel_export_options.json(by_alias=True)) + + assert response.json() == expected_response + + +# get_fuel_exports +@pytest.mark.anyio +async def test_get_fuel_exports_invalid_payload( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + url = fastapi_app.url_path_for("get_fuel_exports") + + payload = {} + + response = await client.post( + url, + json=payload, + ) + + assert response.status_code == 422 + + +@pytest.mark.anyio +async def test_get_fuel_exports_paginated_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + with patch( + "lcfs.web.api.fuel_export.views.FuelExportServices.get_fuel_exports_paginated" + ) as mock_get_fuel_exports_paginated, patch( + "lcfs.web.api.fuel_export.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + + mock_get_fuel_exports_paginated.return_value = FuelExportsSchema( + fuel_exports=[] + ) + mock_validate_organization_access.return_value = True + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + url = fastapi_app.url_path_for("get_fuel_exports") + + payload = CommonPaginatedReportRequestSchema( + compliance_report_id=1, page=1, size=1, sort_orders=[], filters=[] + ).dict() + + response = await client.post( + url, + json=payload, + ) + + assert response.status_code == 200 + + +@pytest.mark.anyio +async def test_get_fuel_exports_list_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + with patch( + "lcfs.web.api.fuel_export.views.FuelExportServices.get_fuel_export_list" + ) as mock_get_fuel_export_list, patch( + "lcfs.web.api.fuel_export.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + + mock_get_fuel_export_list.return_value = FuelExportsSchema(fuel_exports=[]) + mock_validate_organization_access.return_value = True + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + url = fastapi_app.url_path_for("get_fuel_exports") + + payload = CommonPaginatedReportRequestSchema(compliance_report_id=1).dict() + + response = await client.post( + url, + json=payload, + ) + + assert response.status_code == 200 + + +# save_fuel_export_row +@pytest.mark.anyio +async def test_save_fuel_export_row_forbidden( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + url = fastapi_app.url_path_for("save_fuel_export_row") + + payload = { + "compliance_report_id": 1, + "fuel_type": "", + "fuel_type_id": 1, + "fuel_category": "", + "fuel_category_id": 1, + "provision_of_the_act": "", + "provision_of_the_act_id": 1, + "quantity": 1, + "units": "", + "export_date": "2024-01-01", + } + + response = await client.post( + url, + json=payload, + ) + assert response.status_code == 403 # Forbidden + + +@pytest.mark.anyio +async def test_save_fuel_export_row_invalid_payload( + client: AsyncClient, fastapi_app: FastAPI +): + + url = fastapi_app.url_path_for("save_fuel_export_row") + + payload = {} + + response = await client.post( + url, + json=payload, + ) + assert response.status_code == 422 + + +@pytest.mark.anyio +async def test_save_fuel_export_row_delete_success(client, fastapi_app, set_mock_user): + with patch( + "lcfs.web.api.fuel_export.actions_service.FuelExportActionService.delete_fuel_export" + ) as mock_delete_fuel_export, patch( + "lcfs.web.api.fuel_export.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + + mock_delete_response = DeleteFuelExportResponseSchema( + success=True, message="fuel export row deleted successfully" + ) + + mock_validate_organization_access.return_value = None + mock_delete_fuel_export.return_value = mock_delete_response + + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + url = fastapi_app.url_path_for("save_fuel_export_row") + + # Create payload using the schema with all required fields + payload = FuelExportCreateUpdateSchema( + fuel_export_id=1, + compliance_report_id=1, + fuel_type="Diesel", + fuel_type_id=1, + fuel_category="Diesel", + fuel_category_id=1, + provision_of_the_act="Section 6", + provision_of_the_act_id=1, + quantity=100, + units="L", + export_date="2024-01-01", + deleted=True, + ).dict(exclude_none=True) + + response = await client.post(url, json=jsonable_encoder(payload)) + + assert response.status_code == 201 + assert response.json() == mock_delete_response.dict() + mock_delete_fuel_export.assert_called_once() + + +@pytest.mark.anyio +async def test_save_fuel_export_row_update_success(client, fastapi_app, set_mock_user): + with patch( + "lcfs.web.api.fuel_export.actions_service.FuelExportActionService.update_fuel_export" + ) as mock_update_fuel_export, patch( + "lcfs.web.api.fuel_export.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + + mock_fuel_export = FuelExportSchema( + fuel_export_id=1, + compliance_report_id=1, + fuel_type_id=1, + fuel_type=FuelTypeSchema( + fuel_type_id=1, + fuel_type="Diesel", + units="L", + default_carbon_intensity=1, + ), + quantity=1, + units="L", + export_date="2024-01-01", + fuel_category_id=1, + fuel_category=FuelCategoryResponseSchema(category="Diesel"), + ) + + # Create update payload with all required fields + update_payload = FuelExportCreateUpdateSchema( + fuel_export_id=1, + compliance_report_id=1, + fuel_type="Diesel", + fuel_type_id=1, + fuel_category="Diesel", + fuel_category_id=1, + provision_of_the_act="Section 6", + provision_of_the_act_id=1, + quantity=1, + units="L", + export_date="2024-01-01", + ) + + mock_validate_organization_access.return_value = None + mock_update_fuel_export.return_value = mock_fuel_export + + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + url = fastapi_app.url_path_for("save_fuel_export_row") + + response = await client.post( + url, json=jsonable_encoder(update_payload.dict(exclude_none=True)) + ) + + assert response.status_code == 201 + mock_update_fuel_export.assert_called_once() + + +@pytest.mark.anyio +async def test_save_fuel_export_row_create_success(client, fastapi_app, set_mock_user): + with patch( + "lcfs.web.api.fuel_export.actions_service.FuelExportActionService.create_fuel_export" + ) as mock_create_fuel_export, patch( + "lcfs.web.api.fuel_export.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + + # Create payload with all required fields + create_payload = FuelExportCreateUpdateSchema( + compliance_report_id=1, + fuel_type="Diesel", + fuel_type_id=1, + fuel_category="Diesel", + fuel_category_id=1, + provision_of_the_act="Section 6", + provision_of_the_act_id=1, + quantity=1, + units="L", + export_date="2024-01-01", + ) + + mock_fuel_export = FuelExportSchema( + compliance_report_id=1, + fuel_type_id=1, + fuel_type=FuelTypeSchema( + fuel_type_id=1, + fuel_type="Diesel", + units="L", + default_carbon_intensity=1, + ), + quantity=1, + units="L", + export_date="2024-01-01", + fuel_category_id=1, + fuel_category=FuelCategoryResponseSchema(category="Diesel"), + ) + + mock_validate_organization_access.return_value = None + mock_create_fuel_export.return_value = mock_fuel_export + + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + url = fastapi_app.url_path_for("save_fuel_export_row") + + response = await client.post( + url, json=jsonable_encoder(create_payload.dict(exclude_none=True)) + ) + + assert response.status_code == 201 + mock_create_fuel_export.assert_called_once() diff --git a/backend/lcfs/tests/fuel_supply/test_fuel_supplies_actions_service.py b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_actions_service.py new file mode 100644 index 000000000..df6d6eee0 --- /dev/null +++ b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_actions_service.py @@ -0,0 +1,622 @@ +# test_fuel_supply.py + +from datetime import datetime +from unittest.mock import AsyncMock +from uuid import uuid4 + +import pytest + +from lcfs.db.base import ActionTypeEnum, UserTypeEnum +from lcfs.db.models.compliance.ComplianceReport import QuantityUnitsEnum +from lcfs.db.models.compliance.FuelSupply import FuelSupply +from lcfs.web.api.fuel_code.repo import CarbonIntensityResult +from lcfs.web.api.fuel_supply.actions_service import FuelSupplyActionService +from lcfs.web.api.fuel_supply.schema import ( + FuelSupplyCreateUpdateSchema, + DeleteFuelSupplyResponseSchema, +) +from lcfs.web.utils.calculations import calculate_compliance_units + +# Constants defining which fields to exclude during model operations +FUEL_SUPPLY_EXCLUDE_FIELDS = { + "id", + "fuel_supply_id", + "compliance_period", + "deleted", + "group_uuid", + "user_type", + "version", + "action_type", + "units", +} + +# Example test cases from the dataset +test_cases = [ + { + "description": "Jet fuel, default carbon intensity, liters", + "input": { + "quantity": 100000, + "units": "L", + "target_ci": 88.83, + "ci_of_fuel": 88.83, + "energy_density": 36, + "eer": 1, + }, + "expected_compliance_units": 0, + }, + { + "description": "Diesel, prescribed carbon intensity, liters", + "input": { + "quantity": 100000, + "units": "L", + "target_ci": 79.28, + "ci_of_fuel": 94.38, + "energy_density": 38.65, + "eer": 1, + }, + "expected_compliance_units": -58, + }, + { + "description": "Gasoline, default carbon intensity, liters", + "input": { + "quantity": 100000, + "units": "L", + "target_ci": 78.68, + "ci_of_fuel": 93.67, + "energy_density": 34.69, + "eer": 1, + }, + "expected_compliance_units": -52, + }, + { + "description": "Diesel, fuel code, kWh", + "input": { + "quantity": 100000, + "units": "kWh", + "target_ci": 79.28, + "ci_of_fuel": 12.14, + "energy_density": 3.6, + "eer": 2.5, + }, + "expected_compliance_units": 67, + }, + { + "description": "Gasoline, default carbon intensity, m³", + "input": { + "quantity": 100000, + "units": "m³", + "target_ci": 78.68, + "ci_of_fuel": 63.91, + "energy_density": 38.27, + "eer": 0.9, + }, + "expected_compliance_units": 26, + }, +] + + +# Fixtures for mocks +@pytest.fixture +def mock_repo(): + """Mock FuelSupplyRepository.""" + repo = AsyncMock() + repo.create_fuel_supply = AsyncMock() + repo.update_fuel_supply = AsyncMock() + repo.get_fuel_supply_version_by_user = AsyncMock() + repo.get_latest_fuel_supply_by_group_uuid = AsyncMock() + return repo + + +@pytest.fixture +def mock_fuel_code_repo(): + """Mock FuelCodeRepository.""" + fuel_code_repo = AsyncMock() + fuel_code_repo.get_standardized_fuel_data = AsyncMock() + return fuel_code_repo + + +@pytest.fixture +def fuel_supply_action_service(mock_repo, mock_fuel_code_repo): + """Instantiate the FuelSupplyActionService with mocked dependencies.""" + return FuelSupplyActionService(repo=mock_repo, fuel_repo=mock_fuel_code_repo) + + +# Helper function to create sample FuelSupplyCreateUpdateSchema +def create_sample_fs_data(): + return FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + end_use_id=1, + fuel_code_id=1, + compliance_period="2024", # Schema-only field + quantity=1000.0, + units="L", + energy_density=35.0, + group_uuid=str(uuid4()), + version=0, + provisionOfTheActId=123, + exportDate=datetime.now().date(), + ) + + +# Adjusted tests +@pytest.mark.anyio +async def test_create_fuel_supply_success( + fuel_supply_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fs_data() + user_type = UserTypeEnum.SUPPLIER + + # Mock the response from get_standardized_fuel_data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=50.0, + target_ci=80.0, + eer=1.0, + energy_density=35.0, + uci=None, + ) + + fe_data.fuel_type_id = 3 + fe_data.fuel_category_id = 2 + fe_data.provision_of_the_act_id = 3 + + # Exclude invalid fields and set related objects + fe_data_dict = fe_data.model_dump(exclude=FUEL_SUPPLY_EXCLUDE_FIELDS) + + # Mock the created fuel supply + created_supply = FuelSupply( + **fe_data_dict, + fuel_supply_id=1, + version=0, + group_uuid="test_uuid", + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.UPDATE, + ) + created_supply.compliance_units = -100 + created_supply.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + created_supply.fuel_category = {"category": "Diesel"} + created_supply.units = "Litres" + mock_repo.create_fuel_supply.return_value = created_supply + + # Call the method under test + result = await fuel_supply_action_service.create_fuel_supply(fe_data, user_type) + + # Assign mocked related objects for schema validation + result.fuel_type = { + "fuel_type_id": fe_data.fuel_type_id, + "fuel_type": "Electricity", + "units": "kWh", + } + result.fuel_category = {"category": "Diesel"} + result.units = fe_data.units + + # Assertions + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once_with( + fuel_type_id=fe_data.fuel_type_id, + fuel_category_id=fe_data.fuel_category_id, + end_use_id=fe_data.end_use_id, + fuel_code_id=fe_data.fuel_code_id, + compliance_period=fe_data.compliance_period, + ) + mock_repo.create_fuel_supply.assert_awaited_once() + # Ensure compliance units were calculated correctly + assert result.compliance_units < 0 + + +@pytest.mark.anyio +async def test_update_fuel_supply_success_existing_report( + fuel_supply_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fs_data() + user_type = UserTypeEnum.SUPPLIER + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_SUPPLY_EXCLUDE_FIELDS) + + # Mock existing supply with matching compliance_report_id + existing_supply = FuelSupply( + **fe_data_dict, + fuel_supply_id=1, + version=0, + group_uuid="test_uuid", + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.UPDATE, + ) + existing_supply.compliance_units = -100 + existing_supply.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + existing_supply.fuel_category = {"category": "Diesel"} + existing_supply.units = "Litres" + mock_repo.get_fuel_supply_version_by_user.return_value = existing_supply + + # Mock the response from get_standardized_fuel_data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=55.0, + target_ci=85.0, + eer=1.2, + energy_density=36.0, + uci=None, + ) + + # Mock the updated fuel supply + updated_supply = FuelSupply( + **fe_data_dict, + fuel_supply_id=1, + version=1, + group_uuid="test_uuid", + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.UPDATE, + ) + updated_supply.compliance_units = -150 + updated_supply.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + updated_supply.fuel_category = {"category": "Diesel"} + updated_supply.units = "Litres" + mock_repo.update_fuel_supply.return_value = updated_supply + + # Call the method under test + result = await fuel_supply_action_service.update_fuel_supply(fe_data, user_type) + + # Assign mocked related objects for schema validation + result.fuel_type = { + "fuel_type_id": fe_data.fuel_type_id, + "fuel_type": "Electricity", + "units": "kWh", + } + result.fuel_category = {"category": "Diesel"} + result.units = fe_data.units + + # Assertions + mock_repo.get_fuel_supply_version_by_user.assert_awaited_once_with( + fe_data.group_uuid, fe_data.version, user_type + ) + mock_repo.update_fuel_supply.assert_awaited_once() + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once() + # Ensure compliance units were updated correctly + assert result.compliance_units == -150 + + +@pytest.mark.anyio +async def test_update_fuel_supply_create_new_version( + fuel_supply_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fs_data() + fe_data.compliance_report_id = 2 # Different compliance report ID + user_type = UserTypeEnum.SUPPLIER + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_SUPPLY_EXCLUDE_FIELDS) + + # Mock existing supply with different compliance_report_id + existing_supply = FuelSupply( + **fe_data_dict, + fuel_supply_id=1, + version=0, + group_uuid="test_uuid", + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + ) + existing_supply.compliance_report_id = 1 # Original compliance_report_id + existing_supply.compliance_units = -100 + existing_supply.version = 0 + existing_supply.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + existing_supply.fuel_category = {"category": "Diesel"} + existing_supply.units = "Litres" + mock_repo.get_fuel_supply_version_by_user.return_value = existing_supply + + # Mock the response from get_standardized_fuel_data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=60.0, + target_ci=90.0, + eer=1.5, + energy_density=37.0, + uci=None, + ) + + # Mock the newly created supply (new version) + new_supply = FuelSupply( + **fe_data_dict, + fuel_supply_id=2, + version=existing_supply.version + 1, + group_uuid="test_uuid", + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.UPDATE, + ) + new_supply.compliance_units = -150 + new_supply.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + new_supply.fuel_category = {"category": "Diesel"} + new_supply.units = "Litres" + mock_repo.create_fuel_supply.return_value = new_supply + + # Call the method under test + result = await fuel_supply_action_service.update_fuel_supply(fe_data, user_type) + + # Assign mocked related objects for schema validation + result.fuel_type = { + "fuel_type_id": fe_data.fuel_type_id, + "fuel_type": "Electricity", + "units": "kWh", + } + result.fuel_category = {"category": "Diesel"} + result.units = fe_data.units + + # Assertions + mock_repo.get_fuel_supply_version_by_user.assert_awaited_once_with( + fe_data.group_uuid, fe_data.version, user_type + ) + mock_repo.create_fuel_supply.assert_awaited_once() + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once() + # Ensure compliance units were calculated correctly + assert result.compliance_units == -150 + + +@pytest.mark.anyio +async def test_delete_fuel_supply_success( + fuel_supply_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fs_data() + user_type = UserTypeEnum.SUPPLIER + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_SUPPLY_EXCLUDE_FIELDS) + + # Mock existing supply + existing_supply = FuelSupply( + **fe_data_dict, + fuel_supply_id=1, + version=0, + action_type=ActionTypeEnum.CREATE, + ) + existing_supply.compliance_units = -100 + existing_supply.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + existing_supply.fuel_category = {"category": "Diesel"} + existing_supply.units = "Litres" + mock_repo.get_latest_fuel_supply_by_group_uuid.return_value = existing_supply + + # Mock the deletion supply + deleted_supply = FuelSupply( + **fe_data_dict, + fuel_supply_id=2, + version=1, + action_type=ActionTypeEnum.DELETE, + ) + deleted_supply.compliance_units = 0 + deleted_supply.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + deleted_supply.fuel_category = {"category": "Diesel"} + deleted_supply.units = "Litres" + mock_repo.create_fuel_supply.return_value = deleted_supply + + # Call the method under test + result = await fuel_supply_action_service.delete_fuel_supply(fe_data, user_type) + + # Assertions + assert isinstance(result, DeleteFuelSupplyResponseSchema) + assert result.success is True + assert result.message == "Marked as deleted." + mock_repo.get_latest_fuel_supply_by_group_uuid.assert_awaited_once_with( + fe_data.group_uuid + ) + mock_repo.create_fuel_supply.assert_awaited_once() + + +@pytest.mark.anyio +async def test_delete_fuel_supply_already_deleted( + fuel_supply_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = create_sample_fs_data() + user_type = UserTypeEnum.SUPPLIER + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_SUPPLY_EXCLUDE_FIELDS) + + # Mock existing supply already marked as deleted + existing_supply = FuelSupply( + **fe_data_dict, + fuel_supply_id=1, + version=1, + action_type=ActionTypeEnum.DELETE, + ) + existing_supply.compliance_units = 0 + existing_supply.fuel_type = { + "fuel_type_id": 3, + "fuel_type": "Electricity", + "units": "kWh", + } + existing_supply.fuel_category = {"category": "Diesel"} + existing_supply.units = "Litres" + mock_repo.get_latest_fuel_supply_by_group_uuid.return_value = existing_supply + + # Call the method under test + result = await fuel_supply_action_service.delete_fuel_supply(fe_data, user_type) + + # Assertions + assert isinstance(result, DeleteFuelSupplyResponseSchema) + assert result.success is True + assert result.message == "Already deleted." + mock_repo.get_latest_fuel_supply_by_group_uuid.assert_awaited_once_with( + fe_data.group_uuid + ) + mock_repo.create_fuel_supply.assert_not_awaited() + + +@pytest.mark.anyio +async def test_populate_fuel_supply_fields( + fuel_supply_action_service, mock_fuel_code_repo +): + fe_data = create_sample_fs_data() + + # Exclude invalid fields + fe_data_dict = fe_data.model_dump(exclude=FUEL_SUPPLY_EXCLUDE_FIELDS) + + # Create a FuelSupply instance without populated fields + fuel_supply = FuelSupply(**fe_data_dict) + + # Mock standardized fuel data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=50.0, + target_ci=80.0, + eer=1.0, + energy_density=None, + uci=None, + ) + + # Call the method under test + populated_supply = await fuel_supply_action_service._populate_fuel_supply_fields( + fuel_supply, fe_data + ) + + # Assertions + assert populated_supply.units == QuantityUnitsEnum(fe_data.units) + assert populated_supply.ci_of_fuel == 50.0 + assert populated_supply.target_ci == 80.0 + assert populated_supply.eer == 1 # Default EER + assert populated_supply.energy_density == fe_data.energy_density + # Energy calculation + assert populated_supply.energy == round(fe_data.energy_density * fe_data.quantity) + assert populated_supply.compliance_units > 0 + + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once_with( + fuel_type_id=fuel_supply.fuel_type_id, + fuel_category_id=fuel_supply.fuel_category_id, + end_use_id=fuel_supply.end_use_id, + fuel_code_id=fuel_supply.fuel_code_id, + compliance_period=fe_data.compliance_period, + ) + + +@pytest.mark.anyio +@pytest.mark.parametrize("case", test_cases) +async def test_compliance_units_calculation( + case, fuel_supply_action_service, mock_repo, mock_fuel_code_repo +): + fe_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=3, # Adjusted to match the mock fuel_type + fuel_category_id=2, # Adjusted to match the mock fuel_category + end_use_id=1, + fuel_code_id=1, + compliance_period="2024", # Schema-only field + quantity=case["input"]["quantity"], + units=case["input"]["units"], + energy_density=case["input"]["energy_density"], + group_uuid=str(uuid4()), + version=0, + provisionOfTheActId=123, + exportDate=datetime.now().date(), + ) + + # Mock standardized fuel data + mock_fuel_code_repo.get_standardized_fuel_data.return_value = CarbonIntensityResult( + effective_carbon_intensity=case["input"]["ci_of_fuel"], + target_ci=case["input"]["target_ci"], + eer=case["input"]["eer"], + energy_density=case["input"]["energy_density"], + uci=None, + ) + + # Exclude invalid fields and set related objects + fe_data_dict = fe_data.model_dump(exclude=FUEL_SUPPLY_EXCLUDE_FIELDS) + + # Mock the create_fuel_supply method to perform actual calculation + async def create_fuel_supply_side_effect(fuel_supply): + fuel_supply.fuel_supply_id = 1 + # Simulate the _populate_fuel_supply_fields logic + fuel_supply.fuel_type = { + "fuel_type_id": fuel_supply.fuel_type_id, + "fuel_type": "Electricity", + "units": "kWh", + } + fuel_supply.fuel_category = {"category": "Diesel"} + fuel_supply.units = "Litres" + return fuel_supply + + mock_repo.create_fuel_supply.side_effect = create_fuel_supply_side_effect + + # Call the service to create the fuel supply + result = await fuel_supply_action_service.create_fuel_supply( + fe_data, UserTypeEnum.SUPPLIER + ) + + # Assign mocked related objects for schema validation + result.fuel_type = { + "fuel_type_id": fe_data.fuel_type_id, + "fuel_type": "Electricity", + "units": "kWh", + } + result.fuel_category = {"category": "Diesel"} + result.units = fe_data.units + + # Assertions + assert ( + result.compliance_units == case["expected_compliance_units"] + ), f"Failed {case['description']}. Expected {case['expected_compliance_units']}, got {result.compliance_units}" + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once_with( + fuel_type_id=fe_data.fuel_type_id, + fuel_category_id=fe_data.fuel_category_id, + end_use_id=fe_data.end_use_id, + fuel_code_id=fe_data.fuel_code_id, + compliance_period=fe_data.compliance_period, + ) + mock_repo.create_fuel_supply.assert_awaited_once() + + +@pytest.mark.parametrize("case", test_cases) +def test_calculate_compliance_units(case): + """ + Test the calculate_compliance_units function with various input scenarios. + + Args: + case (dict): A dictionary containing the test case description, input parameters, and expected output. + """ + # Extract input parameters + quantity = case["input"]["quantity"] + units = case["input"]["units"] # Not used in calculation but included for context + target_ci = case["input"]["target_ci"] + ci_of_fuel = case["input"]["ci_of_fuel"] + energy_density = case["input"]["energy_density"] + eer = case["input"]["eer"] + + # Constants not provided in test_cases + UCI = 0 # Assuming Additional Carbon Intensity Attributable to Use is zero + + # Call the function under test + result = calculate_compliance_units( + TCI=target_ci, + EER=eer, + RCI=ci_of_fuel, + UCI=UCI, + Q=quantity, + ED=energy_density, + ) + + # Assert that the result matches the expected compliance units + assert ( + result == case["expected_compliance_units"] + ), f"Failed {case['description']}. Expected {case['expected_compliance_units']}, got {result}" diff --git a/backend/lcfs/tests/fuel_supply/test_fuel_supplies_repo.py b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_repo.py new file mode 100644 index 000000000..e87050cfd --- /dev/null +++ b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_repo.py @@ -0,0 +1,96 @@ +import pytest +from unittest.mock import MagicMock, AsyncMock +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.models.compliance import FuelSupply +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository +from lcfs.web.api.fuel_supply.schema import FuelSupplyCreateUpdateSchema + + +@pytest.fixture +def mock_db_session(): + session = AsyncMock(spec=AsyncSession) + + # Create a mock that properly mimics SQLAlchemy's async result chain + async def mock_execute(*args, **kwargs): + mock_result = ( + MagicMock() + ) # Changed to MagicMock since the chained methods are sync + mock_result.scalars = MagicMock(return_value=mock_result) + mock_result.unique = MagicMock(return_value=mock_result) + mock_result.all = MagicMock(return_value=[MagicMock(spec=FuelSupply)]) + mock_result.first = MagicMock(return_value=MagicMock(spec=FuelSupply)) + return mock_result + + session.execute = mock_execute + session.add = MagicMock() # add is synchronous + session.flush = AsyncMock() + session.refresh = AsyncMock() + + return session + + +@pytest.fixture +def fuel_supply_repo(mock_db_session): + return FuelSupplyRepository(db=mock_db_session) + + +@pytest.mark.anyio +async def test_get_fuel_supply_list(fuel_supply_repo, mock_db_session): + compliance_report_id = 1 + mock_result = [MagicMock(spec=FuelSupply)] + + # Set up the mock to return our desired result + mock_result_chain = MagicMock() + mock_result_chain.scalars = MagicMock(return_value=mock_result_chain) + mock_result_chain.unique = MagicMock(return_value=mock_result_chain) + mock_result_chain.all = MagicMock(return_value=mock_result) + + async def mock_execute(*args, **kwargs): + return mock_result_chain + + mock_db_session.execute = mock_execute + + result = await fuel_supply_repo.get_fuel_supply_list(compliance_report_id) + + assert result == mock_result + + +@pytest.mark.anyio +async def test_create_fuel_supply(fuel_supply_repo, mock_db_session): + new_fuel_supply = MagicMock(spec=FuelSupply) + + result = await fuel_supply_repo.create_fuel_supply(new_fuel_supply) + + assert result == new_fuel_supply + mock_db_session.add.assert_called_once_with(new_fuel_supply) + assert mock_db_session.flush.await_count == 1 + assert mock_db_session.refresh.await_count == 1 + + +@pytest.mark.anyio +async def test_check_duplicate(fuel_supply_repo, mock_db_session): + fuel_supply_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=1000, + units="L", + ) + + # Set up the mock chain using regular MagicMock since the chained methods are sync + mock_result_chain = MagicMock() + mock_result_chain.scalars = MagicMock(return_value=mock_result_chain) + mock_result_chain.first = MagicMock(return_value=MagicMock(spec=FuelSupply)) + + # Define an async execute function that returns our mock chain + async def mock_execute(*args, **kwargs): + return mock_result_chain + + # Replace the session's execute with our new mock + mock_db_session.execute = mock_execute + + result = await fuel_supply_repo.check_duplicate(fuel_supply_data) + + assert result is not None diff --git a/backend/lcfs/tests/fuel_supply/test_fuel_supplies_services.py b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_services.py new file mode 100644 index 000000000..bfe03cfb2 --- /dev/null +++ b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_services.py @@ -0,0 +1,358 @@ +import uuid +import pytest +from unittest.mock import MagicMock, AsyncMock, patch +from fastapi import HTTPException + +from lcfs.db.models import ( + FuelType, + EnergyEffectivenessRatio, + EnergyDensity, + FuelCategory, +) +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.web.api.fuel_supply.actions_service import FuelSupplyActionService +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository +from lcfs.web.api.fuel_supply.schema import ( + FuelSupplyCreateUpdateSchema, + FuelTypeOptionsResponse, + FuelSuppliesSchema, + FuelSupplyResponseSchema, + FuelTypeSchema, + FuelCategoryResponseSchema, +) +from lcfs.db.models.compliance.FuelSupply import FuelSupply +from lcfs.web.api.fuel_supply.services import FuelSupplyServices + + +# Fixture to set up the FuelSupplyServices with mocked dependencies +# Mock common fuel type and fuel category for reuse +fuel_type = FuelTypeSchema( + fuel_type_id=1, + fuel_type="Diesel", + fossil_derived=True, + provision_1_id=None, + provision_2_id=None, + default_carbon_intensity=10.5, + units="L", +) + +fuel_category = FuelCategoryResponseSchema( + fuel_category_id=1, + category="Diesel", +) + + +@pytest.fixture +def fuel_supply_action_service(): + mock_repo = MagicMock(spec=FuelSupplyRepository) + mock_fuel_code_repo = MagicMock(spec=FuelCodeRepository) + service = FuelSupplyActionService( + repo=mock_repo, + fuel_repo=mock_fuel_code_repo, + ) + return service, mock_repo, mock_fuel_code_repo + + +@pytest.fixture +def fuel_supply_service(): + mock_repo = MagicMock(spec=FuelSupplyRepository) + mock_fuel_code_repo = MagicMock(spec=FuelCodeRepository) + service = FuelSupplyServices( + repo=mock_repo, + fuel_repo=mock_fuel_code_repo, + ) + return service, mock_repo, mock_fuel_code_repo + + +# Asynchronous test for get_fuel_supply_options +@pytest.mark.anyio +async def test_get_fuel_supply_options(fuel_supply_service): + service, mock_repo, mock_fuel_code_repo = fuel_supply_service + mock_repo.get_fuel_supply_table_options = AsyncMock(return_value={"fuel_types": []}) + compliance_period = "2023" + + response = await service.get_fuel_supply_options(compliance_period) + + assert isinstance(response, FuelTypeOptionsResponse) + mock_repo.get_fuel_supply_table_options.assert_awaited_once_with(compliance_period) + + +# Asynchronous test for get_fuel_supply_list +@pytest.mark.anyio +async def test_get_fuel_supply_list(fuel_supply_service): + service, mock_repo, _ = fuel_supply_service + mock_repo.get_fuel_supply_list = AsyncMock( + return_value=[ + # Mocked list of FuelSupply models + ] + ) + compliance_report_id = 1 + + response = await service.get_fuel_supply_list(compliance_report_id) + + assert isinstance(response, FuelSuppliesSchema) + mock_repo.get_fuel_supply_list.assert_awaited_once_with(compliance_report_id) + + +@pytest.mark.anyio +async def test_update_fuel_supply_not_found(fuel_supply_action_service): + service, mock_repo, mock_fuel_code_repo = fuel_supply_action_service + mock_repo.get_fuel_supply_version_by_user = AsyncMock(return_value=None) + + fs_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=2000, + units="L", + group_uuid="some-uuid", + version=0, + ) + user_type = UserTypeEnum.SUPPLIER + + with pytest.raises(HTTPException) as exc_info: + await service.update_fuel_supply(fs_data, user_type) + + assert exc_info.value.status_code == 404 + assert exc_info.value.detail == "Fuel supply record not found." + + +@pytest.mark.anyio +async def test_update_fuel_supply_success(fuel_supply_action_service): + service, mock_repo, mock_fuel_code_repo = fuel_supply_action_service + + # Setup existing fuel supply record + existing_fuel_supply = FuelSupply( + fuel_supply_id=1, + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + end_use_id=1, + quantity=1000, + units="L", + fuel_type_other=None, + ci_of_fuel=10.5, + energy_density=30.0, + eer=1.0, + energy=30000, + compliance_units=100, + group_uuid="some-uuid", + version=0, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + ) + + # Mock the repository method to return the existing fuel supply + mock_repo.get_fuel_supply_version_by_user = AsyncMock( + return_value=existing_fuel_supply + ) + + # Mock the FuelCodeRepository methods + # get_fuel_type_by_id + mock_fuel_code_repo.get_fuel_type_by_id = AsyncMock( + return_value=FuelType( + fuel_type_id=1, + fuel_type="Diesel", + unrecognized=False, + default_carbon_intensity=10.5, + units="L", + fossil_derived=True, + provision_1_id=None, + provision_2_id=None, + ) + ) + + # get_energy_effectiveness_ratio + mock_fuel_code_repo.get_energy_effectiveness_ratio = AsyncMock( + return_value=EnergyEffectivenessRatio( + fuel_type_id=1, + fuel_category_id=1, + end_use_type_id=1, + ratio=1.0, + ) + ) + + # get_energy_density + mock_fuel_code_repo.get_energy_density = AsyncMock( + return_value=EnergyDensity( + fuel_type_id=1, + density=30.0, + ) + ) + + # Prepare the updated fuel supply that the update_fuel_supply method should return + updated_fuel_supply = FuelSupply( + fuel_supply_id=1, + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + end_use_id=1, + quantity=2000, # Updated quantity + units="L", + fuel_type_other=None, + ci_of_fuel=10.5, + energy_density=30.0, + eer=1.0, + energy=60000, # Updated energy + compliance_units=200, + group_uuid="some-uuid", + version=0, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + fuel_type=fuel_type, + fuel_category=fuel_category, + ) + + # Mock the update_fuel_supply method to return the updated fuel supply + mock_repo.update_fuel_supply = AsyncMock(return_value=updated_fuel_supply) + + # Prepare the input data for updating the fuel supply + fs_data = FuelSupplyCreateUpdateSchema( + fuel_supply_id=1, + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + end_use_id=1, + quantity=2000, # Updated quantity + units="L", + group_uuid="some-uuid", + version=0, + ) + user_type = UserTypeEnum.SUPPLIER + + # Call the service method + response = await service.update_fuel_supply(fs_data, user_type) + + # Assertions + assert isinstance(response, FuelSupplyResponseSchema) + assert response.fuel_supply_id == updated_fuel_supply.fuel_supply_id + assert response.quantity == updated_fuel_supply.quantity + assert response.energy == updated_fuel_supply.energy + assert response.compliance_units == updated_fuel_supply.compliance_units + assert response.group_uuid == updated_fuel_supply.group_uuid + + # Ensure that the appropriate methods were called with correct arguments + mock_repo.get_fuel_supply_version_by_user.assert_awaited_once_with( + fs_data.group_uuid, fs_data.version, user_type + ) + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once_with( + fuel_type_id=fs_data.fuel_type_id, + fuel_category_id=fs_data.fuel_category_id, + end_use_id=fs_data.end_use_id, + fuel_code_id=fs_data.fuel_code_id, + compliance_period=fs_data.compliance_period, + ) + mock_repo.update_fuel_supply.assert_awaited_once_with(existing_fuel_supply) + + +@pytest.mark.anyio +async def test_create_fuel_supply(fuel_supply_action_service): + service, mock_repo, mock_fuel_code_repo = fuel_supply_action_service + fs_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + end_use_id=1, + provision_of_the_act_id=1, + quantity=2000, + fuel_type_other=None, + units="L", + ) + new_fuel_supply = FuelSupply( + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=2000, + units="L", + fuel_type_other=None, + group_uuid=str(uuid.uuid4()), + version=0, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.CREATE, + ) + mock_repo.create_fuel_supply = AsyncMock( + return_value=MagicMock( + fuel_supply_id=1, + groupUuid="new-uuid", + userType="SUPPLIER", + actionType="CREATE", + fuelTypeOther=None, + fuelType={"fuel_type_id": 1, "fuelType": "Diesel", "units": "L"}, + fuelCategory={"fuel_category_id": 1, "category": "Diesel"}, + fuelCode={ + "fuelStatus": {"status": "Approved"}, + "fuelCode": "FUEL123", + "carbonIntensity": 15.0, + }, + provisionOfTheAct={"provisionOfTheActId": 1, "name": "Act Provision"}, + endUseType={"endUseTypeId": 1, "type": "Transport", "subType": "Personal"}, + units="L", + compliancePeriod="2024", + ) + ) + mock_fuel_code_repo.get_fuel_type_by_id = AsyncMock( + return_value=MagicMock( + spec=FuelType, unrecognized=False, default_carbon_intensity=10.5 + ) + ) + mock_fuel_code_repo.get_energy_effectiveness_ratio = AsyncMock( + return_value=MagicMock(spec=EnergyEffectivenessRatio, ratio=1.0) + ) + mock_density = MagicMock(spec=EnergyDensity) + mock_density.density = 30.0 + mock_fuel_code_repo.get_energy_density = AsyncMock(return_value=mock_density) + + user_type = UserTypeEnum.SUPPLIER + + response = await service.create_fuel_supply(fs_data, user_type) + + assert isinstance(response, FuelSupplyResponseSchema) + mock_repo.create_fuel_supply.assert_awaited_once() + mock_fuel_code_repo.get_standardized_fuel_data.assert_awaited_once_with( + fuel_type_id=fs_data.fuel_type_id, + fuel_category_id=fs_data.fuel_category_id, + end_use_id=fs_data.end_use_id, + fuel_code_id=fs_data.fuel_code_id, + compliance_period=fs_data.compliance_period, + ) + + +@pytest.mark.anyio +async def test_delete_fuel_supply(fuel_supply_action_service): + service, mock_repo, mock_fuel_code_repo = fuel_supply_action_service + fs_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + group_uuid="some-uuid", + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=1000, + units="L", + ) + existing_fuel_supply = FuelSupply( + compliance_report_id=1, + fuel_supply_id=1, + group_uuid="some-uuid", + version=0, + action_type=ActionTypeEnum.CREATE, + user_type=UserTypeEnum.SUPPLIER, + ) + mock_repo.get_latest_fuel_supply_by_group_uuid = AsyncMock( + return_value=existing_fuel_supply + ) + mock_repo.create_fuel_supply = AsyncMock() + + user_type = UserTypeEnum.SUPPLIER + + response = await service.delete_fuel_supply(fs_data, user_type) + + assert response.success is True + assert response.message == "Marked as deleted." + mock_repo.get_latest_fuel_supply_by_group_uuid.assert_awaited_once_with("some-uuid") + mock_repo.create_fuel_supply.assert_awaited_once() diff --git a/backend/lcfs/tests/fuel_supply/test_fuel_supplies_validation.py b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_validation.py new file mode 100644 index 000000000..eefa25c91 --- /dev/null +++ b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_validation.py @@ -0,0 +1,115 @@ +from unittest.mock import MagicMock, AsyncMock +import pytest +from fastapi.exceptions import RequestValidationError + +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.fuel_supply.schema import FuelSupplyCreateUpdateSchema +from lcfs.web.api.fuel_supply.validation import FuelSupplyValidation + + +@pytest.fixture +def fuel_supply_validation(): + # Mock repositories + mock_fs_repo = MagicMock(spec=FuelSupplyRepository) + mock_fc_repo = MagicMock(spec=FuelCodeRepository) + + # Create the validation instance with mocked repositories + validation = FuelSupplyValidation( + fs_repo=mock_fs_repo, + fc_repo=mock_fc_repo, + ) + return validation, mock_fs_repo, mock_fc_repo + + +@pytest.mark.anyio +async def test_check_duplicate(fuel_supply_validation): + validation, mock_fs_repo, _ = fuel_supply_validation + fuel_supply_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=1, + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=2000, + units="L", + ) + + mock_fs_repo.check_duplicate = AsyncMock(return_value=True) + + result = await validation.check_duplicate(fuel_supply_data) + + assert result is True + mock_fs_repo.check_duplicate.assert_awaited_once_with(fuel_supply_data) + + +@pytest.mark.anyio +async def test_validate_other_recognized_type(fuel_supply_validation): + validation, _, mock_fc_repo = fuel_supply_validation + # Mock a recognized fuel type (unrecognized = False) + mock_fc_repo.get_fuel_type_by_id = AsyncMock( + return_value=MagicMock(unrecognized=False) + ) + + fuel_supply_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=1, # Some recognized type ID + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=2000, + units="L", + ) + + # Should not raise any error as fuel_type_other is not needed for recognized type + await validation.validate_other(fuel_supply_data) + + +@pytest.mark.anyio +async def test_validate_other_unrecognized_type_with_other(fuel_supply_validation): + validation, _, mock_fc_repo = fuel_supply_validation + # Mock an unrecognized fuel type + mock_fc_repo.get_fuel_type_by_id = AsyncMock( + return_value=MagicMock(unrecognized=True) + ) + + # Provide fuel_type_other since it's unrecognized + fuel_supply_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=99, # Assume 99 is unrecognized "Other" type + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=2000, + units="L", + fuel_type_other="Some other fuel", + ) + + # Should not raise an error since fuel_type_other is provided + await validation.validate_other(fuel_supply_data) + + +@pytest.mark.anyio +async def test_validate_other_unrecognized_type_missing_other(fuel_supply_validation): + validation, _, mock_fc_repo = fuel_supply_validation + # Mock an unrecognized fuel type + mock_fc_repo.get_fuel_type_by_id = AsyncMock( + return_value=MagicMock(unrecognized=True) + ) + + # Missing fuel_type_other + fuel_supply_data = FuelSupplyCreateUpdateSchema( + compliance_report_id=1, + fuel_type_id=99, # Assume 99 is unrecognized "Other" type + fuel_category_id=1, + provision_of_the_act_id=1, + quantity=2000, + units="L", + ) + + # Should raise RequestValidationError since fuel_type_other is required + with pytest.raises(RequestValidationError) as exc: + await validation.validate_other(fuel_supply_data) + + # Assert that the error message is as expected + errors = exc.value.errors() + assert len(errors) == 1 + assert errors[0]["loc"] == ("fuelTypeOther",) + assert "required when using Other" in errors[0]["msg"] diff --git a/backend/lcfs/tests/fuel_supply/test_fuel_supplies_view.py b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_view.py new file mode 100644 index 000000000..81fa58b7a --- /dev/null +++ b/backend/lcfs/tests/fuel_supply/test_fuel_supplies_view.py @@ -0,0 +1,228 @@ +import pytest +from fastapi import FastAPI, status +from httpx import AsyncClient +from unittest.mock import MagicMock, AsyncMock, patch + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.web.api.fuel_supply.validation import FuelSupplyValidation +from lcfs.web.api.fuel_supply.actions_service import FuelSupplyActionService + + +@pytest.fixture +def mock_fuel_supply_action_service(): + return MagicMock(spec=FuelSupplyActionService) + + +@pytest.fixture +def mock_compliance_report_validation(): + validation = MagicMock(spec=ComplianceReportValidation) + mock_repo = MagicMock() + mock_repo.get_compliance_report.return_value = MagicMock(organization_id=1) + validation.repo = mock_repo + validation.request = MagicMock() + validation.request.user.organization.organization_id = 1 + return validation + + +@pytest.fixture +def mock_fuel_supply_validation(): + validation = MagicMock(spec=FuelSupplyValidation) + validation.check_duplicate = AsyncMock(return_value=None) + return validation + + +@pytest.mark.anyio +async def test_save_fuel_supply_row_create( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_fuel_supply_action_service, + mock_compliance_report_validation, + mock_fuel_supply_validation, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_fuel_supply_row") + payload = { + "compliance_report_id": 1, + "fuel_type_id": 1, + "fuel_category_id": 1, + "provision_of_the_act_id": 1, + "quantity": 1000, + "units": "L", + } + + # Mock the create method with all required fields + mock_fuel_supply_action_service.create_fuel_supply.return_value = { + "fuelSupplyId": 1, + "complianceReportId": 1, + "fuelTypeId": 1, + "fuelType": {"fuelType": "Gasoline", "fuelTypeId": 1, "units": "L"}, + "units": "liters", + "fuelCategory": {"category": "category"}, + "quantity": 1000, + "groupUuid": "some-uuid", + "version": 1, + "userType": "SUPPLIER", + "actionType": "CREATE", + } + + # Override dependencies + fastapi_app.dependency_overrides[ComplianceReportValidation] = ( + lambda: mock_compliance_report_validation + ) + fastapi_app.dependency_overrides[FuelSupplyValidation] = ( + lambda: mock_fuel_supply_validation + ) + fastapi_app.dependency_overrides[FuelSupplyActionService] = ( + lambda: mock_fuel_supply_action_service + ) + + response = await client.post(url, json=payload) + + assert response.status_code == status.HTTP_201_CREATED + data = response.json() + assert "fuelSupplyId" in data + assert data["fuelType"]["fuelType"] == "Gasoline" + assert data["quantity"] == 1000 + + +@pytest.mark.anyio +async def test_save_fuel_supply_row_update( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_fuel_supply_action_service, + mock_compliance_report_validation, + mock_fuel_supply_validation, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_fuel_supply_row") + + payload = { + "compliance_report_id": 1, + "fuel_supply_id": 123, + "fuel_type_id": 1, + "fuel_category_id": 1, + "provision_of_the_act_id": 1, + "quantity": 1000, + "units": "L", + } + + # Mock the update method with all required fields + mock_fuel_supply_action_service.update_fuel_supply.return_value = { + "fuelSupplyId": 1, + "complianceReportId": 1, + "fuelTypeId": 1, + "fuelType": {"fuelType": "Diesel", "fuelTypeId": 1, "units": "L"}, + "units": "liters", + "fuelCategory": {"category": "category"}, + "quantity": 2000, + "groupUuid": "some-uuid", + "version": 1, + "userType": "SUPPLIER", + "actionType": "UPDATE", + } + + # Override dependencies + fastapi_app.dependency_overrides[ComplianceReportValidation] = ( + lambda: mock_compliance_report_validation + ) + fastapi_app.dependency_overrides[FuelSupplyValidation] = ( + lambda: mock_fuel_supply_validation + ) + fastapi_app.dependency_overrides[FuelSupplyActionService] = ( + lambda: mock_fuel_supply_action_service + ) + + response = await client.post(url, json=payload) + + assert response.status_code == status.HTTP_201_CREATED + data = response.json() + assert "fuelSupplyId" in data + assert data["fuelType"]["fuelType"] == "Diesel" + assert data["quantity"] == 2000 + + +@pytest.mark.anyio +async def test_save_fuel_supply_row_delete( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_fuel_supply_action_service, + mock_compliance_report_validation, + mock_fuel_supply_validation, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_fuel_supply_row") + payload = { + "compliance_report_id": 1, + "fuel_supply_id": 123, + "fuel_type_id": 1, + "fuel_category_id": 1, + "provision_of_the_act_id": 1, + "quantity": 1000, + "units": "L", + "deleted": True, + } + + # Mock the delete method with all required fields + mock_fuel_supply_action_service.delete_fuel_supply.return_value = { + "success": True, + "message": "Fuel supply row deleted successfully", + "groupUuid": "some-uuid", + "version": 1, + "userType": "SUPPLIER", + "actionType": "DELETE", + } + + # Override dependencies + fastapi_app.dependency_overrides[ComplianceReportValidation] = ( + lambda: mock_compliance_report_validation + ) + fastapi_app.dependency_overrides[FuelSupplyValidation] = ( + lambda: mock_fuel_supply_validation + ) + fastapi_app.dependency_overrides[FuelSupplyActionService] = ( + lambda: mock_fuel_supply_action_service + ) + + response = await client.post(url, json=payload) + + assert response.status_code == status.HTTP_201_CREATED + data = response.json() + assert data == {"success": True, "message": "Fuel supply row deleted successfully"} + + +@pytest.mark.anyio +async def test_save_fuel_supply_row_duplicate( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_fuel_supply_action_service, + mock_fuel_supply_validation, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_fuel_supply_row") + payload = { + "compliance_report_id": 1, + "fuel_supply_id": None, + "deleted": False, + } + + # Mock the validation method to simulate a duplicate + mock_fuel_supply_validation.check_duplicate = AsyncMock(return_value=True) + + # Override dependencies + fastapi_app.dependency_overrides[FuelSupplyActionService] = ( + lambda: mock_fuel_supply_action_service + ) + fastapi_app.dependency_overrides[FuelSupplyValidation] = ( + lambda: mock_fuel_supply_validation + ) + + response = await client.post(url, json=payload) + + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + data = response.json() + assert "Validation failed" in data["message"] diff --git a/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_repo.py b/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_repo.py new file mode 100644 index 000000000..834badc90 --- /dev/null +++ b/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_repo.py @@ -0,0 +1,137 @@ +from unittest.mock import MagicMock, AsyncMock, Mock + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.models.initiative_agreement.InitiativeAgreement import InitiativeAgreement +from lcfs.db.models.initiative_agreement.InitiativeAgreementHistory import ( + InitiativeAgreementHistory, +) +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatus, +) +from lcfs.web.api.initiative_agreement.repo import InitiativeAgreementRepository +from lcfs.web.exception.exceptions import DataNotFoundException + + +@pytest.fixture +def mock_db_session(): + return MagicMock(spec=AsyncSession) + + +@pytest.fixture +def repository(mock_db_session): + return InitiativeAgreementRepository(db=mock_db_session) + + +@pytest.mark.anyio +async def test_get_initiative_agreement_by_id(repository, mock_db_session): + mock_agreement = InitiativeAgreement(initiative_agreement_id=1) + + mock_scalars = Mock() + mock_scalars.first.return_value = mock_agreement + mock_result = Mock() + mock_result.scalars.return_value = mock_scalars + mock_query = AsyncMock() + mock_query.return_value = mock_result + mock_db_session.execute = mock_query + + result = await repository.get_initiative_agreement_by_id(1) + # print(result) + assert result == mock_agreement + + +@pytest.mark.anyio +async def test_create_initiative_agreement(repository, mock_db_session): + mock_agreement = InitiativeAgreement(initiative_agreement_id=1) + + result = await repository.create_initiative_agreement(mock_agreement) + + assert result == mock_agreement + mock_db_session.add.assert_called_once_with(mock_agreement) + mock_db_session.flush.assert_called_once() + mock_db_session.refresh.assert_called_once_with( + mock_agreement, ["to_organization", "current_status", "history"] + ) + + +@pytest.mark.anyio +async def test_update_initiative_agreement(repository, mock_db_session): + mock_agreement = InitiativeAgreement(initiative_agreement_id=1) + mock_db_session.merge.return_value = mock_agreement + + result = await repository.update_initiative_agreement(mock_agreement) + + assert result == mock_agreement + mock_db_session.merge.assert_called_once_with(mock_agreement) + mock_db_session.flush.assert_called_once() + + +@pytest.mark.anyio +async def test_get_initiative_agreement_status_by_name(repository, mock_db_session): + mock_status = InitiativeAgreementStatus(status="Pending") + + mock_scalars = Mock() + mock_scalars.first.return_value = mock_status + mock_result = Mock() + mock_result.scalars.return_value = mock_scalars + mock_query = AsyncMock() + mock_query.return_value = mock_result + mock_db_session.execute = mock_query + + result = await repository.get_initiative_agreement_status_by_name("Pending") + + assert result == mock_status + mock_db_session.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_initiative_agreement_status_by_name_not_found( + repository, mock_db_session +): + # mock_first = MagicMock(return_value=False) + + mock_scalars = Mock() + mock_scalars.first.return_value = None + mock_result = Mock() + mock_result.scalars.return_value = mock_scalars + mock_query = AsyncMock() + mock_query.return_value = mock_result + mock_db_session.execute = mock_query + + with pytest.raises(DataNotFoundException): + await repository.get_initiative_agreement_status_by_name("NonExistent") + + +@pytest.mark.anyio +async def test_add_initiative_agreement_history(repository, mock_db_session): + result = await repository.add_initiative_agreement_history(1, 1, 1) + + assert isinstance(result, InitiativeAgreementHistory) + mock_db_session.add.assert_called_once() + mock_db_session.flush.assert_called_once() + + +@pytest.mark.anyio +async def test_update_initiative_agreement_history(repository, mock_db_session): + mock_history = InitiativeAgreementHistory( + initiative_agreement_id=1, initiative_agreement_status_id=1, user_profile_id=1 + ) + mock_db_session.scalar.return_value = mock_history + + result = await repository.update_initiative_agreement_history(1, 1, 2) + + assert result.user_profile_id == 2 + mock_db_session.add.assert_called_once_with(mock_history) + mock_db_session.flush.assert_called_once() + + +@pytest.mark.anyio +async def test_refresh_initiative_agreement(repository, mock_db_session): + mock_agreement = InitiativeAgreement(initiative_agreement_id=1) + + result = await repository.refresh_initiative_agreement(mock_agreement) + + assert result == mock_agreement + mock_db_session.flush.assert_called_once() + mock_db_session.refresh.assert_called_once_with(mock_agreement) diff --git a/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_services.py b/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_services.py new file mode 100644 index 000000000..cb0ee6994 --- /dev/null +++ b/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_services.py @@ -0,0 +1,204 @@ +from datetime import datetime +from unittest.mock import AsyncMock, MagicMock + +import pytest +from fastapi import HTTPException + +from lcfs.db.models import Organization +from lcfs.db.models.initiative_agreement.InitiativeAgreement import InitiativeAgreement +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatusEnum, + InitiativeAgreementStatus, +) +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.initiative_agreement.schema import ( + InitiativeAgreementCreateSchema, + InitiativeAgreementSchema, + InitiativeAgreementUpdateSchema, +) +from lcfs.web.api.initiative_agreement.services import ( + InitiativeAgreementServices, +) +from lcfs.web.api.internal_comment.services import InternalCommentService + + +@pytest.fixture +def mock_repo(): + repo = MagicMock() + repo.get_initiative_agreement_by_id = AsyncMock() + repo.get_initiative_agreement_status_by_name = AsyncMock() + repo.create_initiative_agreement = AsyncMock() + repo.update_initiative_agreement = AsyncMock() + repo.add_initiative_agreement_history = AsyncMock() + repo.update_initiative_agreement_history = AsyncMock() + repo.refresh_initiative_agreement = AsyncMock() + return repo + + +@pytest.fixture +def mock_org_service(): + service = MagicMock() + service.adjust_balance = AsyncMock() + return service + + +@pytest.fixture +def mock_internal_comment_service(): + service = MagicMock(spec=InternalCommentService) + service.create_internal_comment = AsyncMock() + return service + + +@pytest.fixture +def mock_request(mock_user_profile): + mock_user_profile.role_names = [RoleEnum.DIRECTOR] + request = MagicMock() + request.user = mock_user_profile + return request + + +@pytest.fixture +def service(mock_repo, mock_org_service, mock_internal_comment_service, mock_request): + return InitiativeAgreementServices( + repo=mock_repo, + org_service=mock_org_service, + internal_comment_service=mock_internal_comment_service, + request=mock_request, + ) + + +@pytest.mark.anyio +async def test_get_initiative_agreement(service, mock_repo): + mock_agreement = InitiativeAgreement( + initiative_agreement_id=1, + compliance_units=150, + to_organization_id=3, + to_organization=Organization(name="name", organization_id=3), + current_status=InitiativeAgreementStatus( + status="Status", initiative_agreement_status_id=1 + ), + create_date=datetime.now(), + ) + mock_repo.get_initiative_agreement_by_id.return_value = mock_agreement + + result = await service.get_initiative_agreement(1) + + assert isinstance(result, InitiativeAgreementSchema) + mock_repo.get_initiative_agreement_by_id.assert_called_once_with(1) + + +@pytest.mark.anyio +@pytest.mark.anyio +async def test_create_initiative_agreement(service, mock_repo, mock_request): + # Mock status for the initiative agreement + mock_status = MagicMock(status=InitiativeAgreementStatusEnum.Recommended) + mock_repo.get_initiative_agreement_status_by_name.return_value = mock_status + + # Create a mock initiative agreement with serializable fields + mock_initiative_agreement = MagicMock(spec=InitiativeAgreement) + mock_initiative_agreement.initiative_agreement_id = 1 + mock_initiative_agreement.current_status.status = "Recommended" + mock_initiative_agreement.to_organization_id = 3 + + # Mock return value of create_initiative_agreement + mock_repo.create_initiative_agreement.return_value = mock_initiative_agreement + + # Create input data + create_data = InitiativeAgreementCreateSchema( + compliance_units=150, + current_status="Recommended", + transaction_effective_date=datetime.now().date(), + to_organization_id=3, + gov_comment="Initial setup of the initiative agreement.", + internal_comment=None, + ) + + # Mock _perform_notification_call to isolate it + service._perform_notification_call = AsyncMock() + + # Call the service method + result = await service.create_initiative_agreement(create_data) + + # Assertions + assert result == mock_initiative_agreement + mock_repo.create_initiative_agreement.assert_called_once() + service._perform_notification_call.assert_called_once_with( + mock_initiative_agreement + ) + + +@pytest.mark.anyio +async def test_update_initiative_agreement(service, mock_repo, mock_request): + mock_agreement = InitiativeAgreement( + initiative_agreement_id=1, + compliance_units=150, + to_organization_id=3, + to_organization=Organization(name="name", organization_id=3), + current_status=InitiativeAgreementStatus( + status="Status", initiative_agreement_status_id=1 + ), + create_date=datetime.now(), + ) + mock_status = MagicMock(status=InitiativeAgreementStatusEnum.Approved) + mock_repo.get_initiative_agreement_by_id.return_value = mock_agreement + mock_repo.get_initiative_agreement_status_by_name.return_value = mock_status + mock_repo.update_initiative_agreement.return_value = mock_agreement + service.notfn_service = AsyncMock() + + update_data = InitiativeAgreementUpdateSchema( + initiative_agreement_id=1, + compliance_units=150, + current_status="Approved", + transaction_effective_date=datetime.now().date(), + to_organization_id=3, + gov_comment="Updated initiative agreement.", + ) + + result = await service.update_initiative_agreement(update_data) + + assert isinstance(result, InitiativeAgreementSchema) + mock_repo.update_initiative_agreement.assert_called_once() + + +@pytest.mark.anyio +async def test_director_approve_initiative_agreement( + service, mock_repo, mock_org_service, mock_request +): + mock_agreement = InitiativeAgreement( + initiative_agreement_id=1, + compliance_units=150, + to_organization_id=3, + to_organization=Organization(name="name", organization_id=3), + current_status=InitiativeAgreementStatus( + status="Status", initiative_agreement_status_id=1 + ), + create_date=datetime.now(), + ) + + await service.director_approve_initiative_agreement(mock_agreement) + + mock_org_service.adjust_balance.assert_called_once() + mock_repo.refresh_initiative_agreement.assert_called_once_with(mock_agreement) + + +@pytest.mark.anyio +async def test_non_director_approve_initiative_agreement( + service, mock_repo, mock_org_service, mock_request +): + service.request.user.role_names = [RoleEnum.SUPPLIER] + + mock_agreement = InitiativeAgreement( + initiative_agreement_id=1, + compliance_units=150, + to_organization_id=3, + to_organization=Organization(name="name", organization_id=3), + current_status=InitiativeAgreementStatus( + status="Status", initiative_agreement_status_id=1 + ), + create_date=datetime.now(), + ) + + mock_request.user.user_roles = [] + + with pytest.raises(HTTPException): + await service.director_approve_initiative_agreement(mock_agreement) diff --git a/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_view.py b/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_view.py new file mode 100644 index 000000000..ca98c827a --- /dev/null +++ b/backend/lcfs/tests/initiative_agreement/test_initiative_agreement_view.py @@ -0,0 +1,168 @@ +import datetime + +import pytest +from fastapi import FastAPI, status +from httpx import AsyncClient +from unittest.mock import MagicMock + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.initiative_agreement.schema import ( + InitiativeAgreementCreateSchema, + InitiativeAgreementSchema, + InitiativeAgreementUpdateSchema, +) +from lcfs.web.api.initiative_agreement.services import InitiativeAgreementServices +from lcfs.web.api.initiative_agreement.validation import InitiativeAgreementValidation +from unittest.mock import patch + + +@pytest.fixture +def mock_initiative_agreement_services(): + return MagicMock(spec=InitiativeAgreementServices) + + +@pytest.fixture +def mock_initiative_agreement_validation(): + validation = MagicMock(spec=InitiativeAgreementValidation) + + validation.validate_initiative_agreement_update.return_value = None + validation.validate_initiative_agreement_create.return_value = None + + return validation + + +@pytest.mark.anyio +async def test_get_initiative_agreement( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_initiative_agreement_services, +): + with patch( + "lcfs.web.api.initiative_agreement.services.InitiativeAgreementServices", + mock_initiative_agreement_services, + ): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + initiative_agreement_id = 1 + + # Mock the service method + mock_initiative_agreement_services.get_initiative_agreement.return_value = ( + InitiativeAgreementSchema( + initiative_agreement_id=initiative_agreement_id, + to_organization={"name": "name", "organizationId": 3}, + create_date=datetime.datetime.today().date().isoformat(), + compliance_units=150, + current_status={ + "initiative_agreement_status_id": 1, + "status": "Updated", + }, + to_organization_id=3, + gov_comment="Updated initiative agreement.", + history=[], + ) + ) + + # Use dependency override + fastapi_app.dependency_overrides[InitiativeAgreementServices] = ( + lambda: mock_initiative_agreement_services + ) + + url = fastapi_app.url_path_for( + "get_initiative_agreement", initiative_agreement_id=initiative_agreement_id + ) + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["initiativeAgreementId"] == initiative_agreement_id + + +@pytest.mark.anyio +async def test_create_initiative_agreement( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_initiative_agreement_services, + mock_initiative_agreement_validation, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("create_initiative_agreement") + initiative_agreement_payload = InitiativeAgreementCreateSchema( + compliance_units=150, + current_status="Updated", + to_organization_id=3, + ) + + # Mock the service method + mock_initiative_agreement_services.create_initiative_agreement.return_value = ( + InitiativeAgreementSchema( + initiative_agreement_id=1, + to_organization={"name": "name", "organizationId": 3}, + create_date=datetime.datetime.today().date().isoformat(), + compliance_units=150, + current_status={"initiative_agreement_status_id": 1, "status": "Updated"}, + to_organization_id=3, + history=[], + ) + ) + + # Use dependency override + fastapi_app.dependency_overrides[InitiativeAgreementServices] = ( + lambda: mock_initiative_agreement_services + ) + fastapi_app.dependency_overrides[InitiativeAgreementValidation] = ( + lambda: mock_initiative_agreement_validation + ) + + data = initiative_agreement_payload.model_dump(by_alias=True) + response = await client.post(url, json=data) + assert response.status_code == status.HTTP_201_CREATED + assert data["complianceUnits"] == 150 + + +@pytest.mark.anyio +async def test_update_initiative_agreement( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_initiative_agreement_services, + mock_initiative_agreement_validation, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + initiative_agreement_id = 1 + url = fastapi_app.url_path_for("update_initiative_agreement") + initiative_agreement_update_payload = InitiativeAgreementUpdateSchema( + initiative_agreement_id=initiative_agreement_id, + compliance_units=150, + current_status="Updated", + to_organization_id=3, + ) + + # Mock the service method + mock_initiative_agreement_services.update_initiative_agreement.return_value = ( + InitiativeAgreementSchema( + initiative_agreement_id=initiative_agreement_id, + to_organization={"name": "name", "organizationId": 3}, + create_date=datetime.datetime.today().date().isoformat(), + compliance_units=150, + current_status={"initiative_agreement_status_id": 1, "status": "Updated"}, + to_organization_id=3, + history=[], + ) + ) + + # Use dependency override + fastapi_app.dependency_overrides[InitiativeAgreementServices] = ( + lambda: mock_initiative_agreement_services + ) + fastapi_app.dependency_overrides[InitiativeAgreementValidation] = ( + lambda: mock_initiative_agreement_validation + ) + + response = await client.put( + url, json=initiative_agreement_update_payload.model_dump(by_alias=True) + ) + + data = response.json() + + assert response.status_code == status.HTTP_200_OK + assert data["complianceUnits"] == 150 diff --git a/backend/lcfs/tests/internal_comment/test_internal_comment.py b/backend/lcfs/tests/internal_comment/test_internal_comment.py new file mode 100644 index 000000000..046327621 --- /dev/null +++ b/backend/lcfs/tests/internal_comment/test_internal_comment.py @@ -0,0 +1,474 @@ +import pytest +from unittest.mock import AsyncMock, patch +from fastapi import FastAPI, status +from httpx import AsyncClient +from datetime import datetime + +from lcfs.db.models import UserProfile +from lcfs.db.models.transfer.Transfer import Transfer, TransferRecommendationEnum +from lcfs.db.models.initiative_agreement.InitiativeAgreement import InitiativeAgreement +from lcfs.db.models.admin_adjustment.AdminAdjustment import AdminAdjustment +from lcfs.db.models.comment.InternalComment import InternalComment +from lcfs.db.models.comment.TransferInternalComment import TransferInternalComment +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.internal_comment.schema import EntityTypeEnum, AudienceScopeEnum + + +@pytest.mark.anyio +async def test_create_internal_comment_with_transfer( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + """ + Test creating an internal comment associated with a Transfer entity. + """ + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + transfer = Transfer( + transfer_id=1, + from_organization_id=1, + to_organization_id=2, + agreement_date=datetime.now(), + transaction_effective_date=datetime.now(), + price_per_unit=1.0, + quantity=100, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, + effective_status=True, + ) + await add_models([transfer]) + + payload = { + "entity_type": EntityTypeEnum.TRANSFER.value, + "entity_id": transfer.transfer_id, + "comment": "Transfer comment", + "audience_scope": AudienceScopeEnum.ANALYST.value, + } + + with patch( + "lcfs.web.api.internal_comment.repo.UserRepository.get_full_name", + new_callable=AsyncMock, + ) as mock_get_full_name: + mock_get_full_name.return_value = "Mocked Full Name" + + url = fastapi_app.url_path_for("create_comment") + response = await client.post(url, json=payload) + assert response.status_code == status.HTTP_201_CREATED + data = response.json() + assert data["comment"] == "Transfer comment" + assert data["audienceScope"] == AudienceScopeEnum.ANALYST.value + assert data["createUser"] == "mockuser" + assert data["fullName"] == "Mocked Full Name" + + +@pytest.mark.anyio +async def test_create_internal_comment_with_initiative_agreement( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + """ + Test creating an internal comment associated with an Initiative Agreement entity. + """ + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + initiative_agreement = InitiativeAgreement( + initiative_agreement_id=1, + compliance_units=1000, + transaction_effective_date=datetime.now(), + gov_comment="Test Initiative Agreement", + to_organization_id=1, + current_status_id=1, + ) + await add_models([initiative_agreement]) + + payload = { + "entity_type": EntityTypeEnum.INITIATIVE_AGREEMENT.value, + "entity_id": initiative_agreement.initiative_agreement_id, + "comment": "Initiative Agreement comment", + "audience_scope": AudienceScopeEnum.DIRECTOR.value, + } + + with patch( + "lcfs.web.api.internal_comment.repo.UserRepository.get_full_name", + new_callable=AsyncMock, + ) as mock_get_full_name: + mock_get_full_name.return_value = "Mocked Full Name" + + url = fastapi_app.url_path_for("create_comment") + response = await client.post(url, json=payload) + assert response.status_code == status.HTTP_201_CREATED + data = response.json() + assert data["comment"] == "Initiative Agreement comment" + assert data["audienceScope"] == AudienceScopeEnum.DIRECTOR.value + assert data["createUser"] == "mockuser" + assert data["fullName"] == "Mocked Full Name" + + +@pytest.mark.anyio +async def test_create_internal_comment_with_admin_adjustment( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + """ + Test creating an internal comment associated with an Admin Adjustment entity. + """ + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + admin_adjustment = AdminAdjustment( + admin_adjustment_id=100, + compliance_units=500, + transaction_effective_date=datetime.now(), + gov_comment="Test Admin Adjustment", + to_organization_id=1, + current_status_id=1, + ) + await add_models([admin_adjustment]) + + payload = { + "entity_type": EntityTypeEnum.ADMIN_ADJUSTMENT.value, + "entity_id": admin_adjustment.admin_adjustment_id, + "comment": "Admin Adjustment comment", + "audience_scope": AudienceScopeEnum.COMPLIANCE_MANAGER.value, + } + + with patch( + "lcfs.web.api.internal_comment.repo.UserRepository.get_full_name", + new_callable=AsyncMock, + ) as mock_get_full_name: + mock_get_full_name.return_value = "Mocked Full Name" + + url = fastapi_app.url_path_for("create_comment") + response = await client.post(url, json=payload) + assert response.status_code == status.HTTP_201_CREATED + data = response.json() + assert data["comment"] == "Admin Adjustment comment" + assert data["audienceScope"] == AudienceScopeEnum.COMPLIANCE_MANAGER.value + assert data["createUser"] == "mockuser" + assert data["fullName"] == "Mocked Full Name" + + +@pytest.mark.anyio +async def test_create_internal_comment_invalid_entity_type( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + """ + Test creating an internal comment with an invalid entity type. + """ + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + payload = { + "entity_type": "InvalidType", + "entity_id": 1, + "comment": "Invalid entity type comment", + "audience_scope": AudienceScopeEnum.ANALYST.value, + } + + url = fastapi_app.url_path_for("create_comment") + response = await client.post(url, json=payload) + assert ( + response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + ) # Pydantic validation error + data = response.json() + assert "details" in data + + +@pytest.mark.anyio +async def test_create_internal_comment_missing_fields( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + """ + Test creating an internal comment with missing required fields. + """ + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + payload = { + # Missing 'entity_type', 'entity_id', 'comment', 'audience_scope' + } + + url = fastapi_app.url_path_for("create_comment") + response = await client.post(url, json=payload) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + data = response.json() + assert "details" in data + + +@pytest.mark.anyio +async def test_create_internal_comment_invalid_audience_scope( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + """ + Test creating an internal comment with an invalid audience scope. + """ + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + transfer = Transfer( + transfer_id=2, + from_organization_id=1, + to_organization_id=2, + agreement_date=datetime.now(), + transaction_effective_date=datetime.now(), + price_per_unit=1.0, + quantity=100, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, + effective_status=True, + ) + await add_models([transfer]) + + payload = { + "entity_type": EntityTypeEnum.TRANSFER.value, + "entity_id": transfer.transfer_id, + "comment": "Invalid audience scope comment", + "audience_scope": "InvalidScope", + } + + url = fastapi_app.url_path_for("create_comment") + response = await client.post(url, json=payload) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + data = response.json() + assert "details" in data + + +@pytest.mark.anyio +async def test_get_internal_comments_no_comments( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + """ + Test retrieving internal comments when none exist for the entity. + """ + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + transfer = Transfer( + transfer_id=3, + from_organization_id=1, + to_organization_id=2, + agreement_date=datetime.now(), + transaction_effective_date=datetime.now(), + price_per_unit=1.0, + quantity=100, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, + effective_status=True, + ) + await add_models([transfer]) + + entity_type = EntityTypeEnum.TRANSFER.value + entity_id = transfer.transfer_id + url = fastapi_app.url_path_for( + "get_comments", entity_type=entity_type, entity_id=entity_id + ) + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert isinstance(data, list) + assert len(data) == 0 # No comments + + +@pytest.mark.anyio +async def test_create_internal_comment_without_government_role( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + """ + Test that a user without the GOVERNMENT role cannot create an internal comment. + """ + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + payload = { + "entity_type": EntityTypeEnum.TRANSFER.value, + "entity_id": 1, + "comment": "Attempted comment by supplier", + "audience_scope": AudienceScopeEnum.ANALYST.value, + } + + url = fastapi_app.url_path_for("create_comment") + response = await client.post(url, json=payload) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_get_internal_comments_multiple_comments( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + """ + Test retrieving multiple internal comments for an entity. + """ + set_mock_user( + fastapi_app, [RoleEnum.GOVERNMENT], user_details={"username": "IDIRUSER"} + ) + + transfer = Transfer( + transfer_id=5, + from_organization_id=1, + to_organization_id=2, + agreement_date=datetime.now(), + transaction_effective_date=datetime.now(), + price_per_unit=1.0, + quantity=100, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, + effective_status=True, + ) + await add_models([transfer]) + + user = UserProfile( + keycloak_username="IDIRUSER", + first_name="Test", + last_name="User", + ) + await add_models([user]) + + comments = [] + for i in range(3): + internal_comment = InternalComment( + internal_comment_id=i, + comment=f"Comment {i}", + audience_scope=AudienceScopeEnum.ANALYST.value, + create_user="IDIRUSER", + ) + await add_models([internal_comment]) + association = TransferInternalComment( + transfer_id=transfer.transfer_id, + internal_comment_id=internal_comment.internal_comment_id, + ) + await add_models([association]) + comments.append(internal_comment) + + entity_type = EntityTypeEnum.TRANSFER.value + entity_id = transfer.transfer_id + url = fastapi_app.url_path_for( + "get_comments", entity_type=entity_type, entity_id=entity_id + ) + response = await client.get(url) + + assert response.status_code == status.HTTP_200_OK + data = response.json() + + assert isinstance(data, list) + assert len(data) == 3 + + for i in range(3): + assert data[i]["comment"] == f"Comment {2 - i}" + + +@pytest.mark.anyio +async def test_update_internal_comment_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + """ + Test updating an internal comment successfully. + """ + set_mock_user( + fastapi_app, + [RoleEnum.GOVERNMENT], + user_details={"keycloak_username": "IDIRUSER"}, + ) + + # Create a transfer and an internal comment + transfer = Transfer( + transfer_id=1, + from_organization_id=1, + to_organization_id=2, + agreement_date=datetime.now(), + transaction_effective_date=datetime.now(), + price_per_unit=1.0, + quantity=100, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, + effective_status=True, + ) + await add_models([transfer]) + + internal_comment = InternalComment( + internal_comment_id=1, + comment="Original Comment", + audience_scope=AudienceScopeEnum.ANALYST.value, + create_user="IDIRUSER", + ) + await add_models([internal_comment]) + + # Associate the internal comment with the transfer + association = TransferInternalComment( + transfer_id=transfer.transfer_id, + internal_comment_id=internal_comment.internal_comment_id, + ) + await add_models([association]) + + # Prepare payload for the update + update_payload = {"comment": "Updated Comment"} + + url = fastapi_app.url_path_for( + "update_comment", internal_comment_id=internal_comment.internal_comment_id + ) + response = await client.put(url, json=update_payload) + + assert response.status_code == status.HTTP_200_OK + data = response.json() + + assert data["comment"] == "Updated Comment" + assert data["createUser"] == "IDIRUSER" + + +@pytest.mark.anyio +async def test_update_internal_comment_unauthorized( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + """ + Test trying to update an internal comment the user is not authorized to update. + """ + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + # Prepare payload for the update attempt + update_payload = {"comment": "Updated Comment"} + + url = fastapi_app.url_path_for("update_comment", internal_comment_id=1) + response = await client.put(url, json=update_payload) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_update_internal_comment_nonexistent( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + """ + Test trying to update an internal comment that does not exist. + """ + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + # Prepare payload for the update attempt + update_payload = {"comment": "Updated Comment"} + + # Try to update a comment that does not exist (e.g., internal_comment_id=999) + url = fastapi_app.url_path_for("update_comment", internal_comment_id=999) + response = await client.put(url, json=update_payload) + + assert response.status_code == status.HTTP_404_NOT_FOUND diff --git a/backend/lcfs/tests/notification/test_notification_repo.py b/backend/lcfs/tests/notification/test_notification_repo.py new file mode 100644 index 000000000..bbc4ee80f --- /dev/null +++ b/backend/lcfs/tests/notification/test_notification_repo.py @@ -0,0 +1,348 @@ +from lcfs.db.models.notification.NotificationChannel import ChannelEnum +from lcfs.web.api.base import NotificationTypeEnum, PaginationRequestSchema +import pytest +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import delete +from lcfs.web.api.notification.repo import NotificationRepository +from lcfs.db.models.notification import ( + NotificationMessage, + NotificationChannelSubscription, +) +from lcfs.web.exception.exceptions import DataNotFoundException +from unittest.mock import AsyncMock, MagicMock + + +@pytest.fixture +def mock_db_session(): + session = AsyncMock(spec=AsyncSession) + + async def mock_execute(*args, **kwargs): + mock_result = ( + MagicMock() + ) # Changed to MagicMock since the chained methods are sync + mock_result.scalars = MagicMock(return_value=mock_result) + mock_result.unique = MagicMock(return_value=mock_result) + mock_result.all = MagicMock(return_value=[MagicMock(spec=NotificationMessage)]) + mock_result.first = MagicMock(return_value=MagicMock(spec=NotificationMessage)) + return mock_result + + session.execute = mock_execute + session.add = MagicMock() + session.flush = AsyncMock() + session.refresh = AsyncMock() + + return session + + +@pytest.fixture +def notification_repo(mock_db_session): + return NotificationRepository(db=mock_db_session) + + +@pytest.mark.anyio +async def test_create_notification_message(notification_repo, mock_db_session): + new_notification_message = MagicMock(spec=NotificationMessage) + + result = await notification_repo.create_notification_message( + new_notification_message + ) + + assert result == new_notification_message + mock_db_session.add.assert_called_once_with(new_notification_message) + assert mock_db_session.flush.await_count == 1 + assert mock_db_session.refresh.await_count == 1 + + +@pytest.mark.anyio +async def test_get_notification_message_by_id_found(notification_repo, mock_db_session): + notification_id = 1 + mock_result = MagicMock(spec=NotificationMessage) + + mock_result_chain = MagicMock() + mock_result_chain.scalars = MagicMock(return_value=mock_result_chain) + mock_result_chain.scalar_one_or_none = MagicMock(return_value=mock_result) + + async def mock_execute(*args, **kwargs): + return mock_result_chain + + mock_db_session.execute = MagicMock(side_effect=mock_execute) + + result = await notification_repo.get_notification_message_by_id(notification_id) + + assert result == mock_result + mock_db_session.execute.assert_called_once() + mock_result_chain.scalar_one_or_none.assert_called_once() + + +@pytest.mark.anyio +async def test_get_notification_message_by_id_not_found( + notification_repo, mock_db_session +): + mock_result_chain = MagicMock() + mock_result_chain.scalars = MagicMock(return_value=mock_result_chain) + mock_result_chain.scalar_one_or_none.side_effect = DataNotFoundException + + async def mock_execute(*args, **kwargs): + return mock_result_chain + + mock_db_session.execute = MagicMock(side_effect=mock_execute) + + with pytest.raises(DataNotFoundException): + await notification_repo.get_notification_message_by_id(999) + + +@pytest.mark.anyio +async def test_get_notification_messages_by_user(notification_repo, mock_db_session): + mock_notification1 = MagicMock(spec=NotificationMessage) + mock_notification2 = MagicMock(spec=NotificationMessage) + + mock_result = MagicMock() + mock_result.unique.return_value.scalars.return_value.all.return_value = [ + mock_notification1, + mock_notification2, + ] + + mock_db_session.execute = AsyncMock(return_value=mock_result) + + result = await notification_repo.get_notification_messages_by_user(1) + + assert len(result) == 2 + assert result == [mock_notification1, mock_notification2] + mock_db_session.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_unread_notification_message_count_by_user_id( + notification_repo, mock_db_session +): + user_id = 1 + expected_unread_count = 5 + + mock_result = MagicMock() + mock_result.scalar_one.return_value = expected_unread_count + + mock_db_session.execute = AsyncMock(return_value=mock_result) + + result = await notification_repo.get_unread_notification_message_count_by_user_id( + user_id + ) + + assert result == expected_unread_count + mock_db_session.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_delete_notification_message(notification_repo, mock_db_session): + notification_id = 123 + + mock_db_session.execute = AsyncMock() + mock_db_session.flush = AsyncMock() + + await notification_repo.delete_notification_message(notification_id) + + assert mock_db_session.execute.call_count == 1 + executed_query = mock_db_session.execute.call_args[0][0] + + expected_query = delete(NotificationMessage).where( + NotificationMessage.notification_message_id == notification_id + ) + assert str(executed_query) == str(expected_query) + mock_db_session.execute.assert_called_once() + mock_db_session.flush.assert_called_once() + + +@pytest.mark.anyio +async def test_update_notification_message(notification_repo, mock_db_session): + mock_notification = MagicMock(spec=NotificationMessage) + mock_notification.notification_message_id = 1 + mock_notification.is_read = False + mock_notification.message = "Original message" + + updated_notification = MagicMock(spec=NotificationMessage) + updated_notification.notification_message_id = 1 + updated_notification.is_read = True + updated_notification.message = "Updated message" + + mock_db_session.merge.return_value = updated_notification + mock_db_session.flush = AsyncMock() + + notification_repo.db = mock_db_session + + result = await notification_repo.update_notification_message(mock_notification) + + mock_db_session.merge.assert_called_once_with(mock_notification) + mock_db_session.flush.assert_called_once() + assert result == updated_notification + assert result.is_read == True + assert result.message == "Updated message" + + +@pytest.mark.anyio +async def test_mark_notification_as_read(notification_repo, mock_db_session): + notification_id = 123 + + mock_notification = MagicMock(spec=NotificationMessage) + mock_notification.notification_message_id = notification_id + mock_notification.is_read = False # Initially, the notification is unread + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = mock_notification + + async def mock_execute(*args, **kwargs): + return mock_result + + mock_db_session.execute = mock_execute + mock_db_session.commit = AsyncMock() + mock_db_session.refresh = AsyncMock() + + result = await notification_repo.mark_notification_as_read(notification_id) + + assert result.is_read is True # Verify that is_read was set to True + mock_db_session.commit.assert_called_once() # Ensure commit was called + mock_db_session.refresh.assert_called_once_with( + mock_notification + ) # Check refresh was called + + +@pytest.mark.anyio +async def test_create_notification_channel_subscription( + notification_repo, mock_db_session +): + new_subscription = MagicMock(spec=NotificationChannelSubscription) + + result = await notification_repo.create_notification_channel_subscription( + new_subscription + ) + + assert result == new_subscription + mock_db_session.add.assert_called_once_with(new_subscription) + assert mock_db_session.flush.await_count == 1 + assert mock_db_session.refresh.await_count == 1 + + +@pytest.mark.anyio +async def test_get_notification_channel_subscriptions_by_user( + notification_repo, mock_db_session +): + mock_subscription = MagicMock(spec=NotificationChannelSubscription) + mock_subscription.user_profile_id = 1 + mock_subscription.notification_channel_subscription_id = 1 + mock_subscription.notification_type_id = 1 + mock_subscription.notification_channel_id = 1 + + mock_result_chain = MagicMock() + mock_result_chain.scalars.return_value.all.return_value = [mock_subscription] + + async def mock_execute(*args, **kwargs): + return mock_result_chain + + mock_db_session.execute = mock_execute + + result = await notification_repo.get_notification_channel_subscriptions_by_user(1) + + assert len(result) == 1 + assert result[0].user_profile_id == 1 + + +@pytest.mark.anyio +async def test_get_notification_channel_subscriptions_by_id( + notification_repo, mock_db_session +): + subscription_id = 1 + mock_subscription = MagicMock(spec=NotificationChannelSubscription) + mock_subscription.notification_channel_subscription_id = subscription_id + + mock_result_chain = MagicMock() + mock_result_chain.scalar_one.return_value = mock_subscription + + async def mock_execute(*args, **kwargs): + return mock_result_chain + + mock_db_session.execute = mock_execute + + result = await notification_repo.get_notification_channel_subscription_by_id( + subscription_id + ) + + assert result is not None + assert result.notification_channel_subscription_id == subscription_id + + +@pytest.mark.anyio +async def test_create_notification_messages(notification_repo, mock_db_session): + messages = [ + MagicMock(spec=NotificationMessage), + MagicMock(spec=NotificationMessage), + ] + + await notification_repo.create_notification_messages(messages) + + mock_db_session.add_all.assert_called_once_with(messages) + mock_db_session.flush.assert_called_once() + + +@pytest.mark.anyio +async def test_mark_notifications_as_read(notification_repo, mock_db_session): + user_id = 1 + notification_ids = [1, 2, 3] + + mock_db_session.execute = AsyncMock() + mock_db_session.flush = AsyncMock() + + result = await notification_repo.mark_notifications_as_read( + user_id, notification_ids + ) + + assert result == notification_ids + mock_db_session.execute.assert_called_once() + mock_db_session.flush.assert_called_once() + + +@pytest.mark.anyio +async def test_get_notification_type_by_name(notification_repo, mock_db_session): + # Create a mock result that properly simulates the SQLAlchemy result + mock_result = MagicMock() + mock_scalars = MagicMock() + mock_scalars.first.return_value = 123 + mock_result.scalars.return_value = mock_scalars + + mock_db_session.execute = AsyncMock(return_value=mock_result) + + result = await notification_repo.get_notification_type_by_name("TestNotification") + + assert result == 123 + mock_db_session.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_notification_channel_by_name(notification_repo, mock_db_session): + # Similar setup to the previous test + mock_result = MagicMock() + mock_scalars = MagicMock() + mock_scalars.first.return_value = 456 + mock_result.scalars.return_value = mock_scalars + + mock_db_session.execute = AsyncMock(return_value=mock_result) + + result = await notification_repo.get_notification_channel_by_name(ChannelEnum.EMAIL) + + assert result == 456 + mock_db_session.execute.assert_called_once() + + +@pytest.mark.anyio +async def test_get_subscribed_users_by_channel(notification_repo, mock_db_session): + # Similar setup, but using .all() instead of .first() + mock_result = MagicMock() + mock_scalars = MagicMock() + mock_scalars.all.return_value = [1, 2, 3] + mock_result.scalars.return_value = mock_scalars + + mock_db_session.execute = AsyncMock(return_value=mock_result) + + result = await notification_repo.get_subscribed_users_by_channel( + NotificationTypeEnum.BCEID__TRANSFER__PARTNER_ACTIONS, ChannelEnum.EMAIL + ) + + assert result == [1, 2, 3] + mock_db_session.execute.assert_called_once() diff --git a/backend/lcfs/tests/notification/test_notification_services.py b/backend/lcfs/tests/notification/test_notification_services.py new file mode 100644 index 000000000..a23b49388 --- /dev/null +++ b/backend/lcfs/tests/notification/test_notification_services.py @@ -0,0 +1,400 @@ +from lcfs.web.api.notification.schema import ( + SubscriptionSchema, + NotificationMessageSchema, +) +import pytest +from unittest.mock import AsyncMock, MagicMock +from lcfs.web.api.notification.services import NotificationService +from lcfs.db.models.notification import ( + NotificationChannelSubscription, + NotificationMessage, +) +from lcfs.web.api.notification.repo import NotificationRepository + +# Mock common data for reuse +mock_notification_message = NotificationMessage( + notification_message_id=1, + message="Test message", + is_read=False, + origin_user_profile_id=1, + related_user_profile_id=2, + notification_type_id=1, +) + +mock_notification_channel_subscription = NotificationChannelSubscription( + notification_channel_subscription_id=1, + is_enabled=True, + user_profile_id=1, + notification_type_id=1, + notification_channel_id=1, +) + + +@pytest.fixture +def notification_service(): + mock_repo = MagicMock(spec=NotificationRepository) + service = NotificationService(repo=mock_repo) + return service, mock_repo + + +@pytest.mark.anyio +async def test_get_notifications_by_user_id(notification_service): + service, mock_repo = notification_service + user_id = 1 + + mock_repo.get_notification_messages_by_user = AsyncMock( + return_value=[mock_notification_message] + ) + + result = await service.get_notification_messages_by_user_id(user_id) + + # Assertions + assert isinstance(result, list) + assert len(result) == 1 + assert isinstance(result[0], NotificationMessageSchema) + assert ( + result[0].notification_message_id + == mock_notification_message.notification_message_id + ) + + # Verify that the mock repository method was called with `user_profile_id` and `is_read=None` + mock_repo.get_notification_messages_by_user.assert_called_once_with( + user_profile_id=1, is_read=None + ) + + +@pytest.mark.anyio +async def test_get_notification_by_id(notification_service): + service, mock_repo = notification_service + notification_id = 1 + + mock_notification = NotificationMessage( + notification_message_id=notification_id, + message="Test message", + is_read=False, + origin_user_profile_id=1, + related_user_profile_id=2, + notification_type_id=1, + ) + + mock_repo.get_notification_message_by_id = AsyncMock(return_value=mock_notification) + + result = await service.get_notification_message_by_id(notification_id) + + assert isinstance(result, NotificationMessageSchema) + assert result.notification_message_id == mock_notification.notification_message_id + assert result.message == mock_notification.message + assert result.is_read == mock_notification.is_read + assert result.origin_user_profile_id == mock_notification.origin_user_profile_id + assert result.related_user_profile_id == mock_notification.related_user_profile_id + assert result.notification_type_id == mock_notification.notification_type_id + + mock_repo.get_notification_message_by_id.assert_called_once_with(notification_id) + + +@pytest.mark.anyio +async def test_count_unread_notifications_by_user_id(notification_service): + service, mock_repo = notification_service + user_id = 1 + expected_unread_count = 5 + + mock_repo.get_unread_notification_message_count_by_user_id = AsyncMock( + return_value=expected_unread_count + ) + + result = await service.count_unread_notifications_by_user_id(user_id) + + assert isinstance(result, int) # Ensure the return type is an integer + assert result == expected_unread_count # Check that the count is as expected + + mock_repo.get_unread_notification_message_count_by_user_id.assert_awaited_once_with( + user_id=user_id + ) + + +@pytest.mark.anyio +async def test_mark_notification_as_read(notification_service): + service, mock_repo = notification_service + notification_id = 1 + + mock_notification = NotificationMessage( + notification_message_id=notification_id, + message="Test message", + is_read=False, + origin_user_profile_id=1, + related_user_profile_id=2, + notification_type_id=1, + ) + + async def mock_mark_as_read(notification_id): + mock_notification.is_read = True + return mock_notification + + mock_repo.mark_notification_as_read = AsyncMock(side_effect=mock_mark_as_read) + + result = await service.mark_notification_as_read(notification_id) + + assert result is mock_notification + assert result.is_read is True + + mock_repo.mark_notification_as_read.assert_awaited_once_with(notification_id) + + +@pytest.mark.anyio +async def test_create_notification_message(notification_service): + service, mock_repo = notification_service + + notification_data = NotificationMessageSchema( + message="Test notification", + is_read=False, + origin_user_profile_id=1, + related_user_profile_id=2, + notification_type_id=1, + ) + + created_notification = NotificationMessage( + notification_message_id=1, # Simulate the auto-generated ID + message="Test notification", + is_read=False, + origin_user_profile_id=1, + related_user_profile_id=2, + notification_type_id=1, + ) + + mock_repo.create_notification_message = AsyncMock(return_value=created_notification) + + result = await service.create_notification_message(notification_data) + + assert result == created_notification # Ensure the created notification is returned + + # Extract the argument passed to `create_notification_message` + called_args, _ = mock_repo.create_notification_message.await_args + passed_notification = called_args[0] + + assert passed_notification.message == created_notification.message + assert passed_notification.is_read == created_notification.is_read + assert ( + passed_notification.origin_user_profile_id + == created_notification.origin_user_profile_id + ) + assert ( + passed_notification.related_user_profile_id + == created_notification.related_user_profile_id + ) + assert ( + passed_notification.notification_type_id + == created_notification.notification_type_id + ) + + +@pytest.mark.anyio +async def test_update_notification_message(notification_service): + service, mock_repo = notification_service + + updated_data = NotificationMessageSchema( + notification_message_id=1, + message="Updated message", + is_read=True, + origin_user_profile_id=1, + related_user_profile_id=2, + notification_type_id=1, + ) + + updated_notification = NotificationMessage( + notification_message_id=1, + message="Updated message", + is_read=True, + origin_user_profile_id=1, + related_user_profile_id=2, + notification_type_id=1, + ) + + mock_repo.update_notification_message = AsyncMock(return_value=updated_notification) + + result = await service.update_notification(updated_data) + + assert result == updated_notification # Ensure the updated notification is returned + + # Extract the argument passed to `update_notification_message` + called_args, _ = mock_repo.update_notification_message.await_args + passed_notification = called_args[0] + + assert ( + passed_notification.notification_message_id + == updated_data.notification_message_id + ) + assert passed_notification.message == updated_data.message + assert passed_notification.is_read == updated_data.is_read + assert ( + passed_notification.origin_user_profile_id + == updated_data.origin_user_profile_id + ) + assert ( + passed_notification.related_user_profile_id + == updated_data.related_user_profile_id + ) + assert passed_notification.notification_type_id == updated_data.notification_type_id + + mock_repo.update_notification_message.assert_awaited_once_with(passed_notification) + + +@pytest.mark.anyio +async def test_delete_notification_message(notification_service): + service, mock_repo = notification_service + + user_id = 1 + notification_id = 123 + + # Mock the repository's get and delete methods + mock_notification_data = NotificationMessage( + notification_message_id=notification_id, + message="Test notification", + is_read=False, + origin_user_profile_id=user_id, + related_user_profile_id=2, + notification_type_id=1, + ) + + mock_repo.get_notification_message_by_id = AsyncMock( + return_value=mock_notification_data + ) + mock_repo.delete_notification_message = AsyncMock() + + await service.delete_notification_message(notification_id) + + mock_repo.get_notification_message_by_id.assert_awaited_once_with(notification_id) + mock_repo.delete_notification_message.assert_awaited_once_with(notification_id) + + +@pytest.mark.anyio +async def test_create_notification_channel_subscription(notification_service): + service, mock_repo = notification_service + + subscription_data = SubscriptionSchema( + is_enabled=True, + notification_channel_name="EMAIL", + notification_type_name="BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT", + ) + user_profile_id = 1 + + # Mock the methods that fetch IDs from keys + service.get_notification_channel_id_by_key = AsyncMock(return_value=3) + service.get_notification_type_id_by_key = AsyncMock(return_value=2) + + # Mock the repo method + created_subscription = NotificationChannelSubscription( + notification_channel_subscription_id=123, + is_enabled=True, + user_profile_id=user_profile_id, + notification_type_id=2, + notification_channel_id=3, + ) + mock_repo.create_notification_channel_subscription = AsyncMock( + return_value=created_subscription + ) + + result = await service.create_notification_channel_subscription( + subscription_data, user_profile_id + ) + + # Assertions + assert isinstance(result, SubscriptionSchema) + assert result.notification_channel_subscription_id == 123 + assert result.is_enabled == True + + # Verify that the repo method was called with the correct subscription + called_args, _ = mock_repo.create_notification_channel_subscription.await_args + passed_subscription = called_args[0] + assert passed_subscription.is_enabled == True + assert passed_subscription.user_profile_id == user_profile_id + + +@pytest.mark.anyio +async def test_get_notification_channel_subscriptions_by_user_id(notification_service): + service, mock_repo = notification_service + + user_id = 1 + # Mock subscription data + mock_subscription = MagicMock(spec=NotificationChannelSubscription) + mock_subscription.notification_channel_subscription_id = 123 + mock_subscription.user_profile_id = user_id + mock_subscription.is_enabled = True + + # Mock associated channel and type + mock_channel = MagicMock() + mock_channel.channel_name.name = "email" + mock_subscription.notification_channel = mock_channel + + mock_type = MagicMock() + mock_type.name = "new_message" + mock_subscription.notification_type = mock_type + + mock_repo.get_notification_channel_subscriptions_by_user = AsyncMock( + return_value=[mock_subscription] + ) + + result = await service.get_notification_channel_subscriptions_by_user_id(user_id) + + assert len(result) == 1 + subscription = result[0] + assert subscription["notification_channel_subscription_id"] == 123 + assert subscription["notification_channel_name"] == "email" + assert subscription["notification_type_name"] == "new_message" + + mock_repo.get_notification_channel_subscriptions_by_user.assert_awaited_once_with( + user_id + ) + + +@pytest.mark.anyio +async def test_get_notification_channel_subscription_by_id(notification_service): + service, mock_repo = notification_service + + subscription_id = 123 + expected_subscription = NotificationChannelSubscription( + notification_channel_subscription_id=subscription_id, + is_enabled=True, + user_profile_id=1, + notification_type_id=2, + notification_channel_id=3, + ) + + mock_repo.get_notification_channel_subscription_by_id = AsyncMock( + return_value=expected_subscription + ) + + result = await service.get_notification_channel_subscription_by_id(subscription_id) + + assert result == expected_subscription + mock_repo.get_notification_channel_subscription_by_id.assert_awaited_once_with( + subscription_id + ) + + +@pytest.mark.anyio +async def test_delete_notification_channel_subscription(notification_service): + service, mock_repo = notification_service + + user_profile_id = 1 + subscription_id = 456 + + mock_subscription_data = NotificationChannelSubscription( + notification_channel_subscription_id=subscription_id, + user_profile_id=user_profile_id, + ) + + mock_repo.get_notification_channel_subscription_by_id = AsyncMock( + return_value=mock_subscription_data + ) + mock_repo.delete_notification_channel_subscription = AsyncMock() + + await service.delete_notification_channel_subscription( + subscription_id, user_profile_id + ) + + mock_repo.get_notification_channel_subscription_by_id.assert_awaited_once_with( + subscription_id + ) + mock_repo.delete_notification_channel_subscription.assert_awaited_once_with( + subscription_id + ) diff --git a/backend/lcfs/tests/notification/test_notification_views.py b/backend/lcfs/tests/notification/test_notification_views.py new file mode 100644 index 000000000..7a7e2fa93 --- /dev/null +++ b/backend/lcfs/tests/notification/test_notification_views.py @@ -0,0 +1,255 @@ +from lcfs.web.api.notification.schema import ( + SubscriptionSchema, + NotificationMessageSchema, +) +from lcfs.web.api.notification.services import NotificationService +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from unittest.mock import MagicMock +from unittest.mock import patch + +from lcfs.db.models.user.Role import RoleEnum + +# Mock data for reuse +mock_notification = NotificationMessageSchema( + notification_message_id=1, + message="Test message", + is_read=False, + origin_user_profile_id=1, + related_user_profile_id=2, + notification_type_id=1, +) + +# Mock data for subscription +mock_subscription = SubscriptionSchema( + notification_channel_subscription_id=1, + is_enabled=True, + notification_channel_id=1, + user_profile_id=1, + notification_type_id=1, +) + + +@pytest.fixture +def mock_notification_service(): + return MagicMock(spec=NotificationService) + + +@pytest.mark.anyio +async def test_mark_notification_as_read(client, fastapi_app, set_mock_user): + with patch( + "lcfs.web.api.notification.views.NotificationService.update_notification" + ) as mock_update_notification: + # Set the mock response data to include all required fields in camelCase + mock_update_notification.return_value = { + "notificationMessageId": 1, + "isRead": True, + "message": "Mark as read test", + "originUserProfileId": 1, + "relatedUserProfileId": 1, + "notificationTypeId": 1, + } + + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + url = fastapi_app.url_path_for("save_notification") + + payload = { + "notification_message_id": 1, + "is_read": True, + "message": "Mark as read test", + "origin_user_profile_id": 1, + "related_user_profile_id": 1, + "notification_type_id": 1, + } + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + response_data = response.json() + + assert response_data["notificationMessageId"] == 1 + assert response_data["isRead"] is True + assert response_data["message"] == "Mark as read test" + assert response_data["originUserProfileId"] == 1 + assert response_data["relatedUserProfileId"] == 1 + assert response_data["notificationTypeId"] == 1 + + # Verify that the update_notification method was called with expected payload + called_schema = NotificationMessageSchema(**payload) + mock_update_notification.assert_called_once_with(called_schema) + + +@pytest.mark.anyio +async def test_create_notification(client, fastapi_app, set_mock_user): + with patch( + "lcfs.web.api.notification.views.NotificationService.create_notification_message" + ) as mock_create_notification: + notification_data = { + "message": "New notification", + "notification_type_id": 1, + "related_user_profile_id": 1, + "related_organization_id": 1, + } + + mock_create_notification.return_value = notification_data + + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + url = fastapi_app.url_path_for("save_notification") + + response = await client.post(url, json=notification_data) + + assert response.status_code == 200 + assert response.json()["message"] == "New notification" + mock_create_notification.assert_called_once() + + +@pytest.mark.anyio +async def test_delete_notification(client, fastapi_app, set_mock_user): + with patch( + "lcfs.web.api.notification.views.NotificationService.delete_notification_message", + return_value=None, + ) as mock_delete_notification: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + mock_delete_notification.return_value = { + "message": "Notification Message deleted successfully" + } + + url = fastapi_app.url_path_for("save_notification") # No notification_id here + + response = await client.post( + url, json={"notification_message_id": 1, "deleted": True} + ) + + assert response.status_code == 200 + assert response.json()["message"] == "Notification Message deleted successfully" + mock_delete_notification.assert_called_once() + + +@pytest.mark.anyio +async def test_get_notifications_by_id( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + with patch( + "lcfs.web.api.notification.views.NotificationService.get_notification_message_by_id" + ) as mock_get_notifications: + mock_notification_data = mock_notification.model_copy() + mock_notification_data.related_user_profile_id = 1 + mock_get_notifications.return_value = mock_notification_data + + set_mock_user( + fastapi_app, + [RoleEnum.GOVERNMENT], + user_details={"user_profile_id": 1}, + ) + + url = fastapi_app.url_path_for( + "get_notification_message_by_id", notification_id=1 + ) + print("Resolved URL:", url) + + response = await client.get(url) + + assert response.status_code == 200 + assert response.json() == mock_notification_data.model_dump(by_alias=True) + mock_get_notifications.assert_called_once_with(notification_id=1) + + +@pytest.mark.anyio +async def test_get_notification_channel_subscription_by_id( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + with patch( + "lcfs.web.api.notification.views.NotificationService.get_notification_channel_subscription_by_id" + ) as mock_get_subscription: + mock_subscription = SubscriptionSchema( + notification_channel_subscription_id=1, + user_profile_id=1, # Match mock user + notification_channel_name="EMAIL", + notification_type_name="BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT", + is_enabled=True, + ) + mock_get_subscription.return_value = mock_subscription + + set_mock_user( + fastapi_app, + [RoleEnum.GOVERNMENT], + user_details={"user_profile_id": 1}, + ) + + url = fastapi_app.url_path_for( + "get_notification_channel_subscription_by_id", + notification_channel_subscription_id=1, + ) + + response = await client.get(url) + + assert response.status_code == 200 + assert response.json() == mock_subscription.model_dump(by_alias=True) + mock_get_subscription.assert_called_once_with( + notification_channel_subscription_id=1 + ) + + +@pytest.mark.anyio +async def test_create_subscription(client, fastapi_app, set_mock_user): + with patch( + "lcfs.web.api.notification.views.NotificationService.create_notification_channel_subscription" + ) as mock_create_subscription: + subscription_data = { + "is_enabled": True, + "notification_channel_id": 1, + "user_profile_id": 1, + "notification_type_id": 1, + } + + mock_create_subscription.return_value = subscription_data + + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + url = fastapi_app.url_path_for("save_subscription") + + response = await client.post(url, json=subscription_data) + + assert response.status_code == 200 + + assert response.json()["isEnabled"] is True + mock_create_subscription.assert_called_once() + + +@pytest.mark.anyio +async def test_delete_subscription(client, fastapi_app, set_mock_user): + with patch( + "lcfs.web.api.notification.views.NotificationService.delete_notification_channel_subscription", + return_value=None, + ) as mock_delete_subscription: + set_mock_user( + fastapi_app, + [RoleEnum.GOVERNMENT], + user_details={"user_profile_id": 1}, + ) + + url = fastapi_app.url_path_for("save_subscription") + + subscription_data = { + "notification_channel_subscription_id": 1, + "deleted": True, + } + + response = await client.post(url, json=subscription_data) + + assert response.status_code == 200 + assert ( + response.json()["message"] + == "Notification Subscription deleted successfully" + ) + mock_delete_subscription.assert_called_once_with( + 1, 1 + ) # subscription_id, user_profile_id diff --git a/backend/lcfs/tests/notional_transfer/__init__.py b/backend/lcfs/tests/notional_transfer/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/lcfs/tests/notional_transfer/conftest.py b/backend/lcfs/tests/notional_transfer/conftest.py new file mode 100644 index 000000000..388cefcfc --- /dev/null +++ b/backend/lcfs/tests/notional_transfer/conftest.py @@ -0,0 +1,47 @@ +from unittest.mock import MagicMock + +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.db.models.compliance import NotionalTransfer +from lcfs.web.api.notional_transfer.schema import NotionalTransferCreateSchema + + +def create_mock_entity(overrides: dict): + mock_entity = MagicMock(spec=NotionalTransfer) + mock_entity.notional_transfer_id = 1 + mock_entity.compliance_report_id = 1 + mock_entity.fuel_category.category = "Gasoline" + mock_entity.legal_name = "Test Legal Name" + mock_entity.address_for_service = "Test Address" + mock_entity.quantity = 1000 + mock_entity.received_or_transferred = "Received" + mock_entity.group_uuid = "test-group-uuid" + mock_entity.version = 1 + mock_entity.action_type = ActionTypeEnum.CREATE + mock_entity.user_type = UserTypeEnum.SUPPLIER + + # Apply overrides + if overrides: + for key, value in overrides.items(): + setattr(mock_entity, key, value) + + return mock_entity + + +def create_mock_schema(overrides: dict): + mock_schema = NotionalTransferCreateSchema( + compliance_report_id=1, + fuel_category="Gasoline", + legal_name="Test Legal Name", + address_for_service="Test Address", + quantity=1000, + received_or_transferred="Received", + group_uuid="test-group-uuid", + version=1, + ) + + # Apply overrides + if overrides: + for key, value in overrides.items(): + setattr(mock_schema, key, value) + + return mock_schema diff --git a/backend/lcfs/tests/notional_transfer/test_notional_transfer_repo.py b/backend/lcfs/tests/notional_transfer/test_notional_transfer_repo.py new file mode 100644 index 000000000..72fcaab10 --- /dev/null +++ b/backend/lcfs/tests/notional_transfer/test_notional_transfer_repo.py @@ -0,0 +1,142 @@ +import pytest +from unittest.mock import MagicMock, AsyncMock +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.base import UserTypeEnum +from lcfs.db.models.compliance import NotionalTransfer +from lcfs.web.api.notional_transfer.repo import NotionalTransferRepository +from lcfs.web.api.notional_transfer.schema import NotionalTransferSchema +from lcfs.tests.notional_transfer.conftest import create_mock_entity + + +@pytest.fixture +def mock_db_session(): + session = MagicMock(spec=AsyncSession) + execute_result = AsyncMock() + execute_result.unique = MagicMock(return_value=execute_result) + execute_result.scalars = MagicMock(return_value=execute_result) + execute_result.all = MagicMock(return_value=[MagicMock(spec=NotionalTransfer)]) + execute_result.first = MagicMock(return_value=MagicMock(spec=NotionalTransfer)) + session.execute.return_value = execute_result + return session + + +@pytest.fixture +def notional_transfer_repo(mock_db_session): + repo = NotionalTransferRepository(db=mock_db_session) + repo.fuel_code_repo = MagicMock() + repo.fuel_code_repo.get_fuel_categories = AsyncMock(return_value=[]) + return repo + + +@pytest.mark.anyio +async def test_get_table_options(notional_transfer_repo): + result = await notional_transfer_repo.get_table_options() + + assert isinstance(result, dict) + assert "fuel_categories" in result + assert "received_or_transferred" in result + + +@pytest.mark.anyio +async def test_get_notional_transfers(notional_transfer_repo, mock_db_session): + compliance_report_id = 1 + mock_notional_transfer = create_mock_entity({}) + mock_result_notional_transfers = [mock_notional_transfer] + mock_compliance_report_uuid = "test_group_uuid" + + # Mock the first db.execute call for fetching compliance report group UUID + mock_first_execute = MagicMock() + mock_first_execute.scalar.return_value = mock_compliance_report_uuid + + # Mock the second db.execute call for fetching notional transfers + mock_second_execute = MagicMock() + mock_second_execute.unique.return_value.scalars.return_value.all.return_value = ( + mock_result_notional_transfers + ) + + # Assign side effects to return these mocked execute calls in sequence + mock_db_session.execute = AsyncMock( + side_effect=[mock_first_execute, mock_second_execute] + ) + + result = await notional_transfer_repo.get_notional_transfers(compliance_report_id) + + assert isinstance(result, list) + assert len(result) == 1 + assert isinstance(result[0], NotionalTransferSchema) + assert result[0].fuel_category == "Gasoline" + assert result[0].legal_name == "Test Legal Name" + + +@pytest.mark.anyio +async def test_get_latest_notional_transfer_by_group_uuid( + notional_transfer_repo, mock_db_session +): + group_uuid = "test-group-uuid" + mock_notional_transfer_gov = MagicMock(spec=NotionalTransfer) + mock_notional_transfer_gov.user_type = UserTypeEnum.GOVERNMENT + mock_notional_transfer_gov.version = 2 + + mock_notional_transfer_supplier = MagicMock(spec=NotionalTransfer) + mock_notional_transfer_supplier.user_type = UserTypeEnum.SUPPLIER + mock_notional_transfer_supplier.version = 3 + + # Mock response with both government and supplier versions + mock_db_session.execute.return_value.scalars.return_value.first.side_effect = [ + mock_notional_transfer_gov, + mock_notional_transfer_supplier, + ] + + result = await notional_transfer_repo.get_latest_notional_transfer_by_group_uuid( + group_uuid + ) + + assert result.user_type == UserTypeEnum.GOVERNMENT + assert result.version == 2 + + +@pytest.mark.anyio +async def test_get_notional_transfer_version_by_user( + notional_transfer_repo, mock_db_session +): + group_uuid = "test-group-uuid" + version = 2 + user_type = UserTypeEnum.SUPPLIER + + mock_notional_transfer = MagicMock(spec=NotionalTransfer) + mock_notional_transfer.group_uuid = group_uuid + mock_notional_transfer.version = version + mock_notional_transfer.user_type = user_type + + mock_db_session.execute.return_value.scalars.return_value.first.return_value = ( + mock_notional_transfer + ) + + result = await notional_transfer_repo.get_notional_transfer_version_by_user( + group_uuid, version, user_type + ) + + assert result.group_uuid == group_uuid + assert result.version == version + assert result.user_type == user_type + + +@pytest.mark.anyio +async def test_update_notional_transfer(notional_transfer_repo, mock_db_session): + updated_notional_transfer = create_mock_entity({}) + updated_notional_transfer.quantity = 2000 + updated_notional_transfer.legal_name = "Updated Legal Name" + updated_notional_transfer.address_for_service = "Updated Address" + + mock_db_session.flush = AsyncMock() + mock_db_session.refresh = AsyncMock() + mock_db_session.merge.return_value = updated_notional_transfer + + result = await notional_transfer_repo.update_notional_transfer( + updated_notional_transfer + ) + + # Assertions + assert isinstance(result, NotionalTransfer) + assert mock_db_session.flush.call_count == 1 diff --git a/backend/lcfs/tests/notional_transfer/test_notional_transfer_services.py b/backend/lcfs/tests/notional_transfer/test_notional_transfer_services.py new file mode 100644 index 000000000..cf8345153 --- /dev/null +++ b/backend/lcfs/tests/notional_transfer/test_notional_transfer_services.py @@ -0,0 +1,188 @@ +from unittest.mock import MagicMock, AsyncMock + +import pytest + +from lcfs.db.base import ActionTypeEnum, UserTypeEnum +from lcfs.db.models.compliance.NotionalTransfer import ( + NotionalTransfer, + ReceivedOrTransferredEnum, +) +from lcfs.web.api.notional_transfer.repo import NotionalTransferRepository +from lcfs.web.api.notional_transfer.schema import ( + NotionalTransferSchema, + NotionalTransferTableOptionsSchema, +) +from lcfs.web.api.notional_transfer.services import NotionalTransferServices +from lcfs.web.exception.exceptions import ServiceException +from lcfs.tests.notional_transfer.conftest import create_mock_schema, create_mock_entity + + +@pytest.fixture +def notional_transfer_service(): + mock_repo = MagicMock(spec=NotionalTransferRepository) + mock_fuel_repo = MagicMock() + service = NotionalTransferServices( + repo=mock_repo, fuel_repo=mock_fuel_repo) + return service, mock_repo, mock_fuel_repo + + +@pytest.mark.anyio +async def test_get_table_options(notional_transfer_service): + service, mock_repo, _ = notional_transfer_service + # Update the mocked return value to have the correct key + mock_repo.get_table_options = AsyncMock( + return_value={ + "fuel_categories": [], + "received_or_transferred": [], + } + ) + + response = await service.get_table_options() + + assert isinstance(response, NotionalTransferTableOptionsSchema) + assert response.fuel_categories == [] + assert response.received_or_transferred == [] + + +@pytest.mark.anyio +async def test_create_notional_transfer(notional_transfer_service): + service, mock_repo, mock_fuel_repo = notional_transfer_service + notional_transfer_data = create_mock_schema({}) + mock_fuel_repo.get_fuel_category_by = AsyncMock( + return_value=MagicMock(fuel_category_id=1) + ) + + mock_created_transfer = create_mock_entity({}) + mock_repo.create_notional_transfer = AsyncMock( + return_value=mock_created_transfer) + + response = await service.create_notional_transfer( + notional_transfer_data, UserTypeEnum.SUPPLIER + ) + + assert isinstance(response, NotionalTransferSchema) + assert response.fuel_category == "Gasoline" + assert response.legal_name == "Test Legal Name" + + mock_repo.create_notional_transfer.assert_awaited_once() + + +@pytest.mark.anyio +async def test_update_notional_transfer(notional_transfer_service): + service, mock_repo, mock_fuel_repo = notional_transfer_service + + # Create test data with NotionalTransferCreateSchema + notional_transfer_data = create_mock_schema( + {"quantity": 2000, "legal_name": "Updated Legal Name"} + ) + + # Create a proper NotionalTransfer instance for the existing transfer + mock_existing_transfer = NotionalTransfer( + notional_transfer_id=1, + compliance_report_id=1, + quantity=1000, + legal_name="Existing Legal Name", + address_for_service="Existing Address", + fuel_category=MagicMock(category="Gasoline"), + received_or_transferred=ReceivedOrTransferredEnum.Received, + group_uuid="test-group-uuid", + version=1, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.UPDATE, + ) + + # Configure repository methods to return these mocked objects + mock_repo.get_latest_notional_transfer_by_group_uuid = AsyncMock( + return_value=mock_existing_transfer + ) + mock_fuel_repo.get_fuel_category_by = AsyncMock( + return_value=MagicMock(category="Gasoline") + ) + + # Create a proper NotionalTransfer instance for the updated transfer + mock_updated_transfer = NotionalTransfer( + notional_transfer_id=1, + compliance_report_id=1, + quantity=2000, + legal_name="Updated Legal Name", + address_for_service="Updated Address", + fuel_category=MagicMock(category="Gasoline"), + received_or_transferred=ReceivedOrTransferredEnum.Received, + group_uuid="test-group-uuid", + version=2, + user_type=UserTypeEnum.SUPPLIER, + action_type=ActionTypeEnum.UPDATE, + ) + # Set the return value for update_notional_transfer + mock_repo.update_notional_transfer = AsyncMock( + return_value=mock_updated_transfer) + + # Execute the update function and capture the response + response = await service.update_notional_transfer( + notional_transfer_data, UserTypeEnum.SUPPLIER + ) + + # Assert that the response is a NotionalTransferSchema instance + assert response.notional_transfer_id == 1 + assert response.quantity == 2000 + assert response.legal_name == "Updated Legal Name" + assert response.action_type == ActionTypeEnum.UPDATE.value + + +@pytest.mark.anyio +async def test_update_notional_transfer_not_found(notional_transfer_service): + service, mock_repo, _ = notional_transfer_service + notional_transfer_data = create_mock_schema({}) + + mock_repo.get_notional_transfer_version_by_user = AsyncMock( + return_value=None) + + with pytest.raises(ServiceException): + await service.update_notional_transfer( + notional_transfer_data, UserTypeEnum.SUPPLIER + ) + + +@pytest.mark.anyio +async def test_delete_notional_transfer(notional_transfer_service): + service, mock_repo, _ = notional_transfer_service + notional_transfer_data = create_mock_schema({}) + + # Mock the existing notional transfer with a "CREATE" action type + mock_existing_transfer = MagicMock() + mock_existing_transfer.group_uuid = "test-group-uuid" + mock_existing_transfer.version = 1 + mock_existing_transfer.action_type = ActionTypeEnum.CREATE + + # Set up the mock __table__.columns.keys() to return field names as a list + mock_existing_transfer.__table__ = MagicMock() + mock_existing_transfer.__table__.columns.keys.return_value = [ + "notional_transfer_id", + "compliance_report_id", + "quantity", + "fuel_category", + "legal_name", + "address_for_service", + "received_or_transferred", + "deleted", + "group_uuid", + "user_type", + "version", + "action_type", + ] + + # Mock repository methods + mock_repo.get_latest_notional_transfer_by_group_uuid = AsyncMock( + return_value=mock_existing_transfer + ) + mock_repo.create_notional_transfer = AsyncMock( + return_value=mock_existing_transfer) + + # Call the delete service + response = await service.delete_notional_transfer( + notional_transfer_data, UserTypeEnum.SUPPLIER + ) + + # Assertions + assert response.message == "Marked as deleted." + mock_repo.create_notional_transfer.assert_awaited_once() diff --git a/backend/lcfs/tests/notional_transfer/test_notional_transfer_validation.py b/backend/lcfs/tests/notional_transfer/test_notional_transfer_validation.py new file mode 100644 index 000000000..105835496 --- /dev/null +++ b/backend/lcfs/tests/notional_transfer/test_notional_transfer_validation.py @@ -0,0 +1,57 @@ +import pytest +from unittest.mock import MagicMock +from fastapi import HTTPException, Request + +from lcfs.web.api.notional_transfer.schema import NotionalTransferCreateSchema +from lcfs.web.api.notional_transfer.validation import NotionalTransferValidation + + +@pytest.fixture +def notional_transfer_validation(): + request = MagicMock(spec=Request) + validation = NotionalTransferValidation(request=request) + return validation + + +@pytest.mark.anyio +async def test_validate_compliance_report_id_success(notional_transfer_validation): + validation = notional_transfer_validation + compliance_report_id = 1 + notional_transfer_data = [ + NotionalTransferCreateSchema( + compliance_report_id=compliance_report_id, + fuel_category="Gasoline", + legal_name="Test Legal Name", + address_for_service="Test Address", + quantity=1000, + received_or_transferred="Received", + ) + ] + + await validation.validate_compliance_report_id( + compliance_report_id, notional_transfer_data + ) + + +@pytest.mark.anyio +async def test_validate_compliance_report_id_failure(notional_transfer_validation): + validation = notional_transfer_validation + compliance_report_id = 1 + notional_transfer_data = [ + NotionalTransferCreateSchema( + compliance_report_id=2, # Different from the passed compliance_report_id + fuel_category="Gasoline", + legal_name="Test Legal Name", + address_for_service="Test Address", + quantity=1000, + received_or_transferred="Received", + ) + ] + + with pytest.raises(HTTPException) as exc_info: + await validation.validate_compliance_report_id( + compliance_report_id, notional_transfer_data + ) + + assert exc_info.value.status_code == 400 + assert "Mismatch compliance_report_id" in str(exc_info.value.detail) diff --git a/backend/lcfs/tests/notional_transfer/test_notional_transfer_view.py b/backend/lcfs/tests/notional_transfer/test_notional_transfer_view.py new file mode 100644 index 000000000..0628782b0 --- /dev/null +++ b/backend/lcfs/tests/notional_transfer/test_notional_transfer_view.py @@ -0,0 +1,254 @@ +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from unittest.mock import MagicMock, AsyncMock, patch + +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.base import ComplianceReportRequestSchema +from lcfs.web.api.notional_transfer.schema import ( + PaginatedNotionalTransferRequestSchema, + DeleteNotionalTransferResponseSchema, + NotionalTransferTableOptionsSchema, +) +from lcfs.web.api.notional_transfer.schema import NotionalTransferTableOptionsSchema +from lcfs.web.api.notional_transfer.services import NotionalTransferServices +from lcfs.web.api.notional_transfer.validation import NotionalTransferValidation +from lcfs.tests.notional_transfer.conftest import create_mock_schema + + +@pytest.fixture +def mock_notional_transfer_service(): + return MagicMock(spec=NotionalTransferServices) + + +@pytest.fixture +def mock_notional_transfer_validation(): + validation = MagicMock(spec=NotionalTransferValidation) + validation.validate_organization_access = AsyncMock() + validation.validate_compliance_report_id = AsyncMock() + return validation + + +@pytest.mark.anyio +async def test_get_table_options( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_notional_transfer_service, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = "/api/notional-transfers/table-options" + + # Mock the service's get_table_options method to return the expected schema instance + mock_notional_transfer_service.get_table_options.return_value = ( + NotionalTransferTableOptionsSchema( + fuel_categories=[], + received_or_transferred=[], + ) + ) + + # Override the dependency + fastapi_app.dependency_overrides[NotionalTransferServices] = ( + lambda: mock_notional_transfer_service + ) + + response = await client.get(url) + + assert response.status_code == 200 + data = response.json() + assert "fuelCategories" in data + assert "receivedOrTransferred" in data + + +@pytest.mark.anyio +async def test_get_notional_transfers( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_notional_transfer_service, +): + with patch( + "lcfs.web.api.notional_transfer.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("get_notional_transfers") + payload = ComplianceReportRequestSchema(compliance_report_id=1).model_dump() + + mock_validate_organization_access.return_value = True + mock_notional_transfer_service.get_notional_transfers.return_value = { + "notionalTransfers": [] + } + + fastapi_app.dependency_overrides[NotionalTransferServices] = ( + lambda: mock_notional_transfer_service + ) + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + data = response.json() + assert "notionalTransfers" in data + + +@pytest.mark.anyio +async def test_get_notional_transfers_paginated( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_notional_transfer_service, +): + with patch( + "lcfs.web.api.notional_transfer.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("get_notional_transfers_paginated") + payload = PaginatedNotionalTransferRequestSchema( + compliance_report_id=1, + page=1, + size=10, + sort_orders=[], + filters=[], + ).model_dump() + + mock_validate_organization_access.return_value = True + mock_notional_transfer_service.get_notional_transfers_paginated.return_value = { + "pagination": {"total": 0, "page": 1, "size": 10, "totalPages": 1}, + "notionalTransfers": [], + } + + fastapi_app.dependency_overrides[NotionalTransferServices] = ( + lambda: mock_notional_transfer_service + ) + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + data = response.json() + assert "pagination" in data + assert "notionalTransfers" in data + + +@pytest.mark.anyio +async def test_save_notional_transfer_row_create( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_notional_transfer_service, + mock_notional_transfer_validation, +): + with patch( + "lcfs.web.api.notional_transfer.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_notional_transfer_row") + payload = create_mock_schema({}).model_dump() + + mock_notional_transfer_service.create_notional_transfer.return_value = payload + mock_validate_organization_access.return_value = True + + fastapi_app.dependency_overrides[NotionalTransferServices] = ( + lambda: mock_notional_transfer_service + ) + fastapi_app.dependency_overrides[NotionalTransferValidation] = ( + lambda: mock_notional_transfer_validation + ) + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + data = response.json() + assert "notionalTransferId" in data + assert data["quantity"] == 1000 + assert data["fuelCategory"] == "Gasoline" + + +@pytest.mark.anyio +async def test_save_notional_transfer_row_update( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_notional_transfer_service, + mock_notional_transfer_validation, +): + with patch( + "lcfs.web.api.notional_transfer.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_notional_transfer_row") + payload = create_mock_schema( + { + "notional_transfer_id": 1, + "quantity": 2000, + "legal_name": "Updated Legal Name", + } + ).model_dump() + + mock_notional_transfer_service.update_notional_transfer.return_value = payload + mock_validate_organization_access.return_value = True + + fastapi_app.dependency_overrides[NotionalTransferServices] = ( + lambda: mock_notional_transfer_service + ) + fastapi_app.dependency_overrides[NotionalTransferValidation] = ( + lambda: mock_notional_transfer_validation + ) + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + data = response.json() + assert data["notionalTransferId"] == 1 + assert data["quantity"] == 2000 + assert data["fuelCategory"] == "Gasoline" + + +@pytest.mark.anyio +async def test_save_notional_transfer_row_delete( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_notional_transfer_service, + mock_notional_transfer_validation, +): + with patch( + "lcfs.web.api.notional_transfer.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = "/api/notional-transfers/save" + mock_schema = create_mock_schema( + { + "notional_transfer_id": 1, + "deleted": True, + "compliance_report_id": 1, + } + ) + + # Return an appropriate response object instead of None + mock_notional_transfer_service.delete_notional_transfer.return_value = ( + DeleteNotionalTransferResponseSchema( + message="Notional transfer deleted successfully" + ) + ) + mock_validate_organization_access.return_value = True + mock_notional_transfer_validation.validate_compliance_report_id.return_value = ( + None + ) + + fastapi_app.dependency_overrides[NotionalTransferServices] = ( + lambda: mock_notional_transfer_service + ) + fastapi_app.dependency_overrides[NotionalTransferValidation] = ( + lambda: mock_notional_transfer_validation + ) + + response = await client.post(url, json=mock_schema.model_dump()) + + assert response.status_code == 200 + data = response.json() + assert data == {"message": "Notional transfer deleted successfully"} + + # Adjusted to use UserTypeEnum.SUPPLIER + mock_notional_transfer_service.delete_notional_transfer.assert_called_once_with( + mock_schema, UserTypeEnum.SUPPLIER + ) diff --git a/backend/lcfs/tests/organization/conftest.py b/backend/lcfs/tests/organization/conftest.py new file mode 100644 index 000000000..cfe562047 --- /dev/null +++ b/backend/lcfs/tests/organization/conftest.py @@ -0,0 +1,89 @@ +import pytest +from unittest.mock import MagicMock, AsyncMock +from lcfs.web.api.organization.services import OrganizationService +from lcfs.web.api.organization.validation import OrganizationValidation +from lcfs.web.api.organizations.repo import OrganizationsRepository +from lcfs.web.api.user.services import UserServices +from lcfs.web.api.transaction.services import TransactionsService +from lcfs.web.api.transfer.services import TransferServices +from lcfs.web.api.compliance_report.services import ComplianceReportServices +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.web.api.user.repo import UserRepository +from lcfs.web.api.transaction.repo import TransactionRepository + + +@pytest.fixture +def mock_organization_services(): + return MagicMock(spec=OrganizationService) + + +@pytest.fixture +def mock_user_services(): + return MagicMock(spec=UserServices) + + +@pytest.fixture +def mock_transactions_services(): + return MagicMock(spec=TransactionsService) + + +@pytest.fixture +def mock_transfer_services(): + return MagicMock(spec=TransferServices) + + +@pytest.fixture +def mock_organization_validation(): + return MagicMock(spec=OrganizationValidation) + + +@pytest.fixture +def mock_compliance_report_services(): + return MagicMock(spec=ComplianceReportServices) + + +@pytest.fixture +def mock_user_repo(): + return AsyncMock(spec=UserRepository) + + +@pytest.fixture +def mock_transaction_repo(): + return AsyncMock(spec=TransactionRepository) + + +@pytest.fixture +def mock_orgs_repo(): + return AsyncMock(spec=OrganizationsRepository) + + +@pytest.fixture +def mock_report_repo(): + return AsyncMock(spec=ComplianceReportRepository) + + +@pytest.fixture +def mock_request(mock_user_profile): + request = MagicMock() + request.user = mock_user_profile + return request + + +@pytest.fixture +def organization_service(mock_user_repo, mock_transaction_repo): + service = OrganizationService() + service.transaction_repo = mock_transaction_repo + service.user_repo = mock_user_repo + return service + + +@pytest.fixture +def organization_validation( + mock_orgs_repo, mock_transaction_repo, mock_report_repo, mock_request +): + validation = OrganizationValidation() + validation.org_repo = mock_orgs_repo + validation.transaction_repo = mock_transaction_repo + validation.report_repo = mock_report_repo + validation.request = mock_request + return validation diff --git a/backend/lcfs/tests/organization/test_organization_services.py b/backend/lcfs/tests/organization/test_organization_services.py new file mode 100644 index 000000000..f356e5804 --- /dev/null +++ b/backend/lcfs/tests/organization/test_organization_services.py @@ -0,0 +1,51 @@ +import pytest +from typing import List +from datetime import date +from lcfs.web.api.user.schema import UsersSchema +from unittest.mock import MagicMock +from lcfs.web.api.transaction.schema import TransactionListSchema, TransactionViewSchema + + +@pytest.mark.anyio +async def test_get_organization_users_list_success( + organization_service, mock_user_repo +): + mock_user_repo.get_users_paginated.return_value = ([], 0) + + result = await organization_service.get_organization_users_list( + 1, + "status", + MagicMock(), + ) + + assert isinstance(result, UsersSchema) + mock_user_repo.get_users_paginated.assert_called_once() + + +@pytest.mark.anyio +async def test_get_transactions_paginated_success( + organization_service, mock_transaction_repo +): + mock_transaction_repo.get_transactions_paginated.return_value = ( + [ + { + "transaction_id": 1, + "transaction_type": "str", + "to_organization": "str", + "quantity": 1, + "status": "str", + "create_date": date.today(), + "update_date": date.today(), + } + ], + 0, + ) + + pagination_mock = MagicMock() + pagination_mock.page = 1 + pagination_mock.size = 10 + + result = await organization_service.get_transactions_paginated(pagination_mock, 1) + + assert isinstance(result["transactions"][0], TransactionViewSchema) + mock_transaction_repo.get_transactions_paginated.assert_called_once() diff --git a/backend/lcfs/tests/organization/test_organization_validation.py b/backend/lcfs/tests/organization/test_organization_validation.py new file mode 100644 index 000000000..8a1cf792f --- /dev/null +++ b/backend/lcfs/tests/organization/test_organization_validation.py @@ -0,0 +1,81 @@ +import pytest +from unittest.mock import MagicMock, AsyncMock + +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.transfer.schema import TransferCreateSchema +from lcfs.web.api.compliance_report.schema import ComplianceReportCreateSchema + + +@pytest.mark.anyio +async def test_check_available_balance_success( + organization_validation, mock_transaction_repo +): + mock_transaction_repo.calculate_available_balance.return_value = 100 + + await organization_validation.check_available_balance(1, 1) + + mock_transaction_repo.calculate_available_balance.assert_called_once() + pass + + +@pytest.mark.anyio +async def test_create_transfer_success( + organization_validation, mock_orgs_repo, mock_request, mock_transaction_repo +): + mock_orgs_repo.is_registered_for_transfer.return_value = True + mock_request.user = MagicMock() + mock_request.user.organization.org_status.organization_status_id = 2 + mock_transaction_repo.calculate_available_balance.return_value = 100 + + await organization_validation.create_transfer( + 1, + TransferCreateSchema(from_organization_id=1, to_organization_id=1, quantity=1), + ) + + mock_orgs_repo.is_registered_for_transfer.assert_called_once() + mock_transaction_repo.calculate_available_balance.assert_called_once() + pass + + +@pytest.mark.anyio +async def test_update_transfer_success(organization_validation, mock_transaction_repo): + mock_transaction_repo.calculate_available_balance.return_value = 100 + await organization_validation.update_transfer( + 1, + TransferCreateSchema( + from_organization_id=1, + to_organization_id=2, + quantity=1, + current_status=TransferStatusEnum.Draft, + ), + ) + + +@pytest.mark.anyio +async def test_create_compliance_report_success( + organization_validation, mock_report_repo, set_mock_user, fastapi_app +): + # Mock user setup + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + # Mock the request object and its attributes + mock_request = MagicMock() + mock_request.user.organization.organization_id = 1 + organization_validation.request = mock_request + + # Mock the report repository methods + mock_report_repo.get_compliance_period.return_value = True + mock_report_repo.get_compliance_report_by_period.return_value = False + + # Call the method under test + await organization_validation.create_compliance_report( + 1, + ComplianceReportCreateSchema( + compliance_period="2024", organization_id=1, status="status" + ), + ) + + # Assertions + mock_report_repo.get_compliance_period.assert_called_once_with("2024") + mock_report_repo.get_compliance_report_by_period.assert_called_once_with(1, "2024") diff --git a/backend/lcfs/tests/organization/test_organization_views.py b/backend/lcfs/tests/organization/test_organization_views.py new file mode 100644 index 000000000..3f8cacafc --- /dev/null +++ b/backend/lcfs/tests/organization/test_organization_views.py @@ -0,0 +1,428 @@ +from unittest.mock import AsyncMock + +import pytest +from datetime import date + +from lcfs.db.models.compliance.ComplianceReport import SupplementalInitiatorType +from lcfs.db.models.user.Role import RoleEnum +from httpx import AsyncClient +from fastapi import FastAPI + +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.web.api.organization.services import OrganizationService +from lcfs.web.api.user.services import UserServices +from lcfs.web.api.transaction.services import TransactionsService +from lcfs.web.api.transfer.services import TransferServices +from lcfs.web.api.organization.validation import OrganizationValidation + +from lcfs.web.api.compliance_report.services import ComplianceReportServices +from lcfs.web.api.compliance_report.schema import ChainedComplianceReportSchema + + +@pytest.mark.anyio +async def test_get_org_users_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_organization_services, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + mock_organization_services.get_organization_users_list.return_value = { + "pagination": {"total": 0, "page": 1, "size": 5, "totalPages": 1}, + "users": [], + } + + fastapi_app.dependency_overrides[OrganizationService] = ( + lambda: mock_organization_services + ) + + params = {"status": "Active"} + payload = {"page": 1, "size": 10, "sort_orders": [], "filters": []} + url = fastapi_app.url_path_for("get_org_users", organization_id=1) + + response = await client.post( + url, + params=params, + json=payload, + ) + + assert response.status_code == 200 + + data = response.json() + + assert "users" in data + assert isinstance(data["users"], list) + + +@pytest.mark.anyio +async def test_get_user_by_id_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_user_services, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + user_id = 1 + organization_id = 1 + url = fastapi_app.url_path_for( + "get_user_by_id", organization_id=organization_id, user_id=user_id + ) + + mock_user_services.get_user_by_id.return_value = { + "user_profile_id": user_id, + "keycloak_username": "testuser", + "keycloak_email": "testuser@example.com", + "is_active": True, + } + + fastapi_app.dependency_overrides[UserServices] = lambda: mock_user_services + + response = await client.get(url) + + assert response.status_code == 200 + + data = response.json() + + assert "userProfileId" in data + assert data["userProfileId"] == user_id + assert data["keycloakUsername"] == "testuser" + assert data["keycloakEmail"] == "testuser@example.com" + + +@pytest.mark.anyio +async def test_create_user_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_user_services, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + url = fastapi_app.url_path_for("create_user", organization_id=1) + + payload = { + "title": "testuser", + "keycloak_username": "testuser", + "keycloak_email": "testuser@example.com", + "first_name": "test", + "last_name": "test", + "is_active": True, + } + + mock_user_services.create_user.return_value = "User created successfully" + + fastapi_app.dependency_overrides[UserServices] = lambda: mock_user_services + + response = await client.post( + url, + json=payload, + ) + + assert response.status_code == 201 + + +@pytest.mark.anyio +async def test_get_transactions_paginated_for_org_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_organization_services, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + mock_organization_services.get_transactions_paginated.return_value = { + "transactions": [], + "pagination": {"total": 0, "page": 1, "size": 5, "totalPages": 1}, + } + + fastapi_app.dependency_overrides[OrganizationService] = ( + lambda: mock_organization_services + ) + + payload = {"page": 1, "size": 10, "sort_orders": [], "filters": []} + url = fastapi_app.url_path_for("get_transactions_paginated_for_org") + + response = await client.post( + url, + json=payload, + ) + + assert response.status_code == 200 + + +@pytest.mark.anyio +async def test_export_transactions_for_org_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_transactions_services, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + mock_transactions_services.export_transactions.return_value = { + "streaming": True} + + fastapi_app.dependency_overrides[TransactionsService] = ( + lambda: mock_transactions_services + ) + + params = {"format": "xls"} + url = fastapi_app.url_path_for("export_transactions_for_org") + + response = await client.get( + url, + params=params, + ) + + assert response.status_code == 200 + + +@pytest.mark.anyio +async def test_create_transfer_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_transfer_services, + mock_organization_validation, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + organization_id = 1 + url = fastapi_app.url_path_for( + "create_transfer", organization_id=organization_id) + + payload = {"from_organization_id": 1, "to_organization_id": 2} + + mock_organization_validation.create_transfer.return_value = None + mock_transfer_services.create_transfer.return_value = { + "transfer_id": 1, + "from_organization": {"organization_id": 1, "name": "org1"}, + "to_organization": {"organization_id": 2, "name": "org2"}, + "agreement_date": date.today(), + "quantity": 1, + "price_per_unit": 1, + "current_status": {"transfer_status_id": 1, "status": "status"}, + } + + fastapi_app.dependency_overrides[OrganizationValidation] = ( + lambda: mock_organization_validation + ) + fastapi_app.dependency_overrides[TransferServices] = lambda: mock_transfer_services + + response = await client.post( + url, + json=payload, + ) + + assert response.status_code == 201 + + +@pytest.mark.anyio +async def test_update_transfer_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_transfer_services, + mock_organization_validation, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + url = fastapi_app.url_path_for( + "update_transfer", organization_id=1, transfer_id=1) + + payload = {"from_organization_id": 1, "to_organization_id": 2} + + mock_organization_validation.update_transfer.return_value = None + mock_transfer_services.update_transfer.return_value = { + "transfer_id": 1, + "from_organization": {"organization_id": 1, "name": "org1"}, + "to_organization": {"organization_id": 2, "name": "org2"}, + "agreement_date": date.today(), + "quantity": 1, + "price_per_unit": 1, + "current_status": {"transfer_status_id": 1, "status": "status"}, + } + + fastapi_app.dependency_overrides[OrganizationValidation] = ( + lambda: mock_organization_validation + ) + fastapi_app.dependency_overrides[TransferServices] = lambda: mock_transfer_services + + response = await client.put( + url, + json=payload, + ) + + assert response.status_code == 201 + + data = response.json() + + assert "transferId" in data + assert data["transferId"] == 1 + + +@pytest.mark.anyio +async def test_create_compliance_report_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_compliance_report_services, + mock_organization_validation, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + organization_id = 1 + url = fastapi_app.url_path_for( + "create_compliance_report", organization_id=organization_id + ) + + payload = {"compliance_period": "2024", + "organization_id": 1, "status": "status"} + + mock_organization_validation.create_compliance_report.return_value = None + mock_compliance_report_services.create_compliance_report.return_value = { + "compliance_report_id": 1, + "compliance_period_id": 1, + "compliance_period": {"compliance_period_id": 1, "description": "2024"}, + "organization_id": 1, + "organization": {"organization_id": 1, "name": "org1"}, + "current_status_id": 1, + "current_status": {"compliance_report_status_id": 1, "status": "status"}, + "summary": {"summary_id": 1, "is_locked": False}, + "compliance_report_group_uuid": "uuid", + "version": 0, + "supplemental_initiator": SupplementalInitiatorType.SUPPLIER_SUPPLEMENTAL, + "has_supplemental": False, + } + + fastapi_app.dependency_overrides[OrganizationValidation] = ( + lambda: mock_organization_validation + ) + fastapi_app.dependency_overrides[ComplianceReportServices] = ( + lambda: mock_compliance_report_services + ) + + response = await client.post( + url, + json=payload, + ) + + assert response.status_code == 201 + + +@pytest.mark.anyio +async def test_get_compliance_reports_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_compliance_report_services, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + url = fastapi_app.url_path_for("get_compliance_reports", organization_id=1) + + mock_compliance_report_services.get_compliance_reports_paginated.return_value = { + "pagination": {"total": 1, "page": 1, "size": 10, "totalPages": 1}, + "reports": [], + } + + fastapi_app.dependency_overrides[ComplianceReportServices] = ( + lambda: mock_compliance_report_services + ) + + payload = {"page": 1, "size": 10, "sort_orders": [], "filters": []} + + response = await client.post( + url, + json=payload, + ) + + assert response.status_code == 200 + + +@pytest.mark.anyio +async def test_get_all_org_reported_years_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_compliance_report_services, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + url = fastapi_app.url_path_for( + "get_all_org_reported_years", organization_id=1) + + mock_compliance_report_services.get_all_org_reported_years.return_value = [ + {"compliance_period_id": 1, "description": "2024"} + ] + + fastapi_app.dependency_overrides[ComplianceReportServices] = ( + lambda: mock_compliance_report_services + ) + + response = await client.get(url) + + assert response.status_code == 200 + + +@pytest.mark.anyio +async def test_get_compliance_report_by_id_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_compliance_report_services, +): + # Mock user setup + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + + # Define the URL for the endpoint + url = fastapi_app.url_path_for( + "get_compliance_report_by_id", + organization_id=1, + report_id=1, + ) + + # Mock the compliance report service's method + mock_compliance_report_services.get_compliance_report_by_id.return_value = ChainedComplianceReportSchema( + report={ + "compliance_report_id": 1, + "compliance_period_id": 1, + "compliance_period": {"compliance_period_id": 1, "description": "2024"}, + "organization_id": 1, + "organization": {"organization_id": 1, "name": "org1"}, + "current_status_id": 1, + "current_status": {"compliance_report_status_id": 1, "status": "status"}, + "summary": {"summary_id": 1, "is_locked": False}, + "compliance_report_group_uuid": "uuid", + "version": 0, + "supplemental_initiator": SupplementalInitiatorType.SUPPLIER_SUPPLEMENTAL, + "has_supplemental": False, + }, + chain=[] + ) + + # Create a mock for the validation service + mock_compliance_report_validation = AsyncMock() + mock_compliance_report_validation.validate_organization_access.return_value = None + + # Override dependencies in the FastAPI app + fastapi_app.dependency_overrides[ComplianceReportServices] = ( + lambda: mock_compliance_report_services + ) + fastapi_app.dependency_overrides[ComplianceReportValidation] = ( + lambda: mock_compliance_report_validation + ) + + # Make the request + response = await client.get(url) + + # Assertions + assert response.status_code == 200 + mock_compliance_report_services.get_compliance_report_by_id.assert_awaited_once_with( + 1, apply_masking=True, get_chain=True + ) + mock_compliance_report_validation.validate_organization_access.assert_awaited_once_with( + 1 + ) diff --git a/backend/lcfs/tests/organizations/__init__.py b/backend/lcfs/tests/organizations/__init__.py new file mode 100644 index 000000000..e3294c3e7 --- /dev/null +++ b/backend/lcfs/tests/organizations/__init__.py @@ -0,0 +1 @@ +"""Tests for Organizations""" diff --git a/backend/lcfs/tests/organizations/organizations_payloads.py b/backend/lcfs/tests/organizations/organizations_payloads.py new file mode 100644 index 000000000..e860b0654 --- /dev/null +++ b/backend/lcfs/tests/organizations/organizations_payloads.py @@ -0,0 +1,20 @@ +from lcfs.db.models.transaction.Transaction import TransactionActionEnum, Transaction + +# Transactions ORM Models +adjustment_transaction_orm_model = Transaction( + compliance_units=100, + organization_id=1, + transaction_action=TransactionActionEnum.Adjustment, +) + +reserved_transaction_orm_model = Transaction( + compliance_units=10, + organization_id=1, + transaction_action=TransactionActionEnum.Reserved, +) + +reserved_transaction_orm_model_2 = Transaction( + compliance_units=20, + organization_id=1, + transaction_action=TransactionActionEnum.Reserved, +) diff --git a/backend/lcfs/tests/organizations/test_organizations_repo.py b/backend/lcfs/tests/organizations/test_organizations_repo.py new file mode 100644 index 000000000..d40217c99 --- /dev/null +++ b/backend/lcfs/tests/organizations/test_organizations_repo.py @@ -0,0 +1,245 @@ +import pytest +from lcfs.web.api.base import PaginationRequestSchema, SortOrder +from lcfs.tests.organizations.organizations_payloads import * +from lcfs.web.api.organizations.repo import OrganizationsRepository +from lcfs.db.models.transaction.Transaction import TransactionActionEnum +from lcfs.db.models.organization.Organization import Organization +from lcfs.db.models.organization.OrganizationAddress import OrganizationAddress + + +@pytest.fixture +def organizations_repo(dbsession): + return OrganizationsRepository(db=dbsession) + + +BASE_TOTAL_BALANCE = 51000 + + +@pytest.mark.anyio +async def test_get_organizations_paginated_balances_with_no_transaction( + organizations_repo, +): + # Fetch the paginated list of all organizations + pagination = PaginationRequestSchema( + sort_orders=[SortOrder(field="organization_id", direction="asc")] + ) + organizations, total_count = await organizations_repo.get_organizations_paginated( + 0, 10, [], pagination + ) + + # Search for the organization with ID = 1 + org = next((org for org in organizations if org.organization_id == 1), None) + + # Assert that the organization was found + assert org is not None, "Organization with ID = 1 not found" + + # Assert the balances are as expected + assert ( + org.total_balance == BASE_TOTAL_BALANCE + ), f"Expected total balance to be {BASE_TOTAL_BALANCE}, got {org.total_balance}" + assert ( + org.reserved_balance == 0 + ), f"Expected reserved balance to be 0, got {org.reserved_balance}" + + +@pytest.mark.anyio +async def test_get_organizations_paginated_balances_with_adjustment_transactions_only( + organizations_repo, add_models +): + # Add trnsactions + await add_models([adjustment_transaction_orm_model]) + + # Fetch the paginated list of all organizations + pagination = PaginationRequestSchema( + sort_orders=[SortOrder(field="organization_id", direction="asc")] + ) + organizations, total_count = await organizations_repo.get_organizations_paginated( + 0, 10, [], pagination + ) + + # Search for the organization with ID = 1 + org = next((org for org in organizations if org.organization_id == 1), None) + + # Assert that the organization was found + assert org is not None, "Organization with ID = 1 not found" + + # Assert the balances are as expected + assert org.total_balance == ( + BASE_TOTAL_BALANCE + 100 + ), f"Expected total balance to be 100, got {org.total_balance}" + assert ( + org.reserved_balance == 0 + ), f"Expected reserved balance to be 0, got {org.reserved_balance}" + + +@pytest.mark.anyio +async def test_get_organizations_paginated_balances_with_reserved_transactions( + organizations_repo, add_models +): + # Add trnsactions + await add_models( + [ + adjustment_transaction_orm_model, + reserved_transaction_orm_model, + reserved_transaction_orm_model_2, + ] + ) + + # Fetch the paginated list of all organizations + pagination = PaginationRequestSchema( + sort_orders=[SortOrder(field="organization_id", direction="asc")] + ) + organizations, total_count = await organizations_repo.get_organizations_paginated( + 0, 10, [], pagination + ) + + # Search for the organization with ID = 1 + org = next((org for org in organizations if org.organization_id == 1), None) + + # Assert that the organization was found + assert org is not None, "Organization with ID = 1 not found" + + # Assert the balances are as expected + assert org.total_balance == ( + BASE_TOTAL_BALANCE + 100 + ), f"Expected total balance to be 100, got {org.total_balance}" + assert ( + org.reserved_balance == 30 + ), f"Expected reserved balance to be 30, got {org.reserved_balance}" + + +@pytest.mark.anyio +async def test_get_organizations_paginated_balances_with_released_transactions( + organizations_repo, add_models, update_model +): + # Add transactions, then update one to 'Released' + await add_models( + [ + adjustment_transaction_orm_model, + reserved_transaction_orm_model, + reserved_transaction_orm_model_2, + ] + ) + reserved_transaction_orm_model_2.transaction_action = TransactionActionEnum.Released + await update_model(reserved_transaction_orm_model_2) + + # Fetch the paginated list of all organizations + pagination = PaginationRequestSchema( + sort_orders=[SortOrder(field="organization_id", direction="asc")] + ) + organizations, total_count = await organizations_repo.get_organizations_paginated( + 0, 10, [], pagination + ) + + # Search for the organization with ID = 1 + org = next((org for org in organizations if org.organization_id == 1), None) + + # Assert that the organization was found + assert org is not None, "Organization with ID = 1 not found" + + # Assert the balances are as expected + assert ( + org.total_balance == 51100 + ), f"Expected total balance to be 100, got {org.total_balance}" + assert ( + org.reserved_balance == 10 + ), f"Expected reserved balance to be 10, got {org.reserved_balance}" + + +@pytest.mark.anyio +async def test_search_organizations_with_valid_query(organizations_repo, add_models): + # Add a test organization with address + test_org = Organization( + organization_id=100, + name="Test Company", + operating_name="Test Co.", + org_address=OrganizationAddress( + street_address="123 Test St", + city="Test City", + province_state="Test Province", + country="Test Country", + postalCode_zipCode="T3ST 1Z3", + ), + ) + await add_models([test_org]) + + # Perform the search + results = await organizations_repo.search_organizations_by_name("Test") + + # Assert that the results are as expected + assert len(results) == 1, f"Expected 1 result, got {len(results)}" + assert ( + results[0].organization_id == 100 + ), f"Expected organization_id 100, got {results[0].organization_id}" + assert ( + results[0].name == "Test Company" + ), f"Expected name 'Test Company', got {results[0].name}" + assert results[0].org_address is not None, "Expected org_address to be present" + + +@pytest.mark.anyio +async def test_search_organizations_with_empty_query(organizations_repo): + # Perform the search with an empty query + results = await organizations_repo.search_organizations_by_name("") + + # Assert that all results are returned + assert len(results) == 3, f"Expected 3 results for empty query, got {len(results)}" + + +@pytest.mark.anyio +async def test_search_organizations_no_results(organizations_repo): + # Perform the search with a query that should not match any organizations + results = await organizations_repo.search_organizations_by_name( + "NonexistentCompany" + ) + + # Assert that no results are returned + assert len(results) == 0, f"Expected 0 results, got {len(results)}" + + +@pytest.mark.anyio +async def test_search_organizations_multiple_results(organizations_repo, add_models): + # Add multiple test organizations + test_orgs = [ + Organization( + organization_id=101, name="Test Company A", operating_name="Test Co. A" + ), + Organization( + organization_id=102, name="Test Company B", operating_name="Test Co. B" + ), + Organization( + organization_id=103, name="Another Company", operating_name="Another Co." + ), + ] + await add_models(test_orgs) + + # Perform the search + results = await organizations_repo.search_organizations_by_name("Test") + + # Assert that the correct number of results are returned + assert len(results) == 2, f"Expected 2 results, got {len(results)}" + assert set(org.name for org in results) == { + "Test Company A", + "Test Company B", + }, "Unexpected organization names in results" + + +@pytest.mark.anyio +async def test_search_organizations_case_insensitive(organizations_repo, add_models): + # Add a test organization + test_org = Organization( + organization_id=104, name="CamelCase Company", operating_name="CamelCo" + ) + await add_models([test_org]) + + # Perform case-insensitive searches + results_lower = await organizations_repo.search_organizations_by_name("camelcase") + results_upper = await organizations_repo.search_organizations_by_name("CAMELCASE") + + # Assert that both searches return the same result + assert ( + len(results_lower) == 1 and len(results_upper) == 1 + ), "Expected 1 result for both case variations" + assert ( + results_lower[0].organization_id == results_upper[0].organization_id == 104 + ), "Expected matching organization_id for both case variations" diff --git a/backend/lcfs/tests/other_uses/__init__.py b/backend/lcfs/tests/other_uses/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/lcfs/tests/other_uses/conftest.py b/backend/lcfs/tests/other_uses/conftest.py new file mode 100644 index 000000000..cc418a7d0 --- /dev/null +++ b/backend/lcfs/tests/other_uses/conftest.py @@ -0,0 +1,53 @@ +from unittest.mock import MagicMock + +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.db.models import OtherUses +from lcfs.web.api.other_uses.schema import OtherUsesCreateSchema + + +def create_mock_entity(overrides: dict): + mock_entity = MagicMock(spec=OtherUses) + mock_entity.other_uses_id = 1 + mock_entity.compliance_report_id = 1 + mock_entity.fuel_type.fuel_type = "Gasoline" + mock_entity.fuel_category.category = "Petroleum-based" + mock_entity.expected_use.name = "Transportation" + mock_entity.units = "L" + mock_entity.rationale = "Test rationale" + mock_entity.group_uuid = "test-group-uuid" + mock_entity.version = 1 + mock_entity.quantity_supplied = 1000 + mock_entity.action_type = ActionTypeEnum.CREATE + mock_entity.user_type = UserTypeEnum.SUPPLIER + + # Apply overrides + if overrides: + for key, value in overrides.items(): + setattr(mock_entity, key, value) + + return mock_entity + + +def create_mock_schema(overrides: dict): + mock_schema = OtherUsesCreateSchema( + compliance_report_id=1, + quantity_supplied=1000, + fuel_type="Gasoline", + fuel_category="Petroleum-based", + expected_use="Transportation", + units="L", + rationale="Test rationale", + provision_of_the_act="Provision A", + fuel_code = "FuelCode123", + group_uuid="test-group-uuid", + version=1, + user_type="Supplier", + action_type="Create", + ) + + # Apply overrides + if overrides: + for key, value in overrides.items(): + setattr(mock_schema, key, value) + + return mock_schema diff --git a/backend/lcfs/tests/other_uses/test_other_uses_repo.py b/backend/lcfs/tests/other_uses/test_other_uses_repo.py new file mode 100644 index 000000000..67ea7d1d5 --- /dev/null +++ b/backend/lcfs/tests/other_uses/test_other_uses_repo.py @@ -0,0 +1,269 @@ +import pytest +from unittest.mock import AsyncMock, MagicMock, Mock +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.base import UserTypeEnum +from lcfs.db.models.compliance import OtherUses +from lcfs.db.models.fuel import ProvisionOfTheAct, FuelCode +from lcfs.web.api.other_uses.repo import OtherUsesRepository +from lcfs.web.api.other_uses.schema import OtherUsesSchema +from lcfs.tests.other_uses.conftest import create_mock_entity + + +@pytest.fixture +def mock_query_result(): + # Setup mock for database query result chain + mock_result = AsyncMock() + mock_result.unique = MagicMock(return_value=mock_result) + mock_result.scalars = MagicMock(return_value=mock_result) + mock_result.all = MagicMock(return_value=[MagicMock(spec=OtherUses)]) + return mock_result + + +@pytest.fixture +def mock_db_session(mock_query_result): + session = MagicMock(spec=AsyncSession) + session.execute = AsyncMock(return_value=mock_query_result) + return session + + +@pytest.fixture +def other_uses_repo(mock_db_session): + repo = OtherUsesRepository(db=mock_db_session) + repo.fuel_code_repo = MagicMock() + repo.fuel_code_repo.get_fuel_categories = AsyncMock(return_value=[]) + repo.fuel_code_repo.get_fuel_types = AsyncMock(return_value=[]) + repo.fuel_code_repo.get_expected_use_types = AsyncMock(return_value=[]) + + # Mock for local get_formatted_fuel_types method + async def mock_get_formatted_fuel_types(): + mock_result = await mock_db_session.execute(AsyncMock()) + return mock_result.unique().scalars().all() + + repo.get_formatted_fuel_types = AsyncMock(side_effect=mock_get_formatted_fuel_types) + + return repo + + +@pytest.mark.anyio +async def test_get_table_options(other_uses_repo): + # Mock the database session + mock_db_session = AsyncMock() + + # Mock the return value of the `execute` calls + mock_fuel_categories = [ + MagicMock(fuel_category_id=1, category="Petroleum-based"), + ] + mock_fuel_types = [ + MagicMock( + fuel_type_id=1, + fuel_type="Gasoline", + default_carbon_intensity=12.34, + units="L", + unrecognized=False, + fuel_instances=[ + MagicMock( + fuel_category=MagicMock( + fuel_category_id=1, category="Petroleum-based" + ) + ) + ], + fuel_codes=[ + MagicMock(fuel_code_id=1, fuel_code="FC123", carbon_intensity=10.5) + ], + provision_1=MagicMock(provision_of_the_act_id=1, name="Provision A"), + provision_2=None, + ) + ] + mock_expected_uses = [MagicMock(expected_use_id=1, name="Transportation")] + mock_provisions_of_the_act = [ + MagicMock(provision_of_the_act_id=1, name="Provision A") + ] + mock_fuel_codes = [MagicMock(fuel_code_id=1, fuel_code="FuelCode123")] + + def mock_execute_side_effect(*args, **kwargs): + query = args[0] # Extract the query object + + # Match the ProvisionOfTheAct table + if ProvisionOfTheAct.__tablename__ in str(query): + return AsyncMock(scalars=MagicMock(all=lambda: mock_provisions_of_the_act)) + + # Match the FuelCode table + elif FuelCode.__tablename__ in str(query): + return AsyncMock(scalars=MagicMock(all=lambda: mock_fuel_codes)) + + # If no match, raise an informative error + raise ValueError(f"Unexpected query: {query}") + + mock_db_session.execute.side_effect = mock_execute_side_effect + + # Create async mock for the fuel_code_repo's methods + mock_fuel_code_repo = AsyncMock() + mock_fuel_code_repo.get_fuel_categories = AsyncMock( + return_value=mock_fuel_categories + ) + mock_fuel_code_repo.get_formatted_fuel_types = AsyncMock( + return_value=mock_fuel_types + ) + mock_fuel_code_repo.get_expected_use_types = AsyncMock( + return_value=mock_expected_uses + ) + + # Mock the repository's fuel_code_repo dependency + other_uses_repo.fuel_code_repo = mock_fuel_code_repo + other_uses_repo.db = ( + mock_db_session # Assign the mock database session to the repository + ) + + # Execute the method under test + result = await other_uses_repo.get_table_options() + + # Assertions + assert isinstance(result, dict) + assert "fuel_categories" in result + assert "fuel_types" in result + assert "units_of_measure" in result + assert "expected_uses" in result + assert "provisions_of_the_act" in result + assert "fuel_codes" in result + + +@pytest.mark.anyio +async def test_get_other_uses(other_uses_repo, mock_db_session): + compliance_report_id = 1 + mock_compliance_report_uuid = "mock_group_uuid" + + # Mock the first db.execute call for fetching compliance report group UUID + mock_first_execute = MagicMock() + mock_first_execute.scalar.return_value = mock_compliance_report_uuid + + # Mock related entities for other uses + mock_fuel_type = MagicMock() + mock_fuel_type.fuel_type = "Gasoline" + + mock_fuel_category = MagicMock() + mock_fuel_category.category = "Petroleum-based" + + mock_expected_use = MagicMock() + mock_expected_use.name = "Transportation" + + mock_provision_of_the_act = MagicMock() + mock_provision_of_the_act.name = "Provision A" + + mock_fuel_code = MagicMock() + mock_fuel_code.fuel_code = "FuelCode123" + + # Mock an instance of OtherUses + mock_other_use = MagicMock() + mock_other_use.other_uses_id = 1 + mock_other_use.compliance_report_id = compliance_report_id + mock_other_use.quantity_supplied = 1000 + mock_other_use.fuel_type = mock_fuel_type + mock_other_use.fuel_category = mock_fuel_category + mock_other_use.expected_use = mock_expected_use + mock_other_use.provision_of_the_act = mock_provision_of_the_act + mock_other_use.fuel_code = mock_fuel_code + mock_other_use.units = "L" + mock_other_use.ci_of_fuel = 10.5 + mock_other_use.rationale = "Test rationale" + mock_other_use.group_uuid = mock_compliance_report_uuid + mock_other_use.version = 1 + mock_other_use.user_type = "Supplier" + mock_other_use.action_type = "Create" + + mock_result_other_uses = [mock_other_use] + + # Mock the second db.execute call for fetching other uses + mock_second_execute = MagicMock() + mock_second_execute.unique.return_value.scalars.return_value.all.return_value = ( + mock_result_other_uses + ) + + # Assign side effects to return these mocked execute calls in sequence + mock_db_session.execute = AsyncMock( + side_effect=[mock_first_execute, mock_second_execute] + ) + + # Call the repository method + result = await other_uses_repo.get_other_uses(compliance_report_id) + + # Assertions + assert isinstance(result, list) + assert len(result) == 1 + assert result[0].fuel_type == "Gasoline" + assert result[0].fuel_category == "Petroleum-based" + assert result[0].expected_use == "Transportation" + assert result[0].provision_of_the_act == "Provision A" + assert result[0].fuel_code == "FuelCode123" + assert result[0].units == "L" + assert result[0].ci_of_fuel == 10.5 + assert result[0].rationale == "Test rationale" + + +@pytest.mark.anyio +async def test_get_latest_other_uses_by_group_uuid(other_uses_repo, mock_db_session): + group_uuid = "test-group-uuid" + mock_other_use_gov = MagicMock(spec=OtherUses) + mock_other_use_gov.user_type = UserTypeEnum.GOVERNMENT + mock_other_use_gov.version = 2 + + # Setup mock result chain + mock_result = AsyncMock() + mock_result.unique = MagicMock(return_value=mock_result) + mock_result.scalars = MagicMock(return_value=mock_result) + mock_result.first = MagicMock(return_value=mock_other_use_gov) + + # Configure mock db session + mock_db_session.execute = AsyncMock(return_value=mock_result) + other_uses_repo.db = mock_db_session + + result = await other_uses_repo.get_latest_other_uses_by_group_uuid(group_uuid) + + assert result.user_type == UserTypeEnum.GOVERNMENT + assert result.version == 2 + +@pytest.mark.anyio +async def test_get_other_use_version_by_user(other_uses_repo, mock_db_session): + group_uuid = "test-group-uuid" + version = 2 + user_type = UserTypeEnum.SUPPLIER + + mock_other_use = MagicMock(spec=OtherUses) + mock_other_use.group_uuid = group_uuid + mock_other_use.version = version + mock_other_use.user_type = user_type + + # Set up mock result chain + mock_result = AsyncMock() + mock_result.scalars = MagicMock(return_value=mock_result) + mock_result.first = MagicMock(return_value=mock_other_use) + + # Configure mock db session + mock_db_session.execute = AsyncMock(return_value=mock_result) + other_uses_repo.db = mock_db_session + + result = await other_uses_repo.get_other_use_version_by_user( + group_uuid, version, user_type + ) + + assert result.group_uuid == group_uuid + assert result.version == version + assert result.user_type == user_type + + +@pytest.mark.anyio +async def test_update_other_use(other_uses_repo, mock_db_session): + updated_other_use = create_mock_entity({}) + updated_other_use.quantity_supplied = 2000 + updated_other_use.fuel_type.fuel_type = "Diesel" + updated_other_use.rationale = "Updated rationale" + + mock_db_session.flush = AsyncMock() + mock_db_session.refresh = AsyncMock() + mock_db_session.merge.return_value = updated_other_use + + result = await other_uses_repo.update_other_use(updated_other_use) + + # Assertions + assert isinstance(result, OtherUses) + assert mock_db_session.flush.call_count == 1 diff --git a/backend/lcfs/tests/other_uses/test_other_uses_services.py b/backend/lcfs/tests/other_uses/test_other_uses_services.py new file mode 100644 index 000000000..86794426a --- /dev/null +++ b/backend/lcfs/tests/other_uses/test_other_uses_services.py @@ -0,0 +1,245 @@ +from unittest.mock import MagicMock, AsyncMock + +import pytest + +from lcfs.db.base import ActionTypeEnum +from lcfs.db.base import UserTypeEnum +from lcfs.web.api.other_uses.repo import OtherUsesRepository +from lcfs.web.api.other_uses.schema import OtherUsesSchema +from lcfs.web.api.other_uses.schema import ( + OtherUsesTableOptionsSchema, +) +from lcfs.web.api.other_uses.services import OtherUsesServices +from lcfs.web.exception.exceptions import ServiceException +from lcfs.tests.other_uses.conftest import create_mock_schema, create_mock_entity + + +@pytest.fixture +def other_uses_service(): + mock_repo = MagicMock(spec=OtherUsesRepository) + mock_fuel_repo = MagicMock() + service = OtherUsesServices(repo=mock_repo, fuel_repo=mock_fuel_repo) + return service, mock_repo, mock_fuel_repo + + +@pytest.mark.anyio +async def test_get_table_options(other_uses_service): + service, mock_repo, _ = other_uses_service + mock_repo.get_table_options = AsyncMock( + return_value={ + "fuel_categories": [], + "fuel_types": [], + "units_of_measure": [], + "expected_uses": [], + "provisions_of_the_act": [], + "fuel_codes": [], + } + ) + + response = await service.get_table_options() + + assert isinstance(response, OtherUsesTableOptionsSchema) + mock_repo.get_table_options.assert_awaited_once() + + +@pytest.mark.anyio +async def test_create_other_use(other_uses_service): + service, mock_repo, mock_fuel_repo = other_uses_service + + # Mock the schema data + other_use_data = create_mock_schema({}) + + # Mock related entities for other uses with actual string attributes + mock_fuel_type = create_mock_entity({}) + mock_fuel_type.fuel_type = "Gasoline" + + mock_fuel_category = MagicMock() + mock_fuel_category.category = "Petroleum-based" + + mock_expected_use = MagicMock() + mock_expected_use.name = "Transportation" + + mock_provision_of_the_act = MagicMock() + mock_provision_of_the_act.name = "Provision A" + + mock_fuel_code = MagicMock() + mock_fuel_code.fuel_code = "FuelCode123" + + # Mock fuel repository methods + mock_fuel_repo.get_fuel_category_by = AsyncMock( + return_value=mock_fuel_category) + mock_fuel_repo.get_fuel_type_by_name = AsyncMock( + return_value=mock_fuel_type) + mock_fuel_repo.get_expected_use_type_by_name = AsyncMock( + return_value=mock_expected_use) + mock_fuel_repo.get_provision_of_the_act_by_name = AsyncMock( + return_value=mock_provision_of_the_act) + mock_fuel_repo.get_fuel_code_by_name = AsyncMock( + return_value=mock_fuel_code) + + # Create a mock for the created other use + mock_created_use = create_mock_entity({}) + mock_created_use.other_uses_id = 1 + mock_created_use.compliance_report_id = 1 + mock_created_use.quantity_supplied = 1000 + mock_created_use.fuel_type = mock_fuel_type + mock_created_use.fuel_category = mock_fuel_category + mock_created_use.expected_use = mock_expected_use + mock_created_use.provision_of_the_act = mock_provision_of_the_act + mock_created_use.fuel_code = mock_fuel_code + mock_created_use.units = "L" + mock_created_use.ci_of_fuel = 10.5 + mock_created_use.rationale = "Test rationale" + mock_created_use.group_uuid = "test-group-uuid" + mock_created_use.version = 1 + mock_created_use.user_type = "Supplier" + mock_created_use.action_type = "Create" + + mock_repo.create_other_use = AsyncMock(return_value=mock_created_use) + + response = await service.create_other_use(other_use_data, UserTypeEnum.SUPPLIER) + + # Assertions + assert isinstance(response, OtherUsesSchema) + assert response.fuel_type == "Gasoline" + assert response.fuel_category == "Petroleum-based" + assert response.expected_use == "Transportation" + assert response.fuel_code == "FuelCode123" + + mock_repo.create_other_use.assert_awaited_once() + + +@pytest.mark.anyio +async def test_update_other_use(other_uses_service): + service, mock_repo, mock_fuel_repo = other_uses_service + + # Create test data with OtherUsesCreateSchema + other_use_data = create_mock_schema( + {"quantity_supplied": 4444, "rationale": "Updated rationale"} + ) + mock_existing_use = create_mock_entity({}) + + # Configure repository methods to return these mocked objects + mock_repo.get_other_use_version_by_user = AsyncMock( + return_value=mock_existing_use) + + # Mock related entities with proper string attributes + mock_fuel_type = MagicMock() + mock_fuel_type.fuel_type = "Diesel" + + mock_fuel_category = MagicMock() + mock_fuel_category.category = "Petroleum-based" + + mock_expected_use = MagicMock() + mock_expected_use.name = "Transportation" + + mock_provision_of_the_act = MagicMock() + mock_provision_of_the_act.name = "Provision B" + + mock_fuel_code = MagicMock() + mock_fuel_code.fuel_code = "NewFuelCode" + + # Mock fuel repository methods + mock_fuel_repo.get_fuel_type_by_name = AsyncMock( + return_value=mock_fuel_type) + mock_fuel_repo.get_fuel_category_by = AsyncMock( + return_value=mock_fuel_category) + mock_fuel_repo.get_expected_use_type_by_name = AsyncMock( + return_value=mock_expected_use) + mock_fuel_repo.get_provision_of_the_act_by_name = AsyncMock( + return_value=mock_provision_of_the_act + ) + mock_fuel_repo.get_fuel_code_by_name = AsyncMock( + return_value=mock_fuel_code) + + # Mock the updated use returned after the update + mock_updated_use = MagicMock() + mock_updated_use.other_uses_id = 1 + mock_updated_use.compliance_report_id = 1 + mock_updated_use.quantity_supplied = 2222 + mock_updated_use.rationale = "Updated rationale" + mock_updated_use.units = "L" + mock_updated_use.fuel_type = mock_fuel_type + mock_updated_use.fuel_category = mock_fuel_category + mock_updated_use.expected_use = mock_expected_use + mock_updated_use.provision_of_the_act = mock_provision_of_the_act + mock_updated_use.fuel_code = mock_fuel_code + mock_updated_use.ci_of_fuel = 15.5 + mock_updated_use.group_uuid = "test-group-uuid" + mock_updated_use.version = 2 + mock_updated_use.user_type = "Supplier" + mock_updated_use.action_type = ActionTypeEnum.UPDATE + + # Set the return value for update_other_use + mock_repo.update_other_use = AsyncMock(return_value=mock_updated_use) + + # Execute the update function and capture the response + response = await service.update_other_use(other_use_data, UserTypeEnum.SUPPLIER) + + # Assertions + assert isinstance(response, OtherUsesSchema) + assert response.action_type == ActionTypeEnum.UPDATE.value + assert response.quantity_supplied == 2222 + assert response.rationale == "Updated rationale" + assert response.fuel_type == "Diesel" + assert response.fuel_category == "Petroleum-based" + assert response.expected_use == "Transportation" + assert response.provision_of_the_act == "Provision B" + assert response.fuel_code == "NewFuelCode" + + # Check that the update method was called + mock_repo.update_other_use.assert_awaited_once() + + +@pytest.mark.anyio +async def test_update_other_use_not_found(other_uses_service): + service, mock_repo, _ = other_uses_service + other_use_data = create_mock_schema({}) + + mock_repo.get_other_use_version_by_user = AsyncMock(return_value=None) + + with pytest.raises(ValueError, match="Other use not found"): + await service.update_other_use(other_use_data, UserTypeEnum.SUPPLIER) + + +@pytest.mark.anyio +async def test_delete_other_use(other_uses_service): + service, mock_repo, _ = other_uses_service + other_use_data = create_mock_schema({}) + + # Mock the existing other use with a "CREATE" action type + mock_existing_use = MagicMock() + mock_existing_use.group_uuid = "test-group-uuid" + mock_existing_use.version = 1 + mock_existing_use.action_type = ActionTypeEnum.CREATE + + # Set up the mock __table__.columns.keys() to return field names as a list + mock_existing_use.__table__ = MagicMock() + mock_existing_use.__table__.columns.keys.return_value = [ + "other_uses_id", + "compliance_report_id", + "quantity_supplied", + "fuel_type", + "fuel_category", + "expected_use", + "units", + "rationale", + "deleted", + "group_uuid", + "user_type", + "version", + "action_type", + ] + + # Mock repository methods + mock_repo.get_latest_other_uses_by_group_uuid = AsyncMock( + return_value=mock_existing_use + ) + mock_repo.create_other_use = AsyncMock(return_value=mock_existing_use) + + # Call the delete service + response = await service.delete_other_use(other_use_data, UserTypeEnum.SUPPLIER) + + # Assertions + assert response.message == "Marked as deleted." + mock_repo.create_other_use.assert_awaited_once() diff --git a/backend/lcfs/tests/other_uses/test_other_uses_validation.py b/backend/lcfs/tests/other_uses/test_other_uses_validation.py new file mode 100644 index 000000000..9be8327d9 --- /dev/null +++ b/backend/lcfs/tests/other_uses/test_other_uses_validation.py @@ -0,0 +1,54 @@ +import pytest +from unittest.mock import MagicMock +from fastapi import HTTPException, Request + +from lcfs.web.api.other_uses.schema import OtherUsesCreateSchema +from lcfs.web.api.other_uses.validation import OtherUsesValidation + +@pytest.fixture +def other_uses_validation(): + request = MagicMock(spec=Request) + validation = OtherUsesValidation(request=request) + return validation + +@pytest.mark.anyio +async def test_validate_compliance_report_id_success(other_uses_validation): + validation = other_uses_validation + compliance_report_id = 1 + other_uses_data = [ + OtherUsesCreateSchema( + compliance_report_id=compliance_report_id, + quantity_supplied=1000, + fuel_type="Gasoline", + fuel_category="Petroleum-based", + expected_use="Transportation", + units="L", + rationale="Test rationale", + provision_of_the_act="Provision A", + ) + ] + + await validation.validate_compliance_report_id(compliance_report_id, other_uses_data) + +@pytest.mark.anyio +async def test_validate_compliance_report_id_failure(other_uses_validation): + validation = other_uses_validation + compliance_report_id = 1 + other_uses_data = [ + OtherUsesCreateSchema( + compliance_report_id=2, # Different from the passed compliance_report_id + quantity_supplied=1000, + fuel_type="Gasoline", + fuel_category="Petroleum-based", + expected_use="Transportation", + units="L", + rationale="Test rationale", + provision_of_the_act="Provision A", + ) + ] + + with pytest.raises(HTTPException) as exc_info: + await validation.validate_compliance_report_id(compliance_report_id, other_uses_data) + + assert exc_info.value.status_code == 400 + assert "Mismatch compliance_report_id" in str(exc_info.value.detail) \ No newline at end of file diff --git a/backend/lcfs/tests/other_uses/test_other_uses_view.py b/backend/lcfs/tests/other_uses/test_other_uses_view.py new file mode 100644 index 000000000..3de83dee8 --- /dev/null +++ b/backend/lcfs/tests/other_uses/test_other_uses_view.py @@ -0,0 +1,245 @@ +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from unittest.mock import MagicMock, AsyncMock, patch + +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.base import ComplianceReportRequestSchema +from lcfs.web.api.other_uses.schema import PaginatedOtherUsesRequestSchema, OtherUsesSchema +from lcfs.web.api.other_uses.services import OtherUsesServices +from lcfs.web.api.other_uses.validation import OtherUsesValidation +from lcfs.tests.other_uses.conftest import create_mock_schema, create_mock_entity + + +@pytest.fixture +def mock_other_uses_service(): + return MagicMock(spec=OtherUsesServices) + + +@pytest.fixture +def mock_other_uses_validation(): + validation = MagicMock(spec=OtherUsesValidation) + validation.validate_organization_access = AsyncMock() + validation.validate_compliance_report_id = AsyncMock() + return validation + + +@pytest.mark.anyio +async def test_get_table_options( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_other_uses_service, +): + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("get_table_options") + + mock_other_uses_service.get_table_options.return_value = { + "allocationTransactionTypes": [], + "fuelCategories": [], + "fuelCodes": [], + "fuelTypes": [], + "unitsOfMeasure": [], + } + + fastapi_app.dependency_overrides[OtherUsesServices] = ( + lambda: mock_other_uses_service + ) + + response = await client.get(url) + + assert response.status_code == 200 + data = response.json() + assert "allocationTransactionTypes" in data + assert "fuelCodes" in data + assert "fuelTypes" in data + assert "unitsOfMeasure" in data + + +@pytest.mark.anyio +async def test_get_other_uses( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_other_uses_service, +): + with patch( + "lcfs.web.api.other_uses.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("get_other_uses") + payload = ComplianceReportRequestSchema(compliance_report_id=1).model_dump() + + mock_validate_organization_access.return_value = True + mock_other_uses_service.get_other_uses.return_value = {"otherUses": []} + + fastapi_app.dependency_overrides[OtherUsesServices] = ( + lambda: mock_other_uses_service + ) + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + data = response.json() + assert "otherUses" in data + + +@pytest.mark.anyio +async def test_get_other_uses_paginated( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_other_uses_service, +): + with patch( + "lcfs.web.api.other_uses.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("get_other_uses_paginated") + payload = PaginatedOtherUsesRequestSchema( + compliance_report_id=1, + page=1, + size=10, + sort_orders=[], + filters=[], + ).model_dump() + + mock_validate_organization_access.return_value = True + mock_other_uses_service.get_other_uses_paginated.return_value = { + "pagination": {"total": 0, "page": 1, "size": 10, "totalPages": 1}, + "otherUses": [], + } + + fastapi_app.dependency_overrides[OtherUsesServices] = ( + lambda: mock_other_uses_service + ) + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + data = response.json() + assert "pagination" in data + assert "otherUses" in data + + +@pytest.mark.anyio +async def test_save_other_uses_row_create( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_other_uses_service, + mock_other_uses_validation, +): + with patch( + "lcfs.web.api.other_uses.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_other_uses_row") + payload = create_mock_schema({}).model_dump() + + # Mock the service method to return a valid schema object + mock_other_uses_service.create_other_use.return_value = OtherUsesSchema( + **payload + ) + + mock_validate_organization_access.return_value = True + + fastapi_app.dependency_overrides[OtherUsesServices] = ( + lambda: mock_other_uses_service + ) + fastapi_app.dependency_overrides[OtherUsesValidation] = ( + lambda: mock_other_uses_validation + ) + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + data = response.json() + assert "otherUsesId" in data + assert data["quantitySupplied"] == 1000 + assert data["fuelType"] == "Gasoline" + + +@pytest.mark.anyio +async def test_save_other_uses_row_update( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_other_uses_service, + mock_other_uses_validation, +): + with patch( + "lcfs.web.api.other_uses.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_other_uses_row") + payload = create_mock_schema( + { + "other_uses_id": 1, + "quantity_supplied": 2000, + "rationale": "Updated rationale", + } + ).model_dump() + + mock_other_uses_service.update_other_use.return_value = payload + mock_validate_organization_access.return_value = True + + fastapi_app.dependency_overrides[OtherUsesServices] = ( + lambda: mock_other_uses_service + ) + fastapi_app.dependency_overrides[OtherUsesValidation] = ( + lambda: mock_other_uses_validation + ) + + response = await client.post(url, json=payload) + + assert response.status_code == 200 + data = response.json() + assert data["otherUsesId"] == 1 + assert data["quantitySupplied"] == 2000 + assert data["fuelType"] == "Gasoline" + + +@pytest.mark.anyio +async def test_save_other_uses_row_delete( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_other_uses_service, + mock_other_uses_validation, +): + with patch( + "lcfs.web.api.other_uses.views.ComplianceReportValidation.validate_organization_access" + ) as mock_validate_organization_access: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("save_other_uses_row") + mock_schema = create_mock_schema( + { + "other_uses_id": 1, + "deleted": True, + } + ) + + mock_other_uses_service.delete_other_use.return_value = None + mock_validate_organization_access.return_value = True + mock_other_uses_validation.validate_compliance_report_id.return_value = None + + fastapi_app.dependency_overrides[OtherUsesServices] = ( + lambda: mock_other_uses_service + ) + fastapi_app.dependency_overrides[OtherUsesValidation] = ( + lambda: mock_other_uses_validation + ) + + response = await client.post(url, json=mock_schema.model_dump()) + + assert response.status_code == 200 + data = response.json() + assert data == {"message": "Other use deleted successfully"} + + mock_other_uses_service.delete_other_use.assert_called_once_with( + mock_schema, UserTypeEnum.SUPPLIER + ) + mock_validate_organization_access.assert_called_once_with(1) + mock_other_uses_validation.validate_compliance_report_id.assert_called_once() diff --git a/backend/lcfs/tests/services/rabbitmq/test_report_consumer.py b/backend/lcfs/tests/services/rabbitmq/test_report_consumer.py new file mode 100644 index 000000000..838c9fe0c --- /dev/null +++ b/backend/lcfs/tests/services/rabbitmq/test_report_consumer.py @@ -0,0 +1,218 @@ +import json +from contextlib import ExitStack +from unittest.mock import AsyncMock, patch, MagicMock + +import pytest +from pandas.io.formats.format import return_docstring + +from lcfs.db.models.transaction.Transaction import TransactionActionEnum, Transaction +from lcfs.services.rabbitmq.report_consumer import ( + ReportConsumer, +) +from lcfs.tests.fuel_export.conftest import mock_compliance_report_repo +from lcfs.web.api.compliance_report.schema import ComplianceReportCreateSchema + + +@pytest.fixture +def mock_app(): + """Fixture to provide a mocked FastAPI app.""" + return MagicMock() + + +@pytest.fixture +def mock_redis(): + """Fixture to mock Redis client.""" + return AsyncMock() + + +@pytest.fixture +def mock_session(): + # Create a mock session that behaves like an async context manager. + # Specifying `spec=AsyncSession` helps ensure it behaves like the real class. + from sqlalchemy.ext.asyncio import AsyncSession + + mock_session = AsyncMock(spec=AsyncSession) + + # `async with mock_session:` should work, so we define what happens on enter/exit + mock_session.__aenter__.return_value = mock_session + mock_session.__aexit__.return_value = None + + # Now mock the transaction context manager returned by `session.begin()` + mock_transaction = AsyncMock() + mock_transaction.__aenter__.return_value = mock_transaction + mock_transaction.__aexit__.return_value = None + mock_session.begin.return_value = mock_transaction + + return mock_session + + +@pytest.fixture +def mock_repositories(): + """Fixture to mock all repositories and services.""" + + mock_compliance_report_repo = MagicMock() + mock_compliance_report_repo.get_compliance_report_by_legacy_id = AsyncMock( + return_value=MagicMock() + ) + mock_compliance_report_repo.get_compliance_report_status_by_desc = AsyncMock( + return_value=MagicMock() + ) + mock_compliance_report_repo.add_compliance_report_history = AsyncMock() + + org_service = MagicMock() + org_service.adjust_balance = AsyncMock() + + mock_transaction_repo = MagicMock() + mock_transaction_repo.get_transaction_by_id = AsyncMock( + return_value=MagicMock( + spec=Transaction, transaction_action=TransactionActionEnum.Reserved + ) + ) + + return { + "compliance_report_repo": mock_compliance_report_repo, + "transaction_repo": mock_transaction_repo, + "user_repo": AsyncMock(), + "org_service": org_service, + "compliance_service": AsyncMock(), + } + + +@pytest.fixture +def setup_patches(mock_redis, mock_session, mock_repositories): + """Fixture to apply patches for dependencies.""" + with ExitStack() as stack: + stack.enter_context( + patch("redis.asyncio.Redis.from_url", return_value=mock_redis) + ) + + stack.enter_context( + patch( + "lcfs.services.rabbitmq.report_consumer.AsyncSession", + return_value=mock_session, + ) + ) + stack.enter_context( + patch("lcfs.services.rabbitmq.report_consumer.async_engine", MagicMock()) + ) + + stack.enter_context( + patch( + "lcfs.services.rabbitmq.report_consumer.ComplianceReportRepository", + return_value=mock_repositories["compliance_report_repo"], + ) + ) + stack.enter_context( + patch( + "lcfs.services.rabbitmq.report_consumer.TransactionRepository", + return_value=mock_repositories["transaction_repo"], + ) + ) + stack.enter_context( + patch( + "lcfs.services.rabbitmq.report_consumer.UserRepository", + return_value=mock_repositories["user_repo"], + ) + ) + stack.enter_context( + patch( + "lcfs.services.rabbitmq.report_consumer.OrganizationsService", + return_value=mock_repositories["org_service"], + ) + ) + stack.enter_context( + patch( + "lcfs.services.rabbitmq.report_consumer.ComplianceReportServices", + return_value=mock_repositories["compliance_service"], + ) + ) + yield stack + + +@pytest.mark.anyio +async def test_process_message_created(mock_app, setup_patches, mock_repositories): + consumer = ReportConsumer(mock_app) + + # Prepare a sample message for "Created" action + message = { + "tfrs_id": 123, + "organization_id": 1, + "compliance_period": "2023", + "nickname": "Test Report", + "action": "Created", + "user_id": 42, + } + body = json.dumps(message).encode() + + # Ensure correct mock setup + mock_user = MagicMock() + mock_repositories["user_repo"].get_user_by_id.return_value = mock_user + + await consumer.process_message(body) + + # Assertions for "Created" action + mock_repositories[ + "compliance_service" + ].create_compliance_report.assert_called_once_with( + 1, # org_id + ComplianceReportCreateSchema( + legacy_id=123, + compliance_period="2023", + organization_id=1, + nickname="Test Report", + status="Draft", + ), + mock_user, + ) + + +@pytest.mark.anyio +async def test_process_message_submitted(mock_app, setup_patches, mock_repositories): + consumer = ReportConsumer(mock_app) + + # Prepare a sample message for "Submitted" action + message = { + "tfrs_id": 123, + "organization_id": 1, + "compliance_period": "2023", + "nickname": "Test Report", + "action": "Submitted", + "credits": 50, + "user_id": 42, + } + body = json.dumps(message).encode() + + await consumer.process_message(body) + + # Assertions for "Submitted" action + mock_repositories[ + "compliance_report_repo" + ].get_compliance_report_by_legacy_id.assert_called_once_with(123) + mock_repositories["org_service"].adjust_balance.assert_called_once_with( + TransactionActionEnum.Reserved, 50, 1 + ) + mock_repositories[ + "compliance_report_repo" + ].add_compliance_report_history.assert_called_once() + + +@pytest.mark.anyio +async def test_process_message_approved(mock_app, setup_patches, mock_repositories): + consumer = ReportConsumer(mock_app) + + # Prepare a sample message for "Approved" action + message = { + "tfrs_id": 123, + "organization_id": 1, + "action": "Approved", + "user_id": 42, + } + body = json.dumps(message).encode() + + await consumer.process_message(body) + + # Assertions for "Approved" action + mock_repositories[ + "compliance_report_repo" + ].get_compliance_report_by_legacy_id.assert_called_once_with(123) + mock_repositories["transaction_repo"].confirm_transaction.assert_called_once() diff --git a/backend/lcfs/tests/services/redis/test_redis.py b/backend/lcfs/tests/services/redis/test_redis.py new file mode 100644 index 000000000..af520e7b4 --- /dev/null +++ b/backend/lcfs/tests/services/redis/test_redis.py @@ -0,0 +1,64 @@ +import pytest +from unittest.mock import AsyncMock, patch +from fastapi import FastAPI +from redis.exceptions import RedisError +from lcfs.services.redis.lifetime import init_redis, shutdown_redis + + +@pytest.mark.anyio +async def test_init_redis_success(): + """ + Test Redis initialization succeeds and pings the client. + """ + app = FastAPI() + mock_redis = AsyncMock() + + with patch("lcfs.services.redis.lifetime.Redis", return_value=mock_redis): + # Mock Redis ping to simulate successful connection + mock_redis.ping.return_value = True + + await init_redis(app) + + assert app.state.redis_client is mock_redis + mock_redis.ping.assert_called_once() + mock_redis.close.assert_not_called() + + +@pytest.mark.anyio +async def test_init_redis_failure(): + """ + Test Redis initialization fails during connection. + """ + app = FastAPI() + + with patch( + "lcfs.services.redis.lifetime.Redis", + side_effect=RedisError("Connection failed"), + ): + with pytest.raises(RedisError, match="Connection failed"): + await init_redis(app) + + assert not hasattr(app.state, "redis_client") + + +@pytest.mark.anyio +async def test_shutdown_redis_success(): + """ + Test Redis client shutdown succeeds. + """ + app = FastAPI() + mock_redis = AsyncMock() + app.state.redis_client = mock_redis + + await shutdown_redis(app) + + mock_redis.close.assert_called_once() + + +@pytest.mark.anyio +async def test_shutdown_redis_no_client(): + """ + Test Redis shutdown when no client exists. + """ + app = FastAPI() + await shutdown_redis(app) # Should not raise any exceptions diff --git a/backend/lcfs/tests/services/tfrs/test_redis_balance.py b/backend/lcfs/tests/services/tfrs/test_redis_balance.py new file mode 100644 index 000000000..39504a5f1 --- /dev/null +++ b/backend/lcfs/tests/services/tfrs/test_redis_balance.py @@ -0,0 +1,105 @@ +import pytest +from unittest.mock import AsyncMock, patch, MagicMock, call +from datetime import datetime + +from redis.asyncio import Redis + +from lcfs.services.tfrs.redis_balance import ( + init_org_balance_cache, + set_cache_value, + RedisBalanceService, +) + + +@pytest.mark.anyio +async def test_init_org_balance_cache(): + # Mock the Redis client + mock_redis = AsyncMock() + mock_redis.set = AsyncMock() + + # Patch Redis client creation + with patch("lcfs.services.tfrs.redis_balance.Redis", return_value=mock_redis): + # Mock the app object + mock_app = MagicMock() + mock_app.state.redis_client = mock_redis + + current_year = datetime.now().year + last_year = current_year - 1 + + # Mock repository methods + with patch( + "lcfs.web.api.organizations.repo.OrganizationsRepository.get_organizations", + return_value=[ + MagicMock(organization_id=1, name="Org1"), + MagicMock(organization_id=2, name="Org2"), + ], + ), patch( + "lcfs.web.api.transaction.repo.TransactionRepository.get_transaction_start_year", + return_value=last_year, + ), patch( + "lcfs.web.api.transaction.repo.TransactionRepository.calculate_available_balance_for_period", + side_effect=[100, 200, 150, 250], + ): + # Execute the function with the mocked app + await init_org_balance_cache(mock_app) + + # Define expected calls to Redis `set` + expected_calls = [ + call(name=f"balance_1_{last_year}", value=100), + call(name=f"balance_2_{last_year}", value=200), + call(name=f"balance_1_{current_year}", value=150), + call(name=f"balance_2_{current_year}", value=250), + ] + + # Assert that Redis `set` method was called with the expected arguments + mock_redis.set.assert_has_calls(expected_calls, any_order=True) + + # Ensure the number of calls matches the expected count + assert mock_redis.set.call_count == len(expected_calls) + + +@pytest.mark.anyio +async def test_populate_organization_redis_balance(redis_client: Redis): + # Mock the transaction repository + current_year = datetime.now().year + last_year = current_year - 1 + + mock_transaction_repo = AsyncMock() + mock_transaction_repo.get_transaction_start_year.return_value = last_year + mock_transaction_repo.calculate_available_balance_for_period.side_effect = [ + 100, + 200, + 150, + ] + + # Create an instance of the service with mocked dependencies + service = RedisBalanceService( + transaction_repo=mock_transaction_repo, redis_client=redis_client + ) + + await service.populate_organization_redis_balance(organization_id=1) + + # Assert that the transaction repository methods were called correctly + mock_transaction_repo.get_transaction_start_year.assert_called_once() + mock_transaction_repo.calculate_available_balance_for_period.assert_any_call( + 1, last_year + ) + mock_transaction_repo.calculate_available_balance_for_period.assert_any_call( + 1, current_year + ) + + # Assert that the Redis set method was called with the correct parameters + assert int(await redis_client.get(f"balance_1_{last_year}")) == 100 + assert int(await redis_client.get(f"balance_1_{current_year}")) == 200 + + +@pytest.mark.anyio +async def test_set_cache_value(): + # Mock the Redis client + mock_redis = AsyncMock() + + # Call the function + await set_cache_value(1, 2023, 100, mock_redis) + + # Assert that the Redis set method was called with the correct parameters + mock_redis.set.assert_called_once_with(name="balance_1_2023", value=100) diff --git a/backend/lcfs/tests/test_access_control.py b/backend/lcfs/tests/test_access_control.py new file mode 100644 index 000000000..087e9b021 --- /dev/null +++ b/backend/lcfs/tests/test_access_control.py @@ -0,0 +1,39 @@ +import pytest +from fastapi import FastAPI, Depends +from httpx import AsyncClient +from starlette import status + +from lcfs.db.models.user.Role import RoleEnum + + +@pytest.mark.anyio +async def test_endpoint_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + url = fastapi_app.url_path_for("get_current_user") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert response_data["keycloakUsername"] == "mockuser" + + +@pytest.mark.anyio +async def test_endpoint_access_denied( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + url = fastapi_app.url_path_for("get_organizations") + pagination = {"page": 1, "size": 10, "filters": [], "sortOrders": []} + response = await client.post(url, json=pagination) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_endpoint_access_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_organizations") + pagination = {"page": 1, "size": 10, "filters": [], "sortOrders": []} + response = await client.post(url, json=pagination) + assert response.status_code == status.HTTP_200_OK diff --git a/backend/lcfs/tests/test_audit_events.py b/backend/lcfs/tests/test_audit_events.py new file mode 100644 index 000000000..dd9c4dc4d --- /dev/null +++ b/backend/lcfs/tests/test_audit_events.py @@ -0,0 +1,77 @@ +from unittest.mock import Mock + +import pytest + +from lcfs.db.base import current_user_var +from lcfs.web.api.transfer.repo import TransferRepository +from lcfs.db.models.transfer.Transfer import Transfer +from datetime import datetime +from lcfs.db.models.user.UserProfile import UserProfile + +agreement_date = datetime.strptime("2024-02-02", "%Y-%m-%d").date() +test_transfer = { + "from_organization_id": 1, + "to_organization_id": 2, + "current_status_id": 1, + "transfer_category_id": 1, + "agreement_date": agreement_date, + "quantity": 100, + "price_per_unit": 10.0, +} + + +@pytest.fixture +def transfer_repo(dbsession): + return TransferRepository(db=dbsession) + + +@pytest.fixture +def mock_user(): + # Mock a user object with necessary attributes + mock_user = Mock() + mock_user.keycloak_username = "test_user" + + current_user_var.set(mock_user) + return mock_user + + +@pytest.mark.anyio +async def test_create_transfer_sets_auditable_fields( + dbsession, transfer_repo, mock_user +): + new_transfer = Transfer(**test_transfer) + await transfer_repo.create_transfer(new_transfer) + + added_transfer = await transfer_repo.get_transfer_by_id(new_transfer.transfer_id) + assert added_transfer is not None + assert added_transfer.create_user == mock_user.keycloak_username + assert added_transfer.update_user == mock_user.keycloak_username + assert isinstance(added_transfer.create_date, datetime) + assert isinstance(added_transfer.update_date, datetime) + + +@pytest.mark.anyio +async def test_update_transfer_updates_update_user(dbsession, transfer_repo, mock_user): + # Create a transfer with one user + new_transfer = Transfer(**test_transfer) + await transfer_repo.create_transfer(new_transfer) + + # Change the user in the session info to simulate a different updating user + second_user = UserProfile( + user_profile_id=2, keycloak_username="test_user_2" + ) # Different user ID + current_user_var.set(second_user) + + # Update the transfer + transfer_to_update = await transfer_repo.get_transfer_by_id( + new_transfer.transfer_id + ) + transfer_to_update.current_status_id = 2 + await transfer_repo.update_transfer(transfer_to_update) + + # Fetch the updated transfer + updated_transfer = await transfer_repo.get_transfer_by_id(new_transfer.transfer_id) + + # Assert the update_user is set to the new user + assert updated_transfer.update_user == "test_user_2" + assert updated_transfer.create_user != updated_transfer.update_user diff --git a/backend/lcfs/tests/test_auth_middleware.py b/backend/lcfs/tests/test_auth_middleware.py new file mode 100644 index 000000000..146ccaf0c --- /dev/null +++ b/backend/lcfs/tests/test_auth_middleware.py @@ -0,0 +1,138 @@ +from unittest.mock import AsyncMock, patch, MagicMock, Mock + +import pytest +import json +import redis +from starlette.exceptions import HTTPException +from starlette.requests import Request +from redis.asyncio import Redis, ConnectionPool + +from lcfs.db.models import UserProfile +from lcfs.services.keycloak.authentication import UserAuthentication +from lcfs.settings import Settings + + +@pytest.fixture +def redis_client(): + return AsyncMock() + + +@pytest.fixture +def session_generator(): + async_context = MagicMock() + return async_context + + +@pytest.fixture +def settings(): + return Settings( + well_known_endpoint="https://example.com/.well-known/openid-configuration", + keycloak_audience="your-audience", + ) + + +@pytest.fixture +def auth_backend(redis_client, session_generator, settings): + return UserAuthentication(redis_client, session_generator[0], settings) + + +@pytest.mark.anyio +async def test_load_jwk_from_redis(auth_backend): + # Mock auth_backend.redis_client.get to return a JSON string directly + mock_redis = AsyncMock() + mock_redis.get = AsyncMock( + return_value='{"jwks": "jwks_data", "jwks_uri": "jwks_uri_data"}' + ) + + # Patch Redis client in the auth backend + with patch.object(auth_backend, "redis_client", mock_redis): + await auth_backend.refresh_jwk() + + # Assertions to verify JWKS data was loaded correctly + assert auth_backend.jwks == "jwks_data" + assert auth_backend.jwks_uri == "jwks_uri_data" + + # Verify that Redis `get` was called with the correct key + mock_redis.get.assert_awaited_once_with("jwks_data") + + +@pytest.mark.anyio +@patch("httpx.AsyncClient.get") +async def test_refresh_jwk_sets_new_keys_in_redis(mock_httpx_get, redis_client): + # Mock responses for the well-known endpoint and JWKS URI + mock_oidc_response = MagicMock() + mock_oidc_response.json.return_value = {"jwks_uri": "https://example.com/jwks"} + mock_oidc_response.raise_for_status = MagicMock() + + mock_certs_response = MagicMock() + mock_certs_response.json.return_value = { + "keys": [{"kty": "RSA", "kid": "key2", "use": "sig", "n": "def", "e": "AQAB"}] + } + mock_certs_response.raise_for_status = MagicMock() + + # Configure the mock to return the above responses in order + mock_httpx_get.side_effect = [mock_oidc_response, mock_certs_response] + + # Mock Redis client behavior + redis_client.get = AsyncMock(return_value=None) # JWKS data not in cache + redis_client.set = AsyncMock() + + # Create auth_backend with the mocked Redis client + auth_backend = UserAuthentication( + redis_client=redis_client, + session_factory=AsyncMock(), + settings=MagicMock( + well_known_endpoint="https://example.com/.well-known/openid-configuration" + ), + ) + + # Call refresh_jwk + await auth_backend.refresh_jwk() + + # Assertions to verify JWKS data was fetched and set correctly + expected_jwks = { + "keys": [{"kty": "RSA", "kid": "key2", "use": "sig", "n": "def", "e": "AQAB"}] + } + assert auth_backend.jwks == expected_jwks + assert auth_backend.jwks_uri == "https://example.com/jwks" + + # Verify that Redis `get` was called with "jwks_data" + redis_client.get.assert_awaited_once_with("jwks_data") + + # Verify that the well-known endpoint was called twice + assert mock_httpx_get.call_count == 2 + mock_httpx_get.assert_any_call( + "https://example.com/.well-known/openid-configuration" + ) + mock_httpx_get.assert_any_call("https://example.com/jwks") + + # Verify that Redis `set` was called with the correct parameters + expected_jwks_data = { + "jwks": expected_jwks, + "jwks_uri": "https://example.com/jwks", + } + redis_client.set.assert_awaited_once_with( + "jwks_data", json.dumps(expected_jwks_data), ex=86400 + ) + + +@pytest.mark.anyio +async def test_authenticate_no_auth_header(auth_backend): + request = MagicMock(spec=Request) + request.headers = {} + + with pytest.raises(HTTPException) as exc_info: + await auth_backend.authenticate(request) + + assert exc_info.value.status_code == 401 + assert exc_info.value.detail == "Authorization header is required" + + +@pytest.mark.anyio +async def test_map_user_keycloak_id(auth_backend, session_generator): + user_profile = UserProfile() + user_token = {"preferred_username": "testuser"} + + await auth_backend.map_user_keycloak_id(user_profile, user_token) + + assert user_profile.keycloak_user_id == "testuser" diff --git a/backend/lcfs/tests/test_organization.py b/backend/lcfs/tests/test_organization.py new file mode 100644 index 000000000..3e8046be4 --- /dev/null +++ b/backend/lcfs/tests/test_organization.py @@ -0,0 +1,352 @@ +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from starlette import status + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.organizations.schema import ( + OrganizationBalanceResponseSchema, + OrganizationListSchema, + OrganizationStatusEnum, + OrganizationSummaryResponseSchema, + OrganizationTypeEnum, +) + + +@pytest.mark.anyio +async def test_export_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("export_organizations") + response = await client.get(url) + + assert response.status_code == status.HTTP_200_OK + assert response.headers["Content-Type"] == "application/vnd.ms-excel" + + +@pytest.mark.anyio +async def test_export_unauthorized_access( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + url = fastapi_app.url_path_for("export_organizations") + response = await client.get(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_get_organization_by_id_idir_user( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_organization", organization_id=1) + response = await client.get(url) + + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.anyio +async def test_get_organization_not_found( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_organization", organization_id=100) + response = await client.get(url) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +async def test_get_organization_by_id_bceid_user( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("get_organization", organization_id=1) + response = await client.get(url) + + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.anyio +async def test_create_organization_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + # Set mock user role for organization creation + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + payload = { + "name": "Test Organizationa", + "operatingName": "Test Operating name", + "email": "test@gov.bc.ca", + "phone": "0000000000", + "edrmsRecord": "EDRMS123", + "organizationStatusId": 2, + "organizationTypeId": 1, + "address": { + "name": "Test Operating name", + "streetAddress": "123 Test Street", + "addressOther": "", + "city": "Victoria", + "provinceState": "BC", + "country": "Canada", + "postalcodeZipcode": "V8W 2C3", + }, + "attorneyAddress": { + "name": "Test Operating name", + "streetAddress": "123 Test Street", + "addressOther": "", + "city": "Victoria", + "provinceState": "BC", + "country": "Canada", + "postalcodeZipcode": "V8W 2C3", + }, + } + + response = await create_organization(client, fastapi_app, payload) + + assert response.status_code == status.HTTP_201_CREATED + + +@pytest.mark.anyio +async def test_update_organization_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + # Set mock user role for organization update. + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + payload = { + "name": "Test Organization", + "operatingName": "Test Operating name", + "email": "organization@gov.bc.ca", + "phone": "1111111111", + "edrmsRecord": "EDRMS123", + "organizationStatusId": 2, + "organizationTypeId": 1, + "address": { + "name": "Test Operating name", + "streetAddress": "123 Test Street", + "addressOther": "", + "city": "Victoria", + "provinceState": "BC", + "country": "Canada", + "postalcodeZipcode": "V8W 2C3", + }, + "attorneyAddress": { + "name": "Test Operating name", + "streetAddress": "123 Test Street", + "addressOther": "", + "city": "Victoria", + "provinceState": "BC", + "country": "Canada", + "postalcodeZipcode": "V8W 2C3", + }, + } + + response = await update_organization(client, fastapi_app, 1, payload) + + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.anyio +async def test_update_organization_failure( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + # Set mock user role for organization update + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + payload = { + "name": "Test Organizationa", + "operatingName": "Test Operating name", + "email": "test@gov.bc.ca", + "phone": "0000000000", + "edrmsRecord": "EDRMS123", + "organizationStatusId": 2, + "organizationTypeId": 1, + "address": { + "name": "Test Operating name", + "streetAddress": "123 Test Street", + "addressOther": "", + "city": "Victoria", + "provinceState": "BC", + "country": "Canada", + "postalcodeZipcode": "V8W 2C3", + }, + "attorneyAddress": { + "name": "Test Operating name", + "streetAddress": "123 Test Street", + "addressOther": "", + "city": "Victoria", + "provinceState": "BC", + "country": "Canada", + "postalcodeZipcode": "V8W 2C3", + }, + } + response = await update_organization(client, fastapi_app, 100, payload) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +async def test_get_organizations_list( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_organizations") + response = await client.post( + url, json={"page": 1, "size": 5, "sortOrders": [], "filters": []} + ) + + data = OrganizationListSchema(**response.json()) + assert data.pagination.size == 5 + assert data.pagination.page == 1 + assert len(data.organizations) > 0 + assert data.organizations[0].org_type.org_type == OrganizationTypeEnum.FUEL_SUPPLIER + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.anyio +async def test_get_organization_statuses_list( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_organization_statuses") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.anyio +async def test_get_organization_types_list( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_organization_types") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.anyio +async def test_get_organization_names( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_organization_names") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert isinstance(data, list) + assert len(data) > 0 + assert all("name" in org for org in data) + + +@pytest.mark.anyio +async def test_get_externally_registered_organizations( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("get_externally_registered_organizations") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + data = response.json() + for org in data: + org_model = OrganizationSummaryResponseSchema(**org) + assert org_model.organization_id is not None + assert org_model.org_status.status == OrganizationStatusEnum.REGISTERED + + +@pytest.mark.anyio +async def test_get_balances( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + organization_id = 1 # Assuming this organization exists + url = fastapi_app.url_path_for("get_balances", organization_id=organization_id) + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + data = OrganizationBalanceResponseSchema(**response.json()) + assert data.organization_id == organization_id + assert data.name is not None + assert data.total_balance >= 0 + assert data.reserved_balance >= 0 + + +@pytest.mark.anyio +async def test_get_current_balances( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("get_balances") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + data = OrganizationBalanceResponseSchema(**response.json()) + assert data.name is not None + assert data.total_balance >= 0 + assert data.reserved_balance >= 0 + + +@pytest.mark.anyio +async def test_create_organization_unauthorized( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + url = fastapi_app.url_path_for("create_organization") + payload = { + "name": "Test Organization", + "operatingName": "Test Operating name", + "email": "test@example.com", + "phone": "1234567890", + "edrmsRecord": "EDRMS123", + "organizationStatusId": 1, + "organizationTypeId": 1, + "address": { + "name": "Test Address", + "streetAddress": "123 Test St", + "city": "Test City", + "provinceState": "Test Province", + "country": "Test Country", + "postalcodeZipcode": "12345", + }, + "attorneyAddress": { + "name": "Test Attorney Address", + "streetAddress": "456 Attorney St", + "city": "Attorney City", + "provinceState": "Attorney Province", + "country": "Attorney Country", + "postalcodeZipcode": "67890", + }, + } + response = await client.post(url, json=payload) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_get_balances_unauthorized( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.SUPPLIER]) + organization_id = 1 + url = fastapi_app.url_path_for("get_balances", organization_id=organization_id) + response = await client.get(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +async def create_organization( + client: AsyncClient, + fastapi_app: FastAPI, + payload: dict, + # role: RoleEnum = RoleEnum.GOVERNMENT +) -> object: + """Helper function to create an organization and return the response.""" + # set_mock_user([role]) + url = fastapi_app.url_path_for("create_organization") + response = await client.post(url, json=payload) + return response + + +async def update_organization( + client: AsyncClient, fastapi_app: FastAPI, organization_id: int, payload: dict +) -> object: + """Helper function to update an organization and return the response.""" + url = fastapi_app.url_path_for( + "update_organization", organization_id=organization_id + ) + response = await client.put(url, json=payload) + return response diff --git a/backend/lcfs/tests/test_redis.py b/backend/lcfs/tests/test_redis.py deleted file mode 100644 index e10906b8d..000000000 --- a/backend/lcfs/tests/test_redis.py +++ /dev/null @@ -1,63 +0,0 @@ -import uuid - -import pytest -from fastapi import FastAPI -from httpx import AsyncClient -from redis.asyncio import ConnectionPool, Redis -from starlette import status - - -@pytest.mark.anyio -async def test_setting_value( - fastapi_app: FastAPI, - fake_redis_pool: ConnectionPool, - client: AsyncClient, -) -> None: - """ - Tests that you can set value in redis. - - :param fastapi_app: current application fixture. - :param fake_redis_pool: fake redis pool. - :param client: client fixture. - """ - url = fastapi_app.url_path_for("set_redis_value") - - test_key = uuid.uuid4().hex - test_val = uuid.uuid4().hex - response = await client.put( - url, - json={ - "key": test_key, - "value": test_val, - }, - ) - - assert response.status_code == status.HTTP_200_OK - async with Redis(connection_pool=fake_redis_pool) as redis: - actual_value = await redis.get(test_key) - assert actual_value.decode() == test_val - - -@pytest.mark.anyio -async def test_getting_value( - fastapi_app: FastAPI, - fake_redis_pool: ConnectionPool, - client: AsyncClient, -) -> None: - """ - Tests that you can get value from redis by key. - - :param fastapi_app: current application fixture. - :param fake_redis_pool: fake redis pool. - :param client: client fixture. - """ - test_key = uuid.uuid4().hex - test_val = uuid.uuid4().hex - async with Redis(connection_pool=fake_redis_pool) as redis: - await redis.set(test_key, test_val) - url = fastapi_app.url_path_for("get_redis_value") - response = await client.get(url, params={"key": test_key}) - - assert response.status_code == status.HTTP_200_OK - assert response.json()["key"] == test_key - assert response.json()["value"] == test_val diff --git a/backend/lcfs/tests/test_users.py b/backend/lcfs/tests/test_users.py new file mode 100644 index 000000000..0b286e642 --- /dev/null +++ b/backend/lcfs/tests/test_users.py @@ -0,0 +1,175 @@ +import pandas as pd +import numpy as np +from io import BytesIO +import pytest +from fastapi import FastAPI +from httpx import AsyncClient +from starlette import status + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.user.schema import UsersSchema + + +@pytest.mark.anyio +async def test_export_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("export_users") + response = await client.get(url) + + assert response.status_code == status.HTTP_200_OK + assert response.headers["Content-Type"] == "application/vnd.ms-excel" + + # Read the Excel content into a DataFrame + excel_data = pd.read_excel(BytesIO(response.content), engine="xlrd") + + # Define expected column names + expected_column_names = [ + "Last name", + "First name", + "Email", + "BCeID User ID", + "Title", + "Phone", + "Mobile", + "Status", + "Role(s)", + "Organization name", + ] + + # Check that the column names match the expected values + for column in expected_column_names: + assert ( + column in excel_data.columns + ), f"Column {column} not found in exported data." + + +@pytest.mark.anyio +async def test_export_unauthorized_access( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + url = fastapi_app.url_path_for("export_users") + response = await client.get(url) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_get_users_with_pagination( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_users") + request_data = {"page": 1, "size": 5, "sortOrders": [], "filters": []} + response = await client.post(url, json=request_data) + + # Check the status code + assert response.status_code == status.HTTP_200_OK + # check if pagination is working as expected. + content = UsersSchema(**response.json()) + assert len(content.users) <= 5 + assert content.pagination.page == 1 + + +@pytest.mark.anyio +async def test_get_users_with_sort_order( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_users") + request_data = { + "page": 1, + "size": 10, + "sortOrders": [{"field": "email", "direction": "desc"}], + "filters": [], + } + response = await client.post(url, json=request_data) + + # Check the status code + assert response.status_code == status.HTTP_200_OK + + content = UsersSchema(**response.json()) + emails = [user.email for user in content.users] + # check if emails are sorted in descending order. + assert np.all(emails[:-1] >= emails[1:]) + + +@pytest.mark.anyio +async def test_get_users_with_filter( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_users") + request_data = { + "page": 1, + "size": 10, + "sortOrders": [], + "filters": [ + { + "filterType": "number", + "type": "equals", + "filter": 1, + "field": "user_profile_id", + } + ], + } + response = await client.post(url, json=request_data) + + # Check the status code + assert response.status_code == status.HTTP_200_OK + # check if pagination is working as expected. + + content = UsersSchema(**response.json()) + ids = [user.user_profile_id for user in content.users] + # check if only one user element exists with user_profile_id 1. + assert len(ids) == 1 + assert ids[0] == 1 + + +@pytest.mark.anyio +async def test_get_user_by_id( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_user_by_id", user_id=1) + response = await client.get(url) + + # Check the status code + assert response.status_code == status.HTTP_200_OK + # check the user_profile_id + assert response.json()["userProfileId"] == 1 + + +@pytest.mark.anyio +async def test_create_user_success( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +) -> None: + # Setup mock roles for authorization + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + # Define the URL for the create user endpoint + url = fastapi_app.url_path_for("create_user") + + # Define the payload for creating a new user + user_data = { + "title": "Mr.", + "keycloak_username": "testuser", + "keycloak_email": "testuser@example.com", + "email": "testuser@example.com", # Optional, but provided for clarity + "phone": "1234567890", + "mobile_phone": "0987654321", + "first_name": "Test", + "last_name": "User", + "is_active": True, + "organization_id": None, + "roles": [RoleEnum.SUPPLIER.value], + } + + # Make the POST request to create a new user + response = await client.post(url, json=user_data) + + # Check that the user creation was successful + assert response.status_code == status.HTTP_201_CREATED + assert response.json() == "User created successfully" diff --git a/backend/lcfs/tests/transaction/__init__.py b/backend/lcfs/tests/transaction/__init__.py new file mode 100644 index 000000000..f402076cd --- /dev/null +++ b/backend/lcfs/tests/transaction/__init__.py @@ -0,0 +1 @@ +"""Tests for Transactions""" diff --git a/backend/lcfs/tests/transaction/test_transaction_repo.py b/backend/lcfs/tests/transaction/test_transaction_repo.py new file mode 100644 index 000000000..8f8564bbd --- /dev/null +++ b/backend/lcfs/tests/transaction/test_transaction_repo.py @@ -0,0 +1,196 @@ +import pytest + +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum +from lcfs.tests.transaction.transaction_payloads import * +from lcfs.web.api.base import SortOrder +from lcfs.web.api.transaction.repo import TransactionRepository, EntityType +from lcfs.web.exception.exceptions import DatabaseException + + +@pytest.fixture +def transaction_repo(dbsession): + return TransactionRepository(db=dbsession) + + +@pytest.fixture +async def mock_transactions(dbsession): + transactions = [ + test_org, + test_org_2, + deleted_transfer_orm, + draft_transfer_orm, + submitted_transfer_orm, + sent_transfer_orm, + recorded_transfer_orm, + recommended_transfer_orm, + refused_transfer_orm, + declined_transfer_orm, + rescinded_transfer_orm, + initiative_agreement_orm, + admin_adjustment_orm, + adjustment_transaction_orm, + reserved_transaction_orm, + ] + dbsession.add_all(transactions) + await dbsession.flush() + + return transactions + + +@pytest.mark.anyio +async def test_calculate_total_balance(dbsession, transaction_repo, mock_transactions): + total_balance = await transaction_repo.calculate_total_balance(test_org_id) + assert total_balance == 100 + + +@pytest.mark.anyio +async def test_calculate_reserved_balance( + dbsession, transaction_repo, mock_transactions +): + reserved_balance = await transaction_repo.calculate_reserved_balance(test_org_id) + assert reserved_balance == 100 + + +@pytest.mark.anyio +async def test_calculate_available_balance( + dbsession, transaction_repo, mock_transactions +): + available_balance = await transaction_repo.calculate_available_balance(test_org_id) + assert available_balance == 0 + + +@pytest.mark.anyio +async def test_create_transaction(dbsession, transaction_repo): + dbsession.add_all([test_org]) + await dbsession.flush() + + + new_transaction = await transaction_repo.create_transaction( + TransactionActionEnum.Adjustment, 100, test_org_id + ) + assert new_transaction.transaction_action == TransactionActionEnum.Adjustment + assert new_transaction.compliance_units == 100 + assert new_transaction.organization_id == test_org_id + + +@pytest.mark.anyio +async def test_reserve_transaction(dbsession, transaction_repo, mock_transactions): + success = await transaction_repo.reserve_transaction(4) + + assert success == True + updated_transaction = await dbsession.get(Transaction, 4) + assert updated_transaction.transaction_action is TransactionActionEnum.Reserved + + +@pytest.mark.anyio +async def test_release_transaction(dbsession, transaction_repo, mock_transactions): + success = await transaction_repo.release_transaction(4) + assert success + + updated_transaction = await dbsession.get(Transaction, 4) + assert updated_transaction.transaction_action == TransactionActionEnum.Released + + +@pytest.mark.anyio +async def test_confirm_transaction(dbsession, transaction_repo, mock_transactions): + success = await transaction_repo.confirm_transaction(4) + assert success + + updated_transaction = await dbsession.get(Transaction, 4) + assert updated_transaction.transaction_action == TransactionActionEnum.Adjustment + + +@pytest.mark.anyio +async def test_transactions_in_have_correct_visibilities( + dbsession, transaction_repo, mock_transactions +): + sort_orders = [SortOrder(field="transaction_id", direction="asc")] + + transactions_transferor, total_count_transferor = ( + await transaction_repo.get_transactions_paginated( + 0, 10, [], sort_orders, test_org_id + ) + ) + transactions_transferee, total_count_transferee = ( + await transaction_repo.get_transactions_paginated( + 0, 10, [], sort_orders, test_org_2_id + ) + ) + transactions_gov, total_count_gov = ( + await transaction_repo.get_transactions_paginated(0, 10, [], sort_orders) + ) + + assert len(transactions_transferor) == 8 + assert total_count_transferor == 8 + + assert len(transactions_transferee) == 7 + assert total_count_transferee == 7 + + assert len(transactions_gov) == 9 + assert total_count_gov == 9 + + +@pytest.mark.anyio +async def test_get_visible_statuses_invalid_entity_type(transaction_repo): + with pytest.raises(DatabaseException): + await transaction_repo.get_visible_statuses("InvalidEntity") + + +@pytest.mark.anyio +async def test_get_visible_statuses_for_transferor(dbsession, transaction_repo): + visible_statuses = await transaction_repo.get_visible_statuses( + EntityType.Transferor + ) + expected_statuses = [ + TransferStatusEnum.Draft, + TransferStatusEnum.Sent, + TransferStatusEnum.Submitted, + TransferStatusEnum.Recommended, + TransferStatusEnum.Recorded, + TransferStatusEnum.Refused, + TransferStatusEnum.Declined, + TransferStatusEnum.Rescinded, + ] + + # Verify that only the expected statuses are returned for a transferor + assert set(visible_statuses) == set( + expected_statuses + ), "Unexpected statuses returned for transferor" + + +@pytest.mark.anyio +async def test_get_visible_statuses_for_transferee(dbsession, transaction_repo): + visible_statuses = await transaction_repo.get_visible_statuses( + EntityType.Transferee + ) + expected_statuses = [ + TransferStatusEnum.Sent, + TransferStatusEnum.Submitted, + TransferStatusEnum.Recommended, + TransferStatusEnum.Recorded, + TransferStatusEnum.Refused, + TransferStatusEnum.Declined, + TransferStatusEnum.Rescinded, + ] + # Verify that only the expected statuses are returned for a transferee + assert set(visible_statuses) == set( + expected_statuses + ), "Unexpected statuses returned for transferee" + + +@pytest.mark.anyio +async def test_get_visible_statuses_for_government(dbsession, transaction_repo): + visible_statuses = await transaction_repo.get_visible_statuses( + EntityType.Government + ) + expected_statuses = [ + TransferStatusEnum.Submitted, + TransferStatusEnum.Recommended, + TransferStatusEnum.Recorded, + TransferStatusEnum.Refused, + TransferStatusEnum.Rescinded, + ] + # Verify that only the expected statuses are returned for the government + assert set(visible_statuses) == set( + expected_statuses + ), "Unexpected statuses returned for government" diff --git a/backend/lcfs/tests/transaction/test_transaction_services.py b/backend/lcfs/tests/transaction/test_transaction_services.py new file mode 100644 index 000000000..037e541fb --- /dev/null +++ b/backend/lcfs/tests/transaction/test_transaction_services.py @@ -0,0 +1,120 @@ +from datetime import datetime +from math import ceil +from unittest.mock import AsyncMock, MagicMock + +import pytest +from starlette.responses import StreamingResponse + +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.transaction.schema import ( + TransactionStatusSchema, + TransactionViewSchema, +) +from lcfs.web.api.transaction.services import TransactionsService + + +@pytest.fixture +def mock_repo(): + repo = MagicMock() + repo.get_transactions_paginated = AsyncMock(return_value=([], 0)) + repo.get_transaction_statuses = AsyncMock(return_value=[]) + return repo + + +@pytest.fixture +def transactions_service(mock_repo): + return TransactionsService(repo=mock_repo) + + +# Test retrieving transactions with filters, sorting, and pagination +@pytest.mark.anyio +async def test_get_transactions(transactions_service): + pagination_request = PaginationRequestSchema( + page=1, size=10, filters=[], sort_orders=[] + ) + + mock_transactions = [ + MagicMock(spec=TransactionViewSchema), + MagicMock(spec=TransactionViewSchema), + MagicMock(spec=TransactionViewSchema), + ] + transactions_service.repo.get_transactions_paginated.return_value = ( + mock_transactions, + 3, + ) + + transactions_data = await transactions_service.get_transactions_paginated( + pagination=pagination_request + ) + + assert transactions_data["pagination"].total == 3 + assert len(transactions_data["transactions"]) == 3 + assert transactions_data["pagination"].total_pages == ceil(3 / 10) + + +# Test retrieving transaction statuses +@pytest.mark.anyio +async def test_get_transaction_statuses(transactions_service): + # Mock data returned by the repository + mock_statuses = [ + TransactionStatusSchema(status="Declined"), + TransactionStatusSchema(status="Deleted"), + ] + transactions_service.repo.get_transaction_statuses.return_value = mock_statuses + + statuses = await transactions_service.get_transaction_statuses() + + assert len(statuses) == 2 + assert isinstance(statuses[0], TransactionStatusSchema) + assert statuses[0].status == "Declined" + assert statuses[1].status == "Deleted" + + +# Test exporting transactions +@pytest.mark.anyio +async def test_export_transactions(transactions_service): + # Mock data returned by the repository + mock_transactions = [ + MagicMock( + transaction_type="Transfer", + transaction_id=1, + compliance_period="2023", + from_organization="Org A", + to_organization="Org B", + quantity=100, + price_per_unit=10, + category="Category A", + status="Approved", + transaction_effective_date=datetime.now(), + recorded_date=datetime.now(), + approved_date=datetime.now(), + comment="Test Comment", + ) + ] + transactions_service.repo.get_transactions_paginated.return_value = ( + mock_transactions, + 1, + ) + + response = await transactions_service.export_transactions(export_format="csv") + + assert isinstance(response, StreamingResponse) + assert response.headers["Content-Disposition"].startswith('attachment; filename="') + + # Collect the streamed content + content = b"" + async for chunk in response.body_iterator: + content += chunk + + # Convert bytes to string for easier assertion (assuming CSV format) + content_str = content.decode("utf-8") + + # Check if the content contains expected data + assert "CT1" in content_str # Check for transaction ID with prefix + assert "Org A" in content_str + assert "Org B" in content_str + assert "100" in content_str + assert "10" in content_str + assert "Category A" in content_str + assert "Approved" in content_str + assert "Test Comment" in content_str diff --git a/backend/lcfs/tests/transaction/test_transaction_views.py b/backend/lcfs/tests/transaction/test_transaction_views.py new file mode 100644 index 000000000..3e64a3476 --- /dev/null +++ b/backend/lcfs/tests/transaction/test_transaction_views.py @@ -0,0 +1,79 @@ +import pytest +from httpx import AsyncClient +from fastapi import FastAPI, status + +from lcfs.db.models.user.Role import RoleEnum + + +@pytest.mark.anyio +async def test_get_transactions_paginated_by_org( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + organization_id = 1 + url = fastapi_app.url_path_for( + "get_transactions_paginated_by_org", organization_id=organization_id + ) + pagination = {"page": 1, "size": 10, "filters": [], "sortOrders": []} + response = await client.post(url, json=pagination) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert "transactions" in data + assert "pagination" in data + assert data["pagination"]["page"] == 1 + assert len(data["transactions"]) == 2 + + +@pytest.mark.anyio +async def test_export_transactions_by_org( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + organization_id = 1 + url = fastapi_app.url_path_for( + "export_transactions_by_org", organization_id=organization_id + ) + response = await client.get(url, params={"format": "csv"}) + assert response.status_code == status.HTTP_200_OK + assert response.headers["content-type"] == "text/csv; charset=utf-8" + assert "attachment; filename=" in response.headers["content-disposition"] + + +@pytest.mark.anyio +async def test_get_transactions_paginated( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("get_transactions_paginated") + pagination = {"page": 1, "size": 10, "filters": [], "sortOrders": []} + response = await client.post(url, json=pagination) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert "transactions" in data + assert "pagination" in data + assert data["pagination"]["page"] == 1 + assert len(data["transactions"]) == 2 + + +@pytest.mark.anyio +async def test_export_transactions( + client: AsyncClient, fastapi_app: FastAPI, set_mock_user +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + url = fastapi_app.url_path_for("export_transactions") + response = await client.get(url, params={"format": "csv"}) + assert response.status_code == status.HTTP_200_OK + assert response.headers["content-type"] == "text/csv; charset=utf-8" + assert "attachment; filename=" in response.headers["content-disposition"] + + +@pytest.mark.anyio +async def test_get_transaction_statuses(client: AsyncClient, fastapi_app: FastAPI): + url = fastapi_app.url_path_for("get_transaction_statuses") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + statuses = response.json() + assert isinstance(statuses, list) + if statuses: + assert isinstance(statuses[0], dict) + assert "status" in statuses[0] diff --git a/backend/lcfs/tests/transaction/transaction_payloads.py b/backend/lcfs/tests/transaction/transaction_payloads.py new file mode 100644 index 000000000..ef7bc2c3f --- /dev/null +++ b/backend/lcfs/tests/transaction/transaction_payloads.py @@ -0,0 +1,197 @@ +from datetime import datetime + +from lcfs.db.models import ( + Transaction, + Organization, + OrganizationAddress, + InitiativeAgreement, + AdminAdjustment, + InitiativeAgreementStatus, +) +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatusEnum, +) +from lcfs.db.models.transaction.Transaction import TransactionActionEnum +from lcfs.db.models.transfer.Transfer import Transfer, TransferRecommendationEnum + + +# Utility function to format datetime for consistency +def formatted_date(): + return datetime.strptime("2023-01-01", "%Y-%m-%d").date() + + +test_org_id = 111 +test_org_2_id = 112 + +# Transfer ORM Models +test_org = Organization( + organization_id=test_org_id, + name="Test Company", + operating_name="Test Co.", + org_address=OrganizationAddress( + street_address="123 Test St", + city="Test City", + province_state="Test Province", + country="Test Country", + postalCode_zipCode="T3ST 1Z3", + ), +) + +test_org_2 = Organization( + organization_id=112, + name="Test Company 2", + operating_name="Test Co.", + org_address=OrganizationAddress( + street_address="123 Test St", + city="Test City", + province_state="Test Province", + country="Test Country", + postalCode_zipCode="T3ST 1Z3", + ), +) + +draft_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, +) + +deleted_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=2, + recommendation=TransferRecommendationEnum.Record, +) + +sent_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=3, + recommendation=TransferRecommendationEnum.Record, +) + +submitted_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=4, + recommendation=TransferRecommendationEnum.Record, +) + + +recommended_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=5, + recommendation=TransferRecommendationEnum.Record, +) + +recorded_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=6, + recommendation=TransferRecommendationEnum.Record, +) + +refused_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=7, + recommendation=TransferRecommendationEnum.Record, +) + +declined_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=8, + recommendation=TransferRecommendationEnum.Record, +) + +rescinded_transfer_orm = Transfer( + from_organization_id=test_org_id, + to_organization_id=test_org_2_id, + agreement_date=formatted_date(), + transaction_effective_date=formatted_date(), + price_per_unit=2.0, + quantity=20, + transfer_category_id=1, + current_status_id=9, + recommendation=TransferRecommendationEnum.Record, +) + +# InitiativeAgreementStatus ORM Models +initiative_agreement_status_orm = InitiativeAgreementStatus( + initiative_agreement_status_id=1, status=InitiativeAgreementStatusEnum.Draft +) + +# InitiativeAgreement ORM Models +initiative_agreement_orm = InitiativeAgreement( + initiative_agreement_id=1, + compliance_units=10, + to_organization_id=test_org_id, + current_status_id=1, +) + +# AdminAdjustment ORM Models +admin_adjustment_orm = AdminAdjustment( + admin_adjustment_id=200, + compliance_units=20, + to_organization_id=test_org_id, + current_status_id=1, +) + +reserved_transaction_orm = Transaction( + transaction_id=4, + transaction_action=TransactionActionEnum.Reserved, + compliance_units=100, + organization_id=test_org_id, + create_date=datetime.now(), +) + +adjustment_transaction_orm = Transaction( + transaction_id=5, + transaction_action=TransactionActionEnum.Adjustment, + compliance_units=100, + organization_id=test_org_id, + create_date=datetime.now(), +) diff --git a/backend/lcfs/tests/transfer/__init__.py b/backend/lcfs/tests/transfer/__init__.py new file mode 100644 index 000000000..bbbea40c9 --- /dev/null +++ b/backend/lcfs/tests/transfer/__init__.py @@ -0,0 +1 @@ +"""Tests for Transfers""" diff --git a/backend/lcfs/tests/transfer/conftest.py b/backend/lcfs/tests/transfer/conftest.py new file mode 100644 index 000000000..c4c57ddb5 --- /dev/null +++ b/backend/lcfs/tests/transfer/conftest.py @@ -0,0 +1,79 @@ +import pytest +from unittest.mock import MagicMock, AsyncMock +from lcfs.web.api.transfer.services import TransferServices +from lcfs.web.api.transfer.validation import TransferValidation +from lcfs.web.api.transfer.repo import TransferRepository +from lcfs.web.api.organizations.repo import OrganizationsRepository +from lcfs.web.api.organizations.services import OrganizationsService +from lcfs.web.api.transaction.repo import TransactionRepository +from sqlalchemy.ext.asyncio import AsyncSession + + +@pytest.fixture +def mock_db(): + return AsyncMock(spec=AsyncSession) + + +@pytest.fixture +def mock_transfer_services(): + return MagicMock(spec=TransferServices) + + +@pytest.fixture +def mock_transfer_validation(): + return MagicMock(spec=TransferValidation) + + +@pytest.fixture +def mock_transfer_repo(): + return MagicMock(spec=TransferRepository) + + +@pytest.fixture +def mock_request(mock_user_profile): + request = MagicMock() + request.user = mock_user_profile + request.user.organization = 1 + request.user.user_profile_id = 1 + return request + + +@pytest.fixture +def mock_orgs_repo(): + return MagicMock(spec=OrganizationsRepository) + + +@pytest.fixture +def mock_orgs_service(): + return MagicMock(spec=OrganizationsService) + + +@pytest.fixture +def mock_transaction_repo(): + return MagicMock(spec=TransactionRepository) + + +@pytest.fixture +def transfer_service( + mock_request, + mock_transfer_validation, + mock_transfer_repo, + mock_orgs_repo, + mock_orgs_service, + mock_transaction_repo, +): + service = TransferServices() + service.validate = mock_transfer_validation + service.repo = mock_transfer_repo + service.request = mock_request + service.org_repo = mock_orgs_repo + service.org_service = mock_orgs_service + service.transaction_repo = mock_transaction_repo + return service + + +@pytest.fixture +def transfer_repo(mock_db): + repo = TransferRepository() + repo.db = mock_db + return repo diff --git a/backend/lcfs/tests/transfer/test_transfer_repo.py b/backend/lcfs/tests/transfer/test_transfer_repo.py new file mode 100644 index 000000000..81d504028 --- /dev/null +++ b/backend/lcfs/tests/transfer/test_transfer_repo.py @@ -0,0 +1,163 @@ +import pytest + +from lcfs.db.models.transfer.Transfer import Transfer +from lcfs.db.models.transfer.TransferHistory import TransferHistory + +from unittest.mock import MagicMock, AsyncMock +from lcfs.web.api.transfer.schema import TransferSchema +from datetime import date +from lcfs.db.models.organization import Organization +from lcfs.db.models.transfer import TransferStatus, TransferCategory + + +@pytest.mark.anyio +async def test_get_all_transfers_success(transfer_repo, mock_db): + expected_data = [] + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = expected_data + + mock_db.execute.return_value = mock_result + + result = await transfer_repo.get_all_transfers() + + mock_db.execute.assert_called_once() + mock_result.scalars.return_value.all.assert_called_once() + assert result == expected_data + + +@pytest.mark.anyio +async def test_get_transfer_by_id_success(transfer_repo, mock_db): + transfer_id = 1 + expected_data = Transfer(transfer_id=transfer_id) + mock_result = MagicMock() + mock_result.scalars.return_value.first.return_value = expected_data + + mock_db.execute.return_value = mock_result + + result = await transfer_repo.get_transfer_by_id(transfer_id) + + mock_db.execute.assert_called_once() + mock_result.scalars.return_value.first.assert_called_once() + + assert result == expected_data + assert isinstance(result, Transfer) + assert result.transfer_id == transfer_id + + +@pytest.mark.anyio +async def test_create_transfer_success(transfer_repo): + transfer_id = 1 + expected_data = Transfer( + transfer_id=transfer_id, + from_organization=Organization( + organization_id=1, + name="org1", + ), + to_organization=Organization( + organization_id=2, + name="org2", + ), + agreement_date=date.today(), + quantity=1, + price_per_unit=1.99, + current_status=TransferStatus(transfer_status_id=1, status="status"), + ) + + result = await transfer_repo.create_transfer(expected_data) + + assert result == TransferSchema.from_orm(expected_data) + assert isinstance(result, TransferSchema) + assert result.transfer_id == transfer_id + + +@pytest.mark.anyio +async def test_get_transfer_status_by_name_success(transfer_repo, mock_db): + status = "Draft" + mock_db.scalar.return_value = TransferStatus(status=status) + result = await transfer_repo.get_transfer_status_by_name(status) + + assert isinstance(result, TransferStatus) + assert result.status == status + + +@pytest.mark.anyio +async def test_get_transfer_category_by_name_success(transfer_repo, mock_db): + category = "A" + mock_db.scalar.return_value = TransferCategory(category=category) + result = await transfer_repo.get_transfer_category_by_name(category) + + assert isinstance(result, TransferCategory) + assert result.category == category + + +@pytest.mark.anyio +async def test_update_transfer_success(transfer_repo): + transfer_id = 1 + expected_data = Transfer( + transfer_id=transfer_id, + from_organization=Organization( + organization_id=1, + name="org1", + ), + to_organization=Organization( + organization_id=2, + name="org2", + ), + agreement_date=date.today(), + quantity=1, + price_per_unit=2.75, + current_status=TransferStatus(transfer_status_id=1, status="status"), + ) + + result = await transfer_repo.update_transfer(expected_data) + + assert result == TransferSchema.model_validate(expected_data) + assert result.transfer_id == transfer_id + + +@pytest.mark.anyio +async def test_add_transfer_history_success(transfer_repo): + transfer_id = 1 + transfer_status_id = 1 + user_profile_id = 1 + + result = await transfer_repo.add_transfer_history( + transfer_id, transfer_status_id, user_profile_id + ) + + assert isinstance(result, TransferHistory) + assert result.transfer_id == transfer_id + assert result.transfer_status_id == transfer_status_id + assert result.user_profile_id == user_profile_id + + +@pytest.mark.anyio +async def test_update_transfer_history_success(transfer_repo, mock_db): + transfer_id = 1 + transfer_status_id = 1 + user_profile_id = 1 + expected_data = TransferHistory( + transfer_id=transfer_id, + transfer_status_id=transfer_status_id, + user_profile_id=user_profile_id, + ) + + mock_db.scalar.return_value = expected_data + + result = await transfer_repo.update_transfer_history( + transfer_id, transfer_status_id, user_profile_id + ) + + assert isinstance(result, TransferHistory) + assert result.transfer_id == transfer_id + assert result.transfer_status_id == transfer_status_id + assert result.user_profile_id == user_profile_id + + +@pytest.mark.anyio +async def test_refresh_transfer_success(transfer_repo, mock_db): + expected_data = Transfer(transfer_id=1) + + result = await transfer_repo.refresh_transfer(expected_data) + + assert result == expected_data diff --git a/backend/lcfs/tests/transfer/test_transfer_services.py b/backend/lcfs/tests/transfer/test_transfer_services.py new file mode 100644 index 000000000..f82ef70c7 --- /dev/null +++ b/backend/lcfs/tests/transfer/test_transfer_services.py @@ -0,0 +1,168 @@ +import pytest +from unittest.mock import AsyncMock, patch +from lcfs.web.api.transfer.schema import TransferSchema +from datetime import date +from lcfs.db.models.transfer import Transfer +from lcfs.db.models.organization import Organization +from lcfs.db.models.transfer import TransferStatus +from lcfs.web.api.transfer.schema import TransferCreateSchema + + +@pytest.mark.anyio +async def test_get_all_transfers_success(transfer_service, mock_transfer_repo): + + mock_transfer_repo.get_all_transfers.return_value = [ + { + "transfer_id": 1, + "from_organization": {"organization_id": 1, "name": "org1"}, + "to_organization": {"organization_id": 2, "name": "org2"}, + "agreement_date": date.today(), + "quantity": 1, + "price_per_unit": 1, + "current_status": {"transfer_status_id": 1, "status": "status"}, + } + ] + + result = await transfer_service.get_all_transfers() + + assert isinstance(result[0], TransferSchema) + mock_transfer_repo.get_all_transfers.assert_called_once() + + +@pytest.mark.anyio +async def test_get_transfer_success(transfer_service, mock_transfer_repo): + transfer_id = 1 + mock_transfer_repo.get_transfer_by_id = AsyncMock( + return_value=Transfer( + transfer_id=transfer_id, + from_organization=Organization( + organization_id=1, + name="org1", + ), + to_organization=Organization( + organization_id=2, + name="org2", + ), + agreement_date=date.today(), + quantity=1, + price_per_unit=5.75, + from_org_comment="comment", + to_org_comment="comment", + gov_comment="comment", + current_status=TransferStatus(transfer_status_id=1, status="status"), + ) + ) + + result = await transfer_service.get_transfer(transfer_id) + + assert result.transfer_id == transfer_id + assert isinstance(result, TransferSchema) + mock_transfer_repo.get_transfer_by_id.assert_called_once() + + +@pytest.mark.anyio +async def test_create_transfer_success(transfer_service, mock_transfer_repo): + mock_transfer_repo.get_transfer_status_by_name.return_value = TransferStatus( + transfer_status_id=1, status="Sent" + ) + mock_transfer_repo.add_transfer_history.return_value = True + + transfer_id = 1 + transfer_data = TransferCreateSchema( + transfer_id=transfer_id, + from_organization_id=1, + to_organization_id=2, + price_per_unit=5.75, + ) + mock_transfer_repo.create_transfer.return_value = transfer_data + + # Patch the _perform_notification_call method + with patch.object(transfer_service, "_perform_notification_call", AsyncMock()): + result = await transfer_service.create_transfer(transfer_data) + + assert result.transfer_id == transfer_id + assert isinstance(result, TransferCreateSchema) + transfer_service._perform_notification_call.assert_called_once() + + +@pytest.mark.anyio +async def test_update_transfer_success( + transfer_service, mock_transfer_repo, mock_request +): + transfer_status = TransferStatus(transfer_status_id=1, status="status") + transfer_id = 1 + # Create valid nested organization objects + from_org = Organization(organization_id=1, name="org1") + to_org = Organization(organization_id=2, name="org2") + + # Create a Transfer object with the necessary attributes + transfer = Transfer( + transfer_id=transfer_id, + from_organization=from_org, + to_organization=to_org, + from_organization_id=1, + to_organization_id=2, + from_transaction_id=1, + to_transaction_id=2, + agreement_date=date.today(), + transaction_effective_date=date.today(), + price_per_unit=7.99, + quantity=1, + from_org_comment="comment", + to_org_comment="comment", + gov_comment="comment", + transfer_category_id=1, + current_status_id=1, + recommendation="Recommended", + current_status=transfer_status, + ) + + mock_transfer_repo.get_transfer_status_by_name.return_value = transfer_status + mock_transfer_repo.get_transfer_by_id.return_value = transfer + mock_transfer_repo.update_transfer.return_value = transfer + + # Replace _perform_notification_call with an AsyncMock + transfer_service._perform_notification_call = AsyncMock() + + result = await transfer_service.update_transfer(transfer) + + # Assertions + assert result.transfer_id == transfer_id + assert isinstance(result, Transfer) + + # Verify mocks + mock_transfer_repo.get_transfer_by_id.assert_called_once_with(transfer_id) + mock_transfer_repo.update_transfer.assert_called_once_with(transfer) + transfer_service._perform_notification_call.assert_awaited_once_with( + transfer, status="Return to analyst" + ) + + +@pytest.mark.anyio +async def test_update_category_success(transfer_service, mock_transfer_repo): + transfer_status = TransferStatus(transfer_status_id=1, status="status") + transfer_id = 1 + transfer = Transfer( + transfer_id=transfer_id, + from_organization_id=1, + to_organization_id=2, + from_transaction_id=1, + to_transaction_id=2, + agreement_date=date.today(), + transaction_effective_date=date.today(), + price_per_unit=1, + quantity=1, + from_org_comment="comment", + to_org_comment="comment", + gov_comment="comment", + transfer_category_id=1, + current_status_id=1, + recommendation="Recommended", + current_status=transfer_status, + ) + + mock_transfer_repo.get_transfer_by_id.return_value = transfer + + result = await transfer_service.update_category(transfer_id, None) + assert result.transfer_id == transfer_id + assert isinstance(result, Transfer) diff --git a/backend/lcfs/tests/transfer/test_transfer_views.py b/backend/lcfs/tests/transfer/test_transfer_views.py new file mode 100644 index 000000000..1c5ed6303 --- /dev/null +++ b/backend/lcfs/tests/transfer/test_transfer_views.py @@ -0,0 +1,137 @@ +from datetime import date + +import pytest +from fastapi import FastAPI +from httpx import AsyncClient + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.transfer.schema import TransferSchema +from lcfs.web.api.transfer.services import TransferServices +from lcfs.web.api.transfer.validation import TransferValidation + + +@pytest.mark.anyio +async def test_get_all_transfers_success( + client: AsyncClient, + fastapi_app: FastAPI, + mock_transfer_services, +): + url = fastapi_app.url_path_for("get_all_transfers") + + mock_transfer_services.get_all_transfers.return_value = [] + + fastapi_app.dependency_overrides[TransferServices] = lambda: mock_transfer_services + + response = await client.get(url) + + assert response.status_code == 200 + + +@pytest.mark.anyio +async def test_get_transfer_success( + client: AsyncClient, + fastapi_app: FastAPI, + mock_transfer_services, +): + transfer_id = 1 + url = fastapi_app.url_path_for("get_transfer", transfer_id=transfer_id) + + mock_transfer_services.get_transfer.return_value = TransferSchema( + transfer_id=transfer_id, + from_organization={"organization_id": 1, "name": "org1"}, + to_organization={"organization_id": 2, "name": "org2"}, + agreement_date=date.today(), + quantity=1, + price_per_unit=6.85, + current_status={"transfer_status_id": 1, "status": "status"}, + ) + fastapi_app.dependency_overrides[TransferServices] = lambda: mock_transfer_services + + response = await client.get(url) + + assert response.status_code == 200 + + data = response.json() + + assert data["transferId"] == transfer_id + assert data["pricePerUnit"] == 6.85 + + +@pytest.mark.anyio +async def test_government_update_transfer_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_transfer_services, + mock_transfer_validation, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + transfer_id = 1 + url = fastapi_app.url_path_for( + "government_update_transfer", transfer_id=transfer_id + ) + + mock_transfer_validation.government_update_transfer.return_value = None + mock_transfer_services.update_transfer.return_value = TransferSchema( + transfer_id=transfer_id, + from_organization={"organization_id": 1, "name": "org1"}, + to_organization={"organization_id": 2, "name": "org2"}, + agreement_date=date.today(), + quantity=1, + price_per_unit=7.25, + current_status={"transfer_status_id": 1, "status": "status"}, + ) + fastapi_app.dependency_overrides[TransferServices] = lambda: mock_transfer_services + fastapi_app.dependency_overrides[TransferValidation] = ( + lambda: mock_transfer_validation + ) + + payload = { + "transfer_id": transfer_id, + "from_organization_id": 1, + "to_organization_id": 2, + "price_per_unit": 7.25, + } + + response = await client.put(url, json=payload) + + assert response.status_code == 200 + + data = response.json() + + assert data["transferId"] == transfer_id + assert data["pricePerUnit"] == 7.25 + + +@pytest.mark.anyio +async def test_update_category_success( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + mock_transfer_services, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + transfer_id = 1 + url = fastapi_app.url_path_for("update_category", transfer_id=transfer_id) + + mock_transfer_services.update_category.return_value = { + "transfer_id": transfer_id, + "from_organization": {"organization_id": 1, "name": "org1"}, + "to_organization": {"organization_id": 2, "name": "org2"}, + "agreement_date": date.today(), + "quantity": 1, + "price_per_unit": 1, + "current_status": {"transfer_status_id": 1, "status": "status"}, + } + + fastapi_app.dependency_overrides[TransferServices] = lambda: mock_transfer_services + + response = await client.put(url) + + assert response.status_code == 200 + + data = response.json() + + assert data["transferId"] == transfer_id diff --git a/backend/lcfs/tests/user/__init__.py b/backend/lcfs/tests/user/__init__.py new file mode 100644 index 000000000..894f1b9ba --- /dev/null +++ b/backend/lcfs/tests/user/__init__.py @@ -0,0 +1 @@ +"""Tests for Users""" diff --git a/backend/lcfs/tests/user/test_user_repo.py b/backend/lcfs/tests/user/test_user_repo.py new file mode 100644 index 000000000..a96788933 --- /dev/null +++ b/backend/lcfs/tests/user/test_user_repo.py @@ -0,0 +1,77 @@ +from unittest.mock import Mock + +import pytest + +from lcfs.db.models import UserProfile, UserLoginHistory +from lcfs.web.api.user.repo import UserRepository +from lcfs.tests.user.user_payloads import user_orm_model + + +@pytest.fixture +def user_repo(dbsession): + return UserRepository(db=dbsession) + + +# Tests for get_full_name +@pytest.mark.anyio +async def test_get_full_name_success(dbsession, user_repo): + dbsession.add(user_orm_model) + await dbsession.commit() + full_name = await user_repo.get_full_name(user_orm_model.keycloak_username) + expected_full_name = f"{user_orm_model.first_name} {user_orm_model.last_name}" + assert ( + full_name == expected_full_name + ), "The fetched full name did not match the expected value." + + +@pytest.mark.anyio +async def test_create_login_history(dbsession, user_repo): + """Test creating a user login history entry.""" + # Mock user profile + user = UserProfile( + email="testuser@example.com", + keycloak_username="testuser", + keycloak_user_id="12345", + ) + dbsession.add = Mock() + + # Act + await user_repo.create_login_history(user) + + # Assert + + added_object = dbsession.add.call_args[0][0] + assert isinstance(added_object, UserLoginHistory) + assert added_object.keycloak_email == user.email + assert added_object.external_username == user.keycloak_username + assert added_object.keycloak_user_id == user.keycloak_user_id + assert added_object.is_login_successful is True + + +@pytest.mark.anyio +async def test_update_email_success(dbsession, user_repo): + # Arrange: Create a user in the database + user = UserProfile( + keycloak_user_id="user_id_1", + keycloak_email="user1@domain.com", + keycloak_username="username1", + email="user1@domain.com", + title="Developer", + phone="1234567890", + mobile_phone="0987654321", + first_name="John", + last_name="Doe", + is_active=True, + organization_id=1, + ) + dbsession.add(user) + await dbsession.commit() + await dbsession.refresh(user) + + # Act: Update the email + updated_user = await user_repo.update_email( + user_profile_id=1, email="new_email@domain.com" + ) + + # Assert: Check if the email was updated + assert updated_user.email == "new_email@domain.com" diff --git a/backend/lcfs/tests/user/test_user_views.py b/backend/lcfs/tests/user/test_user_views.py new file mode 100644 index 000000000..24c9e303f --- /dev/null +++ b/backend/lcfs/tests/user/test_user_views.py @@ -0,0 +1,486 @@ +from unittest.mock import patch, AsyncMock + +from lcfs.db.models import UserProfile +from lcfs.db.models.user import UserLoginHistory +import pytest +from fastapi import FastAPI, status +from httpx import AsyncClient +from datetime import datetime + +from lcfs.db.models.transfer.Transfer import Transfer, TransferRecommendationEnum +from lcfs.db.models.transfer.TransferHistory import TransferHistory +from lcfs.db.models.initiative_agreement.InitiativeAgreement import InitiativeAgreement +from lcfs.db.models.initiative_agreement.InitiativeAgreementHistory import ( + InitiativeAgreementHistory, +) +from lcfs.db.models.admin_adjustment.AdminAdjustment import AdminAdjustment +from lcfs.db.models.admin_adjustment.AdminAdjustmentHistory import ( + AdminAdjustmentHistory, +) +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.user.schema import ( + UserActivitiesResponseSchema, + UserLoginHistoryResponseSchema, +) +from lcfs.web.exception.exceptions import DataNotFoundException + + +@pytest.mark.anyio +async def test_get_user_activities_as_administrator( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + # Mock the current user as an ADMINISTRATOR + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + # Assuming user with user_profile_id=7 exists in the database + target_user_id = 7 # LCFS 1 + + # Create activity history records for the target user + transfer = Transfer( + transfer_id=1, + from_organization_id=1, + to_organization_id=2, + agreement_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + transaction_effective_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + price_per_unit=1.0, + quantity=10, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, + effective_status=True, + ) + transfer_history = TransferHistory( + transfer_history_id=1, + transfer_id=transfer.transfer_id, + transfer_status_id=3, # Sent + user_profile_id=target_user_id, + ) + + initiative = InitiativeAgreement( + initiative_agreement_id=1, + compliance_units=100, + transaction_effective_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + gov_comment="Test Initiative", + to_organization_id=1, + current_status_id=1, + ) + initiative_history = InitiativeAgreementHistory( + initiative_agreement_history_id=1, + initiative_agreement_id=initiative.initiative_agreement_id, + initiative_agreement_status_id=2, # Approved + user_profile_id=target_user_id, + ) + + admin_adjustment = AdminAdjustment( + admin_adjustment_id=11, + compliance_units=50, + transaction_effective_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + gov_comment="Test Adjustment", + to_organization_id=1, + current_status_id=1, + ) + admin_adjustment_history = AdminAdjustmentHistory( + admin_adjustment_history_id=1, + admin_adjustment_id=admin_adjustment.admin_adjustment_id, + admin_adjustment_status_id=2, # Approved + user_profile_id=target_user_id, + ) + + await add_models( + [ + transfer, + transfer_history, + initiative, + initiative_history, + admin_adjustment, + admin_adjustment_history, + ] + ) + + # Prepare request data + pagination = { + "page": 1, + "size": 10, + "filters": [], + "sortOrders": [], + } + + # Send request to get user activities + url = fastapi_app.url_path_for("get_user_activities", user_id=target_user_id) + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_200_OK + content = UserActivitiesResponseSchema(**response.json()) + assert len(content.activities) == 3 # Should have 3 activity records + + +@pytest.mark.anyio +async def test_get_user_activities_as_manage_users_same_org( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + # Mock the current user as a user with MANAGE_USERS + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR, RoleEnum.MANAGE_USERS]) + + # Assuming target user with user_profile_id=3 exists and is in organization_id=1 + target_user_id = 1 + + # Create activity history records for the target user + transfer = Transfer( + transfer_id=1, + from_organization_id=1, + to_organization_id=2, + agreement_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + transaction_effective_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + price_per_unit=1.0, + quantity=10, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, + effective_status=True, + ) + transfer_history = TransferHistory( + transfer_history_id=1, + transfer_id=transfer.transfer_id, + transfer_status_id=3, # Sent + user_profile_id=target_user_id, + ) + + await add_models( + [ + transfer, + transfer_history, + ] + ) + + # Prepare request data + pagination = { + "page": 1, + "size": 10, + "filters": [], + "sortOrders": [], + } + + # Send request to get user activities + url = fastapi_app.url_path_for("get_user_activities", user_id=target_user_id) + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_200_OK + content = UserActivitiesResponseSchema(**response.json()) + assert len(content.activities) == 1 # Should have 1 activity record + + +@pytest.mark.anyio +async def test_get_user_activities_permission_denied( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + # Mock the current user as a user with MANAGE_USERS role in organization_id=1 + set_mock_user(fastapi_app, [RoleEnum.MANAGE_USERS]) + + # Assuming target user with user_profile_id=7 exists and is in organization_id=3 + target_user_id = 7 + + # Prepare request data + pagination = { + "page": 1, + "size": 10, + "filters": [], + "sortOrders": [], + } + + # Send request to get user activities + url = fastapi_app.url_path_for("get_user_activities", user_id=target_user_id) + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_get_all_user_activities_as_administrator( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + # Mock the current user as an ADMINISTRATOR + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + # Create activity history records for multiple users + transfer = Transfer( + transfer_id=1, + from_organization_id=1, + to_organization_id=2, + agreement_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + transaction_effective_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + price_per_unit=1.0, + quantity=10, + transfer_category_id=1, + current_status_id=1, + recommendation=TransferRecommendationEnum.Record, + effective_status=True, + ) + transfer_history = TransferHistory( + transfer_history_id=1, + transfer_id=transfer.transfer_id, + transfer_status_id=3, # Sent + user_profile_id=7, # LCFS 1 + ) + + initiative = InitiativeAgreement( + initiative_agreement_id=1, + compliance_units=100, + transaction_effective_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + gov_comment="Test Initiative", + to_organization_id=1, + current_status_id=1, + ) + initiative_history = InitiativeAgreementHistory( + initiative_agreement_history_id=1, + initiative_agreement_id=initiative.initiative_agreement_id, + initiative_agreement_status_id=2, # Approved + user_profile_id=7, # LCFS 1 + ) + + await add_models( + [ + transfer, + transfer_history, + initiative, + initiative_history, + ] + ) + + # Prepare request data + pagination = { + "page": 1, + "size": 10, + "filters": [], + "sortOrders": [], + } + + # Send request to get all user activities + url = fastapi_app.url_path_for("get_all_user_activities") + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_200_OK + content = UserActivitiesResponseSchema(**response.json()) + assert len(content.activities) == 2 # Should have 2 activity records + + +@pytest.mark.anyio +async def test_get_all_user_activities_permission_denied( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + # Mock the current user as a user without ADMINISTRATOR role + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + # Prepare request data + pagination = { + "page": 1, + "size": 10, + "filters": [], + "sortOrders": [], + } + + # Send request to get all user activities + url = fastapi_app.url_path_for("get_all_user_activities") + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_get_user_activities_user_not_found( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + # Mock the current user as an ADMINISTRATOR + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + # Non-existent user_id + non_existent_user_id = 9999 + + # Prepare request data + pagination = { + "page": 1, + "size": 10, + "filters": [], + "sortOrders": [], + } + + # Send request to get user activities for a non-existent user + url = fastapi_app.url_path_for("get_user_activities", user_id=non_existent_user_id) + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +async def test_get_all_user_login_history_as_administrator( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + # Mock the current user as an ADMINISTRATOR + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + # Prepare login history records for multiple users + login_history_1 = UserLoginHistory( + user_login_history_id=1, + keycloak_email="admin1@example.com", + external_username="admin_user1", + keycloak_user_id="user1", + is_login_successful=True, + create_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + ) + login_history_2 = UserLoginHistory( + user_login_history_id=2, + keycloak_email="admin2@example.com", + external_username="admin_user2", + keycloak_user_id="user2", + is_login_successful=False, + login_error_message="Invalid password", + create_date=datetime.strptime("2024-01-02", "%Y-%m-%d"), + ) + + await add_models([login_history_1, login_history_2]) + + # Prepare request data for pagination + pagination = { + "page": 1, + "size": 10, + "filters": [], + "sortOrders": [], + } + + # Send request to get all user login history + url = fastapi_app.url_path_for("get_all_user_login_history") + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_200_OK + content = UserLoginHistoryResponseSchema(**response.json()) + assert len(content.histories) == 2 # Should retrieve 2 records + + +@pytest.mark.anyio +async def test_get_all_user_login_history_permission_denied( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, +): + # Mock the current user without ADMINISTRATOR role + set_mock_user(fastapi_app, [RoleEnum.ANALYST]) + + # Prepare request data for pagination + pagination = { + "page": 1, + "size": 10, + "filters": [], + "sortOrders": [], + } + + # Send request to get all user login history + url = fastapi_app.url_path_for("get_all_user_login_history") + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.anyio +async def test_get_all_user_login_history_with_pagination( + client: AsyncClient, + fastapi_app: FastAPI, + set_mock_user, + add_models, +): + # Mock the current user as an ADMINISTRATOR + set_mock_user(fastapi_app, [RoleEnum.ADMINISTRATOR]) + + # Prepare login history records for testing pagination + login_history_records = [ + UserLoginHistory( + user_login_history_id=i, + keycloak_email=f"user{i}@example.com", + external_username=f"user_{i}", + keycloak_user_id=f"user_id_{i}", + is_login_successful=(i % 2 == 0), + create_date=datetime.strptime("2024-01-01", "%Y-%m-%d"), + ) + for i in range(1, 21) # Assuming 20 records + ] + await add_models(login_history_records) + + # Request data for the first page with size 5 + pagination = { + "page": 1, + "size": 5, + "filters": [], + "sortOrders": [], + } + + # Send request to get the first page of login history + url = fastapi_app.url_path_for("get_all_user_login_history") + response = await client.post(url, json=pagination) + + assert response.status_code == status.HTTP_200_OK + content = UserLoginHistoryResponseSchema(**response.json()) + assert len(content.histories) == 5 + assert content.pagination.total == 20 + assert content.pagination.page == 1 + assert content.pagination.size == 5 + assert content.pagination.total_pages == 4 + + +@pytest.mark.anyio +async def test_track_logged_in_success(client: AsyncClient, fastapi_app, set_mock_user): + """Test successful tracking of user login.""" + # Arrange + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) # Mock user with valid role + url = "/api/users/logged-in" # Adjust prefix if needed + + # Mock the UserServices method + with patch( + "lcfs.web.api.user.services.UserServices.track_user_login" + ) as mock_track_user_login: + mock_track_user_login.return_value = AsyncMock() + + # Act + response = await client.post(url) + + # Assert + assert response.status_code == status.HTTP_200_OK + assert response.text == '"Tracked"' # FastAPI returns JSON-compatible strings + + # Extract the first argument of the first call + user_profile = mock_track_user_login.call_args[0][0] + assert isinstance(user_profile, UserProfile) + + +@pytest.mark.anyio +async def test_update_email_success( + client: AsyncClient, + fastapi_app, + set_mock_user, +): + set_mock_user(fastapi_app, [RoleEnum.GOVERNMENT]) + + # Prepare request data + request_data = {"email": "new_email@domain.com"} + + # Act: Send POST request to the endpoint + url = fastapi_app.url_path_for("update_email") + response = await client.post(url, json=request_data) + + # Assert: Check response status and content + assert response.status_code == status.HTTP_200_OK diff --git a/backend/lcfs/tests/user/user_payloads.py b/backend/lcfs/tests/user/user_payloads.py new file mode 100644 index 000000000..38b9c5acc --- /dev/null +++ b/backend/lcfs/tests/user/user_payloads.py @@ -0,0 +1,16 @@ +from lcfs.db.models.user.UserProfile import UserProfile + +# User ORM Model +user_orm_model = UserProfile( + keycloak_user_id="user_id", + keycloak_email="email@domain.com", + keycloak_username="username", + email="email@domain.com", + title="Developer", + phone="1234567890", + mobile_phone="0987654321", + first_name="John", + last_name="Smith", + is_active=True, + organization_id=1, +) diff --git a/backend/lcfs/utils/__init__.py b/backend/lcfs/utils/__init__.py new file mode 100644 index 000000000..e8282a5e6 --- /dev/null +++ b/backend/lcfs/utils/__init__.py @@ -0,0 +1 @@ +"""Utils for lcfs.""" diff --git a/backend/lcfs/utils/constants.py b/backend/lcfs/utils/constants.py new file mode 100644 index 000000000..793fe2a4e --- /dev/null +++ b/backend/lcfs/utils/constants.py @@ -0,0 +1,97 @@ +from enum import Enum +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum + + +class LCFS_Constants: + EXCEL_MEDIA_TYPE = "application/vnd.ms-excel" + USERS_EXPORT_FILENAME = "BC-LCFS-BCeID-Users" + USERS_EXPORT_SHEETNAME = "BCeID Users" + USERS_EXPORT_COLUMNS = [ + "Last name", + "First name", + "Email", + "BCeID User ID", + "Title", + "Phone", + "Mobile", + "Status", + "Role(s)", + "Organization name", + ] + FROM_ORG_TRANSFER_STATUSES = [ + TransferStatusEnum.Draft, + TransferStatusEnum.Sent, + TransferStatusEnum.Rescinded, + TransferStatusEnum.Deleted, + ] + TO_ORG_TRANSFER_STATUSES = [ + TransferStatusEnum.Submitted, + TransferStatusEnum.Declined, + TransferStatusEnum.Rescinded, + ] + GOV_TRANSFER_STATUSES = [ + TransferStatusEnum.Submitted, # To handle the save comment feature + TransferStatusEnum.Recommended, + TransferStatusEnum.Refused, + TransferStatusEnum.Recorded, + ] + + # Export transactions + TRANSACTIONS_EXPORT_MEDIA_TYPE = "application/vnd.ms-excel" + TRANSACTIONS_EXPORT_COLUMNS = [ + "ID", + "Compliance period", + "Type", + "Compliance units from", + "Compliance units to", + "Number of units", + "Value per unit", + "Category", + "Status", + "Effective Date", + "Recorded", + "Approved", + "Comments (external)", + ] + TRANSACTIONS_EXPORT_SHEETNAME = "Transactions" + TRANSACTIONS_EXPORT_FILENAME = "BC-LCFS-transactions" + + +class FILE_MEDIA_TYPE(Enum): + PDF = "application/pdf" + DOC = "application/msword" + DOCX = "application/vnd.openxmlformats-officedocument.wordprocessingml.document" + XLS = "application/vnd.ms-excel" + XLSX = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + CSV = "text/csv" + TXT = "text/plain" + JSON = "application/json" + XML = "application/xml" + ZIP = "application/zip" + + +transaction_type_to_id_prefix_map = { + "Transfer": "CT", + "AdminAdjustment": "AA", + "InitiativeAgreement": "IA", +} + +id_prefix_to_transaction_type_map = { + "CT": "Transfer", + "AA": "AdminAdjustment", + "IA": "InitiativeAgreement", +} + +default_ci = {"Gasoline": 93.67, "Diesel": 100.21, "Jet fuel": 88.83} + + +RENEWABLE_FUEL_TYPES = [ + "Renewable gasoline", + "Ethanol", + "Renewable naphtha", + "Biodiesel", + "HDRD", + "Other diesel", + "Alternative jet fuel", + "Other" +] diff --git a/backend/lcfs/utils/performance_profiler.py b/backend/lcfs/utils/performance_profiler.py new file mode 100644 index 000000000..763d1ac35 --- /dev/null +++ b/backend/lcfs/utils/performance_profiler.py @@ -0,0 +1,18 @@ +import cProfile +import contextlib + +import pstats + + +### with profile_performance(): +### result = await self.db.execute(query) +### transactions = result.scalars().all() +@contextlib.contextmanager +def profile_performance(): + """Developer Util for Profiling functions""" + pr = cProfile.Profile() + pr.enable() + yield + pr.disable() + ps = pstats.Stats(pr).sort_stats("cumulative") + ps.print_stats() diff --git a/backend/lcfs/utils/query_analyzer.py b/backend/lcfs/utils/query_analyzer.py new file mode 100644 index 000000000..007bbd939 --- /dev/null +++ b/backend/lcfs/utils/query_analyzer.py @@ -0,0 +1,26 @@ +import time +from sqlalchemy import event +from sqlalchemy.engine import Engine +import structlog +from lcfs.logging_config import correlation_id_var + +logger = structlog.get_logger('sqlalchemy') + +SLOW_QUERY_TIME = 1.3 # Seconds + +@event.listens_for(Engine, "before_cursor_execute") +def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): + conn.info.setdefault("query_start_time", []).append(time.time()) + +@event.listens_for(Engine, "after_cursor_execute") +def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): + total = time.time() - conn.info["query_start_time"].pop(-1) + if total > SLOW_QUERY_TIME: + logger.warning( + "Slow query detected", + query=statement, + execution_time=total, + parameters=parameters, + transaction_id=id(conn), + correlation_id=correlation_id_var.get(), + ) diff --git a/backend/lcfs/utils/spreadsheet_builder.py b/backend/lcfs/utils/spreadsheet_builder.py new file mode 100644 index 000000000..a944c6c17 --- /dev/null +++ b/backend/lcfs/utils/spreadsheet_builder.py @@ -0,0 +1,196 @@ +from io import BytesIO +import structlog +import pandas as pd +from openpyxl import styles +from openpyxl.utils import get_column_letter +import xlwt + + +logger = structlog.get_logger(__name__) + + +class SpreadsheetBuilder: + """ + A class to build spreadsheets in xlsx, xls, or csv format. + Allows adding multiple sheets with custom styling and exporting them as a byte stream. + + Note: Only the first sheet data is used for the CSV format, + as CSV files do not support multiple sheets. + + Example: + -------- + builder = SpreadsheetBuilder('xlsx') # xlsx, xls, csv + + # First sheet + builder.add_sheet( + 'Employees', + ['Name', 'Dept'], # Columns + [['Alex', 'HR'], ['Mike', 'IT']], # Rows + styles={'bold_headers': True} + ) + + # Second sheet + builder.add_sheet( ... ) + + file_content = builder.build_spreadsheet() + """ + + def __init__(self, file_format: str = "xls"): + if file_format not in ["xlsx", "xls", "csv"]: + raise ValueError(f"Unsupported file format: {file_format}") + + self.file_format = file_format + self.sheets_data = [] + + def add_sheet( + self, sheet_name: str, columns: list, rows: list, styles: dict = None + ): + self.sheets_data.append( + { + "sheet_name": sheet_name, + "columns": columns, + "rows": rows, + "styles": styles or {}, + } + ) + + def build_spreadsheet(self) -> bytes: + try: + output = BytesIO() + if self.file_format == "xlsx": + self._write_xlsx(output) + elif self.file_format == "xls": + self._write_xls(output) + elif self.file_format == "csv": + self._write_csv(output) + + output.seek(0) + return output.getvalue() + + except Exception as e: + logger.error("Failed to build spreadsheet", error=str(e), exc_info=e) + raise + + def _write_xlsx(self, output): + with pd.ExcelWriter(output, engine="openpyxl") as writer: + for sheet in self.sheets_data: + self._write_sheet_to_excel(writer, sheet) + + # Auto-adjusting column widths + self._auto_adjust_column_width_xlsx(writer, sheet) + + def _write_sheet_to_excel(self, writer, sheet): + df = pd.DataFrame(sheet["rows"], columns=sheet["columns"]) + df.to_excel(writer, index=False, sheet_name=sheet["sheet_name"]) + + worksheet = writer.sheets[sheet["sheet_name"]] + + # Apply number formatting for numeric columns + for col_idx, column in enumerate(df.columns): + if df[column].dtype in ["float", "int"]: + for row in worksheet.iter_rows( + min_row=2, + max_row=worksheet.max_row, + min_col=col_idx + 1, + max_col=col_idx + 1, + ): + for cell in row: + cell.number_format = "#,##0" + cell.alignment = styles.Alignment(horizontal="left") + + self._apply_excel_styling(writer, sheet) + + def _apply_excel_styling(self, writer, sheet): + worksheet = writer.sheets[sheet["sheet_name"]] + + # Apply no border, non-bold font, and left alignment to header cells + for cell in worksheet[1]: + cell.border = styles.Border( + left=styles.Side(style=None), + right=styles.Side(style=None), + top=styles.Side(style=None), + bottom=styles.Side(style=None), + ) + cell.font = styles.Font(bold=False) + cell.alignment = styles.Alignment(horizontal="left") + + if sheet["styles"].get("bold_headers"): + for cell in worksheet[1]: + cell.font = styles.Font(bold=True) + + def _auto_adjust_column_width_xlsx(self, writer, sheet_data): + worksheet = writer.sheets[sheet_data["sheet_name"]] + for column in worksheet.columns: + max_length = 0 + column_letter = get_column_letter(column[0].column) + + for cell in column: + try: + cell_length = len(str(cell.value)) + if cell_length > max_length: + max_length = cell_length + except: + pass + + adjusted_width = max_length + 2 # Adding 2 for a little extra space + worksheet.column_dimensions[column_letter].width = adjusted_width + + def _write_xls(self, output): + book = xlwt.Workbook() + + for sheet_data in self.sheets_data: + self._write_sheet_to_xls(book, sheet_data) + book.save(output) + + def _write_sheet_to_xls(self, book, sheet_data): + sheet = book.add_sheet(sheet_data["sheet_name"]) + + # Styles for headers + bold_style = xlwt.XFStyle() + font = xlwt.Font() + font.bold = True + bold_style.font = font + default_style = xlwt.XFStyle() + + # Style for left-aligned numbers + left_aligned_num_style = xlwt.XFStyle() + left_aligned_num_style.num_format_str = "#,##0" + alignment = xlwt.Alignment() + alignment.horz = xlwt.Alignment.HORZ_LEFT + left_aligned_num_style.alignment = alignment + + # Apply bold style for headers if required + header_style = ( + bold_style if sheet_data["styles"].get("bold_headers") else default_style + ) + + for col_index, column in enumerate(sheet_data["columns"]): + sheet.write(0, col_index, column, header_style) + + # Auto-adjusting column widths + self._auto_adjust_column_width_xls(sheet, sheet_data) + + for row_index, row in enumerate(sheet_data["rows"], start=1): + for col_index, value in enumerate(row): + # Apply left-aligned style for numbers, default style for others + cell_style = ( + left_aligned_num_style + if isinstance(value, (int, float)) + else xlwt.XFStyle() + ) + sheet.write(row_index, col_index, value, cell_style) + + def _auto_adjust_column_width_xls(self, sheet, sheet_data): + column_widths = [ + max(len(str(cell)) for cell in column) + 2 + for column in zip(*sheet_data["rows"], sheet_data["columns"]) + ] + for i, width in enumerate(column_widths): + sheet.col(i).width = 256 * width + + def _write_csv(self, output): + if self.sheets_data: + df = pd.DataFrame( + self.sheets_data[0]["rows"], columns=self.sheets_data[0]["columns"] + ) + df.to_csv(output, index=False) diff --git a/backend/lcfs/web/api/admin_adjustment/__init__.py b/backend/lcfs/web/api/admin_adjustment/__init__.py new file mode 100644 index 000000000..c7a443ce5 --- /dev/null +++ b/backend/lcfs/web/api/admin_adjustment/__init__.py @@ -0,0 +1,5 @@ +"""User API.""" + +from lcfs.web.api.admin_adjustment.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/admin_adjustment/repo.py b/backend/lcfs/web/api/admin_adjustment/repo.py new file mode 100644 index 000000000..b00d81430 --- /dev/null +++ b/backend/lcfs/web/api/admin_adjustment/repo.py @@ -0,0 +1,160 @@ +from fastapi import Depends +from typing import Optional +from datetime import datetime +from sqlalchemy import select, and_ +from sqlalchemy.orm import selectinload +from sqlalchemy.ext.asyncio import AsyncSession +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.db.models.admin_adjustment import AdminAdjustment +from lcfs.db.models.admin_adjustment.AdminAdjustmentStatus import AdminAdjustmentStatus +from lcfs.db.models.admin_adjustment.AdminAdjustmentHistory import ( + AdminAdjustmentHistory, +) +from lcfs.web.api.admin_adjustment.schema import AdminAdjustmentSchema + +from lcfs.db.dependencies import get_async_db_session +from lcfs.web.core.decorators import repo_handler + + +class AdminAdjustmentRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_admin_adjustment_by_id( + self, admin_adjustment_id: int + ) -> Optional[AdminAdjustment]: + query = ( + select(AdminAdjustment) + .options( + selectinload(AdminAdjustment.to_organization), + selectinload(AdminAdjustment.current_status), + selectinload(AdminAdjustment.history).selectinload( + AdminAdjustmentHistory.user_profile + ), + selectinload(AdminAdjustment.history).selectinload( + AdminAdjustmentHistory.admin_adjustment_status + ), + ) + .where(AdminAdjustment.admin_adjustment_id == admin_adjustment_id) + ) + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def create_admin_adjustment( + self, admin_adjustment: AdminAdjustment + ) -> AdminAdjustment: + self.db.add(admin_adjustment) + await self.db.flush() + await self.db.refresh( + admin_adjustment, + [ + "to_organization", + "current_status", + "history", + ], + ) # Ensures that all specified relations are up-to-date + return admin_adjustment + + @repo_handler + async def update_admin_adjustment( + self, admin_adjustment: AdminAdjustment + ) -> AdminAdjustment: + merged_admin_adjustment = await self.db.merge(admin_adjustment) + await self.db.flush() + return merged_admin_adjustment + + @repo_handler + async def get_admin_adjustment_status_by_name( + self, status_name: str + ) -> AdminAdjustmentStatus: + """ + Fetches the Admin Adjustment status by its name. + """ + query = await self.db.execute( + select(AdminAdjustmentStatus).where( + AdminAdjustmentStatus.status == status_name + ) + ) + status = query.scalars().first() + + if not status: + raise DataNotFoundException( + f"Admin Adjustment status '{status_name}' not found" + ) + + return status + + @repo_handler + async def add_admin_adjustment_history( + self, + admin_adjustment_id: int, + admin_adjustment_status_id: int, + user_profile_id: int, + ) -> AdminAdjustmentHistory: + """ + Adds a new record to the admin adjustment history in the database. + + Args: + admin_adjustment_id (int): The ID of the admin adjustment to which this history record relates. + admin_adjustment_status_id (int): The status ID that describes the current state of the admin adjustment. + user_profile_id (int): The ID of the user who made the change. + + Returns: + AdminAdjustmentHistory: The newly created admin adjustment history record. + """ + new_history_record = AdminAdjustmentHistory( + admin_adjustment_id=admin_adjustment_id, + admin_adjustment_status_id=admin_adjustment_status_id, + user_profile_id=user_profile_id, + ) + self.db.add(new_history_record) + await self.db.flush() + return new_history_record + + @repo_handler + async def update_admin_adjustment_history( + self, + admin_adjustment_id: int, + admin_adjustment_status_id: int, + user_profile_id: int, + ) -> AdminAdjustmentHistory: + """ + Updates an admin adjustment history record in the database. + + Args: + admin_adjustment_id (int): The ID of the admin adjustment to which this history record relates. + admin_adjustment_status_id (int): The status ID that describes the current state of the admin adjustment. + user_profile_id (int): The ID of the user who made the change. + + Returns: + AdminAdjustmentHistory: The updated admin adjustment history record. + """ + existing_history = await self.db.scalar( + select(AdminAdjustmentHistory).where( + and_( + AdminAdjustmentHistory.admin_adjustment_id == admin_adjustment_id, + AdminAdjustmentHistory.admin_adjustment_status_id + == admin_adjustment_status_id, + ) + ) + ) + existing_history.create_date = datetime.now() + existing_history.update_date = datetime.now() + existing_history.user_profile_id = user_profile_id + self.db.add(existing_history) + await self.db.flush() + return existing_history + + @repo_handler + async def refresh_admin_adjustment( + self, admin_adjustment: AdminAdjustment + ) -> AdminAdjustment: + """ + Commits and refreshes an administrative adjustment object in db session + + """ + await self.db.flush() + await self.db.refresh(admin_adjustment) + return admin_adjustment diff --git a/backend/lcfs/web/api/admin_adjustment/schema.py b/backend/lcfs/web/api/admin_adjustment/schema.py new file mode 100644 index 000000000..4784f423f --- /dev/null +++ b/backend/lcfs/web/api/admin_adjustment/schema.py @@ -0,0 +1,65 @@ +from lcfs.web.api.base import BaseSchema +from typing import Optional, List +from datetime import date, datetime + + +class AdminAdjustmentStatusSchema(BaseSchema): + admin_adjustment_status_id: int + status: str + + class Config: + from_attributes = True + + +class HistoryUserSchema(BaseSchema): + first_name: str + last_name: str + + class Config: + from_attributes = True + + +class OrganizationSchema(BaseSchema): + organization_id: int + name: str + + class Config: + from_attributes = True + + +class AdminAdjustmentHistorySchema(BaseSchema): + create_date: datetime + admin_adjustment_status: AdminAdjustmentStatusSchema + user_profile: HistoryUserSchema + + class Config: + from_attributes = True + + +class AdminAdjustmentBaseSchema(BaseSchema): + compliance_units: int + current_status: AdminAdjustmentStatusSchema + transaction_effective_date: Optional[date] = None + to_organization_id: int + gov_comment: Optional[str] = None + internal_comment: Optional[str] = None + + class Config: + from_attributes = True + + +class AdminAdjustmentSchema(AdminAdjustmentBaseSchema): + admin_adjustment_id: int + to_organization: OrganizationSchema + history: Optional[List[AdminAdjustmentHistorySchema]] + returned: Optional[bool] = False + create_date: datetime + + +class AdminAdjustmentCreateSchema(AdminAdjustmentBaseSchema): + current_status: str + + +class AdminAdjustmentUpdateSchema(AdminAdjustmentBaseSchema): + admin_adjustment_id: int + current_status: str diff --git a/backend/lcfs/web/api/admin_adjustment/services.py b/backend/lcfs/web/api/admin_adjustment/services.py new file mode 100644 index 000000000..1b9bbfc68 --- /dev/null +++ b/backend/lcfs/web/api/admin_adjustment/services.py @@ -0,0 +1,199 @@ +from datetime import datetime +from fastapi import Depends, Request, HTTPException +from lcfs.db.models.admin_adjustment import AdminAdjustment +from lcfs.db.models.admin_adjustment.AdminAdjustmentStatus import ( + AdminAdjustmentStatusEnum, +) +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.admin_adjustment.schema import ( + AdminAdjustmentCreateSchema, + AdminAdjustmentSchema, + AdminAdjustmentUpdateSchema, +) +from lcfs.web.api.admin_adjustment.repo import AdminAdjustmentRepository +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.web.core.decorators import service_handler +from lcfs.web.api.role.schema import user_has_roles +from lcfs.db.models.transaction.Transaction import TransactionActionEnum +from lcfs.web.api.organizations.services import OrganizationsService +from lcfs.web.api.internal_comment.services import InternalCommentService +from lcfs.web.api.internal_comment.schema import ( + InternalCommentCreateSchema, + AudienceScopeEnum, + EntityTypeEnum, +) + + +class AdminAdjustmentServices: + def __init__( + self, + repo: AdminAdjustmentRepository = Depends(AdminAdjustmentRepository), + org_service: OrganizationsService = Depends(OrganizationsService), + internal_comment_service: InternalCommentService = Depends( + InternalCommentService + ), + request: Request = None, + ) -> None: + self.repo = repo + self.org_service = org_service + self.internal_comment_service = internal_comment_service + self.request = request + + @service_handler + async def get_admin_adjustment( + self, admin_adjustment_id: int + ) -> AdminAdjustmentSchema: + """Fetch an admin adjustment by its ID.""" + admin_adjustment = await self.repo.get_admin_adjustment_by_id( + admin_adjustment_id + ) + return AdminAdjustmentSchema.from_orm(admin_adjustment) + + @service_handler + async def update_admin_adjustment( + self, admin_adjustment_data: AdminAdjustmentUpdateSchema + ) -> AdminAdjustmentSchema: + """Update an existing admin adjustment.""" + admin_adjustment = await self.repo.get_admin_adjustment_by_id( + admin_adjustment_data.admin_adjustment_id + ) + if not admin_adjustment: + raise DataNotFoundException( + f"Admin Adjustment with ID {admin_adjustment_data.admin_adjustment_id} not found." + ) + + new_status = await self.repo.get_admin_adjustment_status_by_name( + admin_adjustment_data.current_status + ) + status_has_changed = admin_adjustment.current_status != new_status + + # Update the fields except for 'current_status' + # FIXME: 1. if the transaction is not in Draft status then don't update the effective date, compliance units and organization details + # FIXME: 2. a draft transaction can be approved directly by the Director via backend api. + # FIXME: 3. should the Director/Compliance Manager be allowed to change the draft transaction to recommend? A validation may be required? NOTE: please confirm + for field, value in admin_adjustment_data.dict(exclude_unset=True).items(): + if field != "current_status": + setattr(admin_adjustment, field, value) + + # Initialize status flags + returned, re_recommended = False, False + + if status_has_changed: + admin_adjustment.current_status = new_status + + # Issue compliance units by Director if status is approved + if new_status.status == AdminAdjustmentStatusEnum.Approved: + await self.director_approve_admin_adjustment(admin_adjustment) + + # Check previous recommended status + previous_recommended = any( + history.admin_adjustment_status.status + == AdminAdjustmentStatusEnum.Recommended + for history in admin_adjustment.history + ) + + if previous_recommended: + if new_status.status == AdminAdjustmentStatusEnum.Draft: + returned = True + elif new_status.status == AdminAdjustmentStatusEnum.Recommended: + re_recommended = True + + # Update or add history record based on status flags + history_method = ( + self.repo.update_admin_adjustment_history + if re_recommended + else self.repo.add_admin_adjustment_history + ) + # We only track history changes on Recommended and Approved, not Draft + if new_status.status != AdminAdjustmentStatusEnum.Draft: + await history_method( + admin_adjustment.admin_adjustment_id, + new_status.admin_adjustment_status_id, + self.request.user.user_profile_id, + ) + + # Save the updated admin adjustment + updated_admin_adjustment = await self.repo.update_admin_adjustment( + admin_adjustment + ) + + # Return the updated admin adjustment schema with the returned status flag + aa_schema = AdminAdjustmentSchema.from_orm(updated_admin_adjustment) + aa_schema.returned = returned + + return aa_schema + + @service_handler + async def create_admin_adjustment( + self, admin_adjustment_data: AdminAdjustmentCreateSchema + ) -> AdminAdjustmentSchema: + """ + Handles creating an admin adjustment, including creating a comment (if provided). + """ + # Fetch the status for the admin adjustment + current_status = await self.repo.get_admin_adjustment_status_by_name( + admin_adjustment_data.current_status + ) + + # Create the admin adjustment + admin_adjustment = AdminAdjustment( + **admin_adjustment_data.model_dump( + exclude={"current_status", "internal_comment"} + ) + ) + + admin_adjustment.current_status = current_status + + # Save the admin adjustment + admin_adjustment = await self.repo.create_admin_adjustment(admin_adjustment) + + if current_status.status == AdminAdjustmentStatusEnum.Recommended: + await self.repo.add_admin_adjustment_history( + admin_adjustment.admin_adjustment_id, + current_status.admin_adjustment_status_id, + self.request.user.user_profile_id, + ) + + # Create internal comment if provided + if admin_adjustment_data.internal_comment: + internal_comment_data = InternalCommentCreateSchema( + entity_type=EntityTypeEnum.ADMIN_ADJUSTMENT, + entity_id=admin_adjustment.admin_adjustment_id, + comment=admin_adjustment_data.internal_comment, + audience_scope=AudienceScopeEnum.ANALYST, + ) + await self.internal_comment_service.create_internal_comment( + internal_comment_data + ) + + return AdminAdjustmentSchema.from_orm(admin_adjustment) + + async def director_approve_admin_adjustment( + self, admin_adjustment: AdminAdjustment + ): + """Create ledger transaction for approved admin adjustment""" + + user = self.request.user + has_director_role = user_has_roles( + user, [RoleEnum.GOVERNMENT, RoleEnum.DIRECTOR] + ) + + if not has_director_role: + raise HTTPException(status_code=403, detail="Forbidden.") + + if admin_adjustment.transaction != None: + raise HTTPException(status_code=403, detail="Transaction already exists.") + + # Create new transaction for receiving organization + to_transaction = await self.org_service.adjust_balance( + transaction_action=TransactionActionEnum.Adjustment, + compliance_units=admin_adjustment.compliance_units, + organization_id=admin_adjustment.to_organization_id, + ) + admin_adjustment.transaction = to_transaction + + # Set effective date to today if the analyst left it blank + if admin_adjustment.transaction_effective_date is None: + admin_adjustment.transaction_effective_date = datetime.now().date() + + await self.repo.refresh_admin_adjustment(admin_adjustment) diff --git a/backend/lcfs/web/api/admin_adjustment/validation.py b/backend/lcfs/web/api/admin_adjustment/validation.py new file mode 100644 index 000000000..94028a532 --- /dev/null +++ b/backend/lcfs/web/api/admin_adjustment/validation.py @@ -0,0 +1,69 @@ +from fastapi import Depends, HTTPException, Request +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.role.schema import user_has_roles +from starlette import status + +from lcfs.web.api.admin_adjustment.schema import ( + AdminAdjustmentCreateSchema, + AdminAdjustmentSchema, +) +from lcfs.web.api.admin_adjustment.services import AdminAdjustmentServices +from lcfs.db.models.admin_adjustment.AdminAdjustmentStatus import ( + AdminAdjustmentStatusEnum, +) + + +class AdminAdjustmentValidation: + def __init__( + self, + request: Request = None, + service: AdminAdjustmentServices = Depends(AdminAdjustmentServices), + ) -> None: + self.request = request + self.service = service + + async def validate_admin_adjustment_create( + self, request, adjustment_data: AdminAdjustmentCreateSchema + ): + pass + + async def validate_admin_adjustment_update( + self, request, adjustment_data: AdminAdjustmentSchema + ): + # Retrieve the current admin adjustment data from the database + admin_adjustment = await self.service.get_admin_adjustment( + adjustment_data.admin_adjustment_id + ) + + if ( + admin_adjustment.current_status.status + == AdminAdjustmentStatusEnum.Approved.name + ): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Editing a processed admin adjustment is not allowed.", + ) + + async def validate_organization_access(self, admin_adjustment_id: int): + admin_adjustment = await self.service.get_admin_adjustment(admin_adjustment_id) + if not admin_adjustment: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Transaction not found.", + ) + + organization_id = admin_adjustment.to_organization.organization_id + user_organization_id = ( + self.request.user.organization.organization_id + if self.request.user.organization + else None + ) + + if ( + not user_has_roles(self.request.user, [RoleEnum.GOVERNMENT]) + and organization_id != user_organization_id + ): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User does not have access to this transaction.", + ) diff --git a/backend/lcfs/web/api/admin_adjustment/views.py b/backend/lcfs/web/api/admin_adjustment/views.py new file mode 100644 index 000000000..2aba3f879 --- /dev/null +++ b/backend/lcfs/web/api/admin_adjustment/views.py @@ -0,0 +1,57 @@ +from fastapi import APIRouter, Depends, Request, status +from lcfs.db import dependencies +from lcfs.web.api.admin_adjustment.services import AdminAdjustmentServices +from lcfs.web.api.admin_adjustment.schema import ( + AdminAdjustmentCreateSchema, + AdminAdjustmentSchema, + AdminAdjustmentUpdateSchema, +) +from lcfs.web.api.admin_adjustment.validation import AdminAdjustmentValidation +from lcfs.web.core.decorators import view_handler +from lcfs.db.models.user.Role import RoleEnum + +router = APIRouter() +get_async_db = dependencies.get_async_db_session + + +@router.get("/{admin_adjustment_id}", response_model=AdminAdjustmentSchema) +@view_handler(["*"]) +async def get_admin_adjustment( + request: Request, + admin_adjustment_id: int, + service: AdminAdjustmentServices = Depends(), + validate: AdminAdjustmentValidation = Depends(), +): + """Endpoint to fetch an admin adjustment by its ID.""" + await validate.validate_organization_access(admin_adjustment_id) + return await service.get_admin_adjustment(admin_adjustment_id) + + +@router.put( + "/", response_model=AdminAdjustmentSchema, status_code=status.HTTP_202_ACCEPTED +) +@view_handler([RoleEnum.GOVERNMENT]) +async def update_admin_adjustment( + request: Request, + admin_adjustment_data: AdminAdjustmentUpdateSchema = ..., + service: AdminAdjustmentServices = Depends(), + validate: AdminAdjustmentValidation = Depends(), +): + """Endpoint to update an existing admin adjustment.""" + await validate.validate_admin_adjustment_update(request, admin_adjustment_data) + return await service.update_admin_adjustment(admin_adjustment_data) + + +@router.post( + "/", response_model=AdminAdjustmentSchema, status_code=status.HTTP_201_CREATED +) +@view_handler([RoleEnum.ANALYST]) +async def create_admin_adjustment( + request: Request, + admin_adjustment_create: AdminAdjustmentCreateSchema = ..., + service: AdminAdjustmentServices = Depends(), + validate: AdminAdjustmentValidation = Depends(), +): + """Endpoint to create a new admin adjustment.""" + await validate.validate_admin_adjustment_create(request, admin_adjustment_create) + return await service.create_admin_adjustment(admin_adjustment_create) diff --git a/backend/lcfs/web/api/allocation_agreement/__init__.py b/backend/lcfs/web/api/allocation_agreement/__init__.py new file mode 100644 index 000000000..17c028cd2 --- /dev/null +++ b/backend/lcfs/web/api/allocation_agreement/__init__.py @@ -0,0 +1,5 @@ +"""Allocation Agreement API.""" + +from lcfs.web.api.allocation_agreement.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/allocation_agreement/repo.py b/backend/lcfs/web/api/allocation_agreement/repo.py new file mode 100644 index 000000000..374f7e6fa --- /dev/null +++ b/backend/lcfs/web/api/allocation_agreement/repo.py @@ -0,0 +1,224 @@ +import structlog +from typing import List + +from fastapi import Depends +from lcfs.db.dependencies import get_async_db_session + +from sqlalchemy import select, delete, func, distinct +from sqlalchemy.orm import joinedload +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.models.compliance.AllocationAgreement import AllocationAgreement +from lcfs.db.models.compliance.AllocationTransactionType import ( + AllocationTransactionType, +) +from lcfs.db.models.fuel.ProvisionOfTheAct import ProvisionOfTheAct +from lcfs.db.models.fuel.FuelCode import FuelCode + +from lcfs.db.models.fuel.FuelType import QuantityUnitsEnum +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.allocation_agreement.schema import AllocationAgreementSchema +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.core.decorators import repo_handler + +logger = structlog.get_logger(__name__) + + +class AllocationAgreementRepository: + def __init__( + self, + db: AsyncSession = Depends(get_async_db_session), + fuel_repo: FuelCodeRepository = Depends(), + ): + self.db = db + self.fuel_code_repo = fuel_repo + + @repo_handler + async def get_table_options(self) -> dict: + """Get all table options""" + fuel_categories = await self.fuel_code_repo.get_fuel_categories() + fuel_types = await self.fuel_code_repo.get_formatted_fuel_types() + units_of_measure = [unit.value for unit in QuantityUnitsEnum] + allocation_transaction_types = ( + (await self.db.execute(select(AllocationTransactionType))).scalars().all() + ) + provisions_of_the_act = ( + (await self.db.execute(select(ProvisionOfTheAct))).scalars().all() + ) + fuel_codes = (await self.db.execute(select(FuelCode))).scalars().all() + + return { + "allocation_transaction_types": allocation_transaction_types, + "fuel_types": fuel_types, + "fuel_categories": fuel_categories, + "provisions_of_the_act": provisions_of_the_act, + "fuel_codes": fuel_codes, + "units_of_measure": units_of_measure, + } + + @repo_handler + async def get_allocation_agreements( + self, compliance_report_id: int + ) -> List[AllocationAgreementSchema]: + """ + Queries allocation agreements from the database for a specific compliance report. + """ + query = ( + select(AllocationAgreement) + .options( + joinedload(AllocationAgreement.allocation_transaction_type), + joinedload(AllocationAgreement.fuel_type), + joinedload(AllocationAgreement.fuel_category), + joinedload(AllocationAgreement.provision_of_the_act), + joinedload(AllocationAgreement.fuel_code), + joinedload(AllocationAgreement.compliance_report), + ) + .where(AllocationAgreement.compliance_report_id == compliance_report_id) + .order_by(AllocationAgreement.allocation_agreement_id) + ) + result = await self.db.execute(query) + allocation_agreements = result.unique().scalars().all() + + return [ + AllocationAgreementSchema( + allocation_agreement_id=allocation_agreement.allocation_agreement_id, + transaction_partner=allocation_agreement.transaction_partner, + transaction_partner_email=allocation_agreement.transaction_partner_email, + transaction_partner_phone=allocation_agreement.transaction_partner_phone, + postal_address=allocation_agreement.postal_address, + ci_of_fuel=allocation_agreement.ci_of_fuel, + quantity=allocation_agreement.quantity, + units=allocation_agreement.units, + compliance_report_id=allocation_agreement.compliance_report_id, + allocation_transaction_type=allocation_agreement.allocation_transaction_type.type, + fuel_type=allocation_agreement.fuel_type.fuel_type, + fuel_type_other=allocation_agreement.fuel_type_other, + fuel_category=allocation_agreement.fuel_category.category, + provision_of_the_act=allocation_agreement.provision_of_the_act.name, + # Set fuel_code only if it exists + fuel_code=( + allocation_agreement.fuel_code.fuel_code + if allocation_agreement.fuel_code + else None + ), + ) + for allocation_agreement in allocation_agreements + ] + + @repo_handler + async def get_allocation_agreements_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> List[AllocationAgreementSchema]: + conditions = [AllocationAgreement.compliance_report_id == compliance_report_id] + offset = 0 if pagination.page < 1 else (pagination.page - 1) * pagination.size + limit = pagination.size + + query = ( + select(AllocationAgreement) + .options( + joinedload(AllocationAgreement.allocation_transaction_type), + joinedload(AllocationAgreement.fuel_type), + joinedload(AllocationAgreement.fuel_category), + joinedload(AllocationAgreement.provision_of_the_act), + joinedload(AllocationAgreement.fuel_code), + joinedload(AllocationAgreement.compliance_report), + ) + .where(*conditions) + ) + + count_query = query.with_only_columns(func.count()).order_by(None) + total_count = (await self.db.execute(count_query)).scalar() + + result = await self.db.execute( + query.offset(offset) + .limit(limit) + .order_by(AllocationAgreement.create_date.desc()) + ) + allocation_agreements = result.unique().scalars().all() + + return allocation_agreements, total_count + + @repo_handler + async def get_allocation_agreement( + self, allocation_agreement_id: int + ) -> AllocationAgreement: + """ + Get a specific allocation agreement by id. + """ + return await self.db.scalar( + select(AllocationAgreement) + .options( + joinedload(AllocationAgreement.allocation_transaction_type), + joinedload(AllocationAgreement.fuel_type), + joinedload(AllocationAgreement.fuel_category), + joinedload(AllocationAgreement.provision_of_the_act), + joinedload(AllocationAgreement.fuel_code), + joinedload(AllocationAgreement.compliance_report), + ) + .where( + AllocationAgreement.allocation_agreement_id == allocation_agreement_id + ) + ) + + @repo_handler + async def update_allocation_agreement( + self, allocation_agreement: AllocationAgreement + ) -> AllocationAgreement: + """ + Update an existing allocation agreement in the database. + """ + updated_allocation_agreement = await self.db.merge(allocation_agreement) + await self.db.flush() + await self.db.refresh( + allocation_agreement, + [ + "fuel_category", + "fuel_type", + "allocation_transaction_type", + "provision_of_the_act", + "fuel_code", + ], + ) + return updated_allocation_agreement + + @repo_handler + async def create_allocation_agreement( + self, allocation_agreement: AllocationAgreement + ) -> AllocationAgreement: + """ + Create a new allocation agreement in the database. + """ + self.db.add(allocation_agreement) + await self.db.flush() + await self.db.refresh( + allocation_agreement, + [ + "fuel_category", + "fuel_type", + "allocation_transaction_type", + "provision_of_the_act", + "fuel_code", + ], + ) + return allocation_agreement + + @repo_handler + async def delete_allocation_agreement(self, allocation_agreement_id: int): + """Delete an allocation agreement from the database""" + await self.db.execute( + delete(AllocationAgreement).where( + AllocationAgreement.allocation_agreement_id == allocation_agreement_id + ) + ) + await self.db.flush() + + @repo_handler + async def get_allocation_transaction_type_by_name( + self, type: str + ) -> AllocationTransactionType: + result = await self.db.execute( + select(AllocationTransactionType).where( + AllocationTransactionType.type == type + ) + ) + return result.scalar_one_or_none() diff --git a/backend/lcfs/web/api/allocation_agreement/schema.py b/backend/lcfs/web/api/allocation_agreement/schema.py new file mode 100644 index 000000000..c2079fe2a --- /dev/null +++ b/backend/lcfs/web/api/allocation_agreement/schema.py @@ -0,0 +1,114 @@ +from typing import Optional, List +from pydantic import Field, field_validator +from lcfs.web.api.base import ( + BaseSchema, + FilterModel, + SortOrder, + PaginationRequestSchema, + PaginationResponseSchema, +) +from enum import Enum + + +class AllocationTransactionTypeSchema(BaseSchema): + allocation_transaction_type_id: int + type: str + + +class FuelCategorySchema(BaseSchema): + fuel_category_id: int + category: str + default_and_prescribed_ci: Optional[float] = None + + +class FuelCodeSchema(BaseSchema): + fuel_code_id: int + fuel_code: str + carbon_intensity: float + + +class ProvisionOfTheActSchema(BaseSchema): + provision_of_the_act_id: int + name: str + +class FuelTypeSchema(BaseSchema): + fuel_type_id: int + fuel_type: str + default_carbon_intensity: float + units: str + unrecognized: bool + fuel_categories: List[FuelCategorySchema] + fuel_codes: Optional[List[FuelCodeSchema]] = [] + provision_of_the_act: Optional[List[ProvisionOfTheActSchema]] = [] + + +class ProvisionOfTheActSchema(BaseSchema): + provision_of_the_act_id: int + name: str + + +class AllocationAgreementTableOptionsSchema(BaseSchema): + allocation_transaction_types: List[AllocationTransactionTypeSchema] + fuel_types: List[FuelTypeSchema] + provisions_of_the_act: List[ProvisionOfTheActSchema] + fuel_codes: List[FuelCodeSchema] + units_of_measure: List[str] + + +class AllocationAgreementCreateSchema(BaseSchema): + compliance_report_id: int + allocation_agreement_id: Optional[int] = None + allocation_transaction_type: str + transaction_partner: str + postal_address: str + transaction_partner_email: str + transaction_partner_phone: str + fuel_type: str + fuel_type_other: Optional[str] = None + ci_of_fuel: float + provision_of_the_act: str + quantity: int = Field( + ..., gt=0, description="Quantity must be greater than 0" + ) + units: str + fuel_category: str + fuel_code: Optional[str] = None + deleted: Optional[bool] = None + + +class AllocationAgreementSchema(AllocationAgreementCreateSchema): + pass + + +class AllocationAgreementAllSchema(BaseSchema): + allocation_agreements: List[AllocationAgreementSchema] + pagination: Optional[PaginationResponseSchema] = {} + + +class AllocationAgreementListSchema(BaseSchema): + allocation_agreements: List[AllocationAgreementSchema] + pagination: PaginationResponseSchema + + +class PaginatedAllocationAgreementRequestSchema(BaseSchema): + compliance_report_id: int = Field(..., alias="complianceReportId") + filters: List[FilterModel] + page: int + size: int + sort_orders: List[SortOrder] + + +class DeleteAllocationAgreementsSchema(BaseSchema): + allocation_agreement_id: int + compliance_report_id: int + + +class DeleteAllocationAgreementResponseSchema(BaseSchema): + message: str + + +class OrganizationDetailsSchema(BaseSchema): + name: str + address: Optional[str] + email: Optional[str] + phone: Optional[str] diff --git a/backend/lcfs/web/api/allocation_agreement/services.py b/backend/lcfs/web/api/allocation_agreement/services.py new file mode 100644 index 000000000..59ada9e41 --- /dev/null +++ b/backend/lcfs/web/api/allocation_agreement/services.py @@ -0,0 +1,350 @@ +import math +import structlog +from typing import List +from fastapi import Depends +from datetime import datetime + +from lcfs.web.api.allocation_agreement.repo import AllocationAgreementRepository +from lcfs.web.core.decorators import service_handler +from lcfs.db.models.compliance.AllocationAgreement import AllocationAgreement +from lcfs.web.api.base import PaginationRequestSchema, PaginationResponseSchema +from lcfs.web.api.allocation_agreement.schema import ( + AllocationAgreementCreateSchema, + AllocationAgreementSchema, + AllocationAgreementListSchema, + AllocationAgreementTableOptionsSchema, + # AllocationAgreementFuelCategorySchema, + AllocationAgreementAllSchema, + FuelTypeSchema, + FuelCategorySchema, + AllocationTransactionTypeSchema, + ProvisionOfTheActSchema, + FuelCodeSchema, + # UnitOfMeasureSchema, + # ExpectedUseTypeSchema +) +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.utils.constants import default_ci + +logger = structlog.get_logger(__name__) + + +class AllocationAgreementServices: + def __init__( + self, + repo: AllocationAgreementRepository = Depends(AllocationAgreementRepository), + fuel_repo: FuelCodeRepository = Depends(), + ) -> None: + self.repo = repo + self.fuel_repo = fuel_repo + + async def convert_to_model( + self, allocation_agreement: AllocationAgreementCreateSchema + ) -> AllocationAgreement: + """ + Converts data from AllocationAgreementCreateSchema to AllocationAgreement data model to store into the database. + """ + allocation_transaction_type = ( + await self.repo.get_allocation_transaction_type_by_name( + allocation_agreement.allocation_transaction_type + ) + ) + fuel_category = await self.fuel_repo.get_fuel_category_by( + category=allocation_agreement.fuel_category + ) + fuel_type = await self.fuel_repo.get_fuel_type_by_name( + allocation_agreement.fuel_type + ) + provision_of_the_act = await self.fuel_repo.get_provision_of_the_act_by_name( + allocation_agreement.provision_of_the_act + ) + + # Fetch fuel code only if provided, else set it to None + fuel_code_id = None + if allocation_agreement.fuel_code: + fuel_code = await self.fuel_repo.get_fuel_code_by_name( + allocation_agreement.fuel_code + ) + fuel_code_id = fuel_code.fuel_code_id + + return AllocationAgreement( + **allocation_agreement.model_dump( + exclude={ + "allocation_agreement_id", + "allocation_transaction_type", + "fuel_category", + "fuel_type", + "provision_of_the_act", + "fuel_code", + "deleted", + } + ), + allocation_transaction_type_id=allocation_transaction_type.allocation_transaction_type_id, + fuel_category_id=fuel_category.fuel_category_id, + fuel_type_id=fuel_type.fuel_type_id, + provision_of_the_act_id=provision_of_the_act.provision_of_the_act_id, + fuel_code_id=fuel_code_id + ) + + @service_handler + async def get_table_options(self) -> AllocationAgreementTableOptionsSchema: + """ + Gets the list of table options related to allocation agreements. + """ + table_options = await self.repo.get_table_options() + fuel_types = [ + { + **fuel_type, + "fuel_categories": [ + { + **fuel_category, + "default_and_prescribed_ci": ( + round(fuel_type["default_carbon_intensity"], 2) + if fuel_type["fuel_type"] != "Other" + else default_ci.get(fuel_category["category"], 0) + ), # + } + for fuel_category in fuel_type["fuel_categories"] + ], + } + for fuel_type in table_options["fuel_types"] + ] + + return AllocationAgreementTableOptionsSchema( + allocation_transaction_types=[ + AllocationTransactionTypeSchema.model_validate( + allocation_transaction_type + ) + for allocation_transaction_type in table_options[ + "allocation_transaction_types" + ] + ], + fuel_types=fuel_types, + provisions_of_the_act=[ + ProvisionOfTheActSchema.model_validate(provision) + for provision in table_options["provisions_of_the_act"] + ], + fuel_codes=[ + FuelCodeSchema.model_validate(fuel_code) + for fuel_code in table_options["fuel_codes"] + ], + units_of_measure=table_options["units_of_measure"], + ) + + @service_handler + async def get_allocation_agreements( + self, compliance_report_id: int + ) -> AllocationAgreementListSchema: + """ + Gets the list of allocation agreements for a specific compliance report. + """ + allocation_agreements = await self.repo.get_allocation_agreements( + compliance_report_id + ) + return AllocationAgreementAllSchema( + allocation_agreements=[ + AllocationAgreementSchema.model_validate(allocation_agreement) + for allocation_agreement in allocation_agreements + ] + ) + + @service_handler + async def get_allocation_agreements_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> AllocationAgreementListSchema: + allocation_agreements, total_count = ( + await self.repo.get_allocation_agreements_paginated( + pagination, compliance_report_id + ) + ) + return AllocationAgreementListSchema( + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + allocation_agreements=[ + AllocationAgreementSchema( + allocation_agreement_id=allocation_agreement.allocation_agreement_id, + transaction_partner=allocation_agreement.transaction_partner, + transaction_partner_email=allocation_agreement.transaction_partner_email, + transaction_partner_phone=allocation_agreement.transaction_partner_phone, + postal_address=allocation_agreement.postal_address, + ci_of_fuel=allocation_agreement.ci_of_fuel, + quantity=allocation_agreement.quantity, + units=allocation_agreement.units, + allocation_transaction_type=allocation_agreement.allocation_transaction_type.type, + fuel_type=allocation_agreement.fuel_type.fuel_type, + fuel_category=allocation_agreement.fuel_category.category, + provision_of_the_act=allocation_agreement.provision_of_the_act.name, + # Set fuel_code only if it exists + fuel_code=( + allocation_agreement.fuel_code.fuel_code + if allocation_agreement.fuel_code + else None + ), + compliance_report_id=allocation_agreement.compliance_report_id, + ) + for allocation_agreement in allocation_agreements + ], + ) + + @service_handler + async def update_allocation_agreement( + self, allocation_agreement_data: AllocationAgreementCreateSchema + ) -> AllocationAgreementSchema: + """Update an existing Allocation agreement""" + existing_allocation_agreement = await self.repo.get_allocation_agreement( + allocation_agreement_data.allocation_agreement_id + ) + if not existing_allocation_agreement: + raise ValueError("Allocation agreement not found") + + if ( + existing_allocation_agreement.allocation_transaction_type.type + != allocation_agreement_data.allocation_transaction_type + ): + existing_allocation_agreement.allocation_transaction_type = ( + await self.repo.get_allocation_transaction_type_by_name( + allocation_agreement_data.allocation_transaction_type + ) + ) + + if ( + existing_allocation_agreement.fuel_type.fuel_type + != allocation_agreement_data.fuel_type + ): + existing_allocation_agreement.fuel_type = ( + await self.fuel_repo.get_fuel_type_by_name( + allocation_agreement_data.fuel_type + ) + ) + + if ( + existing_allocation_agreement.fuel_category.category + != allocation_agreement_data.fuel_category + ): + existing_allocation_agreement.fuel_category = ( + await self.fuel_repo.get_fuel_category_by( + category=allocation_agreement_data.fuel_category + ) + ) + + if ( + existing_allocation_agreement.provision_of_the_act.name + != allocation_agreement_data.provision_of_the_act + ): + existing_allocation_agreement.provision_of_the_act = ( + await self.fuel_repo.get_provision_of_the_act_by_name( + allocation_agreement_data.provision_of_the_act + ) + ) + + if ( + existing_allocation_agreement.fuel_code is None + or allocation_agreement_data.fuel_code is None + or existing_allocation_agreement.fuel_code.fuel_code + != allocation_agreement_data.fuel_code + ): + + existing_allocation_agreement.fuel_code = ( + await self.fuel_repo.get_fuel_code_by_name( + allocation_agreement_data.fuel_code + ) + ) + + existing_allocation_agreement.transaction_partner = ( + allocation_agreement_data.transaction_partner + ) + existing_allocation_agreement.transaction_partner_email = ( + allocation_agreement_data.transaction_partner_email + ) + existing_allocation_agreement.transaction_partner_phone = ( + allocation_agreement_data.transaction_partner_phone + ) + existing_allocation_agreement.postal_address = ( + allocation_agreement_data.postal_address + ) + existing_allocation_agreement.ci_of_fuel = allocation_agreement_data.ci_of_fuel + existing_allocation_agreement.quantity = allocation_agreement_data.quantity + existing_allocation_agreement.units = allocation_agreement_data.units + existing_allocation_agreement.fuel_type_other = ( + allocation_agreement_data.fuel_type_other + ) + + updated_allocation_agreement = await self.repo.update_allocation_agreement( + existing_allocation_agreement + ) + + return AllocationAgreementSchema( + allocation_agreement_id=updated_allocation_agreement.allocation_agreement_id, + transaction_partner=updated_allocation_agreement.transaction_partner, + transaction_partner_email=updated_allocation_agreement.transaction_partner_email, + transaction_partner_phone=updated_allocation_agreement.transaction_partner_phone, + postal_address=updated_allocation_agreement.postal_address, + ci_of_fuel=updated_allocation_agreement.ci_of_fuel, + quantity=updated_allocation_agreement.quantity, + units=updated_allocation_agreement.units, + compliance_report_id=updated_allocation_agreement.compliance_report_id, + allocation_transaction_type=updated_allocation_agreement.allocation_transaction_type.type, + fuel_type=updated_allocation_agreement.fuel_type.fuel_type, + fuel_type_other=updated_allocation_agreement.fuel_type_other, + fuel_category=updated_allocation_agreement.fuel_category.category, + provision_of_the_act=updated_allocation_agreement.provision_of_the_act.name, + fuel_code=( + updated_allocation_agreement.fuel_code.fuel_code + if updated_allocation_agreement.fuel_code + else None + ), + ) + + @service_handler + async def create_allocation_agreement( + self, allocation_agreement_data: AllocationAgreementCreateSchema + ) -> AllocationAgreementSchema: + """Create a new Allocation agreement""" + allocation_agreement = await self.convert_to_model(allocation_agreement_data) + + created_allocation_agreement = await self.repo.create_allocation_agreement( + allocation_agreement + ) + + allocation_transaction_type_value = ( + created_allocation_agreement.allocation_transaction_type.type + ) + fuel_type_value = created_allocation_agreement.fuel_type.fuel_type + fuel_category_value = created_allocation_agreement.fuel_category.category + provision_of_the_act_value = ( + created_allocation_agreement.provision_of_the_act.name + ) + + # Set fuel_code_value only if the fuel_code is present + fuel_code_value = ( + created_allocation_agreement.fuel_code.fuel_code + if created_allocation_agreement.fuel_code + else None + ) + + return AllocationAgreementSchema( + allocation_agreement_id=created_allocation_agreement.allocation_agreement_id, + transaction_partner=created_allocation_agreement.transaction_partner, + transaction_partner_email=created_allocation_agreement.transaction_partner_email, + transaction_partner_phone=created_allocation_agreement.transaction_partner_phone, + postal_address=created_allocation_agreement.postal_address, + ci_of_fuel=created_allocation_agreement.ci_of_fuel, + quantity=created_allocation_agreement.quantity, + units=created_allocation_agreement.units, + compliance_report_id=created_allocation_agreement.compliance_report_id, + allocation_transaction_type=allocation_transaction_type_value, + fuel_type=fuel_type_value, + fuel_type_other=created_allocation_agreement.fuel_type_other, + fuel_category=fuel_category_value, + provision_of_the_act=provision_of_the_act_value, + fuel_code=fuel_code_value, + ) + + @service_handler + async def delete_allocation_agreement(self, allocation_agreement_id: int) -> str: + """Delete an Allocation agreement""" + return await self.repo.delete_allocation_agreement(allocation_agreement_id) diff --git a/backend/lcfs/web/api/allocation_agreement/validation.py b/backend/lcfs/web/api/allocation_agreement/validation.py new file mode 100644 index 000000000..e8a8a7353 --- /dev/null +++ b/backend/lcfs/web/api/allocation_agreement/validation.py @@ -0,0 +1,33 @@ +from typing import List +from fastapi import Depends, HTTPException, Request +from starlette import status + +from lcfs.web.api.organizations.repo import OrganizationsRepository +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.web.api.allocation_agreement.schema import ( + AllocationAgreementCreateSchema, +) + + +class AllocationAgreementValidation: + def __init__( + self, + request: Request = None, + org_repo: OrganizationsRepository = Depends(OrganizationsRepository), + report_repo: ComplianceReportRepository = Depends(ComplianceReportRepository), + ): + self.org_repo = org_repo + self.request = request + self.report_repo = report_repo + + async def validate_compliance_report_id( + self, + compliance_report_id: int, + allocation_agreements: List[AllocationAgreementCreateSchema], + ): + for allocation_agreement in allocation_agreements: + if allocation_agreement.compliance_report_id != compliance_report_id: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Mismatch compliance_report_id in allocation agreement: {allocation_agreement}", + ) diff --git a/backend/lcfs/web/api/allocation_agreement/views.py b/backend/lcfs/web/api/allocation_agreement/views.py new file mode 100644 index 000000000..bb61a4505 --- /dev/null +++ b/backend/lcfs/web/api/allocation_agreement/views.py @@ -0,0 +1,161 @@ +""" +allocation agreements endpoints +""" + +import structlog +from typing import List, Optional, Union + +from fastapi import ( + APIRouter, + Body, + status, + Request, + Response, + Depends, + Query, +) +from fastapi_cache.decorator import cache + +from lcfs.db import dependencies +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.organizations.services import OrganizationsService +from lcfs.web.api.allocation_agreement.services import AllocationAgreementServices +from lcfs.web.api.allocation_agreement.schema import ( + AllocationAgreementCreateSchema, + AllocationAgreementSchema, + AllocationAgreementListSchema, + AllocationAgreementTableOptionsSchema, + DeleteAllocationAgreementResponseSchema, + PaginatedAllocationAgreementRequestSchema, + AllocationAgreementAllSchema, + OrganizationDetailsSchema, +) +from lcfs.web.api.base import ComplianceReportRequestSchema, PaginationRequestSchema +from lcfs.web.api.allocation_agreement.validation import AllocationAgreementValidation +from lcfs.db.models.user.Role import RoleEnum + +router = APIRouter() +logger = structlog.get_logger(__name__) +get_async_db = dependencies.get_async_db_session + + +@router.get( + "/table-options", + response_model=AllocationAgreementTableOptionsSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_table_options( + request: Request, + service: AllocationAgreementServices = Depends(), +): + """Endpoint to retrieve table options related to allocation agreements""" + return await service.get_table_options() + + +@router.post( + "/list-all", + response_model=AllocationAgreementAllSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_allocation_agreements( + request: Request, + request_data: ComplianceReportRequestSchema = Body(...), + response: Response = None, + service: AllocationAgreementServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +): + """Endpoint to get list of allocation agreements for a compliance report""" + await report_validate.validate_organization_access( + request_data.compliance_report_id + ) + return await service.get_allocation_agreements(request_data.compliance_report_id) + + +@router.post( + "/list", + response_model=AllocationAgreementListSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_allocation_agreements_paginated( + request: Request, + request_data: PaginatedAllocationAgreementRequestSchema = Body(...), + service: AllocationAgreementServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +) -> AllocationAgreementListSchema: + pagination = PaginationRequestSchema( + page=request_data.page, + size=request_data.size, + sort_orders=request_data.sort_orders, + filters=request_data.filters, + ) + compliance_report_id = request_data.compliance_report_id + await report_validate.validate_organization_access(compliance_report_id) + return await service.get_allocation_agreements_paginated( + pagination, compliance_report_id + ) + + +@router.post( + "/save", + response_model=Union[ + AllocationAgreementSchema, DeleteAllocationAgreementResponseSchema + ], + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def save_allocation_agreements_row( + request: Request, + request_data: AllocationAgreementCreateSchema = Body(...), + service: AllocationAgreementServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), + validate: AllocationAgreementValidation = Depends(), +): + """Endpoint to save a single allocation agreements row""" + compliance_report_id = request_data.compliance_report_id + allocation_agreement_id: Optional[int] = request_data.allocation_agreement_id + + await report_validate.validate_organization_access(compliance_report_id) + + if request_data.deleted: + # Delete existing Allocation agreement + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + await service.delete_allocation_agreement(allocation_agreement_id) + return DeleteAllocationAgreementResponseSchema( + message="Allocation agreement deleted successfully" + ) + elif allocation_agreement_id: + # Update existing Allocation agreement + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + return await service.update_allocation_agreement(request_data) + else: + # Create new Allocation agreement + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + return await service.create_allocation_agreement(request_data) + + +@router.get("/search", response_model=List[OrganizationDetailsSchema], status_code=200) +@view_handler(["*"]) +async def search_table_options_strings( + request: Request, + transaction_partner: Optional[str] = Query( + None, + alias="transactionPartner", + description="Trading partner (company) for filtering options", + ), + service: OrganizationsService = Depends(), +): + """Endpoint to search allocation agreement options based on a query string""" + if transaction_partner: + return await service.search_organization_details(transaction_partner) + else: + return [] diff --git a/backend/lcfs/web/api/audit_log/__init__.py b/backend/lcfs/web/api/audit_log/__init__.py new file mode 100644 index 000000000..10a4366cc --- /dev/null +++ b/backend/lcfs/web/api/audit_log/__init__.py @@ -0,0 +1,5 @@ +"""Audit Log API.""" + +from lcfs.web.api.audit_log.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/audit_log/repo.py b/backend/lcfs/web/api/audit_log/repo.py new file mode 100644 index 000000000..cc75dfb21 --- /dev/null +++ b/backend/lcfs/web/api/audit_log/repo.py @@ -0,0 +1,64 @@ +from typing import Optional, List +from fastapi import Depends +from sqlalchemy import select, desc, asc, and_, func +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.audit.AuditLog import AuditLog +from lcfs.web.core.decorators import repo_handler +from lcfs.web.api.base import ( + get_field_for_filter, + SortOrder, +) + + +class AuditLogRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_audit_logs_paginated( + self, + offset: int, + limit: Optional[int], + conditions: List = [], + sort_orders: List[SortOrder] = [], + ): + """ + Fetches paginated, filtered, and sorted audit logs. + """ + query = select(AuditLog).where(and_(*conditions)) + + # Apply sorting + if sort_orders: + for order in sort_orders: + direction = asc if order.direction == "asc" else desc + field = get_field_for_filter(AuditLog, order.field) + if field is not None: + query = query.order_by(direction(field)) + else: + # Default sorting by create_date descending + query = query.order_by(desc(AuditLog.create_date)) + + # Get total count for pagination + count_query = select(func.count()).select_from(query.subquery()) + total_count_result = await self.db.execute(count_query) + total_count = total_count_result.scalar_one() + + # Apply pagination + query = query.offset(offset).limit(limit) + + # Execute the query + result = await self.db.execute(query) + audit_logs = result.scalars().all() + + return audit_logs, total_count + + @repo_handler + async def get_audit_log_by_id(self, audit_log_id: int) -> Optional[AuditLog]: + """ + Retrieves an audit log entry by its ID. + """ + query = select(AuditLog).where(AuditLog.audit_log_id == audit_log_id) + result = await self.db.execute(query) + return result.scalar_one_or_none() diff --git a/backend/lcfs/web/api/audit_log/schema.py b/backend/lcfs/web/api/audit_log/schema.py new file mode 100644 index 000000000..957bf3f74 --- /dev/null +++ b/backend/lcfs/web/api/audit_log/schema.py @@ -0,0 +1,50 @@ +from typing import Optional, List +from datetime import datetime +from enum import Enum + +from lcfs.web.api.base import BaseSchema, PaginationResponseSchema + + +# Operation Enum +class AuditLogOperationEnum(str, Enum): + INSERT = "INSERT" + UPDATE = "UPDATE" + DELETE = "DELETE" + + +# AuditLog Schema +class AuditLogSchema(BaseSchema): + audit_log_id: int + table_name: str + operation: AuditLogOperationEnum + row_id: int + old_values: Optional[dict] = None + new_values: Optional[dict] = None + delta: Optional[dict] = None + create_date: Optional[datetime] = None + create_user: Optional[str] = None + update_date: Optional[datetime] = None + update_user: Optional[str] = None + + class Config: + from_attributes = True + + +# Simplified AuditLog Schema for list items +class AuditLogListItemSchema(BaseSchema): + audit_log_id: int + table_name: str + operation: AuditLogOperationEnum + row_id: int + changed_fields: Optional[str] = None + create_date: Optional[datetime] = None + create_user: Optional[str] = None + + class Config: + from_attributes = True + + +# AuditLog List Schema +class AuditLogListSchema(BaseSchema): + pagination: PaginationResponseSchema + audit_logs: List[AuditLogListItemSchema] diff --git a/backend/lcfs/web/api/audit_log/services.py b/backend/lcfs/web/api/audit_log/services.py new file mode 100644 index 000000000..8a1a81529 --- /dev/null +++ b/backend/lcfs/web/api/audit_log/services.py @@ -0,0 +1,117 @@ +from typing import List +from math import ceil + +from fastapi import Depends + +from .repo import AuditLogRepository +from lcfs.web.api.audit_log.schema import ( + AuditLogSchema, + AuditLogListItemSchema, + AuditLogListSchema, +) +from lcfs.web.api.base import ( + PaginationRequestSchema, + PaginationResponseSchema, + apply_filter_conditions, + get_field_for_filter, + validate_pagination, +) +from lcfs.web.core.decorators import service_handler +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.db.models.audit.AuditLog import AuditLog + + +class AuditLogService: + def __init__(self, repo: AuditLogRepository = Depends(AuditLogRepository)): + self.repo = repo + + def apply_audit_log_filters( + self, pagination: PaginationRequestSchema, conditions: List + ): + """ + Apply filters to the audit logs query. + """ + for filter in pagination.filters: + filter_value = filter.filter + filter_option = filter.type + filter_type = filter.filter_type + + # Handle date filters + if filter.filter_type == "date": + filter_value = [] + if filter.date_from: + filter_value.append(filter.date_from) + if filter.date_to: + filter_value.append(filter.date_to) + if not filter_value: + continue # Skip if no valid date is provided + + # Retrieve the correct field based on the filter field name + field = get_field_for_filter(AuditLog, filter.field) + + if field is not None: + condition = apply_filter_conditions( + field, filter_value, filter_option, filter_type + ) + if condition is not None: + conditions.append(condition) + + @service_handler + async def get_audit_logs_paginated( + self, pagination: PaginationRequestSchema + ) -> AuditLogListSchema: + """ + Fetch audit logs with filters, sorting, and pagination. + """ + conditions = [] + pagination = validate_pagination(pagination) + + if pagination.filters: + self.apply_audit_log_filters(pagination, conditions) + + offset = (pagination.page - 1) * pagination.size + limit = pagination.size + + audit_logs, total_count = await self.repo.get_audit_logs_paginated( + offset, limit, conditions, pagination.sort_orders + ) + + if not audit_logs: + raise DataNotFoundException("No audit logs found") + + processed_audit_logs = [] + for audit_log in audit_logs: + # Extract the changed_fields as a comma-separated string + if audit_log.delta: + changed_fields = ", ".join(audit_log.delta.keys()) + else: + changed_fields = None + + processed_log = AuditLogListItemSchema( + audit_log_id=audit_log.audit_log_id, + table_name=audit_log.table_name, + operation=audit_log.operation, + row_id=audit_log.row_id, + changed_fields=changed_fields, + create_date=audit_log.create_date, + create_user=audit_log.create_user, + ) + processed_audit_logs.append(processed_log) + + return AuditLogListSchema( + audit_logs=processed_audit_logs, + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=ceil(total_count / pagination.size), + ), + ) + + @service_handler + async def get_audit_log_by_id(self, audit_log_id: int) -> AuditLogSchema: + """Fetch a single audit log by ID.""" + audit_log = await self.repo.get_audit_log_by_id(audit_log_id) + if not audit_log: + raise DataNotFoundException("Audit log not found") + return AuditLogSchema.model_validate(audit_log) diff --git a/backend/lcfs/web/api/audit_log/views.py b/backend/lcfs/web/api/audit_log/views.py new file mode 100644 index 000000000..44f177ef7 --- /dev/null +++ b/backend/lcfs/web/api/audit_log/views.py @@ -0,0 +1,46 @@ +import structlog +from fastapi import APIRouter, Depends, status, Request, Body + +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.audit_log.services import AuditLogService +from lcfs.web.api.audit_log.schema import AuditLogListSchema, AuditLogSchema +from lcfs.db.models.user.Role import RoleEnum + +logger = structlog.get_logger(__name__) + +router = APIRouter() + + +@router.post( + "/list", + response_model=AuditLogListSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT, RoleEnum.ADMINISTRATOR]) +async def get_audit_logs_paginated( + request: Request, + pagination: PaginationRequestSchema = Body(..., embed=False), + service: AuditLogService = Depends(), +): + """ + Fetches a list of audit logs with pagination and filtering. + """ + return await service.get_audit_logs_paginated(pagination) + + +@router.get( + "/{audit_log_id}", + response_model=AuditLogSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT, RoleEnum.ADMINISTRATOR]) +async def get_audit_log_by_id( + request: Request, + audit_log_id: int, + service: AuditLogService = Depends(), +): + """ + Retrieve an audit log entry by ID. + """ + return await service.get_audit_log_by_id(audit_log_id) diff --git a/backend/lcfs/web/api/base.py b/backend/lcfs/web/api/base.py index 364376774..288f38923 100644 --- a/backend/lcfs/web/api/base.py +++ b/backend/lcfs/web/api/base.py @@ -1,6 +1,29 @@ -from typing import Any +from typing import Any, List, Optional +from enum import Enum +from typing_extensions import deprecated +from sqlalchemy import and_, cast, Date, func, String +from sqlalchemy.orm.attributes import InstrumentedAttribute +from fastapi import HTTPException, Query, Request, Response +from fastapi_cache import FastAPICache -from pydantic import BaseModel +from pydantic import BaseModel, Field, ConfigDict, field_validator +from pydantic.alias_generators import to_camel +import structlog +import re + +logger = structlog.get_logger(__name__) + + +class BaseSchema(BaseModel): + model_config = ConfigDict( + alias_generator=to_camel, + populate_by_name=True, + from_attributes=True, + ) + + +class ComplianceReportRequestSchema(BaseSchema): + compliance_report_id: int def row_to_dict(row, schema): @@ -13,34 +36,389 @@ def row_to_dict(row, schema): return d +class SortOrder(BaseSchema): + field: str + direction: str + + @classmethod + def validate_field(cls, value): + # Convert CamelCase to snake_case + return camel_to_snake(value) + + @field_validator("field") + def convert_field_to_snake(cls, value): + return cls.validate_field(value) + + +class FilterModel(BaseSchema): + filter_type: str = Field(Query(default="text", alias="filterType")) + type: str = Field(Query(default="contains", alias="type")) + filter: Optional[Any] = None + field: str = Field(Query(default="", alias="field")) + date_from: Optional[str] = Field(Query(default="", alias="dateFrom")) + date_to: Optional[str] = Field(Query(default="", alias="dateTo")) + + @classmethod + def validate_field(cls, value): + # Convert CamelCase to snake_case + return camel_to_snake(value) + + @field_validator("field") + def convert_field_to_snake(cls, value): + return cls.validate_field(value) + + +class PaginationRequestSchema(BaseSchema): + page: int = Field(Query(default=0, alias="page")) + size: int = Field(Query(default=20, alias="size")) + sort_orders: List[SortOrder] = Field(Query(default=[], alias="sortOrders")) + filters: List[FilterModel] = Field(Query(default=[], alias="filters")) + model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True) + + +class PaginationResponseSchema(BaseSchema): + total: int + page: int + size: int + total_pages: int + model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True) + + +@deprecated("Use Pagination Request and Response schemas instead") class EntityResponse(BaseModel): status: int message: str error: dict = {} total: int = 0 - limit: int = 1 - offset: int = 0 + size: int = 10 + page: int = 1 total_pages: int = 1 - current_page: int = 1 data: Any = {} - class Config: - from_attributes = True - populate_by_name = True - arbitrary_types_allowed = True - json_encoders = { - set: lambda v: list(v) - } - json_schema_extra = { + model_config = ConfigDict( + from_attributes=True, + populate_by_name=True, + arbitrary_types_allowed=True, + json_schema_extra={ "example": { "status": 200, "message": "Success", "error": {}, "total": 0, - "limit": 1, - "offset": 0, + "page": 1, + "size": 10, "total_pages": 1, - "current_page": 1, - "data": [] + "data": [], } + }, + ) + + +def validate_pagination(pagination: PaginationRequestSchema): + """ + Validate the pagination object. + + Args: + pagination (PaginationRequestSchema): The pagination object to validate. + """ + if not pagination.page or pagination.page < 1: + pagination.page = 1 + if not pagination.size or pagination.size < 1: + pagination.size = 10 + if not pagination.sort_orders: + pagination.sort_orders = [] + if not pagination.filters: + pagination.filters = [] + return pagination + + +def get_field_for_filter(model, field): + """ + Get the field from the model based on the field name. + + Args: + model: The model to get the field from + field: The field name to get + """ + try: + if hasattr(model, field): + field = getattr(model, field) + if isinstance(field, InstrumentedAttribute): + return field.property.columns[0] + else: + return field + return model[field] + except Exception as e: + logger.error( + "Not able to get the required field", + error=str(e), + field=field, + model=model, + exc_info=e, + ) + raise HTTPException( + status_code=500, + detail=f"Failed to apply filter conditions", + ) + + +def get_enum_value(enum_class, filter_value): + # Normalize both the filter value and enum names to lowercase + normalized_filter_value = filter_value.strip().replace(" ", "_").lower() + for enum_member in enum_class: + if enum_member.name.lower() == normalized_filter_value: + return enum_member + raise AttributeError(f"{filter_value} not found in {enum_class}") + + +def apply_text_filter_conditions(field, filter_value, filter_option): + """ + Apply text filtering conditions based on the filter option. + + Args: + field: The field to filter on + filter_value: The value to filter by + filter_option: The filtering operation (equals, contains, etc) + """ + # Apply text filtering with case and space insensitivity + lower_no_space_field = func.lower(func.replace(cast(field, String), " ", "")) + lower_no_space_filter_value = ( + filter_value.name.replace(" ", "").lower() + if isinstance(filter_value, Enum) + else str(filter_value).replace(" ", "").lower() + ) + + text_filter_mapping = { + "true": field.is_(True), + "false": field.is_(False), + "contains": lower_no_space_field.like(f"%{lower_no_space_filter_value}%"), + "notContains": lower_no_space_field.notlike(f"%{lower_no_space_filter_value}%"), + "equals": lower_no_space_field == lower_no_space_filter_value, + "notEqual": lower_no_space_field != lower_no_space_filter_value, + "startsWith": lower_no_space_field.like(f"{lower_no_space_filter_value}%"), + "endsWith": lower_no_space_field.like(f"%{lower_no_space_filter_value}"), + } + + return text_filter_mapping.get(filter_option) + + +def apply_number_filter_conditions(field, filter_value, filter_option): + """ + Apply number filtering conditions based on the filter option. + + Args: + field: The field to filter on + filter_value: The value to filter by + filter_option: The filtering operation (equals, greater than, etc) + """ + if isinstance(filter_value, list): + return and_(field >= filter_value[0], field <= filter_value[1]) + else: + number_filter_mapping = { + "equals": field == filter_value, + "notEqual": field != filter_value, + "greaterThan": field > filter_value, + "greaterThanOrEqual": field >= filter_value, + "lessThan": field < filter_value, + "lessThanOrEqual": field <= filter_value, + "startsWith": cast(field, String).like(f"{filter_value}%"), } + return number_filter_mapping.get(filter_option) + + +def apply_date_filter_conditions(field, filter_value, filter_option): + """ + Apply date filtering conditions based on the filter option. + + Args: + field: The field to filter on + filter_value: The value to filter by + filter_option: The filtering operation + """ + if isinstance(filter_value, list) and len(filter_value) <= 1: + filter_value = filter_value[0] + date_filter_mapping = { + "equals": cast(field, Date) == func.date(filter_value), + "notEqual": cast(field, Date) != func.date(filter_value), + "greaterThan": cast(field, Date) > func.date(filter_value), + "lessThan": cast(field, Date) < func.date(filter_value), + "inRange": and_( + cast(field, Date) >= func.date(filter_value[0]), + cast(field, Date) <= func.date(filter_value[1]), + ), + } + + return date_filter_mapping.get(filter_option) + + +def apply_set_filter_conditions(field, filter_values): + """ + Apply set filtering conditions based on the filter values. + + Args: + field: The field to filter on + filter_values: The set of values to filter by + """ + return field.in_(filter_values) + + +def apply_generic_filter_conditions(field, filter_value, filter_option): + """ + Apply generic filtering conditions based on the filter option. + + Args: + field: The field to filter on + filter_value: The value to filter by + filter_option: The filtering operation (blank, notBlank, empty) + """ + generic_filter_mapping = { + "blank": field.is_(None), + "notBlank": field.isnot(None), + "empty": field.is_(""), + } + + return generic_filter_mapping.get(filter_option) + + +def apply_filter_conditions(field, filter_value, filter_option, filter_type): + """ + Apply filtering conditions based on the filter option and filter type. + + Args: + field: The field to filter on + filter_value: The value to filter by + filter_option: The filtering operation + filter_type: The type of the field (text, number, date, set) + """ + try: + # Handle generic filter options (blank, notBlank, empty) + if filter_option in ["blank", "notBlank", "empty"]: + return apply_generic_filter_conditions(field, filter_value, filter_option) + + # Handle various filter types + if filter_type == "text": + return apply_text_filter_conditions(field, filter_value, filter_option) + elif filter_type == "number": + return apply_number_filter_conditions(field, filter_value, filter_option) + elif filter_type == "date": + return apply_date_filter_conditions(field, filter_value, filter_option) + elif filter_type == "set": + return apply_set_filter_conditions(field, filter_value) + else: + raise HTTPException( + status_code=400, + detail=f"Invalid filter type: {filter_type}", + ) + except Exception as e: + logger.error( + "Failed to apply filter conditions", + error=str(e), + filter_type=filter_type, + filter_option=filter_option, + filter_value=filter_value, + field=field, + exc_info=e, + ) + raise HTTPException( + status_code=500, + detail=f"Failed to apply filter conditions", + ) + + +def camel_to_snake(name): + """Convert a camel case string to snake case.""" + s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() + + +async def lcfs_cache_key_builder( + func, + namespace: Optional[str] = "", + request: Request = None, + response: Response = None, + *args, + **kwargs, +): + """ + Build a cache key for a function using the request and response objects. + + Args: + func: The function to build the cache key for + namespace: The namespace to use for the cache key + request: The request object + response: The response object + args: Positional arguments for the function + kwargs: Keyword arguments for the function + + Returns: + The cache key for the function + """ + # Get the FastAPICache prefix + prefix = FastAPICache.get_prefix() + request_key = "" + for key, value in kwargs.items(): + if key == "args": + for v in value: + if "object at" not in str(v): + request_key += f"{key}:{v}" + elif "object at" not in str(value): + request_key += f"{key}:{value}" + # Build the cache key + cache_key = f"{prefix}:{namespace}:{func.__name__}:{request_key}" + logger.info("Cache key generated", cache_key=cache_key) + + # Return the cache key + return cache_key + + +class NotificationTypeEnum(Enum): + BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT = ( + "BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT" + ) + BCEID__INITIATIVE_AGREEMENT__DIRECTOR_APPROVAL = ( + "BCEID__INITIATIVE_AGREEMENT__DIRECTOR_APPROVAL" + ) + BCEID__TRANSFER__DIRECTOR_DECISION = "BCEID__TRANSFER__DIRECTOR_DECISION" + BCEID__TRANSFER__PARTNER_ACTIONS = "BCEID__TRANSFER__PARTNER_ACTIONS" + IDIR_ANALYST__COMPLIANCE_REPORT__DIRECTOR_DECISION = ( + "IDIR_ANALYST__COMPLIANCE_REPORT__DIRECTOR_DECISION" + ) + IDIR_ANALYST__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION = ( + "IDIR_ANALYST__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION" + ) + IDIR_ANALYST__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW = ( + "IDIR_ANALYST__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW" + ) + IDIR_ANALYST__INITIATIVE_AGREEMENT__RETURNED_TO_ANALYST = ( + "IDIR_ANALYST__INITIATIVE_AGREEMENT__RETURNED_TO_ANALYST" + ) + IDIR_ANALYST__TRANSFER__DIRECTOR_RECORDED = ( + "IDIR_ANALYST__TRANSFER__DIRECTOR_RECORDED" + ) + IDIR_ANALYST__TRANSFER__RESCINDED_ACTION = ( + "IDIR_ANALYST__TRANSFER__RESCINDED_ACTION" + ) + IDIR_ANALYST__TRANSFER__SUBMITTED_FOR_REVIEW = ( + "IDIR_ANALYST__TRANSFER__SUBMITTED_FOR_REVIEW" + ) + IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__ANALYST_RECOMMENDATION = ( + "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__ANALYST_RECOMMENDATION" + ) + IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT = ( + "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT" + ) + IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW = ( + "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW" + ) + IDIR_DIRECTOR__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION = ( + "IDIR_DIRECTOR__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION" + ) + IDIR_DIRECTOR__INITIATIVE_AGREEMENT__ANALYST_RECOMMENDATION = ( + "IDIR_DIRECTOR__INITIATIVE_AGREEMENT__ANALYST_RECOMMENDATION" + ) + IDIR_DIRECTOR__TRANSFER__ANALYST_RECOMMENDATION = ( + "IDIR_DIRECTOR__TRANSFER__ANALYST_RECOMMENDATION" + ) + + def __str__(self): + return self.value diff --git a/backend/lcfs/web/api/compliance_report/__init__.py b/backend/lcfs/web/api/compliance_report/__init__.py new file mode 100644 index 000000000..1db303468 --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/__init__.py @@ -0,0 +1,5 @@ +"""API for internal comments.""" + +from lcfs.web.api.compliance_report.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/compliance_report/constants.py b/backend/lcfs/web/api/compliance_report/constants.py new file mode 100644 index 000000000..39e38e5e1 --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/constants.py @@ -0,0 +1,117 @@ +import enum + +RENEWABLE_FUEL_TARGET_DESCRIPTIONS = { + "1": { + "description": "Volume of fossil-derived base fuel supplied", + "field": "fossil_derived_base_fuel", + }, + "2": { + "description": "Volume of eligible renewable fuel supplied", + "field": "eligible_renewable_fuel_supplied", + }, + "3": { + "description": "Total volume of tracked fuel supplied (Line 1 + Line 2)", + "field": "total_tracked_fuel_supplied", + }, + "4": { + "description": "Volume of eligible renewable fuel required", + "field": "eligible_renewable_fuel_required", + }, + "5": { + "description": "Net volume of eligible renewable fuel notionally transferred", + "field": "net_notionally_transferred", + }, + "6": { + "description": "Volume of eligible renewable fuel retained (up to 5% of Line 4 - gasoline={}; diesel={}; jet fuel={})", + "field": "renewable_fuel_retained", + }, + "7": { + "description": "Volume of eligible renewable fuel previously retained (from Line 6 of previous compliance period)", + "field": "previously_retained", + }, + "8": { + "description": "Volume of eligible renewable obligation deferred (up to 5% of Line 4 - gasoline={}; diesel={}; jet fuel={})", + "field": "obligation_deferred", + }, + "9": { + "description": "Volume of renewable obligation added (from Line 8 of previous compliance period)", + "field": "obligation_added", + }, + "10": { + "description": "Net volume of eligible renewable fuel supplied (Total of Line 2 + Line 5 - Line 6 + Line 7 + Line 8 - Line 9)", + "field": "net_renewable_fuel_supplied", + }, + "11": { + "description": "Non-compliance penalty payable [(Line 4 - Line 10) x prescribed penalty rate]", + "field": "non_compliance_penalty", + }, +} + +LOW_CARBON_FUEL_TARGET_DESCRIPTIONS = { + "12": { + "description": "Compliance units transferred away", + "field": "low_carbon_fuel_required", + }, + "13": { + "description": "Compliance units received through transfers", + "field": "low_carbon_fuel_supplied", + }, + "14": { + "description": "Compliance units issued under initiative agreements", + "field": "low_carbon_fuel_surplus", + }, + "15": { + "description": "Compliance units previously issued for the supply of fuel in the compliance period", + "field": "banked_units_used", + }, + "16": { + "description": "Compliance units previously issued for the export of fuel for the compliance period", + "field": "banked_units_remaining", + }, + "17": { + "description": "Available compliance unit balance for the compliance period", + "field": "non_banked_units_used", + }, + "18": { + "description": "Compliance units being issued for the supply of fuel in the compliance period", + "field": "units_to_be_banked", + }, + "19": { + "description": "Compliance units being issued for the export of fuel for the compliance period", + "field": "units_to_be_exported", + }, + "20": { + "description": "Compliance unit balance change from assessment", + "field": "surplus_deficit_units", + }, + "21": { + "description": "Non-compliance penalty payable ({} units * $600 CAD per unit)", + "field": "surplus_deficit_ratio", + }, + "22": { + "description": "Available compliance unit balance after assessment", + "field": "compliance_units_issued", + }, +} + +NON_COMPLIANCE_PENALTY_SUMMARY_DESCRIPTIONS = { + "11": { + "description": "Renewable fuel target non-compliance penalty total (Line 11, Gasoline + Diesel + Jet fuel)", + "field": "fossil_derived_base_fuel", + }, + "21": { + "description": "Low carbon fuel target non-compliance penalty total (Line 21)", + "field": "line_21_non_compliance_penalty_payable", + }, + "": { + "description": "Total non-compliance penalty payable ", + "field": "total_non_compliance_penalty_payable", + }, +} + +PRESCRIBED_PENALTY_RATE = {"gasoline": 0.3, "diesel": 0.45, "jet_fuel": 0.5} + + +class FORMATS(enum.Enum): + CURRENCY = "currency" + NUMBER = "number" diff --git a/backend/lcfs/web/api/compliance_report/repo.py b/backend/lcfs/web/api/compliance_report/repo.py new file mode 100644 index 000000000..59696d6ab --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/repo.py @@ -0,0 +1,854 @@ +import structlog +from typing import List, Optional, Dict +from collections import defaultdict +from datetime import datetime + +from lcfs.db.models import UserProfile +from lcfs.db.models.organization.Organization import Organization +from lcfs.db.models.user.UserProfile import UserProfile +from lcfs.db.models.fuel.FuelType import FuelType +from lcfs.db.models.fuel.FuelCategory import FuelCategory +from lcfs.db.models.fuel.ExpectedUseType import ExpectedUseType +from sqlalchemy import func, select, and_, asc, desc +from sqlalchemy.orm import joinedload +from sqlalchemy.ext.asyncio import AsyncSession +from fastapi import Depends + +from lcfs.web.api.base import ( + PaginationRequestSchema, + apply_filter_conditions, + get_field_for_filter, +) +from lcfs.db.models.compliance import CompliancePeriod +from lcfs.db.models.compliance.ComplianceReport import ( + ComplianceReport, + ReportingFrequency, +) +from lcfs.db.models.compliance.ComplianceReportSummary import ComplianceReportSummary +from lcfs.db.models.compliance.ComplianceReportStatus import ( + ComplianceReportStatus, + ComplianceReportStatusEnum, +) +from lcfs.web.api.compliance_report.schema import ( + ComplianceReportBaseSchema, + ComplianceReportSummaryUpdateSchema, +) +from lcfs.db.models.compliance.ComplianceReportHistory import ComplianceReportHistory +from lcfs.web.core.decorators import repo_handler +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.compliance.OtherUses import OtherUses +from lcfs.db.models.transfer.Transfer import Transfer +from lcfs.db.models.initiative_agreement.InitiativeAgreement import InitiativeAgreement +from lcfs.db.models.compliance.AllocationAgreement import AllocationAgreement +from lcfs.db.models.compliance.FuelSupply import FuelSupply +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository + +logger = structlog.get_logger(__name__) + + +class ComplianceReportRepository: + def __init__( + self, + db: AsyncSession = Depends(get_async_db_session), + fuel_supply_repo: FuelSupplyRepository = Depends(), + ): + self.db = db + self.fuel_supply_repo = fuel_supply_repo + + def apply_filters(self, pagination, conditions): + for filter in pagination.filters: + filter_value = filter.filter + # check if the date string is selected for filter + if filter.filter is None: + filter_value = [ + datetime.strptime(filter.date_from, "%Y-%m-%d %H:%M:%S").strftime( + "%Y-%m-%d" + ) + ] + if filter.date_to: + filter_value.append( + datetime.strptime(filter.date_to, "%Y-%m-%d %H:%M:%S").strftime( + "%Y-%m-%d" + ) + ) + filter_option = filter.type + filter_type = filter.filter_type + if filter.field == "status": + field = get_field_for_filter(ComplianceReportStatus, "status") + normalized_value = filter_value.lower() + enum_value_map = { + enum_val.value.lower(): enum_val + for enum_val in ComplianceReportStatusEnum + } + filter_value = enum_value_map.get(normalized_value) + elif filter.field == "organization": + field = get_field_for_filter(Organization, "name") + elif filter.field == "type": + field = get_field_for_filter(ComplianceReport, "reporting_frequency") + filter_value = ( + ReportingFrequency.ANNUAL.value + if filter_value.lower().startswith("c") + else ReportingFrequency.QUARTERLY.value + ) + elif filter.field == "compliance_period": + field = get_field_for_filter(CompliancePeriod, "description") + else: + field = get_field_for_filter(ComplianceReport, filter.field) + + conditions.append( + apply_filter_conditions(field, filter_value, filter_option, filter_type) + ) + + @repo_handler + async def get_all_compliance_periods(self) -> List[CompliancePeriod]: + # Retrieve all compliance periods from the database + periods = ( + ( + await self.db.execute( + select(CompliancePeriod) + .where(CompliancePeriod.effective_status == True) + .order_by(CompliancePeriod.display_order.desc()) + ) + ) + .scalars() + .all() + ) + current_year = str(datetime.now().year) + + # Check if the current year is already in the list of periods + if not any(period.description == current_year for period in periods): + # Get the current maximum display_order value + max_display_order = await self.db.execute( + select(func.max(CompliancePeriod.display_order)) + ) + max_display_order_value = max_display_order.scalar() or 0 + # Insert the current year if it doesn't exist + new_period = CompliancePeriod( + description=current_year, + effective_date=datetime.now().date(), + display_order=max_display_order_value + 1, + ) + self.db.add(new_period) + + # Retrieve the updated list of compliance periods + periods = ( + ( + await self.db.execute( + select(CompliancePeriod) + .where(CompliancePeriod.effective_status == True) + .order_by(CompliancePeriod.display_order.desc()) + ) + ) + .scalars() + .all() + ) + + return periods + + @repo_handler + async def get_compliance_period(self, period: str) -> CompliancePeriod: + """ + Retrieve a compliance period from the database + """ + result = await self.db.scalar( + select(CompliancePeriod).where(CompliancePeriod.description == period) + ) + return result + + @repo_handler + async def check_compliance_report( + self, compliance_report_id: int + ) -> Optional[ComplianceReport]: + """ + Identify and retrieve the compliance report by id, including its related objects. + """ + return await self.db.scalar( + select(ComplianceReport) + .options( + joinedload(ComplianceReport.organization), + joinedload(ComplianceReport.compliance_period), + joinedload(ComplianceReport.current_status), + joinedload(ComplianceReport.summary), + joinedload(ComplianceReport.fuel_supplies), + joinedload(ComplianceReport.other_uses), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.status + ), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.user_profile + ), + joinedload(ComplianceReport.transaction), + ) + .where(ComplianceReport.compliance_report_id == compliance_report_id) + ) + + @repo_handler + async def get_compliance_report_status_by_desc( + self, status: str + ) -> ComplianceReportStatus: + """ + Retrieve the compliance report status ID from the database based on the description. + Replaces spaces with underscores in the status description. + """ + status_enum = status.replace(" ", "_") # frontend sends status with spaces + result = await self.db.execute( + select(ComplianceReportStatus).where( + ComplianceReportStatus.status + == getattr(ComplianceReportStatusEnum, status_enum) + ) + ) + return result.scalars().first() + + @repo_handler + async def get_compliance_report_by_period(self, organization_id: int, period: str): + """ + Identify and retrieve the compliance report of an organization for the given compliance period + """ + result = await self.db.scalar( + select(ComplianceReport.compliance_report_id).where( + and_( + ComplianceReport.organization_id == organization_id, + CompliancePeriod.description == period, + ComplianceReport.compliance_period_id + == CompliancePeriod.compliance_period_id, + ) + ) + ) + return result is not None + + @repo_handler + async def get_assessed_compliance_report_by_period( + self, organization_id: int, period: int + ): + """ + Identify and retrieve the compliance report of an organization for the given compliance period + """ + result = ( + ( + await self.db.execute( + select(ComplianceReport) + .options( + joinedload(ComplianceReport.organization), + joinedload(ComplianceReport.compliance_period), + joinedload(ComplianceReport.current_status), + joinedload(ComplianceReport.summary), + ) + .join( + CompliancePeriod, + ComplianceReport.compliance_period_id + == CompliancePeriod.compliance_period_id, + ) + .join( + Organization, + ComplianceReport.organization_id + == Organization.organization_id, + ) + .join( + ComplianceReportStatus, + ComplianceReport.current_status_id + == ComplianceReportStatus.compliance_report_status_id, + ) + .outerjoin( + ComplianceReportSummary, + ComplianceReport.compliance_report_id + == ComplianceReportSummary.compliance_report_id, + ) + .where( + and_( + ComplianceReport.organization_id == organization_id, + CompliancePeriod.description == str(period), + ComplianceReportStatus.status + == ComplianceReportStatusEnum.Assessed, + ) + ) + ) + ) + .unique() + .scalars() + .first() + ) + return result + + @repo_handler + async def create_compliance_report(self, report: ComplianceReport): + """ + Add a new compliance report to the database + """ + self.db.add(report) + await self.db.flush() + await self.db.refresh( + report, + [ + "compliance_period", + "fuel_supplies", + "fuel_exports", + "history", + "notional_transfers", + "organization", + "other_uses", + "current_status", + "summary", + ], + ) + return ComplianceReportBaseSchema.model_validate(report) + + @repo_handler + async def get_compliance_report_history(self, report: ComplianceReport): + history = await self.db.execute( + select(ComplianceReportHistory) + .where( + and_( + ComplianceReportHistory.compliance_report_id + == report.compliance_report_id, + ComplianceReportHistory.status_id == report.current_status_id, + ) + ) + .order_by(ComplianceReportHistory.create_date.desc()) + ) + return history.scalar_one_or_none() + + @repo_handler + async def add_compliance_report_history( + self, report: ComplianceReport, user: UserProfile + ): + """ + Add a new compliance report history record to the database + """ + history = await self.get_compliance_report_history(report) + if history: + history.update_date = datetime.now() + history.create_date = datetime.now() + history.status_id = report.current_status_id + history.user_profile_id = user.user_profile_id + else: + history = ComplianceReportHistory( + compliance_report_id=report.compliance_report_id, + status_id=report.current_status_id, + user_profile_id=user.user_profile_id, + ) + self.db.add(history) + await self.db.flush() + return history + + @repo_handler + async def get_reports_paginated( + self, pagination: PaginationRequestSchema, organization_id: int = None + ): + """ + Retrieve a paginated list of the latest compliance reports from each compliance_report_group_uuid. + Supports pagination, filtering, and sorting. + """ + # Base query conditions + conditions = [] + if organization_id: + conditions.append(ComplianceReport.organization_id == organization_id) + + if pagination.filters and len(pagination.filters) > 0: + self.apply_filters(pagination, conditions) + + # Pagination and offset setup + offset = 0 if (pagination.page < 1) else (pagination.page - 1) * pagination.size + limit = pagination.size + + # Build the main query + subquery = ( + select( + ComplianceReport.compliance_report_group_uuid, + func.max(ComplianceReport.version).label("latest_version"), + ) + .where(and_(*conditions)) + .group_by(ComplianceReport.compliance_report_group_uuid) + ) + + if not organization_id: + subquery = subquery.join( + ComplianceReportStatus, + ComplianceReport.current_status_id + == ComplianceReportStatus.compliance_report_status_id, + ) + + subquery = subquery.subquery() + # Join the main ComplianceReport table with the subquery to get the latest version per group + query = ( + select(ComplianceReport) + .join( + subquery, + and_( + ComplianceReport.compliance_report_group_uuid + == subquery.c.compliance_report_group_uuid, + ComplianceReport.version == subquery.c.latest_version, + ), + ) + .options( + joinedload(ComplianceReport.organization), + joinedload(ComplianceReport.compliance_period), + joinedload(ComplianceReport.current_status), + joinedload(ComplianceReport.summary), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.status + ), + joinedload(ComplianceReport.history) + .joinedload(ComplianceReportHistory.user_profile) + .joinedload(UserProfile.organization), + ) + ) + + # Apply sorting from pagination + for order in pagination.sort_orders: + sort_method = asc if order.direction == "asc" else desc + if order.field == "status": + order.field = get_field_for_filter(ComplianceReportStatus, "status") + query = query.join( + ComplianceReportStatus, + ComplianceReport.current_status_id + == ComplianceReportStatus.compliance_report_status_id, + ) + elif order.field == "compliance_period": + order.field = get_field_for_filter(CompliancePeriod, "description") + query = query.join( + CompliancePeriod, + ComplianceReport.compliance_period_id + == CompliancePeriod.compliance_period_id, + ) + elif order.field == "organization": + order.field = get_field_for_filter(Organization, "name") + query = query.join( + Organization, + ComplianceReport.organization_id == Organization.organization_id, + ) + else: + order.field = get_field_for_filter(ComplianceReport, order.field) + query = query.order_by(sort_method(order.field)) + + # Execute query with offset and limit for pagination + query_result = ( + (await self.db.execute(query.offset(offset).limit(limit))) + .unique() + .scalars() + .all() + ) + total_count = len(query_result) + + # Transform results into Pydantic schemas + reports = [ + ComplianceReportBaseSchema.model_validate(report) for report in query_result + ] + return reports, total_count + + @repo_handler + async def get_compliance_report_by_id(self, report_id: int, is_model: bool = False): + """ + Retrieve a compliance report from the database by ID + """ + result = await self.db.execute( + select(ComplianceReport) + .options( + joinedload(ComplianceReport.organization), + joinedload(ComplianceReport.compliance_period), + joinedload(ComplianceReport.current_status), + joinedload(ComplianceReport.summary), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.status + ), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.user_profile + ), + joinedload(ComplianceReport.transaction), + ) + .where(ComplianceReport.compliance_report_id == report_id) + ) + + compliance_report = result.scalars().unique().first() + + if not compliance_report: + return None + + if is_model: + return compliance_report + + return ComplianceReportBaseSchema.model_validate(compliance_report) + + @repo_handler + async def get_compliance_report_chain(self, group_uuid: str): + result = await self.db.execute( + select(ComplianceReport) + .options( + joinedload(ComplianceReport.organization), + joinedload(ComplianceReport.compliance_period), + joinedload(ComplianceReport.current_status), + joinedload(ComplianceReport.summary), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.status + ), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.user_profile + ), + joinedload(ComplianceReport.transaction), + ) + .where(ComplianceReport.compliance_report_group_uuid == group_uuid) + .order_by(ComplianceReport.version.desc()) # Ensure ordering by version + ) + + compliance_reports = result.scalars().unique().all() + + return [ + ComplianceReportBaseSchema.model_validate(report) + for report in compliance_reports + ] + + @repo_handler + async def get_fuel_type(self, fuel_type_id: int) -> FuelType: + return await self.db.scalar( + select(FuelType).where(FuelType.fuel_type_id == fuel_type_id) + ) + + @repo_handler + async def get_fuel_category(self, fuel_category_id: int) -> FuelCategory: + return await self.db.scalar( + select(FuelCategory).where( + FuelCategory.fuel_category_id == fuel_category_id + ) + ) + + @repo_handler + async def get_expected_use(self, expected_use_type_id: int) -> ExpectedUseType: + return await self.db.scalar( + select(ExpectedUseType).where( + ExpectedUseType.expected_use_type_id == expected_use_type_id + ) + ) + + @repo_handler + async def update_compliance_report( + self, report: ComplianceReport + ) -> ComplianceReportBaseSchema: + """Persists the changes made to the ComplianceReport object to the database.""" + try: + await self.db.flush() + + # Reload the report with all necessary relationships + refreshed_report = await self.db.scalar( + select(ComplianceReport) + .options( + joinedload(ComplianceReport.compliance_period), + joinedload(ComplianceReport.organization), + joinedload(ComplianceReport.current_status), + joinedload(ComplianceReport.summary), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.status + ), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.user_profile + ), + ) + .where( + ComplianceReport.compliance_report_id == report.compliance_report_id + ) + ) + + if not refreshed_report: + raise ValueError( + f"Could not reload report {report.compliance_report_id}" + ) + + return ComplianceReportBaseSchema.model_validate(refreshed_report) + + except Exception as e: + await self.db.rollback() + logger.error(f"Error updating compliance report: {e}") + raise + + @repo_handler + async def add_compliance_report_summary( + self, summary: ComplianceReportSummary + ) -> ComplianceReportSummary: + """ + Adds a new compliance report summary to the database. + """ + self.db.add(summary) + await self.db.flush() + await self.db.refresh(summary) + return summary + + @repo_handler + async def save_compliance_report_summary( + self, summary: ComplianceReportSummaryUpdateSchema + ): + """ + Save the compliance report summary to the database. + + :param summary: The generated summary data + """ + existing_summary = await self.get_summary_by_report_id( + summary.compliance_report_id + ) + + if existing_summary: + summary_obj = existing_summary + else: + raise ValueError( + f"No summary found with report ID {summary.compliance_report_id}" + ) + + # Update renewable fuel target summary + for row in summary.renewable_fuel_target_summary: + line_number = row.line + for fuel_type in ["gasoline", "diesel", "jet_fuel"]: + column_name = f"line_{line_number}_{row.field.lower()}_{fuel_type}" + setattr(summary_obj, column_name, int(getattr(row, fuel_type))) + + # Update low carbon fuel target summary + for row in summary.low_carbon_fuel_target_summary: + column_name = f"line_{row.line}_{row.field}" + setattr( + summary_obj, + column_name, + int(row.value), + ) + + # Update non-compliance penalty summary + non_compliance_summary = summary.non_compliance_penalty_summary + for row in non_compliance_summary: + if row.line == "11": + summary_obj.line_11_fossil_derived_base_fuel_total = row.total_value + elif row.line == "21": + summary_obj.line_21_non_compliance_penalty_payable = row.total_value + elif row.line == "": # Total row + summary_obj.total_non_compliance_penalty_payable = row.total_value + + self.db.add(summary_obj) + await self.db.flush() + await self.db.refresh(summary_obj) + return summary_obj + + @repo_handler + async def get_summary_by_report_id(self, report_id: int) -> ComplianceReportSummary: + query = select(ComplianceReportSummary).where( + ComplianceReportSummary.compliance_report_id == report_id + ) + + result = await self.db.execute(query) + return result.scalars().first() + + # @repo_handler + # async def get_summary_versions(self, report_id: int): + # query = ( + # select( + # ComplianceReportSummary.summary_id, + # ComplianceReportSummary.version, + # case( + # ( + # ComplianceReportSummary.supplemental_report_id.is_(None), + # "Original", + # ), + # else_="Supplemental", + # ).label("type"), + # ) + # .where(ComplianceReportSummary.compliance_report_id == report_id) + # .order_by(ComplianceReportSummary.version) + # ) + + # result = await self.db.execute(query) + # return result.all() + + @repo_handler + async def get_transferred_out_compliance_units( + self, + compliance_period_start: datetime, + compliance_period_end: datetime, + organization_id: int, + ) -> int: + result = await self.db.scalar( + select(func.sum(Transfer.quantity)).where( + Transfer.agreement_date.between( + compliance_period_start, compliance_period_end + ), + Transfer.from_organization_id == organization_id, + Transfer.current_status_id == 6, # Recorded + ) + ) + return result or 0 + + @repo_handler + async def get_received_compliance_units( + self, + compliance_period_start: datetime, + compliance_period_end: datetime, + organization_id: int, + ) -> int: + result = await self.db.scalar( + select(func.sum(Transfer.quantity)).where( + Transfer.agreement_date.between( + compliance_period_start, compliance_period_end + ), + Transfer.to_organization_id == organization_id, + Transfer.current_status_id == 6, # Recorded + ) + ) + return result or 0 + + @repo_handler + async def get_issued_compliance_units( + self, + compliance_period_start: datetime, + compliance_period_end: datetime, + organization_id: int, + ) -> int: + result = await self.db.scalar( + select(func.sum(InitiativeAgreement.compliance_units)).where( + InitiativeAgreement.transaction_effective_date.between( + compliance_period_start, compliance_period_end + ), + InitiativeAgreement.to_organization_id == organization_id, + InitiativeAgreement.current_status_id == 3, # Approved + ) + ) + return result or 0 + + def aggregate_fuel_supplies( + self, fuel_supplies: List[FuelSupply], fossil_derived: bool + ) -> Dict[str, float]: + """Aggregate quantities from fuel supplies based on fossil_derived flag.""" + fuel_quantities = defaultdict(float) + + # Use a list comprehension to filter and iterate in one step + for fs in ( + fs for fs in fuel_supplies if fs.fuel_type.fossil_derived == fossil_derived + ): + fuel_category = self._format_category(fs.fuel_category.category) + fuel_quantities[fuel_category] += fs.quantity + + return dict(fuel_quantities) + + @repo_handler + async def aggregate_other_uses( + self, compliance_report_id: int, fossil_derived: bool + ) -> Dict[str, float]: + """Aggregate quantities from other uses.""" + query = ( + select( + FuelCategory.category, + func.coalesce(func.sum(OtherUses.quantity_supplied), 0).label( + "quantity" + ), + ) + .select_from(OtherUses) + .join(FuelType, OtherUses.fuel_type_id == FuelType.fuel_type_id) + .join( + FuelCategory, + OtherUses.fuel_category_id == FuelCategory.fuel_category_id, + ) + .where( + OtherUses.compliance_report_id == compliance_report_id, + FuelType.fossil_derived.is_(fossil_derived), + FuelType.other_uses_fossil_derived.is_(fossil_derived), + ) + .group_by(FuelCategory.category) + ) + + result = await self.db.execute(query) + return {self._format_category(row.category): row.quantity for row in result} + + @repo_handler + async def aggregate_allocation_agreements( + self, compliance_report_id: int + ) -> Dict[str, float]: + """Aggregate quantities from allocation agreements for renewable fuels.""" + query = ( + select( + FuelCategory.category, + func.coalesce(func.sum(AllocationAgreement.quantity), 0).label( + "quantity" + ), + ) + .select_from(AllocationAgreement) + .join(FuelType, AllocationAgreement.fuel_type_id == FuelType.fuel_type_id) + .join( + FuelCategory, + AllocationAgreement.fuel_category_id == FuelCategory.fuel_category_id, + ) + .where( + AllocationAgreement.compliance_report_id == compliance_report_id, + FuelType.fossil_derived.is_(False), + FuelType.other_uses_fossil_derived.is_(False), + ) + .group_by(FuelCategory.category) + ) + + result = await self.db.execute(query) + return {self._format_category(row.category): row.quantity for row in result} + + @staticmethod + def _format_category(category: str) -> str: + """Format the fuel category string.""" + return category.lower().replace(" ", "_") + + @repo_handler + async def get_all_org_reported_years(self, organization_id: int): + + return ( + ( + await self.db.execute( + select(CompliancePeriod) + .join( + ComplianceReport, + ComplianceReport.compliance_period_id + == CompliancePeriod.compliance_period_id, + ) + .where(ComplianceReport.organization_id == organization_id) + ) + ) + .scalars() + .all() + ) + + @repo_handler + async def get_latest_report_by_group_uuid( + self, group_uuid: str + ) -> Optional[ComplianceReport]: + """ + Retrieve the latest compliance report for a given group_uuid. + This returns the report with the highest version number within the group. + """ + result = await self.db.execute( + select(ComplianceReport) + .options( + joinedload(ComplianceReport.organization), + joinedload(ComplianceReport.compliance_period), + joinedload(ComplianceReport.current_status), + joinedload(ComplianceReport.summary), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.status + ), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.user_profile + ), + ) + .where(ComplianceReport.compliance_report_group_uuid == group_uuid) + .order_by(ComplianceReport.version.desc()) + .limit(1) + ) + return result.scalars().first() + + async def get_compliance_report_by_legacy_id(self, legacy_id): + """ + Retrieve a compliance report from the database by ID + """ + result = await self.db.execute( + select(ComplianceReport) + .options( + joinedload(ComplianceReport.organization), + joinedload(ComplianceReport.compliance_period), + joinedload(ComplianceReport.current_status), + joinedload(ComplianceReport.summary), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.status + ), + joinedload(ComplianceReport.history).joinedload( + ComplianceReportHistory.user_profile + ), + joinedload(ComplianceReport.transaction), + ) + .where(ComplianceReport.legacy_id == legacy_id) + ) + return result.scalars().unique().first() diff --git a/backend/lcfs/web/api/compliance_report/schema.py b/backend/lcfs/web/api/compliance_report/schema.py new file mode 100644 index 000000000..e839bee11 --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/schema.py @@ -0,0 +1,219 @@ +from enum import Enum +from typing import ClassVar, Optional, List, Union +from datetime import datetime, date +from enum import Enum +from lcfs.web.api.fuel_code.schema import EndUseTypeSchema, EndUserTypeSchema + +from lcfs.web.api.base import BaseSchema, FilterModel, SortOrder +from lcfs.web.api.base import PaginationResponseSchema +from pydantic import Field, Extra + +""" +Base - all shared attributes of a resource +Create - attributes required to create a new resource - used at POST requests +Update - attributes that can be updated - used at PUT requests +InDB - attributes present on any resource coming out of the database +Public - attributes present on public facing resources being returned from GET, POST, and PUT requests +""" + + +class SupplementalInitiatorType(str, Enum): + SUPPLIER_SUPPLEMENTAL = "Supplier Supplemental" + GOVERNMENT_REASSESSMENT = "Government Reassessment" + + +class ReportingFrequency(str, Enum): + ANNUAL = "Annual" + QUARTERLY = "Quarterly" + + +class PortsEnum(str, Enum): + SINGLE = "Single port" + DUAL = "Dual port" + + +class CompliancePeriodSchema(BaseSchema): + compliance_period_id: int + description: str + effective_date: Optional[datetime] = None + expiration_date: Optional[datetime] = None + display_order: Optional[int] = None + + +class SummarySchema(BaseSchema): + summary_id: int + is_locked: bool + + class Config: + extra = Extra.allow + + +class ComplianceReportStatusSchema(BaseSchema): + compliance_report_status_id: int + status: str + + +class SnapshotSchema(BaseSchema): + pass + + +class ComplianceReportOrganizationSchema(BaseSchema): + organization_id: int + name: str + + +class ComplianceReportUserSchema(BaseSchema): + first_name: str + last_name: str + organization: Optional[ComplianceReportOrganizationSchema] = None + + +class ComplianceReportHistorySchema(BaseSchema): + compliance_report_history_id: int + compliance_report_id: int + status: ComplianceReportStatusSchema + user_profile: ComplianceReportUserSchema + create_date: datetime + + +class NotionalTransfersSchema(BaseSchema): + pass + + +class FuelSuppliesSchema(BaseSchema): + pass + + +class AllocationAgreementSchema(BaseSchema): + pass + + +class FuelMeasurementTypeSchema(BaseSchema): + fuel_measurement_type_id: int + type: str + description: Optional[str] = None + display_order: int + + +class LevelOfEquipmentSchema(BaseSchema): + level_of_equipment_id: int + name: str + description: Optional[str] = None + display_order: int + + +class FSEOptionsSchema(BaseSchema): + intended_use_types: List[EndUseTypeSchema] + intended_user_types: List[EndUserTypeSchema] + fuel_measurement_types: List[FuelMeasurementTypeSchema] + levels_of_equipment: List[LevelOfEquipmentSchema] + ports: ClassVar[List[str]] = [port.value for port in PortsEnum] + + +class FinalSupplyEquipmentSchema(BaseSchema): + final_supply_equipment_id: int + compliance_report_id: int + organization_name: str + supply_from_date: date + supply_to_date: date + registration_nbr: str + kwh_usage: Optional[float] = None + serial_nbr: str + manufacturer: str + model: Optional[str] = None + level_of_equipment: LevelOfEquipmentSchema + fuel_measurement_type: FuelMeasurementTypeSchema + ports: Optional[PortsEnum] = None + intended_use_types: List[EndUseTypeSchema] + intended_user_types: List[EndUserTypeSchema] + street_address: str + city: str + postal_code: str + latitude: float + longitude: float + notes: Optional[str] = None + + +class ComplianceReportBaseSchema(BaseSchema): + compliance_report_id: int + compliance_report_group_uuid: Optional[str] + version: Optional[int] + supplemental_initiator: Optional[SupplementalInitiatorType] + compliance_period_id: int + compliance_period: CompliancePeriodSchema + organization_id: int + organization: ComplianceReportOrganizationSchema + summary: Optional[SummarySchema] + current_status_id: int + current_status: ComplianceReportStatusSchema + transaction_id: Optional[int] = None + nickname: Optional[str] = None + supplemental_note: Optional[str] = None + reporting_frequency: Optional[ReportingFrequency] = None + update_date: Optional[datetime] = None + history: Optional[List[ComplianceReportHistorySchema]] = None + has_supplemental: bool + legacy_id: Optional[int] = None + + +class ChainedComplianceReportSchema(BaseSchema): + report: ComplianceReportBaseSchema + chain: Optional[List[ComplianceReportBaseSchema]] = [] + + +class ComplianceReportCreateSchema(BaseSchema): + compliance_period: str + organization_id: int + status: str + legacy_id: Optional[int] = None + nickname: Optional[str] = None + + +class ComplianceReportListSchema(BaseSchema): + pagination: PaginationResponseSchema + reports: List[ComplianceReportBaseSchema] + + +class ComplianceReportSummaryRowSchema(BaseSchema): + line: Optional[str] = "" + description: Optional[str] = "" + field: Optional[str] = "" + gasoline: Optional[float] = 0 + diesel: Optional[float] = 0 + jet_fuel: Optional[float] = 0 + value: Optional[float] = 0 + total_value: Optional[float] = 0 + format: Optional[str] = "" + + +class ComplianceReportSummarySchema(BaseSchema): + renewable_fuel_target_summary: List[ComplianceReportSummaryRowSchema] + low_carbon_fuel_target_summary: List[ComplianceReportSummaryRowSchema] + non_compliance_penalty_summary: List[ComplianceReportSummaryRowSchema] + can_sign: bool = False + summary_id: Optional[int] = None + compliance_report_id: Optional[int] = None + version: Optional[int] = None + is_locked: Optional[bool] = False + quarter: Optional[int] = None + + +class ComplianceReportSummaryUpdateSchema(BaseSchema): + compliance_report_id: int + renewable_fuel_target_summary: List[ComplianceReportSummaryRowSchema] + low_carbon_fuel_target_summary: List[ComplianceReportSummaryRowSchema] + non_compliance_penalty_summary: List[ComplianceReportSummaryRowSchema] + summary_id: int + + +class CommonPaginatedReportRequestSchema(BaseSchema): + compliance_report_id: int = Field(..., alias="complianceReportId") + filters: Optional[List[FilterModel]] = None + page: Optional[int] = None + size: Optional[int] = None + sort_orders: Optional[List[SortOrder]] = None + + +class ComplianceReportUpdateSchema(BaseSchema): + status: str + supplemental_note: Optional[str] = None diff --git a/backend/lcfs/web/api/compliance_report/services.py b/backend/lcfs/web/api/compliance_report/services.py new file mode 100644 index 000000000..dac78edd9 --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/services.py @@ -0,0 +1,279 @@ +import structlog +import math +import uuid +from typing import List +from fastapi import Depends, Request + +from lcfs.db.models.compliance.ComplianceReport import ( + ComplianceReport, + SupplementalInitiatorType, + ReportingFrequency, +) +from lcfs.db.models.compliance.ComplianceReportStatus import ComplianceReportStatusEnum +from lcfs.db.models.compliance.ComplianceReportSummary import ComplianceReportSummary +from lcfs.db.models.user import UserProfile +from lcfs.web.api.base import PaginationResponseSchema +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.web.api.compliance_report.schema import ( + CompliancePeriodSchema, + ComplianceReportBaseSchema, + ComplianceReportCreateSchema, + ComplianceReportListSchema, +) +from lcfs.web.core.decorators import service_handler +from lcfs.web.exception.exceptions import DataNotFoundException, ServiceException + +logger = structlog.get_logger(__name__) + + +class ComplianceReportServices: + def __init__(self, repo: ComplianceReportRepository = Depends()) -> None: + self.repo = repo + + @service_handler + async def get_all_compliance_periods(self) -> List[CompliancePeriodSchema]: + """Fetches all compliance periods and converts them to Pydantic models.""" + periods = await self.repo.get_all_compliance_periods() + return [CompliancePeriodSchema.model_validate(period) for period in periods] + + @service_handler + async def create_compliance_report( + self, + organization_id: int, + report_data: ComplianceReportCreateSchema, + user: UserProfile, + ) -> ComplianceReportBaseSchema: + """Creates a new compliance report.""" + period = await self.repo.get_compliance_period(report_data.compliance_period) + if not period: + raise DataNotFoundException("Compliance period not found.") + + draft_status = await self.repo.get_compliance_report_status_by_desc( + report_data.status + ) + if not draft_status: + raise DataNotFoundException(f"Status '{report_data.status}' not found.") + + # Generate a new group_uuid for the new report series + group_uuid = str(uuid.uuid4()) + + report = ComplianceReport( + compliance_period_id=period.compliance_period_id, + organization_id=organization_id, + current_status_id=draft_status.compliance_report_status_id, + reporting_frequency=ReportingFrequency.ANNUAL, + compliance_report_group_uuid=group_uuid, # New group_uuid for the series + version=0, # Start with version 0 + nickname=report_data.nickname or "Original Report", + summary=ComplianceReportSummary(), # Create an empty summary object + legacy_id=report_data.legacy_id, + create_user=user.keycloak_username, + ) + + # Add the new compliance report + report = await self.repo.create_compliance_report(report) + + # Create the history record + await self.repo.add_compliance_report_history(report, user) + + return ComplianceReportBaseSchema.model_validate(report) + + @service_handler + async def create_supplemental_report( + self, report_id: int + ) -> ComplianceReportBaseSchema: + """ + Creates a new supplemental compliance report. + The report_id can be any report in the series (original or supplemental). + Supplemental reports are only allowed if the status of the current report is 'Assessed'. + """ + + user: UserProfile = self.request.user + + # Fetch the current report using the provided report_id + current_report = await self.repo.get_compliance_report_by_id( + report_id, is_model=True + ) + if not current_report: + raise DataNotFoundException("Compliance report not found.") + + # Validate that the user has permission to create a supplemental report + if user.organization_id != current_report.organization_id: + raise ServiceException( + "You do not have permission to create a supplemental report for this organization." + ) + + # Validate that the status of the current report is 'Assessed' + if current_report.current_status.status != ComplianceReportStatusEnum.Assessed: + raise ServiceException( + "A supplemental report can only be created if the current report's status is 'Assessed'." + ) + + # Get the group_uuid from the current report + group_uuid = current_report.compliance_report_group_uuid + + # Fetch the latest version number for the given group_uuid + latest_report = await self.repo.get_latest_report_by_group_uuid(group_uuid) + if not latest_report: + raise DataNotFoundException("Latest compliance report not found.") + + new_version = latest_report.version + 1 + + # Get the 'Draft' status + draft_status = await self.repo.get_compliance_report_status_by_desc("Draft") + if not draft_status: + raise DataNotFoundException("Draft status not found.") + + # Create the new supplemental compliance report + new_report = ComplianceReport( + compliance_period_id=current_report.compliance_period_id, + organization_id=current_report.organization_id, + current_status_id=draft_status.compliance_report_status_id, + reporting_frequency=current_report.reporting_frequency, + compliance_report_group_uuid=group_uuid, # Use the same group_uuid + version=new_version, # Increment the version + supplemental_initiator=SupplementalInitiatorType.SUPPLIER_SUPPLEMENTAL, + nickname=f"Supplemental report {new_version}", + summary=ComplianceReportSummary(), # Create an empty summary object + ) + + # Add the new supplemental report + new_report = await self.repo.create_compliance_report(new_report) + + # Create the history record for the new supplemental report + await self.repo.add_compliance_report_history(new_report, user) + + return ComplianceReportBaseSchema.model_validate(new_report) + + @service_handler + async def get_compliance_reports_paginated( + self, pagination, organization_id: int = None, bceid_user: bool = False + ): + """Fetches all compliance reports""" + if bceid_user: + for filter in pagination.filters: + if ( + filter.field == "status" + and filter.filter == ComplianceReportStatusEnum.Submitted.value + ): + filter.filter_type = "set" + filter.filter = [ + ComplianceReportStatusEnum.Recommended_by_analyst, + ComplianceReportStatusEnum.Recommended_by_manager, + ComplianceReportStatusEnum.Submitted, + ] + reports, total_count = await self.repo.get_reports_paginated( + pagination, organization_id + ) + + if not reports: + raise DataNotFoundException("No compliance reports found.") + + if bceid_user: + reports = self._mask_report_status(reports) + + return ComplianceReportListSchema( + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + reports=reports, + ) + + def _mask_report_status(self, reports: List) -> List: + recommended_statuses = { + ComplianceReportStatusEnum.Recommended_by_analyst.value, + ComplianceReportStatusEnum.Recommended_by_manager.value, + } + + masked_reports = [] + for report in reports: + if report.current_status.status in recommended_statuses: + report.current_status.status = ( + ComplianceReportStatusEnum.Submitted.value + ) + report.current_status.compliance_report_status_id = None + + masked_reports.append(report) + else: + masked_reports.append(report) + + return masked_reports + + @service_handler + async def get_compliance_report_by_id( + self, report_id: int, apply_masking: bool = False, get_chain: bool = False + ): + """Fetches a specific compliance report by ID.""" + report = await self.repo.get_compliance_report_by_id(report_id) + if report is None: + raise DataNotFoundException("Compliance report not found.") + + validated_report = ComplianceReportBaseSchema.model_validate(report) + masked_report = ( + self._mask_report_status([validated_report])[0] + if apply_masking + else validated_report + ) + + history_masked_report = self._mask_report_status_for_history( + masked_report, apply_masking + ) + + if get_chain: + compliance_report_chain = await self.repo.get_compliance_report_chain( + report.compliance_report_group_uuid + ) + + if apply_masking: + # Apply masking to each report in the chain + masked_chain = self._mask_report_status(compliance_report_chain) + # Apply history masking to each report in the chain + masked_chain = [ + self._mask_report_status_for_history(report, apply_masking) + for report in masked_chain + ] + compliance_report_chain = masked_chain + + return { + "report": history_masked_report, + "chain": compliance_report_chain, + } + + return history_masked_report + + def _mask_report_status_for_history( + self, report: ComplianceReportBaseSchema, apply_masking: bool = False + ) -> ComplianceReportBaseSchema: + recommended_statuses = { + ComplianceReportStatusEnum.Recommended_by_analyst.value, + ComplianceReportStatusEnum.Recommended_by_manager.value, + } + if ( + apply_masking + or report.current_status.status + == ComplianceReportStatusEnum.Submitted.value + ): + report.history = [ + h for h in report.history if h.status.status not in recommended_statuses + ] + elif ( + report.current_status.status + == ComplianceReportStatusEnum.Recommended_by_analyst.value + ): + report.history = [ + h + for h in report.history + if h.status.status + != ComplianceReportStatusEnum.Recommended_by_manager.value + ] + + return report + + @service_handler + async def get_all_org_reported_years( + self, organization_id: int + ) -> List[CompliancePeriodSchema]: + return await self.repo.get_all_org_reported_years(organization_id) diff --git a/backend/lcfs/web/api/compliance_report/summary_service.py b/backend/lcfs/web/api/compliance_report/summary_service.py new file mode 100644 index 000000000..c4a737c72 --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/summary_service.py @@ -0,0 +1,883 @@ +import logging +import re +from datetime import datetime +from decimal import Decimal +from typing import List, Tuple, Dict + +from fastapi import Depends +from sqlalchemy import inspect + +from lcfs.db.models import FuelSupply +from lcfs.db.models.compliance.ComplianceReport import ComplianceReport +from lcfs.db.models.compliance.ComplianceReportSummary import ComplianceReportSummary +from lcfs.web.api.allocation_agreement.repo import AllocationAgreementRepository +from lcfs.web.api.compliance_report.constants import ( + RENEWABLE_FUEL_TARGET_DESCRIPTIONS, + LOW_CARBON_FUEL_TARGET_DESCRIPTIONS, + NON_COMPLIANCE_PENALTY_SUMMARY_DESCRIPTIONS, + PRESCRIBED_PENALTY_RATE, + FORMATS, +) +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.web.api.compliance_report.schema import ( + ComplianceReportSummaryRowSchema, + ComplianceReportSummarySchema, + ComplianceReportSummaryUpdateSchema, +) +from lcfs.web.api.fuel_export.repo import FuelExportRepository +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository +from lcfs.web.api.notional_transfer.services import NotionalTransferServices +from lcfs.web.api.transaction.repo import TransactionRepository +from lcfs.web.core.decorators import service_handler +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.web.utils.calculations import calculate_compliance_units + +from lcfs.utils.constants import RENEWABLE_FUEL_TYPES + +logger = logging.getLogger(__name__) + + +class ComplianceReportSummaryService: + def __init__( + self, + repo: ComplianceReportRepository = Depends(), + trxn_repo: TransactionRepository = Depends(), + notional_transfer_service: NotionalTransferServices = Depends( + NotionalTransferServices + ), + fuel_supply_repo: FuelSupplyRepository = Depends(FuelSupplyRepository), + fuel_export_repo: FuelExportRepository = Depends(FuelExportRepository), + allocation_agreement_repo: AllocationAgreementRepository = Depends( + AllocationAgreementRepository + ), + ): + self.repo = repo + self.notional_transfer_service = notional_transfer_service + self.trxn_repo = trxn_repo + self.fuel_supply_repo = fuel_supply_repo + self.fuel_export_repo = fuel_export_repo + self.allocation_agreement_repo = allocation_agreement_repo + + def convert_summary_to_dict( + self, summary_obj: ComplianceReportSummary + ) -> ComplianceReportSummarySchema: + """ + Convert a ComplianceReportSummary object to a dictionary representation. + """ + inspector = inspect(summary_obj) + summary = ComplianceReportSummarySchema( + summary_id=summary_obj.summary_id, + compliance_report_id=summary_obj.compliance_report_id, + is_locked=summary_obj.is_locked, + quarter=summary_obj.quarter, + total_non_compliance_penalty_payable=summary_obj.total_non_compliance_penalty_payable, + renewable_fuel_target_summary=[], + low_carbon_fuel_target_summary=[], + non_compliance_penalty_summary=[], + can_sign=False, + ) + for column in inspector.mapper.column_attrs: + match = re.search(r"line_(\d+)_", column.key) + line = int(match.group(1)) if match else None + if line in range(1, 12) and line != 11: + existing_element = next( + ( + existing + for existing in summary.renewable_fuel_target_summary + if existing.line == str(line) + ), + None, + ) + if not existing_element: + existing_element = ComplianceReportSummaryRowSchema( + line=str(line), + format=FORMATS.NUMBER, + description=( + RENEWABLE_FUEL_TARGET_DESCRIPTIONS[str(line)][ + "description" + ].format( + "{:,}".format( + int( + summary_obj.line_4_eligible_renewable_fuel_required_gasoline + * 0.05 + ) + ), + "{:,}".format( + int( + summary_obj.line_4_eligible_renewable_fuel_required_diesel + * 0.05 + ) + ), + "{:,}".format( + int( + summary_obj.line_4_eligible_renewable_fuel_required_jet_fuel + * 0.05 + ) + ), + ) + if (str(line) in ["6", "8"]) + else RENEWABLE_FUEL_TARGET_DESCRIPTIONS[str(line)][ + "description" + ] + ), + field=RENEWABLE_FUEL_TARGET_DESCRIPTIONS[str( + line)]["field"], + ) + summary.renewable_fuel_target_summary.append( + existing_element) + value = int(getattr(summary_obj, column.key) or 0) + if column.key.endswith("_gasoline"): + existing_element.gasoline = value + elif column.key.endswith("_diesel"): + existing_element.diesel = value + elif column.key.endswith("_jet_fuel"): + existing_element.jet_fuel = value + + elif line in range(12, 23) and line != 21: + summary.low_carbon_fuel_target_summary.append( + ComplianceReportSummaryRowSchema( + line=str(line), + format=FORMATS.NUMBER, + description=( + LOW_CARBON_FUEL_TARGET_DESCRIPTIONS[str(line)][ + "description" + ].format( + "{:,}".format( + int( + summary_obj.line_21_non_compliance_penalty_payable + / 600 + ) + ) + ) + if (str(line) == "21") + else LOW_CARBON_FUEL_TARGET_DESCRIPTIONS[str(line)][ + "description" + ] + ), + field=LOW_CARBON_FUEL_TARGET_DESCRIPTIONS[str( + line)]["field"], + value=int(getattr(summary_obj, column.key) or 0), + ) + ) + elif line in [11, 21]: + line = "" if line is None else line + existing_element = next( + ( + existing + for existing in summary.non_compliance_penalty_summary + if existing.line == str(line) or existing.line == "" + ), + None, + ) + if not existing_element: + existing_element = ComplianceReportSummaryRowSchema( + line=str(line), + format=FORMATS.CURRENCY, + description=( + NON_COMPLIANCE_PENALTY_SUMMARY_DESCRIPTIONS[str(line)][ + "description" + ].format( + "{:,}".format( + int( + summary_obj.line_21_non_compliance_penalty_payable + / 600 + ) + ) + ) + if line == 21 + else NON_COMPLIANCE_PENALTY_SUMMARY_DESCRIPTIONS[str(line)][ + "description" + ] + ), + field=NON_COMPLIANCE_PENALTY_SUMMARY_DESCRIPTIONS[str(line)][ + "field" + ], + ) + summary.non_compliance_penalty_summary.append( + existing_element) + value = int(getattr(summary_obj, column.key) or 0) + if column.key.endswith("_gasoline"): + existing_element.gasoline = value + elif column.key.endswith("_diesel"): + existing_element.diesel = value + elif column.key.endswith("_jet_fuel"): + existing_element.jet_fuel = value + elif column.key.endswith("_total"): + existing_element.total_value = value + else: + existing_element.value = value + return summary + + # @service_handler + # async def get_summary_versions(self, report_id: int) -> List[Tuple[int, int, str]]: + # """ + # Get a list of all summary versions for a given report, including the original and all supplementals. + + # :param report_id: The ID of the original compliance report + # :return: A list of tuples containing (summary_id, version, type) + # """ + # return await self.repo.get_summary_versions(report_id) + + # @service_handler + # async def get_compliance_report_summary( + # self, report_id: int + # ) -> ComplianceReportSummarySchema: + # """ + # Get a specific compliance report summary by its ID. + # """ + # report = await self.repo.get_summary_by_report_id(report_id) + # return self.map_to_schema(report) + + @service_handler + async def update_compliance_report_summary( + self, + report_id: int, + summary_data: ComplianceReportSummaryUpdateSchema, + ) -> ComplianceReportSummarySchema: + """ + Autosave compliance report summary details for a specific summary by ID. + """ + + await self.repo.save_compliance_report_summary(summary_data) + summary_data = await self.calculate_compliance_report_summary(report_id) + + return summary_data + + def filter_renewable_fuel_supplies(self, fuel_supplies: List[FuelSupply]) -> List[FuelSupply]: + return [ + supply for supply in fuel_supplies + if supply.fuel_type and supply.fuel_type.fuel_type in RENEWABLE_FUEL_TYPES + ] + + @service_handler + async def calculate_compliance_report_summary( + self, report_id: int + ) -> ComplianceReportSummarySchema: + """Several fields on Report Summary are Transient until locked, this function will re-calculate fields as necessary""" + # TODO this method will have to be updated to handle supplemental reports + + # Fetch the compliance report details + compliance_report = await self.repo.get_compliance_report_by_id( + report_id, is_model=True + ) + if not compliance_report: + raise DataNotFoundException("Compliance report not found.") + + prev_compliance_report = ( + await self.repo.get_assessed_compliance_report_by_period( + compliance_report.organization_id, + int(compliance_report.compliance_period.description) - 1, + ) + ) + + summary_model = compliance_report.summary + + # After the report has been submitted, the summary becomes locked + # so we can return the existing summary rather than re-calculating + if summary_model.is_locked: + return self.convert_summary_to_dict(compliance_report.summary) + + compliance_period_start = compliance_report.compliance_period.effective_date + compliance_period_end = compliance_report.compliance_period.expiration_date + organization_id = compliance_report.organization_id + + # Placeholder values for demonstration purposes + # need to get these values from the db after fuel supply is implemented + + # If report for previous period copy carryover amounts + if prev_compliance_report: + # TODO: if previous report exists then ensure in the UI we're disabling the line 7 & 9 for editing + previous_retained = { + "gasoline": prev_compliance_report.summary.line_6_renewable_fuel_retained_gasoline, + "diesel": prev_compliance_report.summary.line_6_renewable_fuel_retained_diesel, + "jet_fuel": prev_compliance_report.summary.line_6_renewable_fuel_retained_jet_fuel, + } + previous_obligation = { + "gasoline": prev_compliance_report.summary.line_8_obligation_deferred_gasoline, + "diesel": prev_compliance_report.summary.line_8_obligation_deferred_diesel, + "jet_fuel": prev_compliance_report.summary.line_8_obligation_deferred_jet_fuel, + } + else: + previous_retained = { + "gasoline": summary_model.line_7_previously_retained_gasoline, + "diesel": summary_model.line_7_previously_retained_diesel, + "jet_fuel": summary_model.line_7_previously_retained_jet_fuel, + } + previous_obligation = { + "gasoline": summary_model.line_9_obligation_added_gasoline, + "diesel": summary_model.line_9_obligation_added_diesel, + "jet_fuel": summary_model.line_9_obligation_added_jet_fuel, + } + + notional_transfers = ( + await self.notional_transfer_service.get_notional_transfers( + compliance_report_id=report_id + ) + ) + + notional_transfers_sums = {"gasoline": 0, "diesel": 0, "jet_fuel": 0} + + for transfer in notional_transfers.notional_transfers: + # Normalize the fuel category key + normalized_category = transfer.fuel_category.replace( + " ", "_").lower() + + # Update the corresponding category sum + if transfer.received_or_transferred.lower() == "received": + notional_transfers_sums[normalized_category] += transfer.quantity + elif transfer.received_or_transferred.lower() == "transferred": + notional_transfers_sums[normalized_category] -= transfer.quantity + + # Get effective fuel supplies using the updated logic + effective_fuel_supplies = await self.fuel_supply_repo.get_effective_fuel_supplies( + compliance_report_group_uuid=compliance_report.compliance_report_group_uuid + ) + + # Fetch fuel quantities + # line 1 + fossil_quantities = await self.calculate_fuel_quantities( + compliance_report.compliance_report_id, + effective_fuel_supplies, + fossil_derived=True + ) + # line 2 + filtered_renewable_fuel_supplies = self.filter_renewable_fuel_supplies( + effective_fuel_supplies) + + renewable_quantities = await self.calculate_fuel_quantities( + compliance_report.compliance_report_id, + filtered_renewable_fuel_supplies, + fossil_derived=False + ) + + renewable_fuel_target_summary = self.calculate_renewable_fuel_target_summary( + fossil_quantities, + renewable_quantities, + previous_retained, + previous_obligation, + notional_transfers_sums, + compliance_period=compliance_period_start.year, + prev_summary=compliance_report.summary, + ) + low_carbon_fuel_target_summary, non_compliance_penalty_payable_units = ( + await self.calculate_low_carbon_fuel_target_summary( + compliance_period_start, + compliance_period_end, + organization_id, + compliance_report, + ) + ) + non_compliance_penalty_summary = self.calculate_non_compliance_penalty_summary( + non_compliance_penalty_payable_units, renewable_fuel_target_summary + ) + + existing_summary = self.convert_summary_to_dict(summary_model) + + fuel_export_records = await self.fuel_export_repo.get_effective_fuel_exports( + compliance_report.compliance_report_group_uuid + ) + + allocation_agreements = ( + await self.allocation_agreement_repo.get_allocation_agreements( + compliance_report_id=compliance_report.compliance_report_id + ) + ) + + can_sign = ( + len(effective_fuel_supplies) > 0 + or len(notional_transfers.notional_transfers) > 0 + or len(fuel_export_records) > 0 + or len(allocation_agreements) > 0 + ) + + summary = self.map_to_schema( + compliance_report, + renewable_fuel_target_summary, + low_carbon_fuel_target_summary, + non_compliance_penalty_summary, + summary_model, + can_sign, + ) + + # Only save if summary has changed + if existing_summary.model_dump(mode="json") != summary.model_dump(mode="json"): + logger.debug("Report has changed, updating summary") + await self.repo.save_compliance_report_summary(summary) + return summary + + return existing_summary + + def map_to_schema( + self, + compliance_report, + renewable_fuel_target_summary, + low_carbon_fuel_target_summary, + non_compliance_penalty_summary, + summary_model, + can_sign, + ): + summary = ComplianceReportSummarySchema( + summary_id=summary_model.summary_id, + compliance_report_id=compliance_report.compliance_report_id, + is_locked=summary_model.is_locked, + quarter=summary_model.quarter, + total_non_compliance_penalty_payable=summary_model.total_non_compliance_penalty_payable, + renewable_fuel_target_summary=renewable_fuel_target_summary, + low_carbon_fuel_target_summary=low_carbon_fuel_target_summary, + non_compliance_penalty_summary=non_compliance_penalty_summary, + can_sign=can_sign, + ) + return summary + + def calculate_renewable_fuel_target_summary( + self, + fossil_quantities: dict, + renewable_quantities: dict, + previous_retained: dict, + previous_obligation: dict, + notional_transfers_sums: dict, + compliance_period: int, + prev_summary: ComplianceReportSummary, + ) -> List[ComplianceReportSummaryRowSchema]: + # line 3 + tracked_totals = { + category: fossil_quantities.get(category, 0) + + renewable_quantities.get(category, 0) + for category in ["gasoline", "diesel", "jet_fuel"] + } + + # line 4 + if 2024 <= compliance_period <= 2027: + jet_fuel_percentage = 0 + elif compliance_period == 2028: + jet_fuel_percentage = 1 / 100 + elif compliance_period == 2029: + jet_fuel_percentage = 2 / 100 + else: + jet_fuel_percentage = 3 / 100 + + eligible_renewable_fuel_required = { + "gasoline": tracked_totals["gasoline"] * 0.05, + "diesel": tracked_totals["diesel"] * 0.04, + "jet_fuel": tracked_totals["jet_fuel"] * jet_fuel_percentage, + } + + # line 6 + retained_renewables = {"gasoline": 0.0, "diesel": 0.0, "jet_fuel": 0.0} + # line 8 + deferred_renewables = {"gasoline": 0.0, "diesel": 0.0, "jet_fuel": 0.0} + + for category in ["gasoline", "diesel", "jet_fuel"]: + required_renewable_quantity = eligible_renewable_fuel_required.get( + category) + previous_required_renewable_quantity = getattr( + prev_summary, f"""line_4_eligible_renewable_fuel_required_{ + category}""" + ) + + # only carry over line 6,8 if required quantities have not changed + if previous_required_renewable_quantity == required_renewable_quantity: + retained_renewables[category] = getattr( + prev_summary, f"""line_6_renewable_fuel_retained_{ + category}""" + ) + deferred_renewables[category] = getattr( + prev_summary, f"""line_8_obligation_deferred_{category}""" + ) + + # line 10 + net_renewable_supplied = { + category: + # line 2 + renewable_quantities.get(category, 0) + + # line 5 + notional_transfers_sums.get(category, 0) - + # line 6 + retained_renewables.get(category, 0) + + # line 7 + previous_retained.get(category, 0) + + # line 8 + deferred_renewables.get(category, 0) - + # line 9 + previous_obligation.get(category, 0) + for category in ["gasoline", "diesel", "jet_fuel"] + } + + # line 11 + non_compliance_penalties = { + category: round( + max( + 0, + eligible_renewable_fuel_required.get(category, 0) + - net_renewable_supplied.get(category, 0), + ) + * PRESCRIBED_PENALTY_RATE[category], + 2, + ) + for category in ["gasoline", "diesel", "jet_fuel"] + } + + summary_lines = { + "1": { + "gasoline": fossil_quantities.get("gasoline", 0), + "diesel": fossil_quantities.get("diesel", 0), + "jet_fuel": fossil_quantities.get("jet_fuel", 0), + }, + "2": { + "gasoline": renewable_quantities.get("gasoline", 0), + "diesel": renewable_quantities.get("diesel", 0), + "jet_fuel": renewable_quantities.get("jet_fuel", 0), + }, + "3": { + "gasoline": tracked_totals.get("gasoline", 0), + "diesel": tracked_totals.get("diesel", 0), + "jet_fuel": tracked_totals.get("jet_fuel", 0), + }, + "4": { + "gasoline": eligible_renewable_fuel_required.get("gasoline", 0), + "diesel": eligible_renewable_fuel_required.get("diesel", 0), + "jet_fuel": eligible_renewable_fuel_required.get("jet_fuel", 0), + }, + # Notionally transferred value + "5": notional_transfers_sums, + "6": { + "gasoline": retained_renewables.get("gasoline", 0), + "diesel": retained_renewables.get("diesel", 0), + "jet_fuel": retained_renewables.get("jet_fuel", 0), + }, + "7": { + "gasoline": previous_retained.get("gasoline", 0), + "diesel": previous_retained.get("diesel", 0), + "jet_fuel": previous_retained.get("jet_fuel", 0), + }, + "8": { + "gasoline": deferred_renewables.get("gasoline", 0), + "diesel": deferred_renewables.get("diesel", 0), + "jet_fuel": deferred_renewables.get("jet_fuel", 0), + }, + # Renewable obligation added from previous period + "9": { + "gasoline": previous_obligation.get("gasoline", 0), + "diesel": previous_obligation.get("diesel", 0), + "jet_fuel": previous_obligation.get("jet_fuel", 0), + }, + "10": { + "gasoline": net_renewable_supplied.get("gasoline", 0), + "diesel": net_renewable_supplied.get("diesel", 0), + "jet_fuel": net_renewable_supplied.get("jet_fuel", 0), + }, + "11": { + "gasoline": non_compliance_penalties.get("gasoline", 0), + "diesel": non_compliance_penalties.get("diesel", 0), + "jet_fuel": non_compliance_penalties.get("jet_fuel", 0), + }, + } + + summary = [ + ComplianceReportSummaryRowSchema( + line=line, + description=( + RENEWABLE_FUEL_TARGET_DESCRIPTIONS[line]["description"].format( + "{:,}".format( + int(summary_lines["4"]["gasoline"] * 0.05)), + "{:,}".format( + int(summary_lines["4"]["diesel"] * 0.05)), + "{:,}".format( + int(summary_lines["4"]["jet_fuel"] * 0.05)), + ) + if (line in ["6", "8"]) + else RENEWABLE_FUEL_TARGET_DESCRIPTIONS[line]["description"] + ), + field=RENEWABLE_FUEL_TARGET_DESCRIPTIONS[line]["field"], + gasoline=values.get("gasoline", 0), + diesel=values.get("diesel", 0), + jet_fuel=values.get("jet_fuel", 0), + total_value=values.get("gasoline", 0) + + values.get("diesel", 0) + + values.get("jet_fuel", 0), + format=(FORMATS.CURRENCY if ( + str(line) == "11") else FORMATS.NUMBER), + ) + for line, values in summary_lines.items() + ] + + return summary + + async def calculate_low_carbon_fuel_target_summary( + self, + compliance_period_start: datetime, + compliance_period_end: datetime, + organization_id: int, + compliance_report: ComplianceReport, + ) -> Tuple[List[ComplianceReportSummaryRowSchema], int]: + compliance_units_transferred_out = ( + await self.repo.get_transferred_out_compliance_units( + compliance_period_start, compliance_period_end, organization_id + ) + ) # line 12 + compliance_units_received = await self.repo.get_received_compliance_units( + compliance_period_start, compliance_period_end, organization_id + ) # line 13 + compliance_units_issued = await self.repo.get_issued_compliance_units( + compliance_period_start, compliance_period_end, organization_id + ) # line 14 + # TODO - add the logic as required + compliance_units_prev_issued_for_fuel_supply = 0 # line 15 + compliance_units_prev_issued_for_fuel_export = 0 # line 16 - always 0 + available_balance_for_period = await self.trxn_repo.calculate_available_balance_for_period( + organization_id, compliance_period_start.year + ) # line 17 - Available compliance unit balance on March 31, + compliance_units_curr_issued_for_fuel_supply = ( + await self.calculate_fuel_supply_compliance_units(compliance_report) + ) # line 18 fuel supply compliance units total + compliance_units_curr_issued_for_fuel_export = ( + await self.calculate_fuel_export_compliance_units(compliance_report) + ) # line 19 + compliance_unit_balance_change_from_assessment = ( + compliance_units_curr_issued_for_fuel_supply + + compliance_units_curr_issued_for_fuel_export + ) # line 20 + + calculated_penalty_units = int( + available_balance_for_period + + compliance_units_curr_issued_for_fuel_supply + + compliance_units_curr_issued_for_fuel_export + ) + non_compliance_penalty_payable_units = ( + calculated_penalty_units if (calculated_penalty_units < 0) else 0 + ) + non_compliance_penalty_payable = ( + int((non_compliance_penalty_payable_units * Decimal(-600.0)).max(0)) + if non_compliance_penalty_payable_units < 0 + else 0 + ) # line 21 + + available_balance_for_period_after_assessment = ( # line 22 = line 17 + line 20 + max( + available_balance_for_period + + compliance_unit_balance_change_from_assessment, + 0, + ) + ) + low_carbon_summary_lines = { + "12": {"value": compliance_units_transferred_out}, + "13": {"value": compliance_units_received}, + "14": {"value": compliance_units_issued}, + "15": {"value": compliance_units_prev_issued_for_fuel_supply}, + "16": {"value": compliance_units_prev_issued_for_fuel_export}, + "17": {"value": available_balance_for_period}, + "18": {"value": compliance_units_curr_issued_for_fuel_supply}, + "19": {"value": compliance_units_curr_issued_for_fuel_export}, + "20": {"value": compliance_unit_balance_change_from_assessment}, + "21": {"value": non_compliance_penalty_payable}, + "22": {"value": available_balance_for_period_after_assessment}, + } + + low_carbon_fuel_target_summary = [ + ComplianceReportSummaryRowSchema( + line=line, + description=( + LOW_CARBON_FUEL_TARGET_DESCRIPTIONS[str(line)][ + "description" + ].format("{:,}".format(non_compliance_penalty_payable_units * -1)) + if (str(line) == "21") + else LOW_CARBON_FUEL_TARGET_DESCRIPTIONS[str(line)]["description"] + ), + field=LOW_CARBON_FUEL_TARGET_DESCRIPTIONS[line]["field"], + value=values.get("value", 0), + format=(FORMATS.CURRENCY if ( + str(line) == "21") else FORMATS.NUMBER), + ) + for line, values in low_carbon_summary_lines.items() + ] + + return low_carbon_fuel_target_summary, non_compliance_penalty_payable_units + + def calculate_non_compliance_penalty_summary( + self, + non_compliance_penalty_payable_units: int, + renewable_fuel_target_summary: List[ComplianceReportSummaryRowSchema], + ) -> List[ComplianceReportSummaryRowSchema]: + non_compliance_penalty_payable = int( + (non_compliance_penalty_payable_units * Decimal(-600.0)).max(0) + ) + line_11 = next( + row for row in renewable_fuel_target_summary if row.line == "11") + + non_compliance_summary_lines = { + "11": {"total_value": line_11.total_value}, + "21": {"total_value": non_compliance_penalty_payable}, + "": {"total_value": non_compliance_penalty_payable + line_11.total_value}, + } + + non_compliance_penalty_summary = [ + ComplianceReportSummaryRowSchema( + line=line, + description=( + NON_COMPLIANCE_PENALTY_SUMMARY_DESCRIPTIONS[line][ + "description" + ].format(non_compliance_penalty_payable_units * -1) + ), + field=NON_COMPLIANCE_PENALTY_SUMMARY_DESCRIPTIONS[line]["field"], + gasoline=values.get("gasoline", 0), + diesel=values.get("diesel", 0), + jet_fuel=values.get("jet_fuel", 0), + total_value=values.get("total_value", 0), + format=FORMATS.CURRENCY, + ) + for line, values in non_compliance_summary_lines.items() + ] + + return non_compliance_penalty_summary + + @service_handler + async def calculate_fuel_quantities( + self, + compliance_report_id: int, + effective_fuel_supplies: List[FuelSupply], + fossil_derived: bool, + ) -> Dict[str, float]: + """ + Calculate the total quantities of fuels, separated by fuel category and fossil_derived flag. + """ + fuel_quantities = self.repo.aggregate_fuel_supplies( + effective_fuel_supplies, fossil_derived + ) + + other_uses = await self.repo.aggregate_other_uses( + compliance_report_id, fossil_derived + ) + + for key, value in other_uses.items(): + fuel_quantities[key] = fuel_quantities.get(key, 0) + value + + return dict(fuel_quantities) + + @service_handler + async def calculate_fuel_supply_compliance_units( + self, report: ComplianceReport + ) -> int: + """ + Calculate the total compliance units for Line 18 based on effective fuel supplies. + """ + # Fetch fuel supply records + fuel_supply_records = await self.fuel_supply_repo.get_effective_fuel_supplies( + report.compliance_report_group_uuid + ) + + # Initialize compliance units sum + compliance_units_sum = 0 + + # Calculate compliance units for each fuel supply record + for fuel_supply in fuel_supply_records: + TCI = fuel_supply.target_ci or 0 # Target Carbon Intensity + EER = fuel_supply.eer or 0 # Energy Effectiveness Ratio + RCI = fuel_supply.ci_of_fuel or 0 # Recorded Carbon Intensity + UCI = 0 # Assuming additional carbon intensity attributable to use is not available + Q = fuel_supply.quantity or 0 # Quantity of Fuel Supplied + ED = fuel_supply.energy_density or 0 # Energy Density + + # Apply the compliance units formula + compliance_units = calculate_compliance_units( + TCI, EER, RCI, UCI, Q, ED) + compliance_units_sum += compliance_units + + return int(compliance_units_sum) + + @service_handler + async def calculate_fuel_export_compliance_units( + self, report: ComplianceReport + ) -> int: + """ + Calculate the total compliance units for Line 19 based on effective fuel supplies. + """ + # Fetch fuel export records + fuel_export_records = await self.fuel_export_repo.get_effective_fuel_exports( + report.compliance_report_group_uuid + ) + + # Initialize compliance units sum + compliance_units_sum = 0 + # Calculate compliance units for each fuel export record + for fuel_export in fuel_export_records: + TCI = fuel_export.target_ci or 0 # Target Carbon Intensity + EER = fuel_export.eer or 0 # Energy Effectiveness Ratio + RCI = fuel_export.ci_of_fuel or 0 # Recorded Carbon Intensity + UCI = 0 # Assuming additional carbon intensity attributable to use is not available + Q = fuel_export.quantity or 0 # Quantity of Fuel Supplied + ED = fuel_export.energy_density or 0 # Energy Density + + # Apply the compliance units formula + compliance_units = calculate_compliance_units( + TCI, EER, RCI, UCI, Q, ED) + compliance_units = -compliance_units + compliance_units = round( + compliance_units) if compliance_units < 0 else 0 + + compliance_units_sum += compliance_units + + return int(compliance_units_sum) + + +# async def are_identical( +# self, +# report_id: int, +# summary_1: ComplianceReportSummarySchema, +# summary_2: ComplianceReportSummarySchema, +# ) -> bool: +# comparison = self.compare_summaries(report_id, summary_1, summary_2) + +# # Check if all deltas are zero +# for field, values in comparison.items(): +# if values["delta"] != 0: +# return False + +# return True + +# async def compare_summaries( +# self, +# report_id: int, +# summary_1: ComplianceReportSummarySchema, +# summary_2: ComplianceReportSummarySchema, +# ) -> Dict[str, Dict[str, Any]]: +# """ +# Compare two compliance report summaries and return the values and delta for each field. + +# :param report_id: The ID of the original compliance report +# :param summary_1_id: The ID of the first summary to compare +# :param summary_2_id: The ID of the second summary to compare +# :return: A dictionary containing the values and delta for each field +# """ +# if not summary_1 or not summary_2: +# raise ValueError( +# f"""One or both summaries not found: { +# summary_1.summary_id}, {summary_2.summary_id}""" +# ) + +# if ( +# summary_1.compliance_report_id != report_id +# or summary_2.compliance_report_id != report_id +# ): +# raise ValueError( +# f"""Summaries do not belong to the specified report: {report_id}""" +# ) + +# comparison = {} + +# # Compare all float fields +# float_columns = [ +# c.name +# for c in ComplianceReportSummary.__table__.columns +# if isinstance(c.type, Float) +# ] +# for column in float_columns: +# value_1 = getattr(summary_1, column) +# value_2 = getattr(summary_2, column) +# delta = value_2 - value_1 +# comparison[column] = { +# "summary_1_value": value_1, +# "summary_2_value": value_2, +# "delta": delta, +# } + +# return comparison diff --git a/backend/lcfs/web/api/compliance_report/update_service.py b/backend/lcfs/web/api/compliance_report/update_service.py new file mode 100644 index 000000000..1a1d7d9c7 --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/update_service.py @@ -0,0 +1,277 @@ +import json +from fastapi import Depends, HTTPException, Request +from lcfs.web.api.notification.schema import ( + COMPLIANCE_REPORT_STATUS_NOTIFICATION_MAPPER, + NotificationMessageSchema, + NotificationRequestSchema, +) +from lcfs.web.api.notification.services import NotificationService + +from lcfs.db.models.compliance.ComplianceReport import ComplianceReport +from lcfs.db.models.compliance.ComplianceReportStatus import ComplianceReportStatusEnum +from lcfs.db.models.transaction.Transaction import TransactionActionEnum +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.web.api.compliance_report.schema import ComplianceReportUpdateSchema +from lcfs.web.api.compliance_report.summary_service import ( + ComplianceReportSummaryService, +) +from lcfs.web.api.organizations.services import OrganizationsService +from lcfs.web.api.role.schema import user_has_roles +from lcfs.web.api.transaction.services import TransactionsService +from lcfs.web.exception.exceptions import DataNotFoundException, ServiceException + + +class ComplianceReportUpdateService: + def __init__( + self, + repo: ComplianceReportRepository = Depends(), + request: Request = None, + summary_service: ComplianceReportSummaryService = Depends(), + org_service: OrganizationsService = Depends(OrganizationsService), + trx_service: TransactionsService = Depends(TransactionsService), + notfn_service: NotificationService = Depends(NotificationService), + ): + self.repo = repo + self.request = request + self.summary_service = summary_service + self.org_service = org_service + self.trx_service = trx_service + self.notfn_service = notfn_service + + async def update_compliance_report( + self, report_id: int, report_data: ComplianceReportUpdateSchema + ) -> ComplianceReport: + """Updates an existing compliance report.""" + RETURN_STATUSES = ["Return to analyst", "Return to manager"] + report = await self.repo.get_compliance_report_by_id(report_id, is_model=True) + if not report: + raise DataNotFoundException( + f"Compliance report with ID {report_id} not found" + ) + current_status = report_data.status + # if we're just returning the compliance report back to either compliance manager or analyst, + # then neither history nor any updates to summary is required. + if report_data.status in RETURN_STATUSES: + status_has_changed = False + notifications = COMPLIANCE_REPORT_STATUS_NOTIFICATION_MAPPER.get( + report_data.status + ) + if report_data.status == "Return to analyst": + report_data.status = ComplianceReportStatusEnum.Submitted.value + else: + report_data.status = ( + ComplianceReportStatusEnum.Recommended_by_analyst.value + ) + else: + status_has_changed = report.current_status.status != getattr( + ComplianceReportStatusEnum, report_data.status.replace(" ", "_") + ) + new_status = await self.repo.get_compliance_report_status_by_desc( + report_data.status + ) + # Update fields + report.current_status = new_status + report.supplemental_note = report_data.supplemental_note + + updated_report = await self.repo.update_compliance_report(report) + if status_has_changed: + await self.handle_status_change(report, new_status.status) + # Add history record + await self.repo.add_compliance_report_history(report, self.request.user) + + await self._perform_notification_call(report, current_status) + return updated_report + + async def _perform_notification_call(self, report, status): + """Send notifications based on the current status of the transfer.""" + status_mapper = status.replace(" ", "_") + notifications = COMPLIANCE_REPORT_STATUS_NOTIFICATION_MAPPER.get( + ( + ComplianceReportStatusEnum[status_mapper] + if status_mapper in ComplianceReportStatusEnum.__members__ + else status + ), + None, + ) + message_data = { + "service": "ComplianceReport", + "id": report.compliance_report_id, + "transactionId": report.transaction_id, + "compliancePeriod": report.compliance_period.description, + "status": status.lower(), + } + notification_data = NotificationMessageSchema( + type=f"Compliance report {status.lower()}", + related_transaction_id=f"CR{report.compliance_report_id}", + message=json.dumps(message_data), + related_organization_id=report.organization_id, + origin_user_profile_id=self.request.user.user_profile_id, + ) + if notifications and isinstance(notifications, list): + await self.notfn_service.send_notification( + NotificationRequestSchema( + notification_types=notifications, + notification_data=notification_data, + ) + ) + + async def handle_status_change( + self, report: ComplianceReport, new_status: ComplianceReportStatusEnum + ): + """Handle status-specific actions based on the new status.""" + status_handlers = { + ComplianceReportStatusEnum.Draft: self.handle_draft_status, + ComplianceReportStatusEnum.Submitted: self.handle_submitted_status, + ComplianceReportStatusEnum.Recommended_by_analyst: self.handle_recommended_by_analyst_status, + ComplianceReportStatusEnum.Recommended_by_manager: self.handle_recommended_by_manager_status, + ComplianceReportStatusEnum.Assessed: self.handle_assessed_status, + ComplianceReportStatusEnum.ReAssessed: self.handle_reassessed_status, + } + + handler = status_handlers.get(new_status) + if handler: + await handler(report) + else: + raise ServiceException(f"Unsupported status change to {new_status}") + + async def handle_draft_status(self, report: ComplianceReport): + """Handle actions when a report is set to Draft status.""" + # Implement logic for Draft status + user = self.request.user + has_supplier_role = user_has_roles(user, [RoleEnum.SUPPLIER]) + if not has_supplier_role: + raise HTTPException(status_code=403, detail="Forbidden.") + + async def handle_submitted_status(self, report: ComplianceReport): + """Handle actions when a report is submitted.""" + user = self.request.user + has_supplier_roles = user_has_roles( + user, [RoleEnum.SUPPLIER, RoleEnum.SIGNING_AUTHORITY] + ) + if not has_supplier_roles: + raise HTTPException(status_code=403, detail="Forbidden.") + # Fetch the existing summary from the database, if any + existing_summary = await self.repo.get_summary_by_report_id( + report.compliance_report_id + ) + + # Calculate a new summary based on the current report data + calculated_summary = ( + await self.summary_service.calculate_compliance_report_summary( + report.compliance_report_id + ) + ) + + if not calculated_summary.can_sign: + raise ServiceException("ComplianceReportSummary is not able to be signed") + + # If there's an existing summary, preserve user-edited values + if existing_summary: + for row in calculated_summary.renewable_fuel_target_summary: + if row.line == "6": + # Preserve line 6 values (renewable fuel retained) + row.gasoline = ( + existing_summary.line_6_renewable_fuel_retained_gasoline + or row.gasoline + ) + row.diesel = ( + existing_summary.line_6_renewable_fuel_retained_diesel + or row.diesel + ) + row.jet_fuel = ( + existing_summary.line_6_renewable_fuel_retained_jet_fuel + or row.jet_fuel + ) + elif row.line == "7": + # Preserve line 7 values (previously retained) + row.gasoline = ( + existing_summary.line_7_previously_retained_gasoline + or row.gasoline + ) + row.diesel = ( + existing_summary.line_7_previously_retained_diesel or row.diesel + ) + row.jet_fuel = ( + existing_summary.line_7_previously_retained_jet_fuel + or row.jet_fuel + ) + elif row.line == "8": + # Preserve line 8 values (obligation deferred) + row.gasoline = ( + existing_summary.line_8_obligation_deferred_gasoline + or row.gasoline + ) + row.diesel = ( + existing_summary.line_8_obligation_deferred_diesel or row.diesel + ) + row.jet_fuel = ( + existing_summary.line_8_obligation_deferred_jet_fuel + or row.jet_fuel + ) + + # Lock the summary to prevent further edits + calculated_summary.is_locked = True + + # Save the summary + if report.summary: + # Update existing summary + report.summary = await self.repo.save_compliance_report_summary( + calculated_summary + ) + else: + # Create new summary if it doesn't exist + new_summary = await self.repo.add_compliance_report_summary( + calculated_summary + ) + # Update the report with the new summary + report.summary = new_summary + + if report.summary.line_20_surplus_deficit_units != 0: + # Create a new reserved transaction for receiving organization + report.transaction = await self.org_service.adjust_balance( + transaction_action=TransactionActionEnum.Reserved, + compliance_units=report.summary.line_20_surplus_deficit_units, + organization_id=report.organization_id, + ) + await self.repo.update_compliance_report(report) + + return calculated_summary + + async def handle_recommended_by_analyst_status(self, report: ComplianceReport): + """Handle actions when a report is Recommended by analyst.""" + # Implement logic for Recommended by analyst status + user = self.request.user + has_analyst_role = user_has_roles(user, [RoleEnum.GOVERNMENT, RoleEnum.ANALYST]) + if not has_analyst_role: + raise HTTPException(status_code=403, detail="Forbidden.") + + async def handle_recommended_by_manager_status(self, report: ComplianceReport): + """Handle actions when a report is Recommended by manager.""" + # Implement logic for Recommended by manager status + user = self.request.user + has_compliance_manager_role = user_has_roles( + user, [RoleEnum.GOVERNMENT, RoleEnum.COMPLIANCE_MANAGER] + ) + if not has_compliance_manager_role: + raise HTTPException(status_code=403, detail="Forbidden.") + + async def handle_assessed_status(self, report: ComplianceReport): + """Handle actions when a report is Assessed.""" + user = self.request.user + has_director_role = user_has_roles( + user, [RoleEnum.GOVERNMENT, RoleEnum.DIRECTOR] + ) + if not has_director_role: + raise HTTPException(status_code=403, detail="Forbidden.") + + if report.transaction: + # Update the transaction to assessed + report.transaction.transaction_action = TransactionActionEnum.Adjustment + report.transaction.update_user = user.keycloak_username + await self.repo.update_compliance_report(report) + + async def handle_reassessed_status(self, report: ComplianceReport): + """Handle actions when a report is ReAssessed.""" + # Implement logic for ReAssessed status + pass diff --git a/backend/lcfs/web/api/compliance_report/validation.py b/backend/lcfs/web/api/compliance_report/validation.py new file mode 100644 index 000000000..4f11e4a1c --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/validation.py @@ -0,0 +1,34 @@ +from fastapi import Depends, HTTPException, Request +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from fastapi import status +from lcfs.web.api.role.schema import user_has_roles + + +class ComplianceReportValidation: + def __init__( + self, + request: Request = None, + repo: ComplianceReportRepository = Depends(ComplianceReportRepository), + ) -> None: + self.request = request + self.repo = repo + + async def validate_organization_access(self, compliance_report_id: int): + compliance_report = await self.repo.check_compliance_report( + compliance_report_id + ) + if not compliance_report: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Compliance report not found.", + ) + + organization_id = compliance_report.organization_id + user_organization_id = self.request.user.organization.organization_id if self.request.user.organization else None + + if not user_has_roles(self.request.user, [RoleEnum.GOVERNMENT]) and organization_id != user_organization_id: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User does not have access to this compliance report.", + ) diff --git a/backend/lcfs/web/api/compliance_report/views.py b/backend/lcfs/web/api/compliance_report/views.py new file mode 100644 index 000000000..de43c0c26 --- /dev/null +++ b/backend/lcfs/web/api/compliance_report/views.py @@ -0,0 +1,171 @@ +""" +Compliance reports endpoints +GET: /reports/compliance-periods (to retreieve the list of compliance periods) +POST: /reports/list (Includes ability to perform sort, filter and pagination) - retrieve the list of compliance reports +GET: /reports/fse-options - retrieve the options that assists the user in filling up the Final Supply Equipment rows. +GET: /reports/ - retrieve the compliance report by ID +""" + +import structlog +from typing import List + +from fastapi import APIRouter, Body, status, Request, Depends, HTTPException + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.services.s3.client import DocumentService +from lcfs.web.api.base import FilterModel, PaginationRequestSchema +from lcfs.web.api.compliance_report.schema import ( + CompliancePeriodSchema, + ComplianceReportBaseSchema, + ComplianceReportListSchema, + ComplianceReportSummarySchema, + ChainedComplianceReportSchema, + ComplianceReportUpdateSchema, + ComplianceReportSummaryUpdateSchema, +) +from lcfs.web.api.compliance_report.services import ComplianceReportServices +from lcfs.web.api.compliance_report.summary_service import ( + ComplianceReportSummaryService, +) +from lcfs.web.api.compliance_report.update_service import ComplianceReportUpdateService +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.web.exception.exceptions import DataNotFoundException + +from lcfs.web.api.role.schema import user_has_roles +from lcfs.web.core.decorators import view_handler + +router = APIRouter() +logger = structlog.get_logger(__name__) + + +@router.get( + "/compliance-periods", + response_model=List[CompliancePeriodSchema], + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_compliance_periods( + request: Request, service: ComplianceReportServices = Depends() +) -> list[CompliancePeriodSchema]: + """ + Get a list of compliance periods + """ + return await service.get_all_compliance_periods() + + +@router.post( + "/list", + response_model=ComplianceReportListSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_compliance_reports( + request: Request, + pagination: PaginationRequestSchema = Body(..., embed=False), + service: ComplianceReportServices = Depends(), +) -> ComplianceReportListSchema: + # Add filter on statuses so that IDIR users won't be able to see draft reports + pagination.filters.append( + FilterModel(field="status", filter="Draft", filter_type="text", type="notEqual") + ) + return await service.get_compliance_reports_paginated(pagination) + + +@router.get( + "/{report_id}", + response_model=ChainedComplianceReportSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_compliance_report_by_id( + request: Request, + report_id: int, + service: ComplianceReportServices = Depends(), + validate: ComplianceReportValidation = Depends(), +) -> ChainedComplianceReportSchema: + await validate.validate_organization_access(report_id) + + mask_statuses = not user_has_roles(request.user, [RoleEnum.GOVERNMENT]) + + result = await service.get_compliance_report_by_id( + report_id, mask_statuses, get_chain=True + ) + + return result + + +@router.get( + "/{report_id}/summary", + response_model=ComplianceReportSummarySchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_compliance_report_summary( + request: Request, + report_id: int, + summary_service: ComplianceReportSummaryService = Depends(), + validate: ComplianceReportValidation = Depends(), +) -> ComplianceReportSummarySchema: + """ + Retrieve the comprehensive compliance report summary for a specific report by ID. + """ + await validate.validate_organization_access(report_id) + return await summary_service.calculate_compliance_report_summary(report_id) + + +@router.put( + "/{report_id}/summary", + response_model=ComplianceReportSummarySchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def update_compliance_report_summary( + request: Request, + report_id: int, + summary_data: ComplianceReportSummaryUpdateSchema, + summary_service: ComplianceReportSummaryService = Depends(), + validate: ComplianceReportValidation = Depends(), +) -> ComplianceReportSummarySchema: + """ + Autosave compliance report summary details for a specific summary by ID. + """ + await validate.validate_organization_access(report_id) + return await summary_service.update_compliance_report_summary( + report_id, summary_data + ) + + +@view_handler(["*"]) +@router.put( + "/{report_id}", + response_model=ComplianceReportBaseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT, RoleEnum.SUPPLIER]) +async def update_compliance_report( + request: Request, + report_id: int, + report_data: ComplianceReportUpdateSchema, + update_service: ComplianceReportUpdateService = Depends(), + validate: ComplianceReportValidation = Depends(), +) -> ComplianceReportBaseSchema: + """Update an existing compliance report.""" + await validate.validate_organization_access(report_id) + return await update_service.update_compliance_report(report_id, report_data) + + +@router.post( + "/{report_id}/supplemental", + response_model=ComplianceReportBaseSchema, + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.SUPPLIER]) +async def create_supplemental_report( + request: Request, + report_id: int, + service: ComplianceReportServices = Depends(), +) -> ComplianceReportBaseSchema: + """ + Create a supplemental compliance report. + """ + return await service.create_supplemental_report(report_id) diff --git a/backend/lcfs/web/api/dashboard/__init__.py b/backend/lcfs/web/api/dashboard/__init__.py new file mode 100644 index 000000000..49abdad5d --- /dev/null +++ b/backend/lcfs/web/api/dashboard/__init__.py @@ -0,0 +1,5 @@ +"""API for dashboard cards.""" + +from lcfs.web.api.dashboard.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/dashboard/repo.py b/backend/lcfs/web/api/dashboard/repo.py new file mode 100644 index 000000000..3d7ab51c0 --- /dev/null +++ b/backend/lcfs/web/api/dashboard/repo.py @@ -0,0 +1,72 @@ +import structlog +from fastapi import Depends +from sqlalchemy import select, func, union_all, join, literal +from sqlalchemy.ext.asyncio import AsyncSession +from lcfs.db.dependencies import get_async_db_session +from lcfs.web.core.decorators import repo_handler +from lcfs.db.models.transaction.DirectorReviewTransactionCountView import ( + DirectorReviewTransactionCountView, +) +from lcfs.db.models.transaction.TransactionCountView import TransactionCountView +from lcfs.db.models.organization.Organization import Organization +from lcfs.db.models.compliance.OrgComplianceReportCountView import ( + OrgComplianceReportCountView, +) + +logger = structlog.get_logger(__name__) + + +class DashboardRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_director_review_counts(self): + query = select( + DirectorReviewTransactionCountView.transaction_type, + DirectorReviewTransactionCountView.count_for_review, + ) + + result = await self.db.execute(query) + counts = { + row.transaction_type: row.count_for_review for row in result.fetchall() + } + return counts + + @repo_handler + async def get_transaction_counts(self): + query = select( + TransactionCountView.transaction_type, + TransactionCountView.count_in_progress, + ) + + result = await self.db.execute(query) + counts = { + row.transaction_type: row.count_in_progress for row in result.fetchall() + } + return counts + + @repo_handler + async def get_org_transaction_counts(self, organization_id): + query = select(Organization.count_transfers_in_progress).where( + Organization.organization_id == organization_id + ) + + result = await self.db.execute(query) + counts = {"transfers": result.scalar_one()} + return counts + + @repo_handler + async def get_org_compliance_report_counts(self, organization_id: int): + query = select( + OrgComplianceReportCountView.count_in_progress, + OrgComplianceReportCountView.count_awaiting_gov_review, + ).where(OrgComplianceReportCountView.organization_id == organization_id) + + result = await self.db.execute(query) + counts = result.fetchone() + + return { + "in_progress": getattr(counts, "count_in_progress", 0), + "awaiting_gov_review": getattr(counts, "count_awaiting_gov_review", 0), + } diff --git a/backend/lcfs/web/api/dashboard/schema.py b/backend/lcfs/web/api/dashboard/schema.py new file mode 100644 index 000000000..f57217603 --- /dev/null +++ b/backend/lcfs/web/api/dashboard/schema.py @@ -0,0 +1,23 @@ +from lcfs.web.api.base import BaseSchema + + +class DirectorReviewCountsSchema(BaseSchema): + transfers: int + compliance_reports: int + initiative_agreements: int + admin_adjustments: int + + +class TransactionCountsSchema(BaseSchema): + transfers: int + initiative_agreements: int + admin_adjustments: int + + +class OrganizarionTransactionCountsSchema(BaseSchema): + transfers: int + + +class OrgComplianceReportCountsSchema(BaseSchema): + in_progress: int + awaiting_gov_review: int diff --git a/backend/lcfs/web/api/dashboard/services.py b/backend/lcfs/web/api/dashboard/services.py new file mode 100644 index 000000000..112b709a1 --- /dev/null +++ b/backend/lcfs/web/api/dashboard/services.py @@ -0,0 +1,57 @@ +import structlog +from fastapi import Depends +from lcfs.web.core.decorators import service_handler +from lcfs.web.api.dashboard.repo import DashboardRepository +from lcfs.web.api.dashboard.schema import ( + DirectorReviewCountsSchema, + TransactionCountsSchema, + OrganizarionTransactionCountsSchema, + OrgComplianceReportCountsSchema, +) + +logger = structlog.get_logger(__name__) + + +class DashboardServices: + def __init__(self, repo: DashboardRepository = Depends(DashboardRepository)): + self.repo = repo + + @service_handler + async def get_director_review_counts(self) -> DirectorReviewCountsSchema: + counts = await self.repo.get_director_review_counts() + + return DirectorReviewCountsSchema( + transfers=counts.get("transfers", 0), + compliance_reports=counts.get("compliance_reports", 0), + initiative_agreements=counts.get("initiative_agreements", 0), + admin_adjustments=counts.get("admin_adjustments", 0), + ) + + @service_handler + async def get_transaction_counts(self) -> TransactionCountsSchema: + counts = await self.repo.get_transaction_counts() + + return TransactionCountsSchema( + transfers=counts.get("transfers", 0), + initiative_agreements=counts.get("initiative_agreements", 0), + admin_adjustments=counts.get("admin_adjustments", 0), + ) + + @service_handler + async def get_org_transaction_counts( + self, organization_id + ) -> OrganizarionTransactionCountsSchema: + counts = await self.repo.get_org_transaction_counts(organization_id) + + return OrganizarionTransactionCountsSchema(transfers=counts.get("transfers", 0)) + + @service_handler + async def get_org_compliance_report_counts( + self, organization_id: int + ) -> OrgComplianceReportCountsSchema: + counts = await self.repo.get_org_compliance_report_counts(organization_id) + + return OrgComplianceReportCountsSchema( + in_progress=counts.get("in_progress", 0), + awaiting_gov_review=counts.get("awaiting_gov_review", 0), + ) diff --git a/backend/lcfs/web/api/dashboard/views.py b/backend/lcfs/web/api/dashboard/views.py new file mode 100644 index 000000000..df8f12c01 --- /dev/null +++ b/backend/lcfs/web/api/dashboard/views.py @@ -0,0 +1,61 @@ +import structlog +from lcfs.db.models.user.Role import RoleEnum +from fastapi import APIRouter, Depends, Request +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.dashboard.services import DashboardServices +from lcfs.web.api.dashboard.schema import ( + DirectorReviewCountsSchema, + TransactionCountsSchema, + OrganizarionTransactionCountsSchema, + OrgComplianceReportCountsSchema, +) +from lcfs.db.models.user.Role import RoleEnum + +router = APIRouter() +logger = structlog.get_logger(__name__) + + +@router.get("/director-review-counts", response_model=DirectorReviewCountsSchema) +@view_handler([RoleEnum.DIRECTOR]) +async def get_director_review_counts( + request: Request, + service: DashboardServices = Depends(), +): + """Endpoint to retrieve counts for director review items""" + return await service.get_director_review_counts() + + +@router.get("/transaction-counts", response_model=TransactionCountsSchema) +@view_handler([RoleEnum.ANALYST, RoleEnum.COMPLIANCE_MANAGER]) +async def get_transaction_counts( + request: Request, + service: DashboardServices = Depends(), +): + """Endpoint to retrieve counts for transaction items""" + return await service.get_transaction_counts() + + +@router.get( + "/org-transaction-counts", response_model=OrganizarionTransactionCountsSchema +) +@view_handler([RoleEnum.TRANSFER]) +async def get_org_transaction_counts( + request: Request, + service: DashboardServices = Depends(), +): + """Endpoint to retrieve counts for organizarion transaction items""" + organization_id = request.user.organization.organization_id + return await service.get_org_transaction_counts(organization_id) + + +@router.get( + "/org-compliance-report-counts", response_model=OrgComplianceReportCountsSchema +) +@view_handler([RoleEnum.COMPLIANCE_REPORTING, RoleEnum.SIGNING_AUTHORITY]) +async def get_org_compliance_report_counts( + request: Request, + service: DashboardServices = Depends(), +): + """Endpoint to retrieve counts for organization compliance report items""" + organization_id = request.user.organization.organization_id + return await service.get_org_compliance_report_counts(organization_id) diff --git a/backend/lcfs/web/api/dependancies.py b/backend/lcfs/web/api/dependancies.py deleted file mode 100644 index ca8db75ce..000000000 --- a/backend/lcfs/web/api/dependancies.py +++ /dev/null @@ -1,29 +0,0 @@ -from fastapi import Query -from sqlalchemy import asc, desc - -# Dependencies for pagination and sorting -def pagination_query( - page: int = Query(1, ge=1, description="Page number of the results"), - per_page: int = Query(10, ge=1, le=100, description="Number of results per page"), - sort_field: str = Query("username", description="Sorting field"), - sort_direction: str = Query("asc", description="Sorting direction"), -) -> dict: - """ - Creates a dictionary with pagination and sorting parameters from query parameters. - - Args: - page (int): The current page number. - per_page (int): The number of items to return per page. - sort_field (str): The field by which to sort the results. - sort_direction (str): The direction of sorting (ascending or descending). - - Returns: - dict: A dictionary containing pagination and sorting parameters. - """ - sort_function = desc if sort_direction == "desc" else asc - return { - "page": page, - "per_page": per_page, - "sort_field": sort_field, - "sort_function": sort_function - } diff --git a/backend/lcfs/web/api/document/__init__.py b/backend/lcfs/web/api/document/__init__.py new file mode 100644 index 000000000..702296c76 --- /dev/null +++ b/backend/lcfs/web/api/document/__init__.py @@ -0,0 +1,5 @@ +"""API for dashboard cards.""" + +from lcfs.web.api.document.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/document/views.py b/backend/lcfs/web/api/document/views.py new file mode 100644 index 000000000..9f1cc026a --- /dev/null +++ b/backend/lcfs/web/api/document/views.py @@ -0,0 +1,96 @@ +from http.client import HTTPException +from lcfs.db.models.compliance.ComplianceReportStatus import ComplianceReportStatusEnum +from lcfs.web.api.compliance_report.services import ComplianceReportServices +import structlog +from typing import List + +from fastapi import APIRouter, Depends, Request, UploadFile +from fastapi.params import File +from starlette import status +from starlette.responses import StreamingResponse + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.services.s3.client import DocumentService +from lcfs.services.s3.schema import FileResponseSchema +from lcfs.web.core.decorators import view_handler + +router = APIRouter() +logger = structlog.get_logger(__name__) + + +@router.get( + "/{parent_type}/{parent_id}", + response_model=List[FileResponseSchema], + status_code=status.HTTP_200_OK, +) +async def get_all_documents( + request: Request, + parent_id: int, + parent_type: str, + document_service: DocumentService = Depends(), +) -> List[FileResponseSchema]: + documents = await document_service.get_by_id_and_type(parent_id, parent_type) + + file_responses = [ + (FileResponseSchema.model_validate(document)) for document in documents + ] + + return file_responses + + +@router.post( + "/{parent_type}/{parent_id}", + response_model=FileResponseSchema, + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.SUPPLIER, RoleEnum.ANALYST]) +async def upload_file( + request: Request, + parent_id: int, + parent_type: str, + file: UploadFile = File(...), + document_service: DocumentService = Depends(), +) -> FileResponseSchema: + document = await document_service.upload_file( + file, parent_id, parent_type, request.user + ) + return FileResponseSchema.model_validate(document) + + +@router.get( + "/{parent_type}/{parent_id}/{document_id}", + status_code=status.HTTP_200_OK, +) +async def stream_document( + request: Request, + parent_id: int, + parent_type: str, + document_id: int, + document_service: DocumentService = Depends(), +): + # TODO: Use parent ID and parent type to check permissions / security + file, document = await document_service.get_object(document_id) + + headers = { + "Content-Disposition": f'inline; filename="{document.file_name}"', + "content-length": str(file["ContentLength"]), + } + + return StreamingResponse( + content=file["Body"], media_type=file["ContentType"], headers=headers + ) + + +@router.delete( + "/{parent_type}/{parent_id}/{document_id}", +) +async def delete_file( + request: Request, + parent_type: str, + parent_id: int, + document_id: int, + document_service: DocumentService = Depends(), +): + # TODO: Use parent ID and parent type to check permissions / security + await document_service.delete_file(document_id) + return {"message": "File and metadata deleted successfully"} diff --git a/backend/lcfs/web/api/echo/__init__.py b/backend/lcfs/web/api/echo/__init__.py index b02688103..b4404ae49 100644 --- a/backend/lcfs/web/api/echo/__init__.py +++ b/backend/lcfs/web/api/echo/__init__.py @@ -1,4 +1,5 @@ """Echo API.""" + from lcfs.web.api.echo.views import router __all__ = ["router"] diff --git a/backend/lcfs/web/api/email/__init__.py b/backend/lcfs/web/api/email/__init__.py new file mode 100644 index 000000000..9e5e0025c --- /dev/null +++ b/backend/lcfs/web/api/email/__init__.py @@ -0,0 +1,5 @@ +"""API for internal comments.""" + +from lcfs.web.api.email.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/email/repo.py b/backend/lcfs/web/api/email/repo.py new file mode 100644 index 000000000..7d5b010b3 --- /dev/null +++ b/backend/lcfs/web/api/email/repo.py @@ -0,0 +1,68 @@ +from lcfs.db.models.notification import NotificationChannelSubscription +from lcfs.db.models.notification import NotificationType +from lcfs.db.models.notification.NotificationChannel import ( + ChannelEnum, + NotificationChannel, +) +from lcfs.web.core.decorators import repo_handler +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import func, select, or_ +from typing import List +from lcfs.db.models.user import UserProfile +from lcfs.db.dependencies import get_async_db_session +from fastapi import Depends + + +class CHESEmailRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_subscribed_user_emails( + self, notification_type: str, organization_id: int + ) -> List[str]: + """ + Retrieve emails of users subscribed to a specific notification type and for the Email channel. + Ignores In-Application notification types. + """ + query = ( + select(UserProfile.email) + .join(NotificationChannelSubscription) + .join(NotificationChannelSubscription.notification_channel) + .filter( + NotificationChannelSubscription.notification_type.has( + name=notification_type + ), + NotificationChannelSubscription.is_enabled == True, + NotificationChannel.channel_name + == ChannelEnum.EMAIL, # Only Email channel + or_( + UserProfile.organization_id == organization_id, + UserProfile.organization_id.is_(None), # Include government users + ), + ) + ) + result = await self.db.execute(query) + return [row[0] for row in result.fetchall()] + + @repo_handler + async def get_notification_template(self, notification_type: str) -> str: + """ + Retrieve the email template for a specific notification type. + """ + # Query to fetch NotificationType.email_content via explicit join + query = ( + select(NotificationType.email_content) + .join( + NotificationChannelSubscription, + NotificationChannelSubscription.notification_type_id + == NotificationType.notification_type_id, + ) + .filter(NotificationType.name == notification_type) + .limit(1) # Fetch only one record + ) + + # Execute the query + result = await self.db.execute(query) + template = result.scalar_one_or_none() + return template or "default.html" diff --git a/backend/lcfs/web/api/email/schema.py b/backend/lcfs/web/api/email/schema.py new file mode 100644 index 000000000..cd7239f39 --- /dev/null +++ b/backend/lcfs/web/api/email/schema.py @@ -0,0 +1,27 @@ +from typing import Optional +from lcfs.web.api.base import NotificationTypeEnum +from pydantic import BaseModel, Field + +class EmailNotificationRequest(BaseModel): + notification_type: NotificationTypeEnum = Field(..., description="Type of notification") + organization_id: Optional[int] = Field(None, description="Organization ID associated with the notification") + +TEMPLATE_MAPPING = { + "BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT": "bceid__compliance_report__director_assessment.html", + "BCEID__INITIATIVE_AGREEMENT__DIRECTOR_APPROVAL": "bceid__initiative_agreement__director_approval.html", + "BCEID__TRANSFER__DIRECTOR_DECISION": "bceid__transfer__director_decision.html", + "BCEID__TRANSFER__PARTNER_ACTIONS": "bceid__transfer__partner_actions.html", + "IDIR_ANALYST__COMPLIANCE_REPORT__DIRECTOR_DECISION": "idir_analyst__compliance_report__director_decision.html", + "IDIR_ANALYST__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION": "idir_analyst__compliance_report__manager_recommendation.html", + "IDIR_ANALYST__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW": "idir_analyst__compliance_report__submitted_for_review.html", + "IDIR_ANALYST__INITIATIVE_AGREEMENT__RETURNED_TO_ANALYST": "idir_analyst__initiative_agreement__returned_to_analyst.html", + "IDIR_ANALYST__TRANSFER__DIRECTOR_RECORDED": "idir_analyst__transfer__director_recorded.html", + "IDIR_ANALYST__TRANSFER__RESCINDED_ACTION": "idir_analyst__transfer__rescinded_action.html", + "IDIR_ANALYST__TRANSFER__SUBMITTED_FOR_REVIEW": "idir_analyst__transfer__submitted_for_review.html", + "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__ANALYST_RECOMMENDATION": "idir_compliance_manager__compliance_report__analyst_recommendation.html", + "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT": "idir_compliance_manager__compliance_report__director_assessment.html", + "IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW": "idir_compliance_manager__compliance_report__submitted_for_review.html", + "IDIR_DIRECTOR__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION": "idir_director__compliance_report__manager_recommendation.html", + "IDIR_DIRECTOR__INITIATIVE_AGREEMENT__ANALYST_RECOMMENDATION": "idir_director__initiative_agreement__analyst_recommendation.html", + "IDIR_DIRECTOR__TRANSFER__ANALYST_RECOMMENDATION": "idir_director__transfer__analyst_recommendation.html", +} diff --git a/backend/lcfs/web/api/email/services.py b/backend/lcfs/web/api/email/services.py new file mode 100644 index 000000000..066a7a664 --- /dev/null +++ b/backend/lcfs/web/api/email/services.py @@ -0,0 +1,174 @@ +import os +from lcfs.web.api.base import NotificationTypeEnum +import requests +import structlog +from fastapi import Depends +from jinja2 import Environment, FileSystemLoader +from typing import Dict, List, Any +from datetime import datetime + +from lcfs.settings import settings +from lcfs.web.api.email.repo import CHESEmailRepository +from lcfs.web.core.decorators import service_handler +from lcfs.web.api.email.schema import TEMPLATE_MAPPING + +logger = structlog.get_logger(__name__) + + +class CHESEmailService: + """ + Service layer for sending email notifications via CHES. + Handles CHES-specific logic and coordinates with the repository. + """ + + def __init__(self, repo: CHESEmailRepository = Depends()): + self.repo = repo + self._access_token = None + self._token_expiry = None + + # Update template directory path to the root templates directory + template_dir = os.path.join(os.path.dirname(__file__), "templates") + self.template_env = Environment( + loader=FileSystemLoader(template_dir), + autoescape=True # Enable autoescaping for security + ) + + def _validate_configuration(self): + """ + Validate the CHES configuration to ensure all necessary environment variables are set. + """ + missing_configs = [] + + # Check each required CHES configuration setting + if not settings.ches_auth_url: + missing_configs.append("ches_auth_url") + if not settings.ches_email_url: + missing_configs.append("ches_email_url") + if not settings.ches_client_id: + missing_configs.append("ches_client_id") + if not settings.ches_client_secret: + missing_configs.append("ches_client_secret") + if not settings.ches_sender_email: + missing_configs.append("ches_sender_email") + if not settings.ches_sender_name: + missing_configs.append("ches_sender_name") + + if missing_configs: + raise ValueError(f"Missing CHES configuration: {', '.join(missing_configs)}") + + @service_handler + async def send_notification_email( + self, + notification_type: NotificationTypeEnum, + notification_context: Dict[str, Any], + organization_id: int, + ) -> bool: + """ + Send an email notification to users subscribed to the specified notification type. + """ + # Validate configuration before performing any operations + self._validate_configuration() + + # Retrieve subscribed user emails + recipient_emails = await self.repo.get_subscribed_user_emails( + notification_type.value, organization_id + ) + if not recipient_emails: + logger.info(f"""No subscribers for notification type: { + notification_type.value}""") + return False + + # Render the email content + email_body = self._render_email_template( + notification_type.value, notification_context + ) + + # Build email payload + email_payload = self._build_email_payload( + recipient_emails, notification_context, email_body + ) + + # Send email + return await self.send_email(email_payload) + + def _render_email_template( + self, template_name: str, context: Dict[str, Any] + ) -> str: + """ + Render an email template using a predefined mapping of template names to file paths. + Raises an exception if template is not found. + """ + try: + template_file = TEMPLATE_MAPPING[template_name] + template = self.template_env.get_template(template_file) + return template.render(**context) + except Exception as e: + logger.error(f"Template rendering error: {str(e)}") + raise ValueError( + f"Failed to render email template for {template_name}") + + def _build_email_payload( + self, recipients: List[str], context: Dict[str, Any], body: str + ) -> Dict[str, Any]: + """ + Build the payload for sending an email via CHES. + """ + return { + "bcc": recipients, + "to": ["Undisclosed recipients"], + "from": f"{settings.ches_sender_name} <{settings.ches_sender_email}>", + "delayTS": 0, + "encoding": "utf-8", + "priority": "normal", + "subject": context.get("subject", "LCFS Notification"), + "body": body, + "tag": "lcfs_email", + "bodyType": "html", + } + + @service_handler + async def send_email(self, payload: Dict[str, Any]) -> bool: + """ + Send an email using CHES. + """ + # Validate configuration before performing any operations + self._validate_configuration() + + token = await self.get_ches_token() + response = requests.post( + settings.ches_email_url, + json=payload, + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + }, + timeout=15, + ) + response.raise_for_status() + logger.info("Email sent successfully.") + return True + + async def get_ches_token(self) -> str: + """ + Retrieve and cache the CHES access token. + """ + # Validate configuration before performing any operations + self._validate_configuration() + + if self._access_token and datetime.now().timestamp() < self._token_expiry: + return self._access_token + response = requests.post( + settings.ches_auth_url, + data={"grant_type": "client_credentials"}, + auth=(settings.ches_client_id, settings.ches_client_secret), + timeout=10, + ) + response.raise_for_status() + + token_data = response.json() + self._access_token = token_data.get("access_token") + self._token_expiry = datetime.now().timestamp() + token_data.get( + "expires_in", 3600 + ) + logger.info("Retrieved new CHES token.") + return self._access_token \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/bceid__compliance_report__director_assessment.html b/backend/lcfs/web/api/email/templates/bceid__compliance_report__director_assessment.html new file mode 100644 index 000000000..56009ef1b --- /dev/null +++ b/backend/lcfs/web/api/email/templates/bceid__compliance_report__director_assessment.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Credit Record' %} +{% set url_slug = 'credit-records' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/bceid__initiative_agreement__director_approval.html b/backend/lcfs/web/api/email/templates/bceid__initiative_agreement__director_approval.html new file mode 100644 index 000000000..35e2912d7 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/bceid__initiative_agreement__director_approval.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Initiative Agreement' %} +{% set url_slug = 'initiative-agreement' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/bceid__transfer__director_decision.html b/backend/lcfs/web/api/email/templates/bceid__transfer__director_decision.html new file mode 100644 index 000000000..18063fc72 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/bceid__transfer__director_decision.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Transfer' %} +{% set url_slug = 'transfers' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/bceid__transfer__partner_actions.html b/backend/lcfs/web/api/email/templates/bceid__transfer__partner_actions.html new file mode 100644 index 000000000..18063fc72 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/bceid__transfer__partner_actions.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Transfer' %} +{% set url_slug = 'transfers' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__director_decision.html b/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__director_decision.html new file mode 100644 index 000000000..56009ef1b --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__director_decision.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Credit Record' %} +{% set url_slug = 'credit-records' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__manager_recommendation.html b/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__manager_recommendation.html new file mode 100644 index 000000000..56009ef1b --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__manager_recommendation.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Credit Record' %} +{% set url_slug = 'credit-records' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__submitted_for_review.html b/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__submitted_for_review.html new file mode 100644 index 000000000..56009ef1b --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_analyst__compliance_report__submitted_for_review.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Credit Record' %} +{% set url_slug = 'credit-records' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_analyst__initiative_agreement__returned_to_analyst.html b/backend/lcfs/web/api/email/templates/idir_analyst__initiative_agreement__returned_to_analyst.html new file mode 100644 index 000000000..43a49a9d1 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_analyst__initiative_agreement__returned_to_analyst.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Initiative Agreement' %} +{% set url_slug = 'initiative-agreements' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_analyst__transfer__director_recorded.html b/backend/lcfs/web/api/email/templates/idir_analyst__transfer__director_recorded.html new file mode 100644 index 000000000..9ddb47ee8 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_analyst__transfer__director_recorded.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Transfer' %} +{% set url_slug = 'transfer-requests' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_analyst__transfer__rescinded_action.html b/backend/lcfs/web/api/email/templates/idir_analyst__transfer__rescinded_action.html new file mode 100644 index 000000000..9ddb47ee8 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_analyst__transfer__rescinded_action.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Transfer' %} +{% set url_slug = 'transfer-requests' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_analyst__transfer__submitted_for_review.html b/backend/lcfs/web/api/email/templates/idir_analyst__transfer__submitted_for_review.html new file mode 100644 index 000000000..9ddb47ee8 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_analyst__transfer__submitted_for_review.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Transfer' %} +{% set url_slug = 'transfer-requests' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__analyst_recommendation.html b/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__analyst_recommendation.html new file mode 100644 index 000000000..56009ef1b --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__analyst_recommendation.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Credit Record' %} +{% set url_slug = 'credit-records' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__director_assessment.html b/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__director_assessment.html new file mode 100644 index 000000000..56009ef1b --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__director_assessment.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Credit Record' %} +{% set url_slug = 'credit-records' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__submitted_for_review.html b/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__submitted_for_review.html new file mode 100644 index 000000000..56009ef1b --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_compliance_manager__compliance_report__submitted_for_review.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Credit Record' %} +{% set url_slug = 'credit-records' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_director__compliance_report__manager_recommendation.html b/backend/lcfs/web/api/email/templates/idir_director__compliance_report__manager_recommendation.html new file mode 100644 index 000000000..56009ef1b --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_director__compliance_report__manager_recommendation.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Credit Record' %} +{% set url_slug = 'credit-records' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_director__initiative_agreement__analyst_recommendation.html b/backend/lcfs/web/api/email/templates/idir_director__initiative_agreement__analyst_recommendation.html new file mode 100644 index 000000000..43a49a9d1 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_director__initiative_agreement__analyst_recommendation.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Initiative Agreement' %} +{% set url_slug = 'initiative-agreements' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/idir_director__transfer__analyst_recommendation.html b/backend/lcfs/web/api/email/templates/idir_director__transfer__analyst_recommendation.html new file mode 100644 index 000000000..9ddb47ee8 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/idir_director__transfer__analyst_recommendation.html @@ -0,0 +1,3 @@ +{% extends 'notification_base.html' %} +{% set notification_type = 'Transfer' %} +{% set url_slug = 'transfer-requests' %} \ No newline at end of file diff --git a/backend/lcfs/web/api/email/templates/notification_base.html b/backend/lcfs/web/api/email/templates/notification_base.html new file mode 100644 index 000000000..560b93243 --- /dev/null +++ b/backend/lcfs/web/api/email/templates/notification_base.html @@ -0,0 +1,109 @@ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + Low Carbon Fuels Standard Reporting System +
+
+

This email was generated by the Government of British Columbia, Low Carbon Fuel Standard + portal.

+

A {{ notification_type }} update has occurred within the Low Carbon Fuel Standard portal.

+

For more details, please go to: https://lowcarbonfuels.gov.bc.ca/{{ + url_slug }} +

+

You received this email because you subscribed at the site above, to stop receiving these + email, log in to your account here: https://lowcarbonfuels.gov.bc.ca/notifications +

+
+
+ + + \ No newline at end of file diff --git a/backend/lcfs/web/api/email/views.py b/backend/lcfs/web/api/email/views.py new file mode 100644 index 000000000..4f448e96a --- /dev/null +++ b/backend/lcfs/web/api/email/views.py @@ -0,0 +1,50 @@ +from fastapi import APIRouter, Depends, HTTPException, status, Request +from typing import Dict +from lcfs.web.api.email.schema import EmailNotificationRequest +from lcfs.web.api.email.services import CHESEmailService +from lcfs.web.core.decorators import view_handler + +router = APIRouter() + + +@router.post( + "/test-email-notification", + status_code=status.HTTP_200_OK, + summary="Test Default Email Notification", + response_model=Dict[str, str], +) +@view_handler(["*"]) +async def test_email_notification( + request: Request, + payload: EmailNotificationRequest, + service: CHESEmailService = Depends(), +): + """ + Endpoint to test sending a default email notification. + This is primarily for validating the email notification setup. + """ + # Define a test notification context + notification_context = { + "subject": "Test Notification", + "user_name": "Test User", + "message_body": "This is a test notification email from LCFS Notification System.", + } + + # Extract notification type and organization ID from the request payload + notification_type = payload.notification_type + organization_id = payload.organization_id + + # Trigger the email sending process + success = await service.send_notification_email( + notification_type=notification_type, + notification_context=notification_context, + organization_id=organization_id, + ) + + if success: + return {"status": "success", "message": "Test email sent successfully."} + else: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to send test email." + ) diff --git a/backend/lcfs/web/api/final_supply_equipment/__init__.py b/backend/lcfs/web/api/final_supply_equipment/__init__.py new file mode 100644 index 000000000..45af7ca11 --- /dev/null +++ b/backend/lcfs/web/api/final_supply_equipment/__init__.py @@ -0,0 +1,5 @@ +"""API for internal comments.""" + +from lcfs.web.api.final_supply_equipment.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/final_supply_equipment/repo.py b/backend/lcfs/web/api/final_supply_equipment/repo.py new file mode 100644 index 000000000..f2353b624 --- /dev/null +++ b/backend/lcfs/web/api/final_supply_equipment/repo.py @@ -0,0 +1,423 @@ +import structlog +from typing import List, Tuple +from lcfs.db.models.compliance import EndUserType, FinalSupplyEquipment, ComplianceReport +from lcfs.db.models.compliance.FinalSupplyEquipmentRegNumber import ( + FinalSupplyEquipmentRegNumber, +) +from lcfs.db.models.compliance.FuelMeasurementType import FuelMeasurementType +from lcfs.db.models.compliance.LevelOfEquipment import LevelOfEquipment +from lcfs.db.models.fuel.EndUseType import EndUseType +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.final_supply_equipment.schema import FinalSupplyEquipmentCreateSchema, PortsEnum +from sqlalchemy import and_, delete, distinct, exists, select, update +from sqlalchemy.orm import joinedload, selectinload +from sqlalchemy.ext.asyncio import AsyncSession +from fastapi import Depends + +from lcfs.web.core.decorators import repo_handler +from lcfs.db.dependencies import get_async_db_session +from sqlalchemy import func + +logger = structlog.get_logger(__name__) + + +class FinalSupplyEquipmentRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_fse_options( + self, organization + ) -> Tuple[ + List[EndUseType], + List[LevelOfEquipment], + List[FuelMeasurementType], + List[PortsEnum], + List[str], + ]: + """ + Retrieve all FSE options in a single database transaction + """ + async with self.db.begin_nested(): + intended_use_types = await self.get_intended_use_types() + levels_of_equipment = await self.get_levels_of_equipment() + fuel_measurement_types = await self.get_fuel_measurement_types() + intended_user_types = await self.get_intended_user_types() + organization_names = await self.get_organization_names(organization) + ports = list(PortsEnum) + return ( + intended_use_types, + levels_of_equipment, + fuel_measurement_types, + intended_user_types, + ports, + organization_names, + ) + + async def get_intended_use_types(self) -> List[EndUseType]: + """ + Retrieve a list of intended use types from the database + """ + return ( + ( + await self.db.execute( + select(EndUseType).where(EndUseType.intended_use == True) + ) + ) + .scalars() + .all() + ) + + @repo_handler + async def get_intended_use_by_name(self, intended_use: str) -> EndUseType: + """ + Retrieve intended use type by name from the database + """ + return ( + ( + await self.db.execute( + select(EndUseType).where( + and_( + EndUseType.type == intended_use, + EndUseType.intended_use == True, + ) + ) + ) + ) + .unique() + .scalar_one_or_none() + ) + + async def get_intended_user_types(self) -> List[EndUserType]: + """ + Retrieve a list of intended user types from the database + """ + return ( + ( + await self.db.execute( + select(EndUserType).where(EndUserType.intended_use == True) + ) + ) + .scalars() + .all() + ) + + async def get_organization_names(self, organization) -> List[str]: + """ + Retrieve unique organization names for Final Supply Equipment records + associated with the given organization_id via ComplianceReport. + + Args: + organization_id (int): The ID of the organization. + + Returns: + List[str]: A list of unique organization names. + """ + organization_names = ( + await self.db.execute( + select(distinct(FinalSupplyEquipment.organization_name)) + .join(ComplianceReport, FinalSupplyEquipment.compliance_report_id == ComplianceReport.compliance_report_id) + .filter(ComplianceReport.organization_id == organization.organization_id) + .filter(FinalSupplyEquipment.organization_name.isnot(None)) + ) + ).all() + + # Extract strings from the list of tuples + return [name[0] for name in organization_names] + + @repo_handler + async def get_intended_user_by_name(self, intended_user: str) -> EndUseType: + """ + Retrieve intended user type name from the database + """ + return ( + ( + await self.db.execute( + select(EndUserType).where( + and_( + EndUserType.type_name == intended_user, + EndUserType.intended_use == True, + ) + ) + ) + ) + .unique() + .scalar_one_or_none() + ) + + async def get_levels_of_equipment(self) -> List[LevelOfEquipment]: + """ + Retrieve a list of levels of equipment from the database + """ + return (await self.db.execute(select(LevelOfEquipment))).scalars().all() + + @repo_handler + async def get_level_of_equipment_by_name(self, name: str) -> LevelOfEquipment: + """ + Get the levels of equipment by name + """ + return ( + ( + await self.db.execute( + select(LevelOfEquipment).where(LevelOfEquipment.name == name) + ) + ) + .unique() + .scalar_one_or_none() + ) + + async def get_fuel_measurement_types(self) -> List[FuelMeasurementType]: + """ + Retrieve a list of levels of equipment from the database + """ + return (await self.db.execute(select(FuelMeasurementType))).scalars().all() + + @repo_handler + async def get_fuel_measurement_type_by_type(self, type: str) -> FuelMeasurementType: + """ + Get the levels of equipment by name + """ + return ( + ( + await self.db.execute( + select(FuelMeasurementType).where(FuelMeasurementType.type == type) + ) + ) + .unique() + .scalar_one_or_none() + ) + + @repo_handler + async def get_fse_list(self, report_id: int) -> List[FinalSupplyEquipment]: + """ + Retrieve a list of final supply equipment from the database + """ + result = await self.db.execute( + select(FinalSupplyEquipment) + .options( + joinedload(FinalSupplyEquipment.fuel_measurement_type), + joinedload(FinalSupplyEquipment.intended_use_types), + joinedload(FinalSupplyEquipment.intended_user_types), + joinedload(FinalSupplyEquipment.level_of_equipment), + ) + .where(FinalSupplyEquipment.compliance_report_id == report_id) + .order_by(FinalSupplyEquipment.create_date.asc()) + ) + return result.unique().scalars().all() + + @repo_handler + async def get_fse_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> List[FinalSupplyEquipment]: + """ + Retrieve a list of final supply equipment from the database with pagination + """ + conditions = [FinalSupplyEquipment.compliance_report_id == compliance_report_id] + offset = 0 if pagination.page < 1 else (pagination.page - 1) * pagination.size + limit = pagination.size + query = ( + select(FinalSupplyEquipment) + .options( + joinedload(FinalSupplyEquipment.fuel_measurement_type), + joinedload(FinalSupplyEquipment.intended_use_types), + joinedload(FinalSupplyEquipment.intended_user_types), + joinedload(FinalSupplyEquipment.level_of_equipment), + ) + .where(*conditions) + ) + count_query = query.with_only_columns( + func.count(FinalSupplyEquipment.final_supply_equipment_id) + ).order_by(None) + total_count = (await self.db.execute(count_query)).scalar_one() + result = await self.db.execute( + query.offset(offset) + .limit(limit) + .order_by(FinalSupplyEquipment.create_date.asc()) + ) + final_supply_equipments = result.unique().scalars().all() + return final_supply_equipments, total_count + + @repo_handler + async def get_final_supply_equipment_by_id( + self, final_supply_equipment_id: int + ) -> FinalSupplyEquipment: + """ + Retrieve a final supply equipment from the database + """ + result = await self.db.execute( + select(FinalSupplyEquipment) + .options( + joinedload(FinalSupplyEquipment.fuel_measurement_type), + joinedload(FinalSupplyEquipment.intended_use_types), + joinedload(FinalSupplyEquipment.intended_user_types), + joinedload(FinalSupplyEquipment.level_of_equipment), + ) + .where( + FinalSupplyEquipment.final_supply_equipment_id + == final_supply_equipment_id + ) + ) + return result.unique().scalar_one_or_none() + + @repo_handler + async def update_final_supply_equipment( + self, final_supply_equipment: FinalSupplyEquipment + ) -> FinalSupplyEquipment: + """ + Update an existing final supply equipment in the database. + """ + updated_final_supply_equipment = await self.db.merge(final_supply_equipment) + await self.db.flush() + await self.db.refresh( + final_supply_equipment, + ["fuel_measurement_type", "level_of_equipment", "intended_use_types","intended_user_types"], + ) + return updated_final_supply_equipment + + @repo_handler + async def create_final_supply_equipment( + self, final_supply_equipment: FinalSupplyEquipment + ) -> FinalSupplyEquipment: + """ + Create a new final supply equipment in the database. + """ + self.db.add(final_supply_equipment) + await self.db.flush() + await self.db.refresh( + final_supply_equipment, + ["fuel_measurement_type", "level_of_equipment", "intended_use_types"], + ) + return final_supply_equipment + + @repo_handler + async def delete_final_supply_equipment(self, final_supply_equipment_id: int): + """Delete a final supply equipment from the database""" + await self.db.execute( + delete(FinalSupplyEquipment).where( + FinalSupplyEquipment.final_supply_equipment_id + == final_supply_equipment_id + ) + ) + await self.db.flush() + + @repo_handler + async def get_current_seq_by_org_and_postal_code( + self, organization_code: str, postal_code: str + ) -> int: + """ + Retrieve the current sequence number for a given organization code and postal code. + """ + result = await self.db.execute( + select(FinalSupplyEquipmentRegNumber.current_sequence_number).where( + and_( + FinalSupplyEquipmentRegNumber.organization_code + == organization_code, + FinalSupplyEquipmentRegNumber.postal_code == postal_code, + ) + ) + ) + current_sequence_number = result.scalar() + return current_sequence_number if current_sequence_number is not None else 0 + + @repo_handler + async def increment_seq_by_org_and_postal_code( + self, organization_code: str, postal_code: str + ) -> int: + """ + Increment and return the next sequence number for a given organization code and postal code. + """ + # Try to update the existing sequence + result = await self.db.execute( + update(FinalSupplyEquipmentRegNumber) + .where( + and_( + FinalSupplyEquipmentRegNumber.organization_code + == organization_code, + FinalSupplyEquipmentRegNumber.postal_code == postal_code, + ) + ) + .values( + current_sequence_number=FinalSupplyEquipmentRegNumber.current_sequence_number + + 1 + ) + .returning(FinalSupplyEquipmentRegNumber.current_sequence_number) + ) + sequence_number = result.scalar() + + if sequence_number is None: + # If no existing sequence, insert a new one + new_record = FinalSupplyEquipmentRegNumber( + organization_code=organization_code, + postal_code=postal_code, + current_sequence_number=1, + ) + self.db.add(new_record) + await self.db.flush() + sequence_number = 1 + + return sequence_number + + @repo_handler + async def check_uniques_of_fse_row(self, row: FinalSupplyEquipmentCreateSchema) -> bool: + """ + Check if a duplicate final supply equipment row exists in the database based on the provided data. + Returns True if a duplicate is found, False otherwise. + """ + conditions = [ + FinalSupplyEquipment.supply_from_date == row.supply_from_date, + FinalSupplyEquipment.supply_to_date == row.supply_to_date, + FinalSupplyEquipment.serial_nbr == row.serial_nbr, + FinalSupplyEquipment.postal_code == row.postal_code, + FinalSupplyEquipment.latitude == row.latitude, + FinalSupplyEquipment.longitude == row.longitude, + ] + + if row.final_supply_equipment_id is not None: + conditions.append( + FinalSupplyEquipment.final_supply_equipment_id + != row.final_supply_equipment_id + ) + + query = select(exists().where(*conditions)) + result = await self.db.execute(query) + + return result.scalar() + + @repo_handler + async def check_overlap_of_fse_row( + self, row: FinalSupplyEquipmentCreateSchema + ) -> bool: + """ + Check if there's an overlapping final supply equipment row in the database based on the provided data. + Returns True if an overlap is found, False otherwise. + """ + conditions = [ + and_( + FinalSupplyEquipment.supply_from_date <= row.supply_to_date, + FinalSupplyEquipment.supply_to_date >= row.supply_from_date, + ), + FinalSupplyEquipment.serial_nbr == row.serial_nbr, + ] + + if row.final_supply_equipment_id is not None: + conditions.append( + FinalSupplyEquipment.final_supply_equipment_id + != row.final_supply_equipment_id + ) + + query = select(exists().where(*conditions)) + result = await self.db.execute(query) + + return result.scalar() + + @repo_handler + async def search_manufacturers(self, query: str) -> list[str]: + """ + Search for manufacturers based on the provided query. + """ + result = await self.db.execute( + select(distinct(FinalSupplyEquipment.manufacturer)).where( + FinalSupplyEquipment.manufacturer.ilike(f"%{query}%") + ) + ) + return result.scalars().all() diff --git a/backend/lcfs/web/api/final_supply_equipment/schema.py b/backend/lcfs/web/api/final_supply_equipment/schema.py new file mode 100644 index 000000000..2dc81e8f3 --- /dev/null +++ b/backend/lcfs/web/api/final_supply_equipment/schema.py @@ -0,0 +1,69 @@ +from typing import ClassVar, Optional, List +from datetime import date + +from lcfs.web.api.base import BaseSchema, PaginationResponseSchema +from lcfs.web.api.compliance_report.schema import FinalSupplyEquipmentSchema +from lcfs.web.api.fuel_code.schema import EndUseTypeSchema, EndUserTypeSchema +from pydantic import Field +from enum import Enum + + +class FuelMeasurementTypeSchema(BaseSchema): + fuel_measurement_type_id: int + type: str + description: Optional[str] = None + display_order: int + + +class LevelOfEquipmentSchema(BaseSchema): + level_of_equipment_id: int + name: str + description: Optional[str] = None + display_order: int + + +class PortsEnum(str, Enum): + SINGLE = "Single port" + DUAL = "Dual port" + + +class FSEOptionsSchema(BaseSchema): + intended_use_types: List[EndUseTypeSchema] + fuel_measurement_types: List[FuelMeasurementTypeSchema] + levels_of_equipment: List[LevelOfEquipmentSchema] + intended_user_types: List[EndUserTypeSchema] + ports: List[PortsEnum] + organization_names: List[str] + + +class FinalSupplyEquipmentCreateSchema(BaseSchema): + final_supply_equipment_id: Optional[int] = None + compliance_report_id: Optional[int] = None + organization_name: str + supply_from_date: date + supply_to_date: date + kwh_usage: float + serial_nbr: str + manufacturer: str + model: Optional[str] = None + level_of_equipment: str + ports: Optional[PortsEnum] = None + fuel_measurement_type: str + intended_uses: List[str] + intended_users: List[str] + street_address: str + city: str + postal_code: str = Field(pattern=r"^[A-Za-z]\d[A-Za-z] \d[A-Za-z]\d$") + latitude: float + longitude: float + notes: Optional[str] = None + deleted: Optional[bool] = None + + +class DeleteFinalSupplyEquipmentResponseSchema(BaseSchema): + message: str + + +class FinalSupplyEquipmentsSchema(BaseSchema): + final_supply_equipments: Optional[List[FinalSupplyEquipmentSchema]] = [] + pagination: Optional[PaginationResponseSchema] = {} diff --git a/backend/lcfs/web/api/final_supply_equipment/services.py b/backend/lcfs/web/api/final_supply_equipment/services.py new file mode 100644 index 000000000..a70b1ce4b --- /dev/null +++ b/backend/lcfs/web/api/final_supply_equipment/services.py @@ -0,0 +1,298 @@ +import structlog +import math +import re +from fastapi import Depends, Request + +from lcfs.db.models.compliance import FinalSupplyEquipment +from lcfs.web.api.base import PaginationRequestSchema, PaginationResponseSchema +from lcfs.web.api.compliance_report.schema import FinalSupplyEquipmentSchema +from lcfs.web.api.final_supply_equipment.schema import ( + FinalSupplyEquipmentCreateSchema, + FinalSupplyEquipmentsSchema, + FuelMeasurementTypeSchema, + LevelOfEquipmentSchema, +) +from lcfs.web.api.final_supply_equipment.repo import FinalSupplyEquipmentRepository +from lcfs.web.api.fuel_code.schema import EndUseTypeSchema, EndUserTypeSchema +from lcfs.web.core.decorators import service_handler + +logger = structlog.get_logger(__name__) + + +class FinalSupplyEquipmentServices: + def __init__( + self, request: Request = None, repo: FinalSupplyEquipmentRepository = Depends() + ) -> None: + self.request = request + self.repo = repo + + @service_handler + async def get_fse_options(self): + """Fetches all FSE options concurrently.""" + organization = self.request.user.organization + ( + intended_use_types, + levels_of_equipment, + fuel_measurement_types, + intended_user_types, + ports, + organization_names, + ) = await self.repo.get_fse_options(organization) + + return { + "intended_use_types": [ + EndUseTypeSchema.model_validate(t) for t in intended_use_types + ], + "levels_of_equipment": [ + LevelOfEquipmentSchema.model_validate(l) for l in levels_of_equipment + ], + "fuel_measurement_types": [ + FuelMeasurementTypeSchema.model_validate(f) + for f in fuel_measurement_types + ], + "intended_user_types": [ + EndUserTypeSchema.model_validate(u) for u in intended_user_types + ], + "ports": [port.value for port in ports], + "organization_names": organization_names, + } + + async def convert_to_fse_model(self, fse: FinalSupplyEquipmentCreateSchema): + fse_model = FinalSupplyEquipment( + **fse.model_dump( + exclude={ + "id", + "level_of_equipment", + "fuel_measurement_type", + "intended_uses", + "intended_users", + "deleted", + } + ) + ) + fse_model.level_of_equipment = await self.repo.get_level_of_equipment_by_name( + fse.level_of_equipment + ) + fse_model.fuel_measurement_type = ( + await self.repo.get_fuel_measurement_type_by_type(fse.fuel_measurement_type) + ) + for intended_use in fse.intended_uses: + fse_model.intended_use_types.append( + await self.repo.get_intended_use_by_name(intended_use) + ) + for intended_user in fse.intended_users: + fse_model.intended_user_types.append( + await self.repo.get_intended_user_by_name(intended_user) + ) + return fse_model + + @service_handler + async def get_fse_list( + self, compliance_report_id: int + ) -> FinalSupplyEquipmentsSchema: + """ + Get the list of FSEs for a given report. + """ + logger.info( + "Getting FSE list for report", + compliance_report_id=compliance_report_id, + ) + fse_models = await self.repo.get_fse_list(compliance_report_id) + fse_list = [ + FinalSupplyEquipmentSchema.model_validate(fse) for fse in fse_models + ] + return FinalSupplyEquipmentsSchema(final_supply_equipments=fse_list) + + @service_handler + async def get_final_supply_equipments_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> FinalSupplyEquipmentsSchema: + """ + Get the list of FSEs for a given report. + """ + logger.info( + "Getting paginated FSE list for report", + compliance_report_id=compliance_report_id, + page=pagination.page, + size=pagination.size, + ) + final_supply_equipments, total_count = await self.repo.get_fse_paginated( + pagination, compliance_report_id + ) + return FinalSupplyEquipmentsSchema( + pagination=PaginationResponseSchema( + page=pagination.page, + size=pagination.size, + total=total_count, + total_pages=math.ceil(total_count / pagination.size), + ), + final_supply_equipments=[ + FinalSupplyEquipmentSchema.model_validate(fse) + for fse in final_supply_equipments + ], + ) + + @service_handler + async def update_final_supply_equipment( + self, fse_data: FinalSupplyEquipmentCreateSchema + ) -> FinalSupplyEquipmentSchema: + """Update an existing final supply equipment""" + + existing_fse = await self.repo.get_final_supply_equipment_by_id( + fse_data.final_supply_equipment_id + ) + if not existing_fse: + raise ValueError("final supply equipment not found") + + existing_fse.organization_name = fse_data.organization_name + existing_fse.kwh_usage = fse_data.kwh_usage + existing_fse.serial_nbr = fse_data.serial_nbr + existing_fse.manufacturer = fse_data.manufacturer + existing_fse.model = fse_data.model + if existing_fse.fuel_measurement_type.type != fse_data.fuel_measurement_type: + fuel_measurement_type = await self.repo.get_fuel_measurement_type_by_type( + fse_data.fuel_measurement_type + ) + existing_fse.fuel_measurement_type = fuel_measurement_type + if existing_fse.level_of_equipment.name != fse_data.level_of_equipment: + level_of_equipment = await self.repo.get_level_of_equipment_by_name( + fse_data.level_of_equipment + ) + existing_fse.level_of_equipment = level_of_equipment + existing_fse.ports = fse_data.ports + intended_use_types = [] + for intended_use in fse_data.intended_uses: + if intended_use not in [ + intended_use_type.type + for intended_use_type in existing_fse.intended_use_types + ]: + intended_use_type = await self.repo.get_intended_use_by_name( + intended_use + ) + intended_use_types.append(intended_use_type) + else: + intended_use_types.append( + next( + ( + intended_use_type + for intended_use_type in existing_fse.intended_use_types + if intended_use_type.type == intended_use + ), + None, + ) + ) + intended_user_types = [] + for intended_user in fse_data.intended_users: + # Check if this intended use is already in the existing list + existing_user_type = next( + ( + existing_user + for existing_user in existing_fse.intended_user_types + if existing_user.type_name == intended_user + ), + None, + ) + + if existing_user_type: + intended_user_types.append(existing_user_type) + else: + # Otherwise, fetch the intended user type by name and add it to the list + new_user_type = await self.repo.get_intended_user_by_name(intended_user) + intended_user_types.append(new_user_type) + + existing_fse.supply_from_date = fse_data.supply_from_date + existing_fse.supply_to_date = fse_data.supply_to_date + existing_fse.intended_use_types = intended_use_types + existing_fse.intended_user_types = intended_user_types + existing_fse.street_address = fse_data.street_address + existing_fse.city = fse_data.city + existing_fse.postal_code = fse_data.postal_code + existing_fse.latitude = fse_data.latitude + existing_fse.longitude = fse_data.longitude + existing_fse.notes = fse_data.notes + + updated_transfer = await self.repo.update_final_supply_equipment(existing_fse) + return FinalSupplyEquipmentSchema.model_validate(updated_transfer) + + @service_handler + async def create_final_supply_equipment( + self, fse_data: FinalSupplyEquipmentCreateSchema + ) -> FinalSupplyEquipmentSchema: + """Create a new final supply equipment""" + # Generate the registration number + registration_nbr = await self.generate_registration_number(fse_data.postal_code) + + final_supply_equipment = await self.convert_to_fse_model(fse_data) + final_supply_equipment.registration_nbr = registration_nbr + created_equipment = await self.repo.create_final_supply_equipment( + final_supply_equipment + ) + + # Increment the sequence number for the postal code if creation was successful + if created_equipment: + org_code = self.request.user.organization.organization_code + await self.repo.increment_seq_by_org_and_postal_code( + org_code, fse_data.postal_code + ) + + return FinalSupplyEquipmentSchema.model_validate(created_equipment) + + @service_handler + async def delete_final_supply_equipment( + self, final_supply_equipment_id: int + ) -> str: + """Delete a final supply equipment""" + return await self.repo.delete_final_supply_equipment(final_supply_equipment_id) + + @service_handler + async def generate_registration_number(self, postal_code: str) -> str: + """ + Generate a unique registration number for a Final Supply Equipment (FSE). + + The registration number is composed of the organization ID, the last three characters of the postal code, + and a sequential number. The sequential number resets for each new postal code. + + Args: + postal_code (str): The postal code of the FSE. + + Returns: + str: The generated unique registration number. + + Raises: + ValueError: If the postal code is not a valid Canadian postal code, if the organization ID is not available, + or if the maximum registration numbers for the given postal code is exceeded. + """ + # Validate the postal code format + postal_code_pattern = re.compile(r"^[A-Za-z]\d[A-Za-z] \d[A-Za-z]\d$") + if not postal_code_pattern.match(postal_code): + raise ValueError("Invalid Canadian postal code format") + + # Retrieve the organization ID from the user's request + org_code = self.request.user.organization.organization_code + if not org_code: + raise ValueError("Organization ID is not available") + + # Retrieve the current sequence number for a given postal code + current_number = await self.repo.get_current_seq_by_org_and_postal_code( + org_code, postal_code + ) + next_number = current_number + 1 + + # Ensure the sequential number is within the 001-999 range + if next_number > 999: + raise ValueError( + "Exceeded maximum registration numbers for the given postal code" + ) + + formatted_next_number = f"{next_number:03d}" + + # Remove the space in the postal code + postal_code_no_space = postal_code.replace(" ", "") + + # Concatenate to form the registration number and return it + return f"{org_code}-{postal_code_no_space}-{formatted_next_number}" + + @service_handler + async def search_manufacturers(self, query: str) -> list[str]: + """Search for manufacturers based on the provided query.""" + return await self.repo.search_manufacturers(query) diff --git a/backend/lcfs/web/api/final_supply_equipment/validation.py b/backend/lcfs/web/api/final_supply_equipment/validation.py new file mode 100644 index 000000000..55e88a5ce --- /dev/null +++ b/backend/lcfs/web/api/final_supply_equipment/validation.py @@ -0,0 +1,54 @@ +from typing import List, Optional +from fastapi import Depends, HTTPException, Request +from lcfs.db.models.compliance import FinalSupplyEquipment +from lcfs.web.api.compliance_report.schema import FinalSupplyEquipmentSchema +from lcfs.web.api.final_supply_equipment.repo import FinalSupplyEquipmentRepository +from lcfs.web.api.final_supply_equipment.schema import FinalSupplyEquipmentCreateSchema +from starlette import status + +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.utils.constants import LCFS_Constants + + +class FinalSupplyEquipmentValidation: + def __init__( + self, + request: Request = None, + fse_repo: FinalSupplyEquipmentRepository = Depends( + FinalSupplyEquipmentRepository + ), + report_repo: ComplianceReportRepository = Depends(ComplianceReportRepository), + ): + self.fse_repo = fse_repo + self.request = request + self.report_repo = report_repo + + async def validate_fse_record( + self, + compliance_report_id: int, + final_supply_equipments: List[FinalSupplyEquipmentCreateSchema], + ): + for final_supply_equipment in final_supply_equipments: + if final_supply_equipment.compliance_report_id != compliance_report_id: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Mismatch compliance_report_id in final supply equipment: {final_supply_equipment}", + ) + # TODO: validate each field from the UI + + async def check_equipment_uniqueness_and_overlap(self, data: FinalSupplyEquipmentCreateSchema): + # Check for exact duplicates + is_duplicate = await self.fse_repo.check_uniques_of_fse_row(data) + if is_duplicate: + raise ValueError( + "Duplicate equipment found. Each equipment must be unique based on serial number, supply date range and location." + ) + + # Check for date range overlap + is_overlapping = await self.fse_repo.check_overlap_of_fse_row(data) + if is_overlapping: + raise ValueError( + f"Date range overlap found for equipment with serial number {data.serial_nbr} at the same or different location." + ) + + return True # If no duplicates or overlaps found diff --git a/backend/lcfs/web/api/final_supply_equipment/views.py b/backend/lcfs/web/api/final_supply_equipment/views.py new file mode 100644 index 000000000..b061922df --- /dev/null +++ b/backend/lcfs/web/api/final_supply_equipment/views.py @@ -0,0 +1,129 @@ +import structlog +from typing import List, Optional, Union + +from fastapi import ( + APIRouter, + Body, + Query, + status, + Request, + Response, + Depends, +) + +from lcfs.db import dependencies +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.compliance_report.schema import ( + CommonPaginatedReportRequestSchema, + FinalSupplyEquipmentSchema, +) +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.web.api.final_supply_equipment.schema import ( + DeleteFinalSupplyEquipmentResponseSchema, + FSEOptionsSchema, + FinalSupplyEquipmentCreateSchema, + FinalSupplyEquipmentsSchema, +) +from lcfs.web.api.final_supply_equipment.services import FinalSupplyEquipmentServices +from lcfs.web.api.final_supply_equipment.validation import ( + FinalSupplyEquipmentValidation, +) +from lcfs.web.core.decorators import view_handler +from lcfs.db.models.user.Role import RoleEnum + +router = APIRouter() +logger = structlog.get_logger(__name__) +get_async_db = dependencies.get_async_db_session + + +@router.get( + "/table-options", response_model=FSEOptionsSchema, status_code=status.HTTP_200_OK +) +@view_handler(["*"]) +async def get_fse_options( + request: Request, service: FinalSupplyEquipmentServices = Depends() +) -> FSEOptionsSchema: + return await service.get_fse_options() + + +@router.post( + "/list-all", + response_model=FinalSupplyEquipmentsSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_final_supply_equipments( + request: Request, + request_data: CommonPaginatedReportRequestSchema = Body(...), + response: Response = None, + service: FinalSupplyEquipmentServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +) -> FinalSupplyEquipmentsSchema: + """Endpoint to get list of final supply equipments for a compliance report""" + compliance_report_id = request_data.compliance_report_id + await report_validate.validate_organization_access(compliance_report_id) + if hasattr(request_data, "page") and request_data.page is not None: + # handle pagination. + pagination = PaginationRequestSchema( + page=request_data.page, + size=request_data.size, + sort_orders=request_data.sort_orders, + filters=request_data.filters, + ) + return await service.get_final_supply_equipments_paginated( + pagination, compliance_report_id + ) + else: + return await service.get_fse_list(compliance_report_id) + + +@router.post( + "/save", + response_model=Union[ + FinalSupplyEquipmentSchema, DeleteFinalSupplyEquipmentResponseSchema + ], + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.SUPPLIER]) +async def save_final_supply_equipment_row( + request: Request, + request_data: FinalSupplyEquipmentCreateSchema = Body(...), + fse_service: FinalSupplyEquipmentServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), + fse_validate: FinalSupplyEquipmentValidation = Depends(), +): + """Endpoint to save single final supply equipment row""" + compliance_report_id = request_data.compliance_report_id + fse_id: Optional[int] = request_data.final_supply_equipment_id + + await report_validate.validate_organization_access(compliance_report_id) + + if request_data.deleted: + # Delete existing final supply equipment row + await fse_service.delete_final_supply_equipment(fse_id) + return DeleteFinalSupplyEquipmentResponseSchema( + message="Final supply equipment row deleted successfully" + ) + elif fse_id: + await fse_validate.check_equipment_uniqueness_and_overlap(data=request_data) + # Update existing final supply equipment row + return await fse_service.update_final_supply_equipment(request_data) + else: + await fse_validate.check_equipment_uniqueness_and_overlap(data=request_data) + # Create new final supply equipment row + return await fse_service.create_final_supply_equipment(request_data) + + +@router.get("/search", response_model=List[str], status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.SUPPLIER]) +async def search_table_options( + request: Request, + manufacturer: Optional[str] = Query( + None, alias="manufacturer", description="Manfacturer for filtering options" + ), + service: FinalSupplyEquipmentServices = Depends(), +) -> List[str]: + """Endpoint to search table options strings""" + if manufacturer: + return await service.search_manufacturers(manufacturer) + return [] diff --git a/backend/lcfs/web/api/fuel_code/__init__.py b/backend/lcfs/web/api/fuel_code/__init__.py new file mode 100644 index 000000000..45fe85147 --- /dev/null +++ b/backend/lcfs/web/api/fuel_code/__init__.py @@ -0,0 +1,5 @@ +"""Fuel code API.""" + +from lcfs.web.api.fuel_code.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/fuel_code/export.py b/backend/lcfs/web/api/fuel_code/export.py new file mode 100644 index 000000000..e1f7c70ec --- /dev/null +++ b/backend/lcfs/web/api/fuel_code/export.py @@ -0,0 +1,133 @@ +import io +from datetime import datetime + +from fastapi import Depends +from starlette.responses import StreamingResponse + +from lcfs.utils.constants import FILE_MEDIA_TYPE +from lcfs.utils.spreadsheet_builder import SpreadsheetBuilder +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.core.decorators import service_handler +from lcfs.web.exception.exceptions import DataNotFoundException + + +FUEL_CODE_EXPORT_FILENAME = "BC-LCFS-Fuel-Codes" +FUEL_CODE_EXPORT_SHEETNAME = "Fuel Codes" +FUEL_CODE_EXPORT_COLUMNS = [ + "Status", + "Prefix", + "Fuel code", + "Carbon intensity", + "EDRMS#", + "Company", + "Contact name", + "Contact email", + "Application date", + "Approval date", + "Effective date", + "Expiry date", + "Fuel", + "Feedstock", + "Feedstock location", + "Misc", + "Fuel production facility city", + "Fuel production facility province/state", + "Fuel production facility country", + "Facility nameplate capacity", + "Unit", + "Feedstock transport mode", + "Finished fuel transport mode", + "Former company", + "Notes", +] + + +class FuelCodeExporter: + def __init__(self, repo: FuelCodeRepository = Depends(FuelCodeRepository)) -> None: + self.repo = repo + + @service_handler + async def export(self, export_format) -> StreamingResponse: + """ + Prepares a list of users in a file that is downloadable + """ + if not export_format in ["xls", "xlsx", "csv"]: + raise DataNotFoundException("Export format not supported") + + # Query database for the list of users. Exclude government users. + results = await self.repo.get_fuel_codes_paginated( + pagination=PaginationRequestSchema( + page=1, + size=1000, + filters=[], + sortOrders=[], + ) + ) + + # Prepare data for the spreadsheet + data = [] + for fuel_code in results[0]: + data.append( + [ + fuel_code.fuel_code_status.status.value, + fuel_code.fuel_code_prefix.prefix, + fuel_code.fuel_suffix, + fuel_code.carbon_intensity, + fuel_code.edrms, + fuel_code.company, + fuel_code.contact_name, + fuel_code.contact_email, + fuel_code.application_date, + fuel_code.approval_date, + fuel_code.effective_date, + fuel_code.expiration_date, + fuel_code.fuel_type.fuel_type, + fuel_code.feedstock, + fuel_code.feedstock_location, + fuel_code.feedstock_misc, + fuel_code.fuel_production_facility_city, + fuel_code.fuel_production_facility_province_state, + fuel_code.fuel_production_facility_country, + fuel_code.facility_nameplate_capacity, + ( + fuel_code.facility_nameplate_capacity_unit.value + if fuel_code.facility_nameplate_capacity_unit + else None + ), + ", ".join( + mode.feedstock_fuel_transport_mode.transport_mode + for mode in fuel_code.feedstock_fuel_transport_modes + ), + ", ".join( + mode.finished_fuel_transport_mode.transport_mode + for mode in fuel_code.finished_fuel_transport_modes + ), + fuel_code.former_company, + fuel_code.notes, + ] + ) + + # Create a spreadsheet + builder = SpreadsheetBuilder(file_format=export_format) + + builder.add_sheet( + sheet_name=FUEL_CODE_EXPORT_SHEETNAME, + columns=FUEL_CODE_EXPORT_COLUMNS, + rows=data, + styles={"bold_headers": True}, + ) + + file_content = builder.build_spreadsheet() + + # Get the current date in YYYY-MM-DD format + current_date = datetime.now().strftime("%Y-%m-%d") + + filename = f"{FUEL_CODE_EXPORT_FILENAME}-{current_date}.{export_format}" + headers = {"Content-Disposition": f'attachment; filename="{filename}"'} + + return StreamingResponse( + io.BytesIO(file_content), + media_type=FILE_MEDIA_TYPE[export_format.upper()].value, + headers=headers, + ) diff --git a/backend/lcfs/web/api/fuel_code/repo.py b/backend/lcfs/web/api/fuel_code/repo.py new file mode 100644 index 000000000..325c3579b --- /dev/null +++ b/backend/lcfs/web/api/fuel_code/repo.py @@ -0,0 +1,911 @@ +from dataclasses import dataclass +from datetime import date +from typing import List, Dict, Any, Union, Optional, Sequence + +import structlog +from fastapi import Depends +from sqlalchemy import and_, or_, select, func, text, update, distinct, desc, asc +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import joinedload, contains_eager + +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.compliance.CompliancePeriod import CompliancePeriod +from lcfs.db.models.fuel.AdditionalCarbonIntensity import AdditionalCarbonIntensity +from lcfs.db.models.fuel.EnergyDensity import EnergyDensity +from lcfs.db.models.fuel.EnergyEffectivenessRatio import EnergyEffectivenessRatio +from lcfs.db.models.fuel.ExpectedUseType import ExpectedUseType +from lcfs.db.models.fuel.FeedstockFuelTransportMode import FeedstockFuelTransportMode +from lcfs.db.models.fuel.FinishedFuelTransportMode import FinishedFuelTransportMode +from lcfs.db.models.fuel.FuelCategory import FuelCategory +from lcfs.db.models.fuel.FuelCode import FuelCode +from lcfs.db.models.fuel.FuelCodePrefix import FuelCodePrefix +from lcfs.db.models.fuel.FuelCodeStatus import FuelCodeStatus, FuelCodeStatusEnum +from lcfs.db.models.fuel.FuelInstance import FuelInstance +from lcfs.db.models.fuel.FuelType import FuelType +from lcfs.db.models.fuel.ProvisionOfTheAct import ProvisionOfTheAct +from lcfs.db.models.fuel.TargetCarbonIntensity import TargetCarbonIntensity +from lcfs.db.models.fuel.TransportMode import TransportMode +from lcfs.db.models.fuel.UnitOfMeasure import UnitOfMeasure +from lcfs.web.api.base import ( + PaginationRequestSchema, + get_field_for_filter, + apply_filter_conditions, +) +from lcfs.web.api.fuel_code.schema import FuelCodeCloneSchema, FuelCodeSchema +from lcfs.web.core.decorators import repo_handler + +logger = structlog.get_logger(__name__) + + +@dataclass +class CarbonIntensityResult: + effective_carbon_intensity: float + target_ci: float | None + eer: float + energy_density: float | None + uci: float | None + + +class FuelCodeRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_fuel_types(self) -> List[FuelType]: + """Get all fuel type options""" + return ( + ( + await self.db.execute( + select(FuelType).options( + joinedload(FuelType.provision_1), + joinedload(FuelType.provision_2), + ) + ) + ) + .scalars() + .all() + ) + + @repo_handler + async def get_formatted_fuel_types(self) -> List[Dict[str, Any]]: + """Get all fuel type options with their associated fuel categories and fuel codes""" + # Define the filtering conditions for fuel codes + current_date = date.today() + fuel_code_filters = or_( + FuelCode.effective_date == None, FuelCode.effective_date <= current_date + ) & or_( + FuelCode.expiration_date == None, FuelCode.expiration_date > current_date + ) + + # Build the query with filtered fuel_codes + query = ( + select(FuelType) + .outerjoin(FuelType.fuel_instances) + .outerjoin(FuelInstance.fuel_category) + .outerjoin(FuelType.fuel_codes) + .where(fuel_code_filters) + .options( + contains_eager(FuelType.fuel_instances).contains_eager( + FuelInstance.fuel_category + ), + contains_eager(FuelType.fuel_codes), + joinedload(FuelType.provision_1), + joinedload(FuelType.provision_2), + ) + ) + + result = await self.db.execute(query) + fuel_types = result.unique().scalars().all() + + # Prepare the data in the format matching your schema + formatted_fuel_types = [] + for fuel_type in fuel_types: + formatted_fuel_type = { + "fuel_type_id": fuel_type.fuel_type_id, + "fuel_type": fuel_type.fuel_type, + "default_carbon_intensity": fuel_type.default_carbon_intensity, + "units": fuel_type.units if fuel_type.units else None, + "unrecognized": fuel_type.unrecognized, + "fuel_categories": [ + { + "fuel_category_id": fc.fuel_category.fuel_category_id, + "category": fc.fuel_category.category, + } + for fc in fuel_type.fuel_instances + ], + "fuel_codes": [ + { + "fuel_code_id": fc.fuel_code_id, + "fuel_code": fc.fuel_code, + "carbon_intensity": fc.carbon_intensity, + } + for fc in fuel_type.fuel_codes + ], + "provision_of_the_act": [], + } + + if fuel_type.provision_1: + formatted_fuel_type["provision_of_the_act"].append( + { + "provision_of_the_act_id": fuel_type.provision_1_id, + "name": fuel_type.provision_1.name, + } + ) + + if fuel_type.provision_2: + formatted_fuel_type["provision_of_the_act"].append( + { + "provision_of_the_act_id": fuel_type.provision_2_id, + "name": fuel_type.provision_2.name, + } + ) + formatted_fuel_types.append(formatted_fuel_type) + + return formatted_fuel_types + + @repo_handler + async def get_fuel_type_by_name(self, fuel_type_name: str) -> FuelType: + """Get fuel type by name""" + stmt = select(FuelType).where(FuelType.fuel_type == fuel_type_name) + result = await self.db.execute(stmt) + fuel_type = result.scalars().first() + if not fuel_type: + raise ValueError(f"Fuel type '{fuel_type_name}' not found") + return fuel_type + + @repo_handler + async def get_fuel_type_by_id(self, fuel_type_id: int) -> FuelType: + """Get fuel type by ID""" + result = await self.db.get_one( + FuelType, + fuel_type_id, + options=[ + joinedload(FuelType.energy_density), + joinedload(FuelType.energy_effectiveness_ratio), + ], + ) + if not result: + raise ValueError(f"Fuel type with ID '{fuel_type_id}' not found") + return result + + @repo_handler + async def get_fuel_categories(self) -> List[FuelCategory]: + """Get all fuel category options""" + return (await self.db.execute(select(FuelCategory))).scalars().all() + + @repo_handler + async def get_fuel_category_by(self, **filters: Any) -> FuelCategory: + """Get a fuel category by any filters""" + result = await self.db.execute(select(FuelCategory).filter_by(**filters)) + return result.scalar_one_or_none() + + @repo_handler + async def get_transport_modes(self) -> List[TransportMode]: + """Get all transport mode options""" + return (await self.db.execute(select(TransportMode))).scalars().all() + + @repo_handler + async def get_transport_mode(self, transport_mode_id: int) -> TransportMode: + return await self.db.scalar( + select(TransportMode).where( + TransportMode.transport_mode_id == transport_mode_id + ) + ) + + @repo_handler + async def get_transport_mode_by_name(self, mode_name: str) -> TransportMode: + query = select(TransportMode).where(TransportMode.transport_mode == mode_name) + result = await self.db.execute(query) + transport_mode = result.scalar_one() + + return transport_mode + + @repo_handler + async def get_fuel_code_prefixes(self) -> List[FuelCodePrefix]: + """Get all fuel code prefix options""" + return (await self.db.execute(select(FuelCodePrefix))).scalars().all() + + @repo_handler + async def get_fuel_code_prefix(self, prefix_id: int) -> FuelCodePrefix: + """Get fuel code prefix""" + return await self.db.get_one(FuelCodePrefix, prefix_id) + + @repo_handler + async def get_fuel_status_by_status( + self, status: Union[str, FuelCodeStatusEnum] + ) -> FuelCodeStatus: + """Get fuel status by name""" + return ( + await self.db.execute(select(FuelCodeStatus).filter_by(status=status)) + ).scalar() + + @repo_handler + async def get_energy_densities(self) -> List[EnergyDensity]: + """Get all energy densities""" + return ( + ( + await self.db.execute( + select(EnergyDensity).options( + joinedload(EnergyDensity.fuel_type), + joinedload(EnergyDensity.uom), + ) + ) + ) + .scalars() + .all() + ) + + @repo_handler + async def get_energy_density(self, fuel_type_id) -> EnergyDensity: + """Get the energy density for the specified fuel_type_id""" + + stmt = select(EnergyDensity).where(EnergyDensity.fuel_type_id == fuel_type_id) + result = await self.db.execute(stmt) + energy_density = result.scalars().first() + + return energy_density + + @repo_handler + async def get_energy_effectiveness_ratios(self) -> List[EnergyEffectivenessRatio]: + """Get all energy effectiveness ratios""" + return ( + ( + await self.db.execute( + select(EnergyEffectivenessRatio).options( + joinedload(EnergyEffectivenessRatio.fuel_category), + joinedload(EnergyEffectivenessRatio.fuel_type), + joinedload(EnergyEffectivenessRatio.end_use_type), + ) + ) + ) + .scalars() + .all() + ) + + @repo_handler + async def get_units_of_measure(self) -> List[UnitOfMeasure]: + """Get all unit of measure options""" + return (await self.db.execute(select(UnitOfMeasure))).scalars().all() + + @repo_handler + async def get_expected_use_types(self) -> List[ExpectedUseType]: + """Get all expected use options""" + return (await self.db.execute(select(ExpectedUseType))).scalars().all() + + @repo_handler + async def get_expected_use_type_by_name(self, name: str) -> ExpectedUseType: + """Get a expected use by its name""" + result = await self.db.execute(select(ExpectedUseType).filter_by(name=name)) + return result.scalar_one_or_none() + + @repo_handler + async def get_fuel_codes_paginated( + self, pagination: PaginationRequestSchema + ) -> tuple[Sequence[FuelCode], int]: + """ + Queries fuel codes from the database with optional filters. Supports pagination and sorting. + + Args: + pagination (dict): Pagination and sorting parameters. + + Returns: + List[FuelCodeSchema]: A list of fuel codes matching the query. + """ + delete_status = await self.get_fuel_status_by_status("Deleted") + conditions = [FuelCode.fuel_status_id != delete_status.fuel_code_status_id] + + for filter in pagination.filters: + + filter_value = filter.filter + if filter.filter_type == "date": + if filter.type == "inRange": + filter_value = [filter.date_from, filter.date_to] + else: + filter_value = filter.date_from + + filter_option = filter.type + filter_type = filter.filter_type + if filter.field == "status": + field = get_field_for_filter(FuelCodeStatus, filter.field) + elif filter.field == "prefix": + field = get_field_for_filter(FuelCodePrefix, filter.field) + elif filter.field == "fuel_type": + field = get_field_for_filter(FuelType, filter.field) + elif ( + filter.field == "feedstock_fuel_transport_mode" + or filter.field == "finished_fuel_transport_mode" + ): + transport_mode = await self.get_transport_mode_by_name(filter_value) + + if filter.field == "feedstock_fuel_transport_mode": + conditions.append( + FeedstockFuelTransportMode.transport_mode_id + == transport_mode.transport_mode_id + ) + + if filter.field == "finished_fuel_transport_mode": + conditions.append( + FinishedFuelTransportMode.transport_mode_id + == transport_mode.transport_mode_id + ) + continue + else: + field = get_field_for_filter(FuelCode, filter.field) + + conditions.append( + apply_filter_conditions(field, filter_value, filter_option, filter_type) + ) + + # setup pagination + offset = 0 if (pagination.page < 1) else (pagination.page - 1) * pagination.size + limit = pagination.size + # Construct the select query with options for eager loading + query = ( + select(FuelCode) + .options( + joinedload(FuelCode.fuel_code_status), + joinedload(FuelCode.fuel_code_prefix), + joinedload(FuelCode.fuel_type).joinedload(FuelType.provision_1), + joinedload(FuelCode.fuel_type).joinedload(FuelType.provision_2), + joinedload(FuelCode.feedstock_fuel_transport_modes).joinedload( + FeedstockFuelTransportMode.feedstock_fuel_transport_mode + ), + joinedload(FuelCode.finished_fuel_transport_modes).joinedload( + FinishedFuelTransportMode.finished_fuel_transport_mode + ), + ) + .where(and_(*conditions)) + ) + + # Apply sorting + for order in pagination.sort_orders: + direction = asc if order.direction == "asc" else desc + if order.field == "status": + field = getattr(FuelCodeStatus, "status") + elif order.field == "prefix": + field = getattr(FuelCodePrefix, "prefix") + else: + field = getattr(FuelCode, order.field) + query = query.order_by(direction(field)) + + # Execute the count query to get the total count + count_query = query.with_only_columns(func.count()).order_by(None) + total_count = (await self.db.execute(count_query)).scalar() + + # Execute the main query to retrieve all fuel codes + result = await self.db.execute( + query.offset(offset).limit(limit).order_by(FuelCode.create_date.desc()) + ) + fuel_codes = result.unique().scalars().all() + return fuel_codes, total_count + + @repo_handler + async def get_fuel_code_statuses(self): + query = select(FuelCodeStatus).order_by(asc(FuelCodeStatus.status)) + status_results = await self.db.execute(query) + return status_results.scalars().all() + + @repo_handler + async def create_fuel_code(self, fuel_code: FuelCode) -> FuelCode: + """ + Saves a new fuel code to the database. + + Args: + fuel_code (FuelCodeSchema): A fuel code to be saved. + """ + self.db.add(fuel_code) + await self.db.flush() + result = await self.get_fuel_code(fuel_code.fuel_code_id) + return result + + @repo_handler + async def get_fuel_code(self, fuel_code_id: int) -> FuelCode: + return await self.db.scalar( + select(FuelCode) + .options( + joinedload(FuelCode.feedstock_fuel_transport_modes).joinedload( + FeedstockFuelTransportMode.feedstock_fuel_transport_mode + ), + joinedload(FuelCode.finished_fuel_transport_modes).joinedload( + FinishedFuelTransportMode.finished_fuel_transport_mode + ), + joinedload(FuelCode.fuel_type).joinedload(FuelType.provision_1), + joinedload(FuelCode.fuel_type).joinedload(FuelType.provision_2), + ) + .where(FuelCode.fuel_code_id == fuel_code_id) + ) + + @repo_handler + async def get_fuel_code_status( + self, fuel_code_status: FuelCodeStatusEnum + ) -> FuelCodeStatus: + return await self.db.scalar( + select(FuelCodeStatus).where(FuelCodeStatus.status == fuel_code_status) + ) + + @repo_handler + async def update_fuel_code(self, fuel_code: FuelCode) -> FuelCodeSchema: + + await self.db.flush() + await self.db.refresh(fuel_code) + + return FuelCodeSchema.model_validate(fuel_code) + + @repo_handler + async def delete_fuel_code(self, fuel_code_id: int): + delete_status = await self.get_fuel_status_by_status(FuelCodeStatusEnum.Deleted) + await self.db.execute( + update(FuelCode) + .where(FuelCode.fuel_code_id == fuel_code_id) + .values(fuel_status_id=delete_status.fuel_code_status_id) + ) + + @repo_handler + async def get_distinct_company_names(self, company: str) -> List[str]: + query = ( + select(distinct(FuelCode.company)) + .where(func.lower(FuelCode.company).like(func.lower(company + "%"))) + .order_by(FuelCode.company) + .limit(10) + ) + return (await self.db.execute(query)).scalars().all() + + @repo_handler + async def get_contact_names_by_company( + self, company: str, contact_name: str + ) -> List[str]: + query = ( + select(distinct(FuelCode.contact_name)) + .where( + and_( + func.lower(FuelCode.company) == func.lower(company), + func.lower(FuelCode.contact_name).like( + func.lower(contact_name + "%") + ), + ) + ) + .order_by(FuelCode.contact_name) + .limit(10) + ) + return (await self.db.execute(query)).scalars().all() + + @repo_handler + async def get_contact_email_by_company_and_name( + self, company: str, contact_name: str, contact_email: str + ) -> List[str]: + query = ( + select(distinct(FuelCode.contact_email)) + .where( + and_( + func.lower(FuelCode.company) == func.lower(company), + func.lower(FuelCode.contact_name) == func.lower(contact_name), + ), + func.lower(FuelCode.contact_email).like( + func.lower(contact_email + "%") + ), + ) + .order_by(FuelCode.contact_email) + .limit(10) + ) + return (await self.db.execute(query)).scalars().all() + + @repo_handler + async def get_distinct_fuel_codes_by_code( + self, fuel_code: str, prefix: str + ) -> List[str]: + query = ( + select(distinct(FuelCode.fuel_suffix)) + .join( + FuelCodePrefix, FuelCodePrefix.fuel_code_prefix_id == FuelCode.prefix_id + ) + .where( + and_( + FuelCode.fuel_suffix.like(fuel_code + "%"), + func.lower(FuelCodePrefix.prefix) == func.lower(prefix), + FuelCodeStatus.status != FuelCodeStatusEnum.Deleted, + ) + ) + .order_by(FuelCode.fuel_suffix) + .limit(10) + ) + + return (await self.db.execute(query)).scalars().all() + + @repo_handler + async def get_fuel_code_by_code_prefix( + self, fuel_suffix: str, prefix: str + ) -> list[FuelCodeCloneSchema]: + query = ( + select(FuelCode) + .options( + joinedload(FuelCode.fuel_code_status), + joinedload(FuelCode.fuel_code_prefix), + joinedload(FuelCode.fuel_type).joinedload(FuelType.provision_1), + joinedload(FuelCode.fuel_type).joinedload(FuelType.provision_2), + joinedload(FuelCode.feedstock_fuel_transport_modes).joinedload( + FeedstockFuelTransportMode.feedstock_fuel_transport_mode + ), + joinedload(FuelCode.finished_fuel_transport_modes).joinedload( + FinishedFuelTransportMode.finished_fuel_transport_mode + ), + ) + .where( + and_( + FuelCode.fuel_suffix == fuel_suffix, + FuelCodePrefix.prefix == prefix, + FuelCodeStatus.status != FuelCodeStatusEnum.Deleted, + ) + ) + ) + fuel_code_main_version = fuel_suffix.split(".")[0] + results = (await self.db.execute(query)).unique().scalars().all() + next_suffix = await self.get_next_available_sub_version_fuel_code_by_prefix( + fuel_code_main_version, prefix + ) + if results is None or len(results) < 1: + fc = FuelCodeCloneSchema(fuel_suffix=next_suffix, prefix=prefix) + return [fc] + else: + fuel_code_results = [] + for fuel_code in results: + fc = FuelCodeCloneSchema.model_validate(fuel_code) + fc.fuel_suffix = next_suffix + fuel_code_results.append(fc) + return fuel_code_results + + def format_decimal(self, value): + parts = str(value).split(".") + # Format the integer part to always have 3 digits + formatted_integer = f"{int(parts[0]):03d}" + if len(parts) > 1: + return f"{formatted_integer}.{parts[1]}" + else: + return formatted_integer + + @repo_handler + async def validate_fuel_code(self, suffix: str, prefix_id: int) -> str: + # check if the fuel_code already exists + query = ( + select(FuelCode) + .join(FuelCode.fuel_code_prefix) + .join(FuelCode.fuel_code_status) + .options(joinedload(FuelCode.fuel_code_prefix)) + .where( + and_( + FuelCode.fuel_suffix == suffix, + FuelCodePrefix.fuel_code_prefix_id == prefix_id, + FuelCodeStatus.status != FuelCodeStatusEnum.Deleted, + ) + ) + ) + result = (await self.db.execute(query)).scalar_one_or_none() + if result: + fuel_code_main_version = suffix.split(".")[0] + suffix = await self.get_next_available_sub_version_fuel_code_by_prefix( + fuel_code_main_version, prefix_id + ) + if int(suffix.split(".")[1]) > 9: + return await self.get_next_available_fuel_code_by_prefix( + result.fuel_code_prefix.prefix + ) + return suffix + else: + return suffix + + @repo_handler + async def get_next_available_fuel_code_by_prefix(self, prefix: str) -> str: + query = text( + """ + WITH parsed_codes AS ( + SELECT SPLIT_PART(fc.fuel_suffix, '.', 1)::INTEGER AS base_code + FROM fuel_code fc + JOIN fuel_code_prefix fcp ON fcp.fuel_code_prefix_id = fc.prefix_id + WHERE fcp.prefix = :prefix + ), + all_possible_codes AS ( + SELECT generate_series(101, COALESCE((SELECT MAX(base_code) FROM parsed_codes), 101) + 1) AS base_code + ), + available_codes AS ( + SELECT base_code + FROM all_possible_codes + WHERE base_code NOT IN (SELECT base_code FROM parsed_codes) + ), + next_code AS ( + SELECT MIN(base_code) AS next_base_code + FROM available_codes + ) + SELECT LPAD(next_base_code::TEXT, 3, '0') || '.0' AS next_fuel_code + FROM next_code; + """ + ) + result = (await self.db.execute(query, {"prefix": prefix})).scalar_one_or_none() + return self.format_decimal(result) + + async def get_next_available_sub_version_fuel_code_by_prefix( + self, input_version: str, prefix_id: int + ) -> str: + query = text( + """ + WITH split_versions AS ( + SELECT + fuel_suffix, + CAST(SPLIT_PART(fuel_suffix, '.', 1) AS INTEGER) AS main_version, + CAST(SPLIT_PART(fuel_suffix, '.', 2) AS INTEGER) AS sub_version + FROM fuel_code fc + JOIN fuel_code_prefix fcp ON fcp.fuel_code_prefix_id = fc.prefix_id + WHERE fcp.fuel_code_prefix_id = :prefix_id + ), + sub_versions AS ( + SELECT + main_version, + sub_version + FROM split_versions + WHERE main_version = :input_version + ), + all_sub_versions AS ( + SELECT generate_series(0, COALESCE((SELECT MAX(sub_version) FROM sub_versions), -1)) AS sub_version + ), + missing_sub_versions AS ( + SELECT a.sub_version + FROM all_sub_versions a + LEFT JOIN sub_versions s ON a.sub_version = s.sub_version + WHERE s.sub_version IS NULL + ORDER BY a.sub_version + LIMIT 1 + ) + SELECT + :input_version || '.' || + COALESCE((SELECT sub_version FROM missing_sub_versions)::VARCHAR, + (SELECT COALESCE(MAX(sub_version), -1) + 1 FROM sub_versions)::VARCHAR) + AS next_available_version + """ + ) + result = ( + await self.db.execute( + query, {"input_version": int(input_version), "prefix_id": prefix_id} + ) + ).scalar_one_or_none() + return self.format_decimal(result) + + async def get_latest_fuel_codes(self) -> List[FuelCodeSchema]: + subquery = ( + select(func.max(FuelCode.fuel_suffix).label("latest_code")) + .group_by(func.split_part(FuelCode.fuel_suffix, ".", 1)) + .subquery() + ) + + query = ( + select(FuelCode) + .join(subquery, FuelCode.fuel_suffix == subquery.c.latest_code) + .options( + joinedload(FuelCode.feedstock_fuel_transport_modes).joinedload( + FeedstockFuelTransportMode.feedstock_fuel_transport_mode + ), + joinedload(FuelCode.finished_fuel_transport_modes).joinedload( + FinishedFuelTransportMode.finished_fuel_transport_mode + ), + joinedload(FuelCode.fuel_type).joinedload(FuelType.provision_1), + joinedload(FuelCode.fuel_type).joinedload(FuelType.provision_2), + ) + .filter(FuelCodeStatus.status != FuelCodeStatusEnum.Deleted) + ) + + result = await self.db.execute(query) + + fuel_codes = result.unique().scalars().all() + + next_fuel_codes = [] + + for fuel_code in fuel_codes: + base_code, version = fuel_code.fuel_code.rsplit(".", 1) + next_version = str(int(version) + 1) + next_code = f"{base_code}.{next_version}" + + fuel_code_pydantic = FuelCodeSchema.from_orm(fuel_code) + + fuel_code_dict = fuel_code_pydantic.dict() + + next_fuel_codes.append({**fuel_code_dict, "fuel_code": next_code}) + + return next_fuel_codes + + @repo_handler + async def get_fuel_code_field_options(self): + query = select( + FuelCode.company, + FuelCode.feedstock, + FuelCode.feedstock_location, + FuelCode.feedstock_misc, + FuelCode.former_company, + FuelCode.contact_name, + FuelCode.contact_email, + ) + + result = (await self.db.execute(query)).all() + + return result + + @repo_handler + async def get_fp_locations(self): + query = select( + FuelCode.fuel_production_facility_city, + FuelCode.fuel_production_facility_province_state, + FuelCode.fuel_production_facility_country, + ) + + result = (await self.db.execute(query)).all() + + return result + + @repo_handler + async def get_fuel_code_by_name(self, fuel_code: str) -> FuelCode: + result = await self.db.execute( + select(FuelCode) + .join(FuelCode.fuel_code_prefix) + .join( + FuelCodeStatus, + FuelCode.fuel_status_id == FuelCodeStatus.fuel_code_status_id, + ) + .outerjoin(FuelType, FuelCode.fuel_type_id == FuelType.fuel_type_id) + .options( + contains_eager(FuelCode.fuel_code_prefix), + joinedload(FuelCode.fuel_code_status), + joinedload(FuelCode.fuel_type), + ) + .where( + and_( + func.concat(FuelCodePrefix.prefix, FuelCode.fuel_suffix) + == fuel_code, + FuelCodeStatus.status != FuelCodeStatusEnum.Deleted, + ) + ) + ) + return result.scalar_one_or_none() + + @repo_handler + async def get_provision_of_the_act_by_name( + self, provision_of_the_act: str + ) -> ProvisionOfTheAct: + result = await self.db.execute( + select(ProvisionOfTheAct).where( + ProvisionOfTheAct.name == provision_of_the_act + ) + ) + return result.scalar_one_or_none() + + @repo_handler + async def get_energy_effectiveness_ratio( + self, fuel_type_id: int, fuel_category_id: int, end_use_type_id: Optional[int] + ) -> EnergyEffectivenessRatio: + """ + Retrieves the Energy Effectiveness Ratio based on fuel type, fuel category, + and optionally the end use type. + + Args: + fuel_type_id (int): The ID of the fuel type. + fuel_category_id (int): The ID of the fuel category. + end_use_type_id (Optional[int]): The ID of the end use type (optional). + + Returns: + Optional[EnergyEffectivenessRatio]: The matching EnergyEffectivenessRatio record or None. + """ + conditions = [ + EnergyEffectivenessRatio.fuel_type_id == fuel_type_id, + EnergyEffectivenessRatio.fuel_category_id == fuel_category_id, + ] + + if end_use_type_id is not None: + conditions.append( + EnergyEffectivenessRatio.end_use_type_id == end_use_type_id + ) + + stmt = select(EnergyEffectivenessRatio).where(*conditions) + result = await self.db.execute(stmt) + energy_effectiveness_ratio = result.scalars().first() + + return energy_effectiveness_ratio + + @repo_handler + async def get_target_carbon_intensities( + self, fuel_category_id: int, compliance_period: str + ) -> List[TargetCarbonIntensity]: + + compliance_period_id_subquery = ( + select(CompliancePeriod.compliance_period_id) + .where(CompliancePeriod.description == compliance_period) + .scalar_subquery() + ) + + stmt = ( + select(TargetCarbonIntensity) + .where( + TargetCarbonIntensity.fuel_category_id == fuel_category_id, + TargetCarbonIntensity.compliance_period_id + == compliance_period_id_subquery, + ) + .options( + joinedload(TargetCarbonIntensity.fuel_category), + joinedload(TargetCarbonIntensity.compliance_period), + ) + ) + result = await self.db.execute(stmt) + + return result.scalars().all() + + @repo_handler + async def get_standardized_fuel_data( + self, + fuel_type_id: int, + fuel_category_id: int, + end_use_id: int, + compliance_period: str, + fuel_code_id: Optional[int] = None, + ) -> CarbonIntensityResult: + """ + Fetch and standardize fuel data values required for compliance calculations. + """ + # Fetch the fuel type details + fuel_type = await self.get_fuel_type_by_id(fuel_type_id) + if not fuel_type: + raise ValueError("Invalid fuel type ID") + + # Determine energy density + energy_density = ( + (await self.get_energy_density(fuel_type_id)).density + if fuel_type.fuel_type != "Other" + else None + ) + + # Set effective carbon intensity and target carbon intensity + if fuel_code_id: + fuel_code = await self.get_fuel_code(fuel_code_id) + effective_carbon_intensity = fuel_code.carbon_intensity + # Other Fuel uses the Default CI of the Category + elif fuel_type.unrecognized: + fuel_category = await self.get_fuel_category_by( + fuel_category_id=fuel_category_id + ) + effective_carbon_intensity = fuel_category.default_carbon_intensity + else: + effective_carbon_intensity = fuel_type.default_carbon_intensity + + # Get energy effectiveness ratio (EER) + energy_effectiveness = await self.get_energy_effectiveness_ratio( + fuel_type_id, fuel_category_id, end_use_id + ) + eer = energy_effectiveness.ratio if energy_effectiveness else 1.0 + + # Fetch target carbon intensity (TCI) + target_ci = None + target_carbon_intensities = await self.get_target_carbon_intensities( + fuel_category_id, compliance_period + ) + if target_carbon_intensities: + target_ci = next( + (tci.target_carbon_intensity for tci in target_carbon_intensities), + 0.0, + ) + + # Additional Carbon Intensity (UCI) + uci = await self.get_additional_carbon_intensity(fuel_type_id, end_use_id) + + return CarbonIntensityResult( + effective_carbon_intensity=effective_carbon_intensity, + target_ci=target_ci, + eer=eer, + energy_density=energy_density, + uci=uci.intensity if uci else None, + ) + + @repo_handler + async def get_additional_carbon_intensity( + self, fuel_type_id, end_use_type_id + ) -> Optional[AdditionalCarbonIntensity]: + """Get a single use of a carbon intensity (UCI), returns None if one does not apply""" + query = select(AdditionalCarbonIntensity).where( + AdditionalCarbonIntensity.end_use_type_id == end_use_type_id, + AdditionalCarbonIntensity.fuel_type_id == fuel_type_id, + ) + + result = await self.db.execute(query) + return result.scalars().one_or_none() diff --git a/backend/lcfs/web/api/fuel_code/schema.py b/backend/lcfs/web/api/fuel_code/schema.py new file mode 100644 index 000000000..061ed20a1 --- /dev/null +++ b/backend/lcfs/web/api/fuel_code/schema.py @@ -0,0 +1,393 @@ +from typing import Optional, List, Union + +from fastapi.exceptions import RequestValidationError + +from lcfs.web.api.base import BaseSchema, PaginationResponseSchema +from datetime import date, datetime +from pydantic import ( + Field, + ValidationError, + field_validator, + model_validator, +) +from enum import Enum + +from lcfs.web.api.fuel_type.schema import FuelTypeQuantityUnitsEnumSchema + + +class FuelCodeStatusEnumSchema(str, Enum): + Draft = "Draft" + Approved = "Approved" + Deleted = "Deleted" + + +class ProvisionOfTheActSchema(BaseSchema): + provision_of_the_act_id: int + name: str + + +class FuelTypeSchema(BaseSchema): + fuel_type_id: int + fuel_type: str + fossil_derived: Optional[bool] = None + provision_1_id: Optional[int] = None + provision_2_id: Optional[int] = None + default_carbon_intensity: Optional[float] = None + provision_1: Optional[ProvisionOfTheActSchema] = None + provision_2: Optional[ProvisionOfTheActSchema] = None + units: FuelTypeQuantityUnitsEnumSchema + + @field_validator("default_carbon_intensity") + def quantize_default_carbon_intensity(cls, value): + return round(value, 2) + + +class FuelCodeStatusSchema(BaseSchema): + fuel_code_status_id: Optional[int] = None + status: FuelCodeStatusEnumSchema + + +class FuelCodeResponseSchema(BaseSchema): + fuel_code_id: Optional[int] = None + fuel_status_id: Optional[int] = None + fuel_status: Optional[FuelCodeStatusSchema] = None + prefix_id: Optional[int] = None + fuel_code: str + carbon_intensity: float + + +class TransportModeSchema(BaseSchema): + transport_mode_id: int + transport_mode: str + + +class FeedstockFuelTransportModeSchema(BaseSchema): + feedstock_fuel_transport_mode_id: Optional[int] = None + fuel_code_id: Optional[int] = None + transport_mode_id: Optional[int] = None + feedstock_fuel_transport_mode: Optional[TransportModeSchema] = None + + +class FinishedFuelTransportModeSchema(BaseSchema): + finished_fuel_transport_mode_id: Optional[int] = None + fuel_code_id: Optional[int] = None + transport_mode_id: Optional[int] = None + finished_fuel_transport_mode: Optional[TransportModeSchema] = None + + +class FuelCodePrefixSchema(BaseSchema): + fuel_code_prefix_id: int + next_fuel_code: Optional[str] = None + prefix: str + + +class UOMSchema(BaseSchema): + uom_id: int + name: str + description: Optional[str] = None + + +class EndUseTypeSchema(BaseSchema): + end_use_type_id: int + type: str + sub_type: Optional[str] = None + + +class EndUserTypeSchema(BaseSchema): + end_user_type_id: int + type_name: str + + +class EnergyDensitySchema(BaseSchema): + energy_density_id: int + density: float = Field(..., pre=True, always=True) + + fuel_type_id: int + fuel_type: Optional[FuelTypeSchema] = None + uom_id: int + uom: Optional[UOMSchema] = None + + @field_validator("density") + def quantize_density(cls, value): + return round(value, 2) + + +class FuelCategorySchema(BaseSchema): + fuel_category_id: int + category: str + description: Optional[str] = None + + +class EnergyEffectivenessRatioSchema(BaseSchema): + eer_id: int + fuel_category_id: int + fuel_category: Optional[FuelCategorySchema] = None + fuel_type_id: int + fuel_type: Optional[FuelTypeSchema] = None + end_use_type_id: Optional[int] = None + end_use_type: Optional[EndUseTypeSchema] = None + ratio: float = Field(..., pre=True, always=True) + + @field_validator("ratio") + def quantize_ratio(cls, value): + return round(value, 2) + + +class AdditionalCarbonIntensitySchema(BaseSchema): + additional_uci_id: int + fuel_type_id: Optional[int] = None + fuel_type: Optional[FuelTypeSchema] = None + end_use_type_id: Optional[int] = None + end_use_type: Optional[EndUseTypeSchema] = None + uom_id: Optional[int] = None + uom: Optional[UOMSchema] = None + intensity: float = Field(..., pre=True, always=True) + + @field_validator("intensity") + def quantize_intensity(cls, value): + return round(value, 2) + + +class FuelCodeSchema(BaseSchema): + fuel_code_id: Optional[int] = None + fuel_status_id: Optional[int] = None + prefix_id: int + fuel_suffix: str + company: str + contact_name: Optional[str] = None + contact_email: Optional[str] = None + carbon_intensity: float + edrms: str + last_updated: datetime + application_date: date + approval_date: Optional[date] = None + effective_date: Optional[date] = None + expiration_date: Optional[date] = None + fuel_type_id: int + feedstock: str + feedstock_location: str + feedstock_misc: Optional[str] = None + fuel_production_facility_city: Optional[str] = None + fuel_production_facility_province_state: Optional[str] = None + fuel_production_facility_country: Optional[str] = None + facility_nameplate_capacity: Optional[int] = None + facility_nameplate_capacity_unit: Optional[FuelTypeQuantityUnitsEnumSchema] = None + former_company: Optional[str] = None + notes: Optional[str] = None + fuel_code_status: Optional[FuelCodeStatusSchema] = None + fuel_code_prefix: Optional[FuelCodePrefixSchema] = None + fuel_type: Optional[FuelTypeSchema] = None + feedstock_fuel_transport_modes: Optional[List[FeedstockFuelTransportModeSchema]] = ( + None + ) + finished_fuel_transport_modes: Optional[List[FinishedFuelTransportModeSchema]] = ( + None + ) + + +class FuelCodeCloneSchema(BaseSchema): + fuel_code_id: Optional[int] = None + fuel_status_id: Optional[int] = None + prefix_id: Optional[int] = None + fuel_suffix: Optional[str] = None + company: Optional[str] = None + contact_name: Optional[str] = None + contact_email: Optional[str] = None + carbon_intensity: Optional[float] = None + edrms: Optional[str] = None + last_updated: Optional[datetime] = None + application_date: Optional[date] = None + approval_date: Optional[date] = None + effective_date: Optional[date] = None + expiration_date: Optional[date] = None + fuel_type_id: Optional[int] = None + feedstock: Optional[str] = None + feedstock_location: Optional[str] = None + feedstock_misc: Optional[str] = None + fuel_production_facility_city: Optional[str] = None + fuel_production_facility_province_state: Optional[str] = None + fuel_production_facility_country: Optional[str] = None + facility_nameplate_capacity: Optional[int] = None + facility_nameplate_capacity_unit: Optional[FuelTypeQuantityUnitsEnumSchema] = None + former_company: Optional[str] = None + notes: Optional[str] = None + fuel_code_status: Optional[FuelCodeStatusSchema] = None + fuel_code_prefix: Optional[FuelCodePrefixSchema] = None + fuel_type: Optional[FuelTypeSchema] = None + feedstock_fuel_transport_modes: Optional[List[FeedstockFuelTransportModeSchema]] = ( + None + ) + finished_fuel_transport_modes: Optional[List[FinishedFuelTransportModeSchema]] = ( + None + ) + + +class FieldOptions(BaseSchema): + company: List[str] + feedstock: List[str] + feedstock_location: List[str] + feedstock_misc: List[str] + former_company: List[str] + contact_name: List[str] + contact_email: List[str] + + +class FPLocationsSchema(BaseSchema): + fuel_production_facility_city: Optional[str] = None + fuel_production_facility_province_state: Optional[str] = None + fuel_production_facility_country: Optional[str] = None + + +class TableOptionsSchema(BaseSchema): + fuel_types: List[FuelTypeSchema] + transport_modes: List[TransportModeSchema] + fuel_code_prefixes: List[FuelCodePrefixSchema] + latest_fuel_codes: Optional[List[FuelCodeSchema]] + field_options: FieldOptions + fp_locations: List[FPLocationsSchema] + facility_nameplate_capacity_units: List[FuelTypeQuantityUnitsEnumSchema] + + +class SearchFuelCodeList(BaseSchema): + fuel_codes: Union[List[str], List[FuelCodeCloneSchema]] + + +class FuelCodesSchema(BaseSchema): + fuel_codes: List[FuelCodeSchema] + pagination: Optional[PaginationResponseSchema] = None + + +class FuelCodeCreateUpdateSchema(BaseSchema): + fuel_code_id: Optional[int] = None + prefix_id: int = None + fuel_suffix: str + carbon_intensity: float + edrms: str + company: str + contact_name: Optional[str] = None + contact_email: Optional[str] = None + application_date: date + approval_date: date + effective_date: date + expiration_date: date + fuel_type_id: int + feedstock: str + feedstock_location: str + feedstock_misc: Optional[str] = None + fuel_production_facility_city: str + fuel_production_facility_province_state: str + fuel_production_facility_country: str + + facility_nameplate_capacity: Optional[int] = None + facility_nameplate_capacity_unit: Optional[ + Union[FuelTypeQuantityUnitsEnumSchema, str] + ] = None + feedstock_fuel_transport_mode: Optional[List[str]] = None + finished_fuel_transport_mode: Optional[List[str]] = None + feedstock_fuel_transport_modes: Optional[List[FeedstockFuelTransportModeSchema]] = ( + None + ) + finished_fuel_transport_modes: Optional[List[FinishedFuelTransportModeSchema]] = ( + None + ) + former_company: Optional[str] = None + notes: Optional[str] = None + is_valid: Optional[bool] = False + validation_msg: Optional[str] = None + deleted: Optional[bool] = None + + @model_validator(mode="after") + def check_capacity_and_unit(self): + facility_nameplate_capacity = self.facility_nameplate_capacity + facility_nameplate_capacity_unit = self.facility_nameplate_capacity_unit + + if facility_nameplate_capacity is None: + self.facility_nameplate_capacity = None + elif ( + facility_nameplate_capacity is not None + and facility_nameplate_capacity_unit is None + ): + errors = [ + { + "loc": ("facilityNameplateCapacityUnit",), + "msg": "must be provided when the facility nameplate capacity is set", + "type": "value_error", + } + ] + raise RequestValidationError(errors) + return self + + @model_validator(mode="after") + def validate_dates(self): + application_date = self.application_date + approval_date = self.approval_date + effective_date = self.effective_date + expiration_date = self.expiration_date + + errors = [] + + # Application Date: Must be before Approval Date and Expiry Date + if application_date >= approval_date: + errors.append( + { + "loc": ("applicationDate",), + "msg": "must be before Approval Date.", + "type": "value_error", + } + ) + if application_date >= expiration_date: + errors.append( + { + "loc": ("applicationDate",), + "msg": "must be before Expiration Date.", + "type": "value_error", + } + ) + + # Approval Date: Must be after Application Date + if approval_date <= application_date: + errors.append( + { + "loc": ("approvalDate",), + "msg": "must be after Application Date.", + "type": "value_error", + } + ) + + # Effective Date: Must be on/after Application Date and before Expiry Date + if effective_date < application_date: + errors.append( + { + "loc": ("effectiveDate",), + "msg": "must be on or after Application Date.", + "type": "value_error", + } + ) + if expiration_date and effective_date >= expiration_date: + errors.append( + { + "loc": ("effectiveDate",), + "msg": "must be before Expiry Date.", + "type": "value_error", + } + ) + + # Expiry Date: Must be after Effective Date + if expiration_date <= effective_date: + errors.append( + { + "loc": ("expirationDate",), + "msg": "must be after Effective Date.", + "type": "value_error", + } + ) + + # Raise RequestValidationError if any errors exist + if errors: + raise RequestValidationError(errors) + + return self + + +class DeleteFuelCodeResponseSchema(BaseSchema): + message: str diff --git a/backend/lcfs/web/api/fuel_code/services.py b/backend/lcfs/web/api/fuel_code/services.py new file mode 100644 index 000000000..d40fde544 --- /dev/null +++ b/backend/lcfs/web/api/fuel_code/services.py @@ -0,0 +1,320 @@ +import math + +import structlog +from fastapi import Depends + +from lcfs.db.models.fuel.FeedstockFuelTransportMode import FeedstockFuelTransportMode +from lcfs.db.models.fuel.FinishedFuelTransportMode import FinishedFuelTransportMode +from lcfs.db.models.fuel.FuelCode import FuelCode +from lcfs.db.models.fuel.FuelCodeStatus import FuelCodeStatusEnum +from lcfs.db.models.fuel.FuelType import QuantityUnitsEnum +from lcfs.web.api.base import ( + PaginationRequestSchema, + PaginationResponseSchema, +) +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.fuel_code.schema import ( + FuelCodeCreateUpdateSchema, + FuelCodeSchema, + FuelCodesSchema, + FuelTypeSchema, + SearchFuelCodeList, + TransportModeSchema, + FuelCodePrefixSchema, + TableOptionsSchema, + FuelCodeStatusSchema, +) +from lcfs.web.core.decorators import service_handler + +logger = structlog.get_logger(__name__) + + +class FuelCodeServices: + def __init__(self, repo: FuelCodeRepository = Depends(FuelCodeRepository)) -> None: + self.repo = repo + + @service_handler + async def search_fuel_code(self, fuel_code, prefix, distinct_search): + if distinct_search: + result = await self.repo.get_distinct_fuel_codes_by_code(fuel_code, prefix) + else: + result = await self.repo.get_fuel_code_by_code_prefix(fuel_code, prefix) + return SearchFuelCodeList(fuel_codes=result) + + @service_handler + async def search_company(self, company): + return await self.repo.get_distinct_company_names(company) + + @service_handler + async def search_contact_name(self, company, contact_name): + return await self.repo.get_contact_names_by_company(company, contact_name) + + @service_handler + async def search_contact_email(self, company, contact_name, contact_email): + return await self.repo.get_contact_email_by_company_and_name( + company, contact_name, contact_email + ) + + @service_handler + async def get_table_options(self) -> TableOptionsSchema: + """ + Gets the list of table options related to fuel codes. + """ + fuel_types = await self.repo.get_fuel_types() + transport_modes = await self.repo.get_transport_modes() + fuel_code_prefixes = await self.repo.get_fuel_code_prefixes() + latest_fuel_codes = await self.repo.get_latest_fuel_codes() + field_options_results = await self.repo.get_fuel_code_field_options() + fp_locations = await self.repo.get_fp_locations() + facility_nameplate_capacity_units = [unit.value for unit in QuantityUnitsEnum] + + field_options_results_dict = {} + for row in field_options_results: + for key, value in row._mapping.items(): + if value is None or value == "": # Skip empty strings or null values + continue + if key not in field_options_results_dict: + # Use a set to remove duplicates + field_options_results_dict[key] = set() + field_options_results_dict[key].add(value) + + field_options = { + key: sorted(list(values)) + for key, values in field_options_results_dict.items() + } + + # Get next available fuel code for each prefix + fuel_code_prefixes_with_next = [] + for prefix in fuel_code_prefixes: + next_code = await self.repo.get_next_available_fuel_code_by_prefix( + prefix.prefix + ) + schema = FuelCodePrefixSchema.model_validate(prefix) + schema.next_fuel_code = next_code + fuel_code_prefixes_with_next.append(schema) + + return TableOptionsSchema( + fuel_types=[ + FuelTypeSchema.model_validate(fuel_type) for fuel_type in fuel_types + ], + transport_modes=[ + TransportModeSchema.model_validate(transport_mode) + for transport_mode in transport_modes + ], + fuel_code_prefixes=fuel_code_prefixes_with_next, + latest_fuel_codes=latest_fuel_codes, + field_options=field_options, + fp_locations=list(set(fp_locations)), + facility_nameplate_capacity_units=facility_nameplate_capacity_units, + ) + + @service_handler + async def search_fuel_codes( + self, pagination: PaginationRequestSchema + ) -> FuelCodesSchema: + """ + Gets the list of fuel codes. + """ + fuel_codes, total_count = await self.repo.get_fuel_codes_paginated(pagination) + return FuelCodesSchema( + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + fuel_codes=[ + FuelCodeSchema.model_validate(fuel_code) for fuel_code in fuel_codes + ], + ) + + async def get_fuel_code_statuses(self): + """ + Get all available statuses for fuel codes from the database. + + Returns: + List[TransactionStatusView]: A list of TransactionStatusView objects containing the basic transaction status details. + """ + fuel_code_statuses = await self.repo.get_fuel_code_statuses() + return [ + FuelCodeStatusSchema.model_validate(fuel_code_status) + for fuel_code_status in fuel_code_statuses + ] + + async def get_transport_modes(self): + """ + Get all available transport modes for fuel codes from the database. + + Returns: + List[TransportModeSchema]: A list of TransportModeSchema. + """ + transport_modes = await self.repo.get_transport_modes() + return [ + TransportModeSchema.model_validate(fuel_code_status) + for fuel_code_status in transport_modes + ] + + async def convert_to_model( + self, fuel_code_schema: FuelCodeCreateUpdateSchema, status: FuelCodeStatusEnum + ) -> FuelCode: + """ + Converts data from FuelCodeCreateSchema to FuelCode data model to store in the database. + """ + prefix = await self.repo.get_fuel_code_prefix(fuel_code_schema.prefix_id) + fuel_type = await self.repo.get_fuel_type_by_id(fuel_code_schema.fuel_type_id) + facility_nameplate_capacity_units_enum = ( + QuantityUnitsEnum(fuel_code_schema.facility_nameplate_capacity_unit) + if fuel_code_schema.facility_nameplate_capacity_unit is not None + else None + ) + transport_modes = await self.repo.get_transport_modes() + fuel_status = await self.repo.get_fuel_status_by_status(status) + fuel_code = FuelCode( + **fuel_code_schema.model_dump( + exclude={ + "id", + "prefix_id", + "fuel_type_id", + "feedstock_fuel_transport_mode", + "finished_fuel_transport_mode", + "feedstock_fuel_transport_modes", + "finished_fuel_transport_modes", + "status", + "is_valid", + "validation_msg", + "fuel_suffix", + "deleted", + "facility_nameplate_capacity_unit", + } + ), + fuel_code_status=fuel_status, + fuel_suffix=str(fuel_code_schema.fuel_suffix), + prefix_id=prefix.fuel_code_prefix_id, + fuel_type_id=fuel_type.fuel_type_id, + facility_nameplate_capacity_unit=facility_nameplate_capacity_units_enum, + ) + + fuel_code.feedstock_fuel_transport_modes = [] + fuel_code.finished_fuel_transport_modes = [] + for transport_mode in fuel_code_schema.feedstock_fuel_transport_mode or []: + matching_transport_mode = next( + (tm for tm in transport_modes if tm.transport_mode == transport_mode), + None, + ) + if matching_transport_mode: + fuel_code.feedstock_fuel_transport_modes.append( + FeedstockFuelTransportMode( + fuel_code_id=fuel_code.fuel_code_id, + transport_mode_id=matching_transport_mode.transport_mode_id, + ) + ) + else: + raise ValueError(f"Invalid transport mode: {transport_mode}") + + for transport_mode in fuel_code_schema.finished_fuel_transport_mode or []: + matching_transport_mode = next( + (tm for tm in transport_modes if tm.transport_mode == transport_mode), + None, + ) + if matching_transport_mode: + fuel_code.finished_fuel_transport_modes.append( + FinishedFuelTransportMode( + fuel_code_id=fuel_code.fuel_code_id, + transport_mode_id=matching_transport_mode.transport_mode_id, + ) + ) + else: + raise ValueError(f"Invalid transport mode: {transport_mode}") + + return fuel_code + + @service_handler + async def create_fuel_code( + self, fuel_code: FuelCodeCreateUpdateSchema + ) -> FuelCodeSchema: + """ + Create a new fuel code. + """ + fuel_suffix_value = await self.repo.validate_fuel_code( + fuel_code.fuel_suffix, fuel_code.prefix_id + ) + fuel_code.fuel_suffix = fuel_suffix_value + fuel_code_model = await self.convert_to_model( + fuel_code, FuelCodeStatusEnum.Draft + ) + fuel_code_model = await self.repo.create_fuel_code(fuel_code_model) + result = FuelCodeSchema.model_validate(fuel_code_model) + return result + + @service_handler + async def get_fuel_code(self, fuel_code_id: int): + return await self.repo.get_fuel_code(fuel_code_id) + + @service_handler + async def approve_fuel_code(self, fuel_code_id: int): + fuel_code = await self.get_fuel_code(fuel_code_id) + if not fuel_code: + raise ValueError("Fuel code not found") + + if fuel_code.fuel_code_status.status != FuelCodeStatusEnum.Draft: + raise ValueError("Fuel code is not in Draft") + + fuel_code.fuel_code_status = await self.repo.get_fuel_code_status( + FuelCodeStatusEnum.Approved + ) + + return await self.repo.update_fuel_code(fuel_code) + + @service_handler + async def update_fuel_code(self, fuel_code_data: FuelCodeCreateUpdateSchema): + fuel_code = await self.get_fuel_code(fuel_code_data.fuel_code_id) + if not fuel_code: + raise ValueError("Fuel code not found") + + for field, value in fuel_code_data.model_dump( + exclude={ + "feedstock_fuel_transport_modes", + "feedstock_fuel_transport_mode", + "finished_fuel_transport_modes", + "facility_nameplate_capacity_unit", + } + ).items(): + setattr(fuel_code, field, value) + + fuel_code.feedstock_fuel_transport_modes.clear() + + if fuel_code_data.feedstock_fuel_transport_mode: + for mode_name in fuel_code_data.feedstock_fuel_transport_mode: + transport_mode = await self.repo.get_transport_mode_by_name(mode_name) + feedstock_mode = FeedstockFuelTransportMode( + fuel_code_id=fuel_code.fuel_code_id, + transport_mode_id=transport_mode.transport_mode_id, + ) + fuel_code.feedstock_fuel_transport_modes.append(feedstock_mode) + + fuel_code.finished_fuel_transport_modes.clear() + + if fuel_code_data.finished_fuel_transport_mode: + for mode_name in fuel_code_data.finished_fuel_transport_mode: + transport_mode = await self.repo.get_transport_mode_by_name(mode_name) + if transport_mode: + finished_mode = FinishedFuelTransportMode( + fuel_code_id=fuel_code.fuel_code_id, + transport_mode_id=transport_mode.transport_mode_id, + ) + fuel_code.finished_fuel_transport_modes.append(finished_mode) + + facility_nameplate_capacity_units_enum = ( + QuantityUnitsEnum(fuel_code_data.facility_nameplate_capacity_unit).name + if fuel_code_data.facility_nameplate_capacity_unit is not None + else None + ) + fuel_code.facility_nameplate_capacity_unit = ( + facility_nameplate_capacity_units_enum + ) + + return await self.repo.update_fuel_code(fuel_code) + + @service_handler + async def delete_fuel_code(self, fuel_code_id: int): + return await self.repo.delete_fuel_code(fuel_code_id) diff --git a/backend/lcfs/web/api/fuel_code/views.py b/backend/lcfs/web/api/fuel_code/views.py new file mode 100644 index 000000000..08d7c320f --- /dev/null +++ b/backend/lcfs/web/api/fuel_code/views.py @@ -0,0 +1,219 @@ +""" +Fuel codes endpoints +""" + +from typing import List, Union, Optional + +import structlog +from fastapi import ( + APIRouter, + Body, + status, + Request, + Response, + Depends, + Query, +) +from fastapi_cache.decorator import cache +from starlette.responses import StreamingResponse + +from lcfs.db import dependencies +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.fuel_code.export import FuelCodeExporter +from lcfs.web.api.fuel_code.schema import ( + FuelCodeCreateUpdateSchema, + FuelCodesSchema, + SearchFuelCodeList, + TableOptionsSchema, + FuelCodeSchema, + FuelCodeStatusSchema, + TransportModeSchema, +) +from lcfs.web.api.fuel_code.services import FuelCodeServices +from lcfs.web.core.decorators import view_handler + +router = APIRouter() +logger = structlog.get_logger(__name__) +get_async_db = dependencies.get_async_db_session + + +@router.get( + "/table-options", response_model=TableOptionsSchema, status_code=status.HTTP_200_OK +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_table_options( + request: Request, + service: FuelCodeServices = Depends(), +): + """Endpoint to retrieve table options related to fuel codes""" + logger.info("Retrieving table options") + return await service.get_table_options() + + +@router.get( + "/search", + response_model=Union[SearchFuelCodeList, List[str]], + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def search_table_options_strings( + request: Request, + company: Optional[str] = Query( + None, alias="company", description="Company for filtering options" + ), + contact_name: Optional[str] = Query( + None, alias="contactName", description="Contact name for filtering options" + ), + contact_email: Optional[str] = Query( + None, alias="contactEmail", description="Contact email for filtering options" + ), + fuel_code: Optional[str] = Query( + None, alias="fuelCode", description="Fuel code for filtering options" + ), + prefix: Optional[str] = Query( + None, alias="prefix", description="Prefix for filtering options" + ), + distinct_search: Optional[bool] = Query( + False, + alias="distinctSearch", + description="Based on flag retrieve entire row data or just the list of distinct values", + ), + service: FuelCodeServices = Depends(), +): + """Endpoint to search fuel codes based on a query string""" + if fuel_code: + logger.info("Searching fuel code", fuel_code=fuel_code, prefix=prefix) + return await service.search_fuel_code(fuel_code, prefix, distinct_search) + elif company: + logger.info("Searching company", company=company, prefix=prefix) + if contact_email and contact_name and company: + logger.info( + "Searching contact email under company", + contact_email=contact_email, + company=company, + ) + return await service.search_contact_email( + company, contact_name, contact_email + ) + elif contact_name and company: + logger.info( + "Searching contact name under company", + contact_name=contact_name, + company=company, + ) + return await service.search_contact_name(company, contact_name) + return await service.search_company(company) + else: + raise ValueError("Invalid parameters provided for search") + + +@router.post("/list", response_model=FuelCodesSchema, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_fuel_codes( + request: Request, + pagination: PaginationRequestSchema = Body(..., embed=False), + response: Response = None, + service: FuelCodeServices = Depends(), +): + """Endpoint to get list of fuel codes with pagination options""" + return await service.search_fuel_codes(pagination) + + +@router.get("/export", response_class=StreamingResponse, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def export_users( + request: Request, + format: str = Query(default="xls", description="File export format"), + exporter: FuelCodeExporter = Depends(), +): + """ + Endpoint to export information of all fuel codes + + This endpoint can support exporting data in different file formats (xls, xlsx, csv) + as specified by the 'format' and 'media_type' variables. + - 'format' specifies the file format: options are 'xls', 'xlsx', and 'csv'. + - 'media_type' sets the appropriate MIME type based on 'format': + 'application/vnd.ms-excel' for 'xls', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' for 'xlsx', + 'text/csv' for 'csv'. + + The SpreadsheetBuilder class is used for building the spreadsheet. + It allows adding multiple sheets with custom styling options and exports them as a byte stream. + Also, an example of how to use the SpreadsheetBuilder is provided in its class documentation. + + Note: Only the first sheet data is used for the CSV format, + as CSV files do not support multiple sheets. + """ + return await exporter.export(format) + + +@router.get( + "/statuses", + response_model=List[FuelCodeStatusSchema], + status_code=status.HTTP_200_OK, +) +@cache(expire=60 * 60 * 24) # cache for 24 hours +@view_handler(["*"]) +async def get_fuel_code_statuses( + request: Request, service: FuelCodeServices = Depends() +) -> List[FuelCodeStatusSchema]: + """Fetch all fuel code statuses""" + return await service.get_fuel_code_statuses() + + +@router.get( + "/transport-modes", + response_model=List[TransportModeSchema], + status_code=status.HTTP_200_OK, +) +@cache(expire=60 * 60 * 24) # cache for 24 hours +@view_handler(["*"]) +async def get_transport_modes( + request: Request, service: FuelCodeServices = Depends() +) -> List[TransportModeSchema]: + """Fetch all fuel code transport modes""" + return await service.get_transport_modes() + + +@router.get("/{fuel_code_id}", status_code=status.HTTP_200_OK) +@view_handler(["*"]) +async def get_fuel_code( + request: Request, + fuel_code_id: int, + service: FuelCodeServices = Depends(), +) -> FuelCodeSchema: + return await service.get_fuel_code(fuel_code_id) + + +@router.post("/{fuel_code_id}/approve", status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def approve_fuel_code( + request: Request, + fuel_code_id: int, + service: FuelCodeServices = Depends(), +): + # TODO: Add Logic that checks if the code has necessary fields for approval + return await service.approve_fuel_code(fuel_code_id) + + +@router.post("", status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.ANALYST]) +async def save_fuel_code( + request: Request, + fuel_code_data: FuelCodeCreateUpdateSchema, + service: FuelCodeServices = Depends(), +): + """Endpoint to create or update a fuel code""" + if fuel_code_data.fuel_code_id: + return await service.update_fuel_code(fuel_code_data) + else: + return await service.create_fuel_code(fuel_code_data) + + +@router.delete("/{fuel_code_id}", status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.ANALYST]) +async def delete_fuel_code( + request: Request, fuel_code_id: int, service: FuelCodeServices = Depends() +): + return await service.delete_fuel_code(fuel_code_id) diff --git a/backend/lcfs/web/api/fuel_export/__init__.py b/backend/lcfs/web/api/fuel_export/__init__.py new file mode 100644 index 000000000..dd5828d88 --- /dev/null +++ b/backend/lcfs/web/api/fuel_export/__init__.py @@ -0,0 +1,5 @@ +"""API for internal comments.""" + +from lcfs.web.api.fuel_export.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/fuel_export/actions_service.py b/backend/lcfs/web/api/fuel_export/actions_service.py new file mode 100644 index 000000000..cd758ca85 --- /dev/null +++ b/backend/lcfs/web/api/fuel_export/actions_service.py @@ -0,0 +1,229 @@ +import uuid +from logging import getLogger +from typing import Optional +from fastapi import Depends, HTTPException + +from lcfs.db.base import ActionTypeEnum, UserTypeEnum +from lcfs.db.models.compliance.FuelExport import FuelExport +from lcfs.db.models.compliance.ComplianceReport import QuantityUnitsEnum +from lcfs.web.api.fuel_export.repo import FuelExportRepository +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.fuel_export.schema import ( + DeleteFuelExportResponseSchema, + FuelExportCreateUpdateSchema, + FuelExportSchema, +) +from lcfs.web.core.decorators import service_handler +from lcfs.web.utils.calculations import calculate_compliance_units + +logger = getLogger(__name__) + +# Constants defining which fields to exclude during model operations +FUEL_EXPORT_EXCLUDE_FIELDS = { + "id", + "fuel_export_id", + "compliance_period", + "deleted", + "group_uuid", + "user_type", + "version", + "action_type", + "units", +} + + +class FuelExportActionService: + """ + Service layer handling CRUD operations and versioning for Fuel Export records. + This service manages the creation, update (versioned), and deletion of fuel exports + and populates calculated fields required for each record. + """ + + def __init__( + self, + repo: FuelExportRepository = Depends(), + fuel_repo: FuelCodeRepository = Depends(), + ) -> None: + self.repo = repo + self.fuel_repo = fuel_repo + + async def _populate_fuel_export_fields( + self, fuel_export: FuelExport, fe_data: FuelExportCreateUpdateSchema + ) -> FuelExport: + """ + Populate additional calculated and referenced fields for a FuelExport instance. + """ + # Fetch standardized fuel data + fuel_data = await self.fuel_repo.get_standardized_fuel_data( + fuel_type_id=fuel_export.fuel_type_id, + fuel_category_id=fuel_export.fuel_category_id, + end_use_id=fuel_export.end_use_id, + fuel_code_id=fuel_export.fuel_code_id, + compliance_period=fe_data.compliance_period, + ) + + fuel_export.units = QuantityUnitsEnum(fe_data.units) + fuel_export.ci_of_fuel = fuel_data.effective_carbon_intensity + fuel_export.target_ci = fuel_data.target_ci + fuel_export.eer = fuel_data.eer + fuel_export.energy_density = fuel_data.energy_density + fuel_export.uci = fuel_data.uci + + # Calculate total energy if energy density is available + fuel_export.energy = ( + round(fuel_export.energy_density * fuel_export.quantity) + if fuel_export.energy_density + else 0 + ) + + # Calculate compliance units using the direct utility function + compliance_units = calculate_compliance_units( + TCI=fuel_export.target_ci or 0, + EER=fuel_export.eer or 1, + RCI=fuel_export.ci_of_fuel or 0, + UCI=0, # Assuming Additional Carbon Intensity Attributable to Use is zero + Q=fuel_export.quantity or 0, + ED=fuel_export.energy_density or 0, + ) + + # Adjust compliance units to negative to represent exports + compliance_units = -round(compliance_units) + fuel_export.compliance_units = compliance_units if compliance_units < 0 else 0 + + return fuel_export + + @service_handler + async def create_fuel_export( + self, fe_data: FuelExportCreateUpdateSchema, user_type: UserTypeEnum + ) -> FuelExportSchema: + """ + Create a new fuel export record. + + - Assigns a unique group UUID and sets the initial version to 0. + - Uses `ActionTypeEnum.CREATE` to indicate a new record. + - Populates calculated fields and saves the new record. + + Returns the newly created fuel export record as a response schema. + """ + # Assign a unique group UUID for the new fuel export + new_group_uuid = str(uuid.uuid4()) + fuel_export = FuelExport( + **fe_data.model_dump(exclude=FUEL_EXPORT_EXCLUDE_FIELDS), + group_uuid=new_group_uuid, + version=0, + user_type=user_type, + action_type=ActionTypeEnum.CREATE, + ) + + # Populate calculated and referenced fields + fuel_export = await self._populate_fuel_export_fields(fuel_export, fe_data) + + # Save the populated fuel export record + created_export = await self.repo.create_fuel_export(fuel_export) + return FuelExportSchema.model_validate(created_export) + + @service_handler + async def update_fuel_export( + self, fe_data: FuelExportCreateUpdateSchema, user_type: UserTypeEnum + ) -> FuelExportSchema: + """ + Update an existing fuel export record or create a new version if necessary. + + - Checks if a record exists for the given `group_uuid` and `version`. + - If `compliance_report_id` matches, updates the existing record. + - If `compliance_report_id` differs, creates a new version. + - If no existing record is found, raises an HTTPException. + + Returns the updated or new version of the fuel export record. + """ + existing_export = await self.repo.get_fuel_export_version_by_user( + fe_data.group_uuid, fe_data.version, user_type + ) + + if ( + existing_export + and existing_export.compliance_report_id == fe_data.compliance_report_id + ): + # Update existing record if compliance report ID matches + for field, value in fe_data.model_dump( + exclude=FUEL_EXPORT_EXCLUDE_FIELDS + ).items(): + print(field, value) + setattr(existing_export, field, value) + + # Populate calculated fields + existing_export = await self._populate_fuel_export_fields( + existing_export, fe_data + ) + + updated_export = await self.repo.update_fuel_export(existing_export) + return FuelExportSchema.model_validate(updated_export) + + elif existing_export: + # Create a new version if compliance report ID differs + fuel_export = FuelExport( + compliance_report_id=fe_data.compliance_report_id, + group_uuid=fe_data.group_uuid, + version=existing_export.version + 1, + action_type=ActionTypeEnum.UPDATE, + user_type=user_type, + ) + + # Copy existing fields, then apply new data + for field in existing_export.__table__.columns.keys(): + if field not in FUEL_EXPORT_EXCLUDE_FIELDS: + setattr(fuel_export, field, getattr(existing_export, field)) + + for field, value in fe_data.model_dump( + exclude=FUEL_EXPORT_EXCLUDE_FIELDS + ).items(): + setattr(fuel_export, field, value) + + # Populate calculated fields + fuel_export = await self._populate_fuel_export_fields(fuel_export, fe_data) + + # Save the new version + new_export = await self.repo.create_fuel_export(fuel_export) + return FuelExportSchema.model_validate(new_export) + + raise HTTPException(status_code=404, detail="Fuel export record not found.") + + @service_handler + async def delete_fuel_export( + self, fe_data: FuelExportCreateUpdateSchema, user_type: UserTypeEnum + ) -> DeleteFuelExportResponseSchema: + """ + Delete a fuel export record by creating a new version marked as deleted. + + Returns a response schema confirming deletion. + """ + existing_export = await self.repo.get_latest_fuel_export_by_group_uuid( + fe_data.group_uuid + ) + + if existing_export and existing_export.action_type == ActionTypeEnum.DELETE: + return DeleteFuelExportResponseSchema( + success=True, message="Fuel export record already deleted." + ) + + # Create a new version with action_type DELETE + delete_export = FuelExport( + compliance_report_id=fe_data.compliance_report_id, + group_uuid=fe_data.group_uuid, + version=(existing_export.version + 1) if existing_export else 0, + action_type=ActionTypeEnum.DELETE, + user_type=user_type, + ) + + # Copy over necessary fields from the latest version + if existing_export: + for field in existing_export.__table__.columns.keys(): + if field not in FUEL_EXPORT_EXCLUDE_FIELDS: + setattr(delete_export, field, getattr(existing_export, field)) + + delete_export.units = QuantityUnitsEnum(fe_data.units) + + await self.repo.create_fuel_export(delete_export) + return DeleteFuelExportResponseSchema( + success=True, message="Fuel export record marked as deleted." + ) diff --git a/backend/lcfs/web/api/fuel_export/repo.py b/backend/lcfs/web/api/fuel_export/repo.py new file mode 100644 index 000000000..510af73b6 --- /dev/null +++ b/backend/lcfs/web/api/fuel_export/repo.py @@ -0,0 +1,396 @@ +import structlog +from typing import List, Optional, Tuple +from lcfs.db.models.compliance import CompliancePeriod, FuelExport +from lcfs.db.models.fuel import ( + EnergyDensity, + EnergyEffectivenessRatio, + FuelCategory, + FuelInstance, + FuelCode, + FuelCodePrefix, + FuelCodeStatus, + FuelType, + ProvisionOfTheAct, + TargetCarbonIntensity, + UnitOfMeasure, + EndUseType, +) +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.db.models.compliance.ComplianceReport import ComplianceReport +from lcfs.web.api.base import PaginationRequestSchema +from sqlalchemy import and_, or_, select, func, delete +from sqlalchemy.orm import joinedload, selectinload +from sqlalchemy.sql import case +from sqlalchemy.ext.asyncio import AsyncSession +from fastapi import Depends + +from lcfs.web.core.decorators import repo_handler +from lcfs.db.dependencies import get_async_db_session + +logger = structlog.get_logger(__name__) + + +class FuelExportRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + self.query = select(FuelExport).options( + joinedload(FuelExport.fuel_code).options( + joinedload(FuelCode.fuel_code_status), + joinedload(FuelCode.fuel_code_prefix), + ), + joinedload(FuelExport.fuel_category).options( + joinedload(FuelCategory.target_carbon_intensities), + joinedload(FuelCategory.energy_effectiveness_ratio), + ), + joinedload(FuelExport.fuel_type).options( + joinedload(FuelType.energy_density), + joinedload(FuelType.additional_carbon_intensity), + joinedload(FuelType.energy_effectiveness_ratio), + ), + joinedload(FuelExport.provision_of_the_act), + joinedload(FuelExport.end_use_type), + ) + + @repo_handler + async def get_fuel_export_table_options(self, compliancePeriod: str): + """ + Retrieve Fuel Type and other static data to use them while populating fuel supply form. + """ + subquery_compliance_period_id = ( + select(CompliancePeriod.compliance_period_id) + .where(CompliancePeriod.description == compliancePeriod) + .scalar_subquery() + ) + + subquery_fuel_code_status_id = ( + select(FuelCodeStatus.fuel_code_status_id) + .where(FuelCodeStatus.status == "Approved") + .scalar_subquery() + ) + + subquery_provision_of_the_act_id = ( + select(ProvisionOfTheAct.provision_of_the_act_id) + .where(ProvisionOfTheAct.name == "Fuel code - section 19 (b) (i)") + .scalar_subquery() + ) + + query = ( + select( + FuelType.fuel_type_id, + FuelInstance.fuel_instance_id, + FuelInstance.fuel_category_id, + FuelType.fuel_type, + FuelType.fossil_derived, + FuelType.default_carbon_intensity, + FuelCategory.category, + ProvisionOfTheAct.provision_of_the_act_id, + ProvisionOfTheAct.name.label("provision_of_the_act"), + EnergyDensity.energy_density_id, + EnergyDensity.density.label("energy_density"), + FuelType.units.label("unit"), + EndUseType.end_use_type_id, + EndUseType.type.label("end_use_type"), + EndUseType.sub_type.label("end_use_sub_type"), + UnitOfMeasure.uom_id, + UnitOfMeasure.name, + EnergyEffectivenessRatio.eer_id, + func.coalesce(EnergyEffectivenessRatio.ratio, 1).label( + "energy_effectiveness_ratio" + ), + TargetCarbonIntensity.target_carbon_intensity_id, + TargetCarbonIntensity.target_carbon_intensity, + TargetCarbonIntensity.reduction_target_percentage, + FuelCode.fuel_code_id, + FuelCodePrefix.fuel_code_prefix_id, + func.concat(FuelCodePrefix.prefix, FuelCode.fuel_suffix).label( + "fuel_code" + ), + FuelCode.carbon_intensity.label("fuel_code_carbon_intensity"), + ) + .join(FuelInstance, FuelInstance.fuel_type_id == FuelType.fuel_type_id) + .join( + FuelCategory, + FuelCategory.fuel_category_id == FuelInstance.fuel_category_id, + ) + .outerjoin( + ProvisionOfTheAct, + or_( + and_( + FuelType.fossil_derived == True, + ProvisionOfTheAct.provision_of_the_act_id == 1, + ), + and_( + FuelType.fossil_derived == False, + ProvisionOfTheAct.provision_of_the_act_id != 1, + ), + ), + ) + .outerjoin( + EnergyDensity, EnergyDensity.fuel_type_id == FuelType.fuel_type_id + ) + .outerjoin(UnitOfMeasure, EnergyDensity.uom_id == UnitOfMeasure.uom_id) + .outerjoin( + EnergyEffectivenessRatio, + and_( + EnergyEffectivenessRatio.fuel_category_id + == FuelCategory.fuel_category_id, + EnergyEffectivenessRatio.fuel_type_id == FuelInstance.fuel_type_id, + ), + ) + .outerjoin( + EndUseType, + EndUseType.end_use_type_id == EnergyEffectivenessRatio.end_use_type_id, + ) + .outerjoin( + TargetCarbonIntensity, + and_( + TargetCarbonIntensity.fuel_category_id + == FuelCategory.fuel_category_id, + TargetCarbonIntensity.compliance_period_id + == subquery_compliance_period_id, + ), + ) + .outerjoin( + FuelCode, + and_( + FuelCode.fuel_type_id == FuelType.fuel_type_id, + FuelCode.fuel_status_id == subquery_fuel_code_status_id, + ProvisionOfTheAct.provision_of_the_act_id + == subquery_provision_of_the_act_id, + ), + ) + .outerjoin( + FuelCodePrefix, FuelCodePrefix.fuel_code_prefix_id == FuelCode.prefix_id + ) + ) + + results = (await self.db.execute(query)).all() + return results + + @repo_handler + async def get_fuel_export_list(self, compliance_report_id: int) -> List[FuelExport]: + """ + Retrieve the list of effective fuel exports for a given compliance report. + """ + # Retrieve the compliance report's group UUID + report_group_query = await self.db.execute( + select(ComplianceReport.compliance_report_group_uuid).where( + ComplianceReport.compliance_report_id == compliance_report_id + ) + ) + group_uuid = report_group_query.scalar() + if not group_uuid: + return [] + + # Retrieve effective fuel exports using the group UUID + effective_fuel_exports = await self.get_effective_fuel_exports( + compliance_report_group_uuid=group_uuid + ) + + return effective_fuel_exports + + @repo_handler + async def get_fuel_exports_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> Tuple[List[FuelExport], int]: + """ + Retrieve a paginated list of effective fuel exports for a given compliance report. + """ + # Retrieve the compliance report's group UUID + report_group_query = await self.db.execute( + select(ComplianceReport.compliance_report_group_uuid).where( + ComplianceReport.compliance_report_id == compliance_report_id + ) + ) + group_uuid = report_group_query.scalar() + if not group_uuid: + return [], 0 + + # Retrieve effective fuel exports using the group UUID + effective_fuel_exports = await self.get_effective_fuel_exports( + compliance_report_group_uuid=group_uuid + ) + + # Manually apply pagination + total_count = len(effective_fuel_exports) + offset = 0 if pagination.page < 1 else (pagination.page - 1) * pagination.size + limit = pagination.size + paginated_exports = effective_fuel_exports[offset : offset + limit] + + return paginated_exports, total_count + + @repo_handler + async def get_fuel_export_by_id(self, fuel_export_id: int) -> FuelExport: + """ + Retrieve a fuel supply row from the database + """ + query = self.query.where(FuelExport.fuel_export_id == fuel_export_id) + result = await self.db.execute(query) + return result.unique().scalar_one_or_none() + + @repo_handler + async def update_fuel_export(self, fuel_export: FuelExport) -> FuelExport: + """ + Update an existing fuel supply row in the database. + """ + updated_fuel_export = await self.db.merge(fuel_export) + await self.db.flush() + await self.db.refresh( + updated_fuel_export, + [ + "fuel_category", + "fuel_type", + "provision_of_the_act", + "end_use_type", + ], + ) + return updated_fuel_export + + @repo_handler + async def create_fuel_export(self, fuel_export: FuelExport) -> FuelExport: + """ + Create a new fuel supply row in the database. + """ + self.db.add(fuel_export) + await self.db.flush() + await self.db.refresh( + fuel_export, + [ + "fuel_category", + "fuel_type", + "provision_of_the_act", + "end_use_type", + "fuel_code", + ], + ) + return fuel_export + + @repo_handler + async def delete_fuel_export(self, fuel_export_id: int): + """Delete a fuel supply row from the database""" + await self.db.execute( + delete(FuelExport).where(FuelExport.fuel_export_id == fuel_export_id) + ) + await self.db.flush() + + @repo_handler + async def get_fuel_export_version_by_user( + self, group_uuid: str, version: int, user_type: UserTypeEnum + ) -> Optional[FuelExport]: + """ + Retrieve a specific FuelExport record by group UUID, version, and user_type. + """ + query = ( + select(FuelExport) + .where( + FuelExport.group_uuid == group_uuid, + FuelExport.version == version, + FuelExport.user_type == user_type, + ) + .options( + joinedload(FuelExport.fuel_code), + joinedload(FuelExport.fuel_category), + joinedload(FuelExport.fuel_type), + joinedload(FuelExport.provision_of_the_act), + joinedload(FuelExport.end_use_type), + ) + ) + + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def get_latest_fuel_export_by_group_uuid( + self, group_uuid: str + ) -> Optional[FuelExport]: + """ + Retrieve the latest FuelExport record for a given group UUID. + Government records are prioritized over supplier records. + """ + query = ( + select(FuelExport) + .where(FuelExport.group_uuid == group_uuid) + .order_by( + # FuelExport.user_type == UserTypeEnum.SUPPLIER evaluates to False for GOVERNMENT, + # thus bringing GOVERNMENT records to the top in the ordered results. + FuelExport.user_type == UserTypeEnum.SUPPLIER, + FuelExport.version.desc(), + ) + ) + + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def get_effective_fuel_exports( + self, compliance_report_group_uuid: str + ) -> List[FuelExport]: + """ + Retrieve effective FuelExport records associated with the given compliance_report_group_uuid. + Excludes groups with any DELETE action and selects the highest version and priority. + """ + # Step 1: Select to get all compliance_report_ids in the specified group + compliance_reports_select = select(ComplianceReport.compliance_report_id).where( + ComplianceReport.compliance_report_group_uuid + == compliance_report_group_uuid + ) + + # Step 2: Select to identify group_uuids that have any DELETE action + delete_group_select = ( + select(FuelExport.group_uuid) + .where( + FuelExport.compliance_report_id.in_(compliance_reports_select), + FuelExport.action_type == ActionTypeEnum.DELETE, + ) + .distinct() + ) + + # Step 3: Select to find the max version and priority per group_uuid, excluding DELETE groups + user_type_priority = case( + (FuelExport.user_type == UserTypeEnum.GOVERNMENT, 1), + (FuelExport.user_type == UserTypeEnum.SUPPLIER, 0), + else_=0, + ) + + valid_fuel_exports_select = ( + select( + FuelExport.group_uuid, + func.max(FuelExport.version).label("max_version"), + func.max(user_type_priority).label("max_role_priority"), + ) + .where( + FuelExport.compliance_report_id.in_(compliance_reports_select), + FuelExport.action_type != ActionTypeEnum.DELETE, + ~FuelExport.group_uuid.in_(delete_group_select), + ) + .group_by(FuelExport.group_uuid) + ) + + # Now create a subquery for use in the JOIN + valid_fuel_exports_subq = valid_fuel_exports_select.subquery() + + # Step 4: Main query to retrieve effective FuelExport records + query = ( + select(FuelExport) + .options( + # Load necessary related data + joinedload(FuelExport.fuel_code), + joinedload(FuelExport.fuel_category), + joinedload(FuelExport.fuel_type), + joinedload(FuelExport.provision_of_the_act), + selectinload(FuelExport.end_use_type), + ) + .join( + valid_fuel_exports_subq, + and_( + FuelExport.group_uuid == valid_fuel_exports_subq.c.group_uuid, + FuelExport.version == valid_fuel_exports_subq.c.max_version, + user_type_priority == valid_fuel_exports_subq.c.max_role_priority, + ), + ) + ) + + result = await self.db.execute(query) + fuel_exports = result.unique().scalars().all() + + return fuel_exports diff --git a/backend/lcfs/web/api/fuel_export/schema.py b/backend/lcfs/web/api/fuel_export/schema.py new file mode 100644 index 000000000..f3a5d7584 --- /dev/null +++ b/backend/lcfs/web/api/fuel_export/schema.py @@ -0,0 +1,188 @@ +from datetime import date +from enum import Enum +from typing import List, Optional, Union +from lcfs.web.api.base import ( + BaseSchema, + FilterModel, + PaginationResponseSchema, + SortOrder, +) +from pydantic import Field, field_validator, validator + +from lcfs.web.api.fuel_code.schema import FuelCodeResponseSchema +from lcfs.web.api.fuel_type.schema import FuelTypeQuantityUnitsEnumSchema + + +class CommonPaginatedReportRequestSchema(BaseSchema): + compliance_report_id: int = Field(..., alias="complianceReportId") + filters: Optional[List[FilterModel]] = None + page: Optional[int] = None + size: Optional[int] = None + sort_orders: Optional[List[SortOrder]] = None + + +class FuelCategorySchema(BaseSchema): + fuel_category_id: int + fuel_category: str + default_and_prescribed_ci: Optional[float] = None + + +class ProvisionOfTheActSchema(BaseSchema): + provision_of_the_act_id: int + name: str + + +class UnitOfMeasureSchema(BaseSchema): + uom_id: int + name: str + + +class EnergyDensitySchema(BaseSchema): + energy_density_id: int + energy_density: float + unit: UnitOfMeasureSchema + + +class EndUseTypeSchema(BaseSchema): + end_use_type_id: int + type: str + sub_type: Optional[str] = None + + +class EnergyEffectivenessRatioSchema(BaseSchema): + eer_id: Optional[int] + fuel_category: FuelCategorySchema + end_use_type: Optional[EndUseTypeSchema] + energy_effectiveness_ratio: float + + +class TargetCarbonIntensitySchema(BaseSchema): + target_carbon_intensity_id: int + target_carbon_intensity: float + reduction_target_percentage: float + fuel_category: FuelCategorySchema + compliance_period: str + + +class FuelCodeSchema(BaseSchema): + fuel_code_id: int + fuel_code_prefix_id: int + fuel_code: str + fuel_code_carbon_intensity: float + + +class FuelTypeOptionsSchema(BaseSchema): + fuel_type_id: int + fuel_type: str + fossil_derived: bool + default_carbon_intensity: float + unit: str + energy_density: Optional[EnergyDensitySchema] + provisions: List[ProvisionOfTheActSchema] + fuel_categories: List[FuelCategorySchema] + eer_ratios: List[EnergyEffectivenessRatioSchema] + target_carbon_intensities: List[TargetCarbonIntensitySchema] + fuel_codes: Optional[List[FuelCodeSchema]] = [] + + +class FuelTypeOptionsResponse(BaseSchema): + fuel_types: List[FuelTypeOptionsSchema] + + +class FuelTypeSchema(BaseSchema): + fuel_type_id: int + fuel_type: str + fossil_derived: Optional[bool] = None + provision_1_id: Optional[int] = None + provision_2_id: Optional[int] = None + default_carbon_intensity: Optional[float] = None + units: FuelTypeQuantityUnitsEnumSchema + + @field_validator("default_carbon_intensity") + def quantize_default_carbon_intensity(cls, value): + return round(value, 2) + + +class FuelCategoryResponseSchema(BaseSchema): + fuel_category_id: Optional[int] = None + category: str + + +class FuelExportSchema(BaseSchema): + fuel_export_id: Optional[int] = None + compliance_report_id: int + group_uuid: Optional[str] = None + version: Optional[int] = None + user_type: Optional[str] = None + action_type: Optional[str] = None + compliance_period: Optional[str] = None + fuel_type_id: int + fuel_type: FuelTypeSchema + fuel_category_id: int + fuel_category: FuelCategoryResponseSchema + end_use_id: Optional[int] = None + end_use_type: Optional[EndUseTypeSchema] = None + provision_of_the_act_id: Optional[int] = None + provision_of_the_act: Optional[ProvisionOfTheActSchema] = None + fuel_type_other: Optional[str] = None + quantity: int = Field(..., gt=0) + units: str + export_date: date + compliance_units: Optional[int] = 0 + target_ci: Optional[float] = None + ci_of_fuel: Optional[float] = None + uci: Optional[float] = None + energy_density: Optional[float] = None + eer: Optional[float] = None + energy: Optional[float] = None + fuel_code_id: Optional[int] = None + fuel_code: Optional[FuelCodeResponseSchema] = None + + + @validator("quantity") + def quantity_must_be_positive(cls, v): + if v <= 0: + raise ValueError("quantity must be greater than zero") + return v + + +class FuelExportCreateUpdateSchema(BaseSchema): + fuel_export_id: Optional[int] = None + compliance_report_id: int + group_uuid: Optional[str] = None + version: Optional[int] = None + user_type: Optional[str] = None + action_type: Optional[str] = None + compliance_period: Optional[str] = None + fuel_type_other: Optional[str] = None + fuel_type_id: int + fuel_category_id: int + end_use_id: Optional[int] = None + provision_of_the_act_id: int + fuel_code_id: Optional[int] = None + quantity: int = Field(..., gt=0) + units: str + export_date: date + compliance_units: Optional[int] = 0 + target_ci: Optional[float] = 0 + ci_of_fuel: Optional[float] = 0 + energy_density: Optional[float] = 0 + eer: Optional[float] = 0 + energy: Optional[float] = 0 + deleted: Optional[bool] = None + + @validator("quantity") + def quantity_must_be_positive(cls, v): + if v <= 0: + raise ValueError("quantity must be greater than zero") + return v + + +class DeleteFuelExportResponseSchema(BaseSchema): + success: bool + message: str + + +class FuelExportsSchema(BaseSchema): + fuel_exports: Optional[List[FuelExportSchema]] = [] + pagination: Optional[PaginationResponseSchema] = None diff --git a/backend/lcfs/web/api/fuel_export/services.py b/backend/lcfs/web/api/fuel_export/services.py new file mode 100644 index 000000000..1a3279b5c --- /dev/null +++ b/backend/lcfs/web/api/fuel_export/services.py @@ -0,0 +1,264 @@ +import structlog +import math +from fastapi import Depends, Request +from fastapi_cache.decorator import cache + +from lcfs.web.api.base import ( + PaginationRequestSchema, + PaginationResponseSchema, + lcfs_cache_key_builder, +) +from lcfs.web.api.fuel_export.schema import ( + EndUseTypeSchema, + EnergyDensitySchema, + EnergyEffectivenessRatioSchema, + FuelCategorySchema, + FuelCodeSchema, + FuelExportsSchema, + FuelExportSchema, + FuelTypeOptionsResponse, + FuelTypeOptionsSchema, + ProvisionOfTheActSchema, + TargetCarbonIntensitySchema, + UnitOfMeasureSchema, +) +from lcfs.web.api.fuel_export.repo import FuelExportRepository +from lcfs.db.models.compliance.ComplianceReport import QuantityUnitsEnum +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.web.api.fuel_export.validation import FuelExportValidation +from lcfs.web.core.decorators import service_handler +from lcfs.web.utils.calculations import calculate_compliance_units +from lcfs.utils.constants import default_ci + +logger = structlog.get_logger(__name__) + + +class FuelExportServices: + def __init__( + self, + request: Request = None, + repo: FuelExportRepository = Depends(), + validate: FuelExportValidation = Depends(), + compliance_report_repo: ComplianceReportRepository = Depends(), + ) -> None: + self.request = request + self.repo = repo + self.compliance_report_repo = compliance_report_repo + + def fuel_type_row_mapper(self, compliance_period, fuel_types, row): + column_names = row._fields + row_data = dict(zip(column_names, row)) + fuel_category = FuelCategorySchema( + fuel_category_id=row_data["fuel_category_id"], + fuel_category=row_data["category"], + default_and_prescribed_ci=( + round(row_data["default_carbon_intensity"], 2) + if row_data["fuel_type"] != "Other" + else default_ci.get(row_data["category"]) + ), + ) + provision = ProvisionOfTheActSchema( + provision_of_the_act_id=row_data["provision_of_the_act_id"], + name=row_data["provision_of_the_act"], + ) + end_use_type = ( + EndUseTypeSchema( + end_use_type_id=row_data["end_use_type_id"], + type=row_data["end_use_type"], + sub_type=row_data["end_use_sub_type"], + ) + if row_data["end_use_type_id"] + else None + ) + eer = EnergyEffectivenessRatioSchema( + eer_id=row_data["eer_id"], + energy_effectiveness_ratio=round(row_data["energy_effectiveness_ratio"], 2), + fuel_category=fuel_category, + end_use_type=end_use_type, + ) + tci = TargetCarbonIntensitySchema( + target_carbon_intensity_id=row_data["target_carbon_intensity_id"], + target_carbon_intensity=round(row_data["target_carbon_intensity"], 2), + reduction_target_percentage=round( + row_data["reduction_target_percentage"], 2 + ), + fuel_category=fuel_category, + compliance_period=compliance_period, + ) + fuel_code = ( + FuelCodeSchema( + fuel_code_id=row_data["fuel_code_id"], + fuel_code=row_data["fuel_code"], + fuel_code_prefix_id=row_data["fuel_code_prefix_id"], + fuel_code_carbon_intensity=round( + row_data["fuel_code_carbon_intensity"], 2 + ), + ) + if row_data["fuel_code_id"] + else None + ) + # Find the existing fuel type if it exists + existing_fuel_type = next( + (ft for ft in fuel_types if ft.fuel_type == row_data["fuel_type"]), None + ) + + if existing_fuel_type: + # Append to the existing fuel type's + ( + existing_fuel_type.fuel_categories.append(fuel_category) + if not next( + ( + fc + for fc in existing_fuel_type.fuel_categories + if fc.fuel_category == row_data["category"] + ), + None, + ) + else None + ) + # Only add provision if it's "Fuel code - section 19 (b) (i)" and fuel_code exists + if ( + row_data["provision_of_the_act"] == "Fuel code - section 19 (b) (i)" + and fuel_code + ) or row_data["provision_of_the_act"] != "Fuel code - section 19 (b) (i)": + ( + existing_fuel_type.provisions.append(provision) + if not next( + ( + p + for p in existing_fuel_type.provisions + if p.name == row_data["provision_of_the_act"] + ), + None, + ) + else None + ) + + ( + existing_fuel_type.eer_ratios.append(eer) + if not next( + ( + e + for e in existing_fuel_type.eer_ratios + if e.end_use_type == row_data["end_use_type"] + and e.fuel_category == fuel_category + ), + None, + ) + else None + ) + ( + existing_fuel_type.target_carbon_intensities.append(tci) + if not next( + ( + t + for t in existing_fuel_type.target_carbon_intensities + if t.fuel_category == fuel_category + and t.compliance_period == compliance_period + ), + None, + ) + else None + ) + ( + existing_fuel_type.fuel_codes.append(fuel_code) + if fuel_code + and not next( + ( + fc + for fc in existing_fuel_type.fuel_codes + if fc.fuel_code == row_data["fuel_code"] + ), + None, + ) + else None + ) + else: + if row_data["energy_density_id"]: + unit = UnitOfMeasureSchema( + uom_id=row_data["uom_id"], + name=row_data["name"], + ) + energy_density = EnergyDensitySchema( + energy_density_id=row_data["energy_density_id"], + energy_density=round(row_data["energy_density"], 2), + unit=unit, + ) + # Only include provision if it's "Fuel code - section 19 (b) (i)" and fuel_code exists + provisions = ( + [provision] + if ( + row_data["provision_of_the_act"] == "Fuel code - section 19 (b) (i)" + and fuel_code + ) + or ( + row_data["provision_of_the_act"] != "Fuel code - section 19 (b) (i)" + ) + else [] + ) + # Create a new fuel type and append + fuel_type = FuelTypeOptionsSchema( + fuel_type_id=row_data["fuel_type_id"], + fuel_type=row_data["fuel_type"], + fossil_derived=row_data["fossil_derived"], + default_carbon_intensity=( + round(row_data["default_carbon_intensity"], 2) + if row_data["fuel_type"] != "Other" + else default_ci.get(row_data["category"]) + ), + unit=row_data["unit"].value, + energy_density=( + energy_density if row_data["energy_density_id"] else None + ), + fuel_categories=[fuel_category], + provisions=provisions, + eer_ratios=[eer], + target_carbon_intensities=[tci], + fuel_codes=[fuel_code] if fuel_code else [], + ) + fuel_types.append(fuel_type) + + @service_handler + # @cache( + # expire=3600 * 24, + # key_builder=lcfs_cache_key_builder, + # namespace="users", + # ) # seems to cause issues with fuel exports + async def get_fuel_export_options( + self, compliance_period: str + ) -> FuelTypeOptionsResponse: + """Get fuel supply table options""" + fs_options = await self.repo.get_fuel_export_table_options(compliance_period) + fuel_types = [] + for row in fs_options: + self.fuel_type_row_mapper(compliance_period, fuel_types, row) + return FuelTypeOptionsResponse(fuel_types=fuel_types) + + @service_handler + async def get_fuel_export_list( + self, compliance_report_id: int + ) -> FuelExportsSchema: + """Get fuel supply list for a compliance report""" + fuel_export_models = await self.repo.get_fuel_export_list(compliance_report_id) + fs_list = [FuelExportSchema.model_validate(fs) for fs in fuel_export_models] + return FuelExportsSchema(fuel_exports=fs_list if fs_list else []) + + @service_handler + async def get_fuel_exports_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ): + """Get paginated fuel supply list for a compliance report""" + fuel_exports, total_count = await self.repo.get_fuel_exports_paginated( + pagination, compliance_report_id + ) + return FuelExportsSchema( + pagination=PaginationResponseSchema( + page=pagination.page, + size=pagination.size, + total=total_count, + total_pages=( + math.ceil(total_count / pagination.size) if total_count > 0 else 0 + ), + ), + fuel_exports=[FuelExportSchema.model_validate(fs) for fs in fuel_exports], + ) diff --git a/backend/lcfs/web/api/fuel_export/validation.py b/backend/lcfs/web/api/fuel_export/validation.py new file mode 100644 index 000000000..8b1c653f3 --- /dev/null +++ b/backend/lcfs/web/api/fuel_export/validation.py @@ -0,0 +1,12 @@ +from fastapi import Depends, Request +from lcfs.web.api.fuel_export.repo import FuelExportRepository + + +class FuelExportValidation: + def __init__( + self, + request: Request = None, + fs_repo: FuelExportRepository = Depends(FuelExportRepository), + ): + self.fse_repo = fs_repo + self.request = request diff --git a/backend/lcfs/web/api/fuel_export/views.py b/backend/lcfs/web/api/fuel_export/views.py new file mode 100644 index 000000000..292f61326 --- /dev/null +++ b/backend/lcfs/web/api/fuel_export/views.py @@ -0,0 +1,115 @@ +import structlog +from typing import Optional, Union +from fastapi import ( + APIRouter, + Body, + status, + Request, + Response, + Depends, + HTTPException, +) +from lcfs.db import dependencies + +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.fuel_export.schema import ( + DeleteFuelExportResponseSchema, + FuelExportsSchema, + FuelExportCreateUpdateSchema, + FuelTypeOptionsResponse, + CommonPaginatedReportRequestSchema, + FuelExportSchema, +) +from lcfs.web.api.fuel_export.services import FuelExportServices +from lcfs.web.api.fuel_export.actions_service import ( + FuelExportActionService, +) +from lcfs.web.api.fuel_export.validation import FuelExportValidation +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.db.models.user.Role import RoleEnum + +router = APIRouter() +logger = structlog.get_logger(__name__) +get_async_db = dependencies.get_async_db_session + + +@router.get( + "/table-options", + response_model=FuelTypeOptionsResponse, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_fuel_export_table_options( + request: Request, compliancePeriod: str, service: FuelExportServices = Depends() +) -> FuelTypeOptionsResponse: + return await service.get_fuel_export_options(compliancePeriod) + + +@router.post( + "/list-all", response_model=FuelExportsSchema, status_code=status.HTTP_200_OK +) +@view_handler(["*"]) +async def get_fuel_exports( + request: Request, + request_data: CommonPaginatedReportRequestSchema = Body(...), + response: Response = None, + service: FuelExportServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +) -> FuelExportsSchema: + """Endpoint to get list of fuel supplied list for a compliance report""" + compliance_report_id = request_data.compliance_report_id + await report_validate.validate_organization_access(compliance_report_id) + if hasattr(request_data, "page") and request_data.page is not None: + # handle pagination. + pagination = PaginationRequestSchema( + page=request_data.page, + size=request_data.size, + sort_orders=request_data.sort_orders, + filters=request_data.filters, + ) + return await service.get_fuel_exports_paginated( + pagination, compliance_report_id + ) + else: + return await service.get_fuel_export_list(compliance_report_id) + + +@router.post( + "/save", + response_model=Union[FuelExportSchema, DeleteFuelExportResponseSchema], + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.SUPPLIER]) +async def save_fuel_export_row( + request: Request, + request_data: FuelExportCreateUpdateSchema = Body(...), + action_service: FuelExportActionService = Depends(), + report_validate: ComplianceReportValidation = Depends(), + fs_validate: FuelExportValidation = Depends(), +): + """Endpoint to save single fuel export row""" + compliance_report_id = request_data.compliance_report_id + await report_validate.validate_organization_access(compliance_report_id) + + # Determine user type for record creation + current_user_type = request.user.user_type + if not current_user_type: + raise HTTPException( + status_code=403, detail="User does not have the required role." + ) + + if request_data.deleted: + # Use action service to handle delete logic + return await action_service.delete_fuel_export(request_data, current_user_type) + else: + if request_data.fuel_export_id: + # Use action service to handle update logic + return await action_service.update_fuel_export( + request_data, current_user_type + ) + else: + # Use action service to handle create logic + return await action_service.create_fuel_export( + request_data, current_user_type + ) diff --git a/backend/lcfs/web/api/fuel_supply/__init__.py b/backend/lcfs/web/api/fuel_supply/__init__.py new file mode 100644 index 000000000..1aa72b7ea --- /dev/null +++ b/backend/lcfs/web/api/fuel_supply/__init__.py @@ -0,0 +1,5 @@ +"""API for internal comments.""" + +from lcfs.web.api.fuel_supply.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/fuel_supply/actions_service.py b/backend/lcfs/web/api/fuel_supply/actions_service.py new file mode 100644 index 000000000..642e97f7b --- /dev/null +++ b/backend/lcfs/web/api/fuel_supply/actions_service.py @@ -0,0 +1,263 @@ +import uuid +from logging import getLogger +from typing import Optional + +from fastapi import Depends, HTTPException + +from lcfs.db.base import ActionTypeEnum, UserTypeEnum +from lcfs.db.models.compliance.FuelSupply import FuelSupply +from lcfs.db.models.compliance.ComplianceReport import QuantityUnitsEnum +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository +from lcfs.web.api.fuel_supply.schema import ( + DeleteFuelSupplyResponseSchema, + FuelSupplyCreateUpdateSchema, + FuelSupplyResponseSchema, +) +from lcfs.web.core.decorators import service_handler +from lcfs.web.utils.calculations import calculate_compliance_units + +logger = getLogger(__name__) + +# Constants defining which fields to exclude during model operations +FUEL_SUPPLY_EXCLUDE_FIELDS = { + "id", + "fuel_supply_id", + "compliance_period", + "deleted", + "group_uuid", + "user_type", + "version", + "action_type", + "units", +} + + +class FuelSupplyActionService: + """ + Service layer handling CRUD operations and versioning for Fuel Supply records. + This service manages the creation, update (versioned), and deletion of fuel supplies + and populates calculated fields required for each record. + """ + + def __init__( + self, + repo: FuelSupplyRepository = Depends(), + fuel_repo: FuelCodeRepository = Depends(), + ) -> None: + self.repo = repo + self.fuel_repo = fuel_repo + + async def _populate_fuel_supply_fields( + self, fuel_supply: FuelSupply, fs_data: FuelSupplyCreateUpdateSchema + ) -> FuelSupply: + """ + Populate additional calculated and referenced fields for a FuelSupply instance. + + Args: + fuel_supply (FuelSupply): The FuelSupply instance to populate. + fs_data (FuelSupplyCreateUpdateSchema): The data provided for creation or update. + + Returns: + FuelSupply: The populated FuelSupply instance. + """ + # Fetch standardized fuel data + fuel_data = await self.fuel_repo.get_standardized_fuel_data( + fuel_type_id=fuel_supply.fuel_type_id, + fuel_category_id=fuel_supply.fuel_category_id, + end_use_id=fuel_supply.end_use_id, + fuel_code_id=fuel_supply.fuel_code_id, + compliance_period=fs_data.compliance_period, + ) + + # Set units + fuel_supply.units = QuantityUnitsEnum(fs_data.units) + + # Set calculated fields based on standardized fuel data + fuel_supply.ci_of_fuel = fuel_data.effective_carbon_intensity + fuel_supply.target_ci = fuel_data.target_ci + fuel_supply.eer = fuel_data.eer + fuel_supply.uci = fuel_data.uci + fuel_supply.energy_density = ( + fuel_data.energy_density + if fuel_data.energy_density + else fs_data.energy_density + ) + + # Calculate total energy if energy density is available + fuel_supply.energy = ( + int(fuel_supply.energy_density * fuel_supply.quantity) + if fuel_supply.energy_density + else 0 + ) + + # Calculate compliance units using the direct utility function + fuel_supply.compliance_units = calculate_compliance_units( + TCI=fuel_supply.target_ci or 0, + EER=fuel_supply.eer or 1, + RCI=fuel_supply.ci_of_fuel or 0, + UCI=fuel_supply.uci or 0, + Q=fuel_supply.quantity or 0, + ED=fuel_supply.energy_density or 0, + ) + + return fuel_supply + + @service_handler + async def create_fuel_supply( + self, fs_data: FuelSupplyCreateUpdateSchema, user_type: UserTypeEnum + ) -> FuelSupplyResponseSchema: + """ + Create a new fuel supply record. + + - Assigns a unique group UUID and sets the initial version to 0. + - Uses `ActionTypeEnum.CREATE` to indicate a new record. + - Populates calculated fields and saves the new record. + + Args: + fs_data (FuelSupplyCreateUpdateSchema): The data for the new fuel supply. + user_type (UserTypeEnum): The type of user creating the record. + + Returns: + FuelSupplyResponseSchema: The newly created fuel supply record as a response schema. + """ + # Assign a unique group UUID for the new fuel supply + new_group_uuid = str(uuid.uuid4()) + fuel_supply = FuelSupply( + **fs_data.model_dump(exclude=FUEL_SUPPLY_EXCLUDE_FIELDS), + group_uuid=new_group_uuid, + version=0, + user_type=user_type, + action_type=ActionTypeEnum.CREATE, + ) + + # Populate calculated and referenced fields + fuel_supply = await self._populate_fuel_supply_fields(fuel_supply, fs_data) + + # Save the populated fuel supply record + created_supply = await self.repo.create_fuel_supply(fuel_supply) + return FuelSupplyResponseSchema.model_validate(created_supply) + + @service_handler + async def update_fuel_supply( + self, fs_data: FuelSupplyCreateUpdateSchema, user_type: UserTypeEnum + ) -> FuelSupplyResponseSchema: + """ + Update an existing fuel supply record or create a new version if necessary. + + - Checks if a record exists for the given `group_uuid` and `version`. + - If `compliance_report_id` matches, updates the existing record. + - If `compliance_report_id` differs, creates a new version. + - If no existing record is found, raises an HTTPException. + + Args: + fs_data (FuelSupplyCreateUpdateSchema): The data for the fuel supply update. + user_type (UserTypeEnum): The type of user performing the update. + + Returns: + FuelSupplyResponseSchema: The updated or new version of the fuel supply record. + + Raises: + HTTPException: If the fuel supply record is not found. + """ + existing_fuel_supply = await self.repo.get_fuel_supply_version_by_user( + fs_data.group_uuid, fs_data.version, user_type + ) + + if ( + existing_fuel_supply + and existing_fuel_supply.compliance_report_id + == fs_data.compliance_report_id + ): + # Update existing record if compliance report ID matches + for field, value in fs_data.model_dump( + exclude=FUEL_SUPPLY_EXCLUDE_FIELDS + ).items(): + setattr(existing_fuel_supply, field, value) + + # Populate calculated fields + existing_fuel_supply = await self._populate_fuel_supply_fields( + existing_fuel_supply, fs_data + ) + + updated_supply = await self.repo.update_fuel_supply(existing_fuel_supply) + return FuelSupplyResponseSchema.model_validate(updated_supply) + + elif existing_fuel_supply: + # Create a new version if compliance report ID differs + fuel_supply = FuelSupply( + compliance_report_id=fs_data.compliance_report_id, + group_uuid=fs_data.group_uuid, + version=existing_fuel_supply.version + 1, + action_type=ActionTypeEnum.UPDATE, + user_type=user_type, + ) + + # Copy existing fields, then apply new data + for field in existing_fuel_supply.__table__.columns.keys(): + if field not in FUEL_SUPPLY_EXCLUDE_FIELDS: + setattr(fuel_supply, field, getattr(existing_fuel_supply, field)) + + for field, value in fs_data.model_dump( + exclude=FUEL_SUPPLY_EXCLUDE_FIELDS + ).items(): + setattr(fuel_supply, field, value) + + # Populate calculated fields + fuel_supply = await self._populate_fuel_supply_fields(fuel_supply, fs_data) + + # Save the new version + new_supply = await self.repo.create_fuel_supply(fuel_supply) + return FuelSupplyResponseSchema.model_validate(new_supply) + + # Raise an exception if no existing record is found + raise HTTPException(status_code=404, detail="Fuel supply record not found.") + + @service_handler + async def delete_fuel_supply( + self, fs_data: FuelSupplyCreateUpdateSchema, user_type: UserTypeEnum + ) -> DeleteFuelSupplyResponseSchema: + """ + Delete a fuel supply record by creating a new version marked as deleted. + + - Fetches the latest version of the record by `group_uuid`. + - If already deleted, returns success immediately. + - Otherwise, creates a new version with `ActionTypeEnum.DELETE`. + + Args: + fs_data (FuelSupplyCreateUpdateSchema): The data for the fuel supply deletion. + user_type (UserTypeEnum): The type of user performing the deletion. + + Returns: + DeleteFuelSupplyResponseSchema: A response schema confirming deletion. + """ + existing_fuel_supply = await self.repo.get_latest_fuel_supply_by_group_uuid( + fs_data.group_uuid + ) + + if existing_fuel_supply.action_type == ActionTypeEnum.DELETE: + return DeleteFuelSupplyResponseSchema( + success=True, message="Already deleted." + ) + + # Create a new version with action_type DELETE + delete_supply = FuelSupply( + compliance_report_id=fs_data.compliance_report_id, + group_uuid=fs_data.group_uuid, + version=existing_fuel_supply.version + 1, + action_type=ActionTypeEnum.DELETE, + user_type=user_type, + ) + + # Copy fields from the latest version for the deletion record + for field in existing_fuel_supply.__table__.columns.keys(): + if field not in FUEL_SUPPLY_EXCLUDE_FIELDS: + setattr(delete_supply, field, getattr(existing_fuel_supply, field)) + + delete_supply.units = QuantityUnitsEnum(fs_data.units) + + # Save the deletion record + await self.repo.create_fuel_supply(delete_supply) + return DeleteFuelSupplyResponseSchema( + success=True, message="Marked as deleted." + ) diff --git a/backend/lcfs/web/api/fuel_supply/repo.py b/backend/lcfs/web/api/fuel_supply/repo.py new file mode 100644 index 000000000..dca195539 --- /dev/null +++ b/backend/lcfs/web/api/fuel_supply/repo.py @@ -0,0 +1,482 @@ +from datetime import datetime + +import structlog +from typing import List, Optional, Sequence +from fastapi import Depends +from sqlalchemy import and_, delete, or_, select, case +from sqlalchemy import func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import joinedload, selectinload + +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.compliance import CompliancePeriod, FuelSupply, ComplianceReport +from lcfs.db.models.fuel import ( + EnergyDensity, + EnergyEffectivenessRatio, + FuelCategory, + FuelInstance, + FuelCode, + FuelCodePrefix, + FuelCodeStatus, + FuelType, + ProvisionOfTheAct, + TargetCarbonIntensity, + UnitOfMeasure, + EndUseType, +) +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.fuel_supply.schema import FuelSupplyCreateUpdateSchema +from lcfs.web.core.decorators import repo_handler + +logger = structlog.get_logger(__name__) + + +class FuelSupplyRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + self.query = select(FuelSupply).options( + joinedload(FuelSupply.fuel_code).options( + joinedload(FuelCode.fuel_code_status), + joinedload(FuelCode.fuel_code_prefix), + ), + joinedload(FuelSupply.fuel_category).options( + joinedload(FuelCategory.target_carbon_intensities), + joinedload(FuelCategory.energy_effectiveness_ratio), + ), + joinedload(FuelSupply.fuel_type).options( + joinedload(FuelType.energy_density), + joinedload(FuelType.additional_carbon_intensity), + joinedload(FuelType.energy_effectiveness_ratio), + ), + joinedload(FuelSupply.provision_of_the_act), + joinedload(FuelSupply.end_use_type), + ) + + @repo_handler + async def get_fuel_supply_table_options(self, compliance_period: str): + """ + Retrieve Fuel Type and other static data to use them while populating fuel supply form. + """ + + subquery_compliance_period_id = ( + select(CompliancePeriod.compliance_period_id) + .where(CompliancePeriod.description == compliance_period) + .scalar_subquery() + ) + + subquery_fuel_code_status_id = ( + select(FuelCodeStatus.fuel_code_status_id) + .where(FuelCodeStatus.status == "Approved") + .scalar_subquery() + ) + + subquery_provision_of_the_act_id = ( + select(ProvisionOfTheAct.provision_of_the_act_id) + .where(ProvisionOfTheAct.name == "Fuel code - section 19 (b) (i)") + .scalar_subquery() + ) + + try: + current_year = int(compliance_period) + except ValueError as e: + logger.error( + "Invalid compliance_period: not an integer", + compliance_period=compliance_period, + error=str(e), + ) + raise ValueError( + f"Invalid compliance_period: '{compliance_period}' must be an integer." + ) from e + + start_of_this_year = datetime(current_year, 1, 1) + start_of_previous_year = datetime(current_year - 1, 1, 1) + + query = ( + select( + FuelType.fuel_type_id, + FuelInstance.fuel_instance_id, + FuelInstance.fuel_category_id, + FuelType.fuel_type, + FuelType.fossil_derived, + FuelType.default_carbon_intensity, + FuelCategory.category, + ProvisionOfTheAct.provision_of_the_act_id, + ProvisionOfTheAct.name.label("provision_of_the_act"), + EnergyDensity.energy_density_id, + EnergyDensity.density.label("energy_density"), + FuelType.units.label("unit"), + FuelType.unrecognized, + EndUseType.end_use_type_id, + EndUseType.type.label("end_use_type"), + EndUseType.sub_type.label("end_use_sub_type"), + UnitOfMeasure.uom_id, + UnitOfMeasure.name, + EnergyEffectivenessRatio.eer_id, + func.coalesce(EnergyEffectivenessRatio.ratio, 1).label( + "energy_effectiveness_ratio" + ), + TargetCarbonIntensity.target_carbon_intensity_id, + TargetCarbonIntensity.target_carbon_intensity, + TargetCarbonIntensity.reduction_target_percentage, + FuelCode.fuel_code_id, + FuelCode.fuel_suffix, + FuelCodePrefix.fuel_code_prefix_id, + FuelCodePrefix.prefix, + FuelCode.carbon_intensity.label("fuel_code_carbon_intensity"), + ) + .join(FuelInstance, FuelInstance.fuel_type_id == FuelType.fuel_type_id) + .join( + FuelCategory, + FuelCategory.fuel_category_id == FuelInstance.fuel_category_id, + ) + .outerjoin( + ProvisionOfTheAct, + or_( + and_( + FuelType.fossil_derived == True, + ProvisionOfTheAct.provision_of_the_act_id == 1, + ), + and_( + FuelType.fossil_derived == False, + ProvisionOfTheAct.provision_of_the_act_id != 1, + ), + ), + ) + .outerjoin( + EnergyDensity, EnergyDensity.fuel_type_id == FuelType.fuel_type_id + ) + .outerjoin(UnitOfMeasure, EnergyDensity.uom_id == UnitOfMeasure.uom_id) + .outerjoin( + EnergyEffectivenessRatio, + and_( + EnergyEffectivenessRatio.fuel_category_id + == FuelCategory.fuel_category_id, + EnergyEffectivenessRatio.fuel_type_id == FuelInstance.fuel_type_id, + ), + ) + .outerjoin( + EndUseType, + EndUseType.end_use_type_id == EnergyEffectivenessRatio.end_use_type_id, + ) + .outerjoin( + TargetCarbonIntensity, + and_( + TargetCarbonIntensity.fuel_category_id + == FuelCategory.fuel_category_id, + TargetCarbonIntensity.compliance_period_id + == subquery_compliance_period_id, + ), + ) + .outerjoin( + FuelCode, + and_( + FuelCode.fuel_type_id == FuelType.fuel_type_id, + FuelCode.fuel_status_id == subquery_fuel_code_status_id, + ProvisionOfTheAct.provision_of_the_act_id + == subquery_provision_of_the_act_id, + FuelCode.expiration_date > start_of_previous_year, + FuelCode.effective_date <= start_of_this_year, + ), + ) + .outerjoin( + FuelCodePrefix, FuelCodePrefix.fuel_code_prefix_id == FuelCode.prefix_id + ) + ) + + fuel_type_results = (await self.db.execute(query)).all() + + return { + "fuel_types": fuel_type_results, + } + + @repo_handler + async def get_fuel_supply_list(self, compliance_report_id: int) -> List[FuelSupply]: + """ + Retrieve the list of effective fuel supplies for a given compliance report. + """ + # Retrieve the compliance report's group UUID + report_group_query = await self.db.execute( + select(ComplianceReport.compliance_report_group_uuid).where( + ComplianceReport.compliance_report_id == compliance_report_id + ) + ) + group_uuid = report_group_query.scalar() + if not group_uuid: + return [] + + # Retrieve effective fuel supplies using the group UUID + effective_fuel_supplies = await self.get_effective_fuel_supplies( + compliance_report_group_uuid=group_uuid + ) + + return effective_fuel_supplies + + @repo_handler + async def get_fuel_supplies_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> List[FuelSupply]: + """ + Retrieve a paginated list of effective fuel supplies for a given compliance report. + """ + # Retrieve the compliance report's group UUID + report_group_query = await self.db.execute( + select(ComplianceReport.compliance_report_group_uuid).where( + ComplianceReport.compliance_report_id == compliance_report_id + ) + ) + group_uuid = report_group_query.scalar() + if not group_uuid: + return [], 0 + + # Retrieve effective fuel supplies using the group UUID + effective_fuel_supplies = await self.get_effective_fuel_supplies( + compliance_report_group_uuid=group_uuid + ) + + # Manually apply pagination + total_count = len(effective_fuel_supplies) + offset = 0 if pagination.page < 1 else (pagination.page - 1) * pagination.size + limit = pagination.size + paginated_supplies = effective_fuel_supplies[offset : offset + limit] + + return paginated_supplies, total_count + + @repo_handler + async def get_fuel_supply_by_id(self, fuel_supply_id: int) -> FuelSupply: + """ + Retrieve a fuel supply row from the database + """ + query = self.query.where(FuelSupply.fuel_supply_id == fuel_supply_id) + result = await self.db.execute(query) + return result.unique().scalar_one_or_none() + + @repo_handler + async def update_fuel_supply(self, fuel_supply: FuelSupply) -> FuelSupply: + """ + Update an existing fuel supply row in the database. + """ + fuel_supply = await self.db.merge(fuel_supply) + await self.db.flush() + await self.db.refresh( + fuel_supply, + [ + "fuel_category", + "fuel_type", + "fuel_code", + "provision_of_the_act", + "end_use_type", + ], + ) + return fuel_supply + + @repo_handler + async def create_fuel_supply(self, fuel_supply: FuelSupply) -> FuelSupply: + """ + Create a new fuel supply row in the database. + """ + self.db.add(fuel_supply) + await self.db.flush() + await self.db.refresh( + fuel_supply, + [ + "fuel_category", + "fuel_type", + "fuel_code", + "provision_of_the_act", + "end_use_type", + ], + ) + return fuel_supply + + @repo_handler + async def delete_fuel_supply(self, fuel_supply_id: int): + """Delete a fuel supply row from the database""" + await self.db.execute( + delete(FuelSupply).where(FuelSupply.fuel_supply_id == fuel_supply_id) + ) + await self.db.flush() + + @repo_handler + async def get_fuel_supplies(self, report_id: int) -> List[FuelSupply]: + """ + Retrieve the list of fuel supplies for a given report (compliance or supplemental). + """ + query = select(FuelSupply).options( + joinedload(FuelSupply.fuel_code), + joinedload(FuelSupply.fuel_category), + joinedload(FuelSupply.fuel_type), + joinedload(FuelSupply.provision_of_the_act), + joinedload(FuelSupply.end_use_type), + ) + + query = query.where(FuelSupply.compliance_report_id == report_id) + + result = await self.db.execute(query) + return result.scalars().all() + + @repo_handler + async def check_duplicate(self, fuel_supply: FuelSupplyCreateUpdateSchema): + """Check if this would duplicate an existing row""" + + delete_group_subquery = ( + select(FuelSupply.group_uuid) + .where( + FuelSupply.compliance_report_id == fuel_supply.compliance_report_id, + FuelSupply.action_type == ActionTypeEnum.DELETE, + ) + .distinct() + ) + + ### Type, Category, and Determine CI/Fuel codes are included + query = select(FuelSupply.fuel_supply_id).where( + FuelSupply.compliance_report_id == fuel_supply.compliance_report_id, + FuelSupply.fuel_type_id == fuel_supply.fuel_type_id, + FuelSupply.fuel_category_id == fuel_supply.fuel_category_id, + FuelSupply.provision_of_the_act_id == fuel_supply.provision_of_the_act_id, + FuelSupply.fuel_code_id == fuel_supply.fuel_code_id, + FuelSupply.group_uuid != fuel_supply.group_uuid, + FuelSupply.end_use_id == fuel_supply.end_use_id, + FuelSupply.action_type == ActionTypeEnum.CREATE, + ~FuelSupply.group_uuid.in_(delete_group_subquery), + ( + FuelSupply.fuel_supply_id != fuel_supply.fuel_supply_id + if fuel_supply.fuel_supply_id is not None + else True + ), + ) + + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def get_fuel_supply_version_by_user( + self, group_uuid: str, version: int, user_type: UserTypeEnum + ) -> Optional[FuelSupply]: + """ + Retrieve a specific FuelSupply record by group UUID, version, and user_type. + This method explicitly requires user_type to avoid ambiguity. + """ + query = select(FuelSupply).where( + FuelSupply.group_uuid == group_uuid, + FuelSupply.version == version, + FuelSupply.user_type == user_type, + ) + + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def get_latest_fuel_supply_by_group_uuid( + self, group_uuid: str + ) -> Optional[FuelSupply]: + """ + Retrieve the latest FuelSupply record for a given group UUID. + Government records are prioritized over supplier records by ordering first by `user_type` + (with GOVERNMENT records coming first) and then by `version` in descending order. + """ + query = ( + select(FuelSupply) + .where(FuelSupply.group_uuid == group_uuid) + .order_by( + # FuelSupply.user_type == UserTypeEnum.SUPPLIER evaluates to False for GOVERNMENT, + # thus bringing GOVERNMENT records to the top in the ordered results. + FuelSupply.user_type == UserTypeEnum.SUPPLIER, + FuelSupply.version.desc(), + ) + ) + + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def get_effective_fuel_supplies( + self, compliance_report_group_uuid: str + ) -> Sequence[FuelSupply]: + """ + Retrieve effective FuelSupply records associated with the given compliance_report_group_uuid. + For each group_uuid: + - Exclude the entire group if any record in the group is marked as DELETE. + - From the remaining groups, select the record with the highest version and highest priority. + """ + # Step 1: Subquery to get all compliance_report_ids in the specified group + compliance_reports_select = select(ComplianceReport.compliance_report_id).where( + ComplianceReport.compliance_report_group_uuid + == compliance_report_group_uuid + ) + + # Step 2: Subquery to identify record group_uuids that have any DELETE action + delete_group_select = ( + select(FuelSupply.group_uuid) + .where( + FuelSupply.compliance_report_id.in_(compliance_reports_select), + FuelSupply.action_type == ActionTypeEnum.DELETE, + ) + .distinct() + ) + + # Step 3: Subquery to find the maximum version and priority per group_uuid, + # excluding groups with any DELETE action + user_type_priority = case( + (FuelSupply.user_type == UserTypeEnum.GOVERNMENT, 1), + (FuelSupply.user_type == UserTypeEnum.SUPPLIER, 0), + else_=0, + ) + + valid_fuel_supplies_select = ( + select( + FuelSupply.group_uuid, + func.max(FuelSupply.version).label("max_version"), + func.max(user_type_priority).label("max_role_priority"), + ) + .where( + FuelSupply.compliance_report_id.in_(compliance_reports_select), + FuelSupply.action_type != ActionTypeEnum.DELETE, + ~FuelSupply.group_uuid.in_(delete_group_select), + ) + .group_by(FuelSupply.group_uuid) + ) + # Now create a subquery for use in the JOIN + valid_fuel_supplies_subq = valid_fuel_supplies_select.subquery() + + # Step 4: Main query to retrieve FuelSupply records with necessary eager relationships + query = ( + select(FuelSupply) + .options( + # Use selectinload for collections + selectinload(FuelSupply.fuel_code).options( + selectinload(FuelCode.fuel_code_status), + selectinload(FuelCode.fuel_code_prefix), + ), + # Use selectinload for one-to-many relationships + selectinload(FuelSupply.fuel_category).options( + selectinload(FuelCategory.target_carbon_intensities), + selectinload(FuelCategory.energy_effectiveness_ratio), + ), + # Use joinedload for many-to-one relationships + joinedload(FuelSupply.fuel_type).options( + joinedload(FuelType.energy_density), + joinedload(FuelType.additional_carbon_intensity), + joinedload(FuelType.energy_effectiveness_ratio), + ), + # Use joinedload for single relationships + joinedload(FuelSupply.provision_of_the_act), + selectinload(FuelSupply.end_use_type), + ) + .join( + valid_fuel_supplies_subq, + and_( + FuelSupply.group_uuid == valid_fuel_supplies_subq.c.group_uuid, + FuelSupply.version == valid_fuel_supplies_subq.c.max_version, + user_type_priority == valid_fuel_supplies_subq.c.max_role_priority, + ), + isouter=False # Explicit inner join + ) + .order_by(FuelSupply.create_date.asc()) + ) + + # Step 5: Execute the query and retrieve results using unique() + result = await self.db.execute(query) + fuel_supplies = result.unique().scalars().all() + + return fuel_supplies diff --git a/backend/lcfs/web/api/fuel_supply/schema.py b/backend/lcfs/web/api/fuel_supply/schema.py new file mode 100644 index 000000000..60592dffe --- /dev/null +++ b/backend/lcfs/web/api/fuel_supply/schema.py @@ -0,0 +1,174 @@ +from enum import Enum +from typing import List, Optional + +from pydantic import Field, field_validator + +from lcfs.web.api.base import ( + BaseSchema, + FilterModel, + PaginationResponseSchema, + SortOrder, +) +from lcfs.web.api.fuel_code.schema import FuelCodeResponseSchema +from lcfs.web.api.fuel_type.schema import FuelTypeQuantityUnitsEnumSchema + + +class CommonPaginatedReportRequestSchema(BaseSchema): + compliance_report_id: int = Field(..., alias="complianceReportId") + filters: Optional[List[FilterModel]] = None + page: Optional[int] = None + size: Optional[int] = None + sort_orders: Optional[List[SortOrder]] = None + + +class FuelCategorySchema(BaseSchema): + fuel_category_id: int + fuel_category: str + default_and_prescribed_ci: Optional[float] = None + + +class ProvisionOfTheActSchema(BaseSchema): + provision_of_the_act_id: int + name: str + + +class UnitOfMeasureSchema(BaseSchema): + uom_id: int + name: str + + +class EnergyDensitySchema(BaseSchema): + energy_density_id: int + energy_density: float + unit: UnitOfMeasureSchema + + +class EndUseTypeSchema(BaseSchema): + end_use_type_id: int + type: str + sub_type: Optional[str] = None + + +class EnergyEffectivenessRatioSchema(BaseSchema): + eer_id: Optional[int] + fuel_category: FuelCategorySchema + end_use_type: Optional[EndUseTypeSchema] + energy_effectiveness_ratio: float + + +class TargetCarbonIntensitySchema(BaseSchema): + target_carbon_intensity_id: int + target_carbon_intensity: float + reduction_target_percentage: float + fuel_category: FuelCategorySchema + compliance_period: str + + +class FuelCodeSchema(BaseSchema): + fuel_code_id: int + fuel_code_prefix_id: int + fuel_code: str + fuel_code_carbon_intensity: float + + +class FuelTypeOptionsSchema(BaseSchema): + fuel_type_id: int + fuel_type: str + fossil_derived: bool + default_carbon_intensity: Optional[float] = None + unit: str + unrecognized: bool + energy_density: Optional[EnergyDensitySchema] + provisions: List[ProvisionOfTheActSchema] + fuel_categories: List[FuelCategorySchema] + eer_ratios: List[EnergyEffectivenessRatioSchema] + target_carbon_intensities: List[TargetCarbonIntensitySchema] + fuel_codes: Optional[List[FuelCodeSchema]] = [] + + +class FuelTypeOptionsResponse(BaseSchema): + fuel_types: List[FuelTypeOptionsSchema] + + +class FuelTypeSchema(BaseSchema): + fuel_type_id: int + fuel_type: str + fossil_derived: Optional[bool] = None + provision_1_id: Optional[int] = None + provision_2_id: Optional[int] = None + default_carbon_intensity: Optional[float] = None + units: FuelTypeQuantityUnitsEnumSchema + + @field_validator("default_carbon_intensity") + def quantize_default_carbon_intensity(cls, value): + return round(value, 2) + + +class FuelCategoryResponseSchema(BaseSchema): + fuel_category_id: Optional[int] = None + category: str + + +class FuelSupplyCreateUpdateSchema(BaseSchema): + compliance_report_id: int + fuel_supply_id: Optional[int] = None + group_uuid: Optional[str] = None + version: Optional[int] = None + compliance_period: Optional[str] = None + fuel_type_id: int + fuel_category_id: int + end_use_id: Optional[int] = None + provision_of_the_act_id: int + quantity: int + units: str + fuel_type_other: Optional[str] = None + fuel_code_id: Optional[int] = None + target_ci: Optional[float] = None + ci_of_fuel: Optional[float] = None + energy_density: Optional[float] = None + eer: Optional[float] = None + energy: Optional[float] = None + deleted: Optional[bool] = None + + class Config: + use_enum_values = True + + +class FuelSupplyResponseSchema(BaseSchema): + fuel_supply_id: int + compliance_report_id: int + group_uuid: str + version: int + user_type: str + action_type: str + fuel_type_id: int + fuel_type: FuelTypeSchema + fuel_category_id: Optional[int] = None + fuel_category: FuelCategoryResponseSchema + end_use_id: Optional[int] = None + end_use_type: Optional[EndUseTypeSchema] = None + provision_of_the_act_id: Optional[int] = None + provision_of_the_act: Optional[ProvisionOfTheActSchema] = None + compliance_period: Optional[str] = None + quantity: int + units: str + compliance_units: Optional[int] = None + target_ci: Optional[float] = None + ci_of_fuel: Optional[float] = None + uci: Optional[float] = None + energy_density: Optional[float] = None + eer: Optional[float] = None + energy: Optional[float] = None + fuel_code_id: Optional[int] = None + fuel_code: Optional[FuelCodeResponseSchema] = None + fuel_type_other: Optional[str] = None + + +class DeleteFuelSupplyResponseSchema(BaseSchema): + success: bool + message: str + + +class FuelSuppliesSchema(BaseSchema): + fuel_supplies: Optional[List[FuelSupplyResponseSchema]] = [] + pagination: Optional[PaginationResponseSchema] = {} diff --git a/backend/lcfs/web/api/fuel_supply/services.py b/backend/lcfs/web/api/fuel_supply/services.py new file mode 100644 index 000000000..67a2b7efa --- /dev/null +++ b/backend/lcfs/web/api/fuel_supply/services.py @@ -0,0 +1,267 @@ +import structlog +import math +from fastapi import Depends, Request, HTTPException + +from lcfs.web.api.base import PaginationRequestSchema, PaginationResponseSchema +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.fuel_supply.schema import ( + EndUseTypeSchema, + EnergyDensitySchema, + EnergyEffectivenessRatioSchema, + FuelCategorySchema, + FuelCodeSchema, + FuelSuppliesSchema, + FuelSupplyResponseSchema, + FuelTypeOptionsResponse, + FuelTypeOptionsSchema, + ProvisionOfTheActSchema, + TargetCarbonIntensitySchema, + UnitOfMeasureSchema, + FuelSupplyCreateUpdateSchema, +) +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository +from lcfs.web.core.decorators import service_handler +from lcfs.web.utils.calculations import calculate_compliance_units +from lcfs.utils.constants import default_ci + +logger = structlog.get_logger(__name__) + + +class FuelSupplyServices: + def __init__( + self, + request: Request = None, + repo: FuelSupplyRepository = Depends(), + fuel_repo: FuelCodeRepository = Depends(), + ) -> None: + self.request = request + self.repo = repo + self.fuel_repo = fuel_repo + + def fuel_type_row_mapper(self, compliance_period, fuel_types, row): + column_names = row._fields + + row_data = dict(zip(column_names, row)) + + fuel_category = FuelCategorySchema( + fuel_category_id=row_data["fuel_category_id"], + fuel_category=row_data["category"], + default_and_prescribed_ci=( + round(row_data["default_carbon_intensity"], 2) + if row_data["fuel_type"] != "Other" + else default_ci.get(row_data["category"]) + ), + ) + provision = ProvisionOfTheActSchema( + provision_of_the_act_id=row_data["provision_of_the_act_id"], + name=row_data["provision_of_the_act"], + ) + end_use_type = ( + EndUseTypeSchema( + end_use_type_id=row_data["end_use_type_id"], + type=row_data["end_use_type"], + sub_type=row_data["end_use_sub_type"], + ) + if row_data["end_use_type_id"] + else None + ) + eer = EnergyEffectivenessRatioSchema( + eer_id=row_data["eer_id"], + energy_effectiveness_ratio=round(row_data["energy_effectiveness_ratio"], 2), + fuel_category=fuel_category, + end_use_type=end_use_type, + ) + tci = TargetCarbonIntensitySchema( + target_carbon_intensity_id=row_data["target_carbon_intensity_id"], + target_carbon_intensity=round(row_data["target_carbon_intensity"], 2), + reduction_target_percentage=round( + row_data["reduction_target_percentage"], 2 + ), + fuel_category=fuel_category, + compliance_period=compliance_period, + ) + fuel_code = ( + FuelCodeSchema( + fuel_code_id=row_data["fuel_code_id"], + fuel_code=row_data["prefix"] + row_data["fuel_suffix"], + fuel_code_prefix_id=row_data["fuel_code_prefix_id"], + fuel_code_carbon_intensity=round( + row_data["fuel_code_carbon_intensity"], 2 + ), + ) + if row_data["fuel_code_id"] + else None + ) + # Find the existing fuel type if it exists + existing_fuel_type = next( + (ft for ft in fuel_types if ft.fuel_type == row_data["fuel_type"]), None + ) + + if existing_fuel_type: + # Append to the existing fuel type's + ( + existing_fuel_type.fuel_categories.append(fuel_category) + if not next( + ( + fc + for fc in existing_fuel_type.fuel_categories + if fc.fuel_category == row_data["category"] + ), + None, + ) + else None + ) + # Only add provision if it's "Fuel code - section 19 (b) (i)" and fuel_code exists + if ( + row_data["provision_of_the_act"] == "Fuel code - section 19 (b) (i)" + and fuel_code + ) or row_data["provision_of_the_act"] != "Fuel code - section 19 (b) (i)": + ( + existing_fuel_type.provisions.append(provision) + if not next( + ( + p + for p in existing_fuel_type.provisions + if p.name == row_data["provision_of_the_act"] + ), + None, + ) + else None + ) + + ( + existing_fuel_type.eer_ratios.append(eer) + if not next( + ( + e + for e in existing_fuel_type.eer_ratios + if e.end_use_type == row_data["end_use_type"] + and e.fuel_category == fuel_category + ), + None, + ) + else None + ) + ( + existing_fuel_type.target_carbon_intensities.append(tci) + if not next( + ( + t + for t in existing_fuel_type.target_carbon_intensities + if t.fuel_category == fuel_category + and t.compliance_period == compliance_period + ), + None, + ) + else None + ) + ( + existing_fuel_type.fuel_codes.append(fuel_code) + if fuel_code + and not next( + ( + fc + for fc in existing_fuel_type.fuel_codes + if fc.fuel_code + == (row_data["prefix"] + row_data["fuel_suffix"]) + ), + None, + ) + else None + ) + else: + if row_data["energy_density_id"]: + unit = UnitOfMeasureSchema( + uom_id=row_data["uom_id"], + name=row_data["name"], + ) + energy_density = EnergyDensitySchema( + energy_density_id=row_data["energy_density_id"], + energy_density=round(row_data["energy_density"], 2), + unit=unit, + ) + # Only include provision if it's "Fuel code - section 19 (b) (i)" and fuel_code exists + provisions = ( + [provision] + if ( + row_data["provision_of_the_act"] == "Fuel code - section 19 (b) (i)" + and fuel_code + ) + or ( + row_data["provision_of_the_act"] != "Fuel code - section 19 (b) (i)" + ) + else [] + ) + # Create a new fuel type and append + fuel_type = FuelTypeOptionsSchema( + fuel_type_id=row_data["fuel_type_id"], + fuel_type=row_data["fuel_type"], + fossil_derived=row_data["fossil_derived"], + default_carbon_intensity=( + round(row_data["default_carbon_intensity"], 2) + if row_data["fuel_type"] != "Other" + else default_ci.get(row_data["category"]) + ), + unit=row_data["unit"].value, + energy_density=( + energy_density if row_data["energy_density_id"] else None + ), + fuel_categories=[fuel_category], + provisions=provisions, + eer_ratios=[eer], + target_carbon_intensities=[tci], + fuel_codes=[fuel_code] if fuel_code else [], + unrecognized=row_data["unrecognized"], + ) + fuel_types.append(fuel_type) + + @service_handler + async def get_fuel_supply_options( + self, compliance_period: str + ) -> FuelTypeOptionsResponse: + """Get fuel supply table options""" + fs_options = await self.repo.get_fuel_supply_table_options(compliance_period) + fuel_types = [] + for row in fs_options["fuel_types"]: + self.fuel_type_row_mapper(compliance_period, fuel_types, row) + + return FuelTypeOptionsResponse(fuel_types=fuel_types) + + @service_handler + async def get_fuel_supply_list( + self, compliance_report_id: int + ) -> FuelSuppliesSchema: + """Get fuel supply list for a compliance report""" + fuel_supply_models = await self.repo.get_fuel_supply_list(compliance_report_id) + fs_list = [ + FuelSupplyResponseSchema.model_validate(fs) for fs in fuel_supply_models + ] + return FuelSuppliesSchema(fuel_supplies=fs_list if fs_list else []) + + @service_handler + async def get_fuel_supplies_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ): + """Get paginated fuel supply list for a compliance report""" + logger.info( + "Getting paginated fuel supply list", + compliance_report_id=compliance_report_id, + page=pagination.page, + size=pagination.size, + ) + fuel_supplies, total_count = await self.repo.get_fuel_supplies_paginated( + pagination, compliance_report_id + ) + return FuelSuppliesSchema( + pagination=PaginationResponseSchema( + page=pagination.page, + size=pagination.size, + total=total_count, + total_pages=( + math.ceil(total_count / pagination.size) if total_count > 0 else 0 + ), + ), + fuel_supplies=[ + FuelSupplyResponseSchema.model_validate(fs) for fs in fuel_supplies + ], + ) diff --git a/backend/lcfs/web/api/fuel_supply/validation.py b/backend/lcfs/web/api/fuel_supply/validation.py new file mode 100644 index 000000000..dc065e06a --- /dev/null +++ b/backend/lcfs/web/api/fuel_supply/validation.py @@ -0,0 +1,34 @@ +from fastapi import Depends +from fastapi.exceptions import RequestValidationError + +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.fuel_supply.repo import FuelSupplyRepository +from lcfs.web.api.fuel_supply.schema import FuelSupplyCreateUpdateSchema + + +class FuelSupplyValidation: + def __init__( + self, + fs_repo: FuelSupplyRepository = Depends(FuelSupplyRepository), + fc_repo: FuelCodeRepository = Depends(FuelCodeRepository), + ): + self.fs_repo = fs_repo + self.fc_repo = fc_repo + + async def check_duplicate(self, fuel_supply: FuelSupplyCreateUpdateSchema): + return await self.fs_repo.check_duplicate(fuel_supply) + + async def validate_other(self, fuel_supply: FuelSupplyCreateUpdateSchema): + fuel_type = await self.fc_repo.get_fuel_type_by_id(fuel_supply.fuel_type_id) + + if fuel_type.unrecognized: + if not fuel_supply.fuel_type_other: + raise RequestValidationError( + [ + { + "loc": ("fuelTypeOther",), + "msg": "required when using Other", + "type": "value_error", + } + ] + ) diff --git a/backend/lcfs/web/api/fuel_supply/views.py b/backend/lcfs/web/api/fuel_supply/views.py new file mode 100644 index 000000000..3f5674408 --- /dev/null +++ b/backend/lcfs/web/api/fuel_supply/views.py @@ -0,0 +1,143 @@ +import structlog +from typing import Optional, Union + +from fastapi import APIRouter, Body, Depends, Request, Response, status, HTTPException +from starlette.responses import JSONResponse + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.web.api.fuel_supply.schema import ( + DeleteFuelSupplyResponseSchema, + FuelSuppliesSchema, + FuelSupplyCreateUpdateSchema, + FuelSupplyResponseSchema, + FuelTypeOptionsResponse, + CommonPaginatedReportRequestSchema, +) +from lcfs.web.api.fuel_supply.services import FuelSupplyServices +from lcfs.web.api.fuel_supply.validation import FuelSupplyValidation +from lcfs.web.api.fuel_supply.actions_service import FuelSupplyActionService +from lcfs.web.core.decorators import view_handler + +router = APIRouter() +logger = structlog.get_logger(__name__) + + +@router.get( + "/table-options", + response_model=FuelTypeOptionsResponse, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_fs_table_options( + request: Request, compliancePeriod: str, service: FuelSupplyServices = Depends() +) -> FuelTypeOptionsResponse: + return await service.get_fuel_supply_options(compliancePeriod) + + +@router.post( + "/list-all", response_model=FuelSuppliesSchema, status_code=status.HTTP_200_OK +) +@view_handler(["*"]) +async def get_fuel_supply( + request: Request, + request_data: CommonPaginatedReportRequestSchema = Body(...), + response: Response = None, + service: FuelSupplyServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +) -> FuelSuppliesSchema: + """Endpoint to get list of fuel supplied list for a compliance report""" + compliance_report_id = request_data.compliance_report_id + await report_validate.validate_organization_access(compliance_report_id) + if hasattr(request_data, "page") and request_data.page is not None: + # Handle pagination. + pagination = PaginationRequestSchema( + page=request_data.page, + size=request_data.size, + sort_orders=request_data.sort_orders, + filters=request_data.filters, + ) + return await service.get_fuel_supplies_paginated( + pagination, compliance_report_id + ) + else: + return await service.get_fuel_supply_list(compliance_report_id) + + +@router.post( + "/save", + response_model=Union[FuelSupplyResponseSchema, DeleteFuelSupplyResponseSchema], + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.SUPPLIER, RoleEnum.GOVERNMENT]) +async def save_fuel_supply_row( + request: Request, + response: Response, + request_data: FuelSupplyCreateUpdateSchema = Body(...), + action_service: FuelSupplyActionService = Depends(), + report_validate: ComplianceReportValidation = Depends(), + fs_validate: FuelSupplyValidation = Depends(), +): + """Endpoint to save single fuel supply row""" + compliance_report_id = request_data.compliance_report_id + await report_validate.validate_organization_access(compliance_report_id) + + # Determine user type for record creation + current_user_type = request.user.user_type + if not current_user_type: + raise HTTPException( + status_code=403, detail="User does not have the required role." + ) + + if request_data.deleted: + # Delete existing fuel supply row using actions service + return await action_service.delete_fuel_supply(request_data, current_user_type) + else: + duplicate_id = await fs_validate.check_duplicate(request_data) + await fs_validate.validate_other(request_data) + if duplicate_id is not None: + duplicate_response = format_duplicate_error(duplicate_id) + return duplicate_response + if request_data.fuel_supply_id: + # Update existing fuel supply row using actions service + return await action_service.update_fuel_supply( + request_data, current_user_type + ) + else: + # Create new fuel supply row using actions service + return await action_service.create_fuel_supply( + request_data, current_user_type + ) + + +def format_duplicate_error(duplicate_id: int): + return JSONResponse( + status_code=422, + content={ + "message": "Validation failed", + "errors": [ + { + "fields": [ + "fuelCode", + "fuelType", + "fuelCategory", + "provisionOfTheAct", + ], + "message": "There are duplicate fuel entries, please combine the quantity into a single value on one row.", + } + ], + "warnings": [ + { + "id": duplicate_id, + "fields": [ + "fuelCode", + "fuelType", + "fuelCategory", + "provisionOfTheAct", + ], + "message": "There are duplicate fuel entries, please combine the quantity into a single value on one row.", + } + ], + }, + ) diff --git a/backend/lcfs/web/api/fuel_type/__init__.py b/backend/lcfs/web/api/fuel_type/__init__.py new file mode 100644 index 000000000..4b727fadb --- /dev/null +++ b/backend/lcfs/web/api/fuel_type/__init__.py @@ -0,0 +1,5 @@ +"""API for internal comments.""" + +from lcfs.web.api.fuel_type.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/fuel_type/repo.py b/backend/lcfs/web/api/fuel_type/repo.py new file mode 100644 index 000000000..489dcd690 --- /dev/null +++ b/backend/lcfs/web/api/fuel_type/repo.py @@ -0,0 +1,42 @@ +from logging import getLogger +from typing import List + +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, union_all + +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.compliance import FuelSupply, FuelExport, AllocationAgreement +from lcfs.web.core.decorators import repo_handler + +logger = getLogger("fuel_type_repo") + + +class FuelTypeRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_fuel_type_others(self) -> List[str]: + """ + Retrieve the list of fuel supplied information for a given compliance report. + """ + fuel_supply_query = select(FuelSupply.fuel_type_other).where( + FuelSupply.fuel_type_other.isnot(None) + ) + fuel_export_query = select(FuelExport.fuel_type_other).where( + FuelExport.fuel_type_other.isnot(None) + ) + allocation_agreement_query = select(AllocationAgreement.fuel_type_other).where( + AllocationAgreement.fuel_type_other.isnot(None) + ) + + # Use union_all to combine the queries + combined_query = union_all( + fuel_supply_query, fuel_export_query, allocation_agreement_query + ) + + # Now, you can execute the combined_query to get all results in a single list + result = (await self.db.execute(combined_query)).unique().scalars().all() + + return result diff --git a/backend/lcfs/web/api/fuel_type/schema.py b/backend/lcfs/web/api/fuel_type/schema.py new file mode 100644 index 000000000..1689804c1 --- /dev/null +++ b/backend/lcfs/web/api/fuel_type/schema.py @@ -0,0 +1,8 @@ +from enum import Enum + + +class FuelTypeQuantityUnitsEnumSchema(str, Enum): + Litres = "L" + Kilograms = "kg" + Kilowatt_hour = "kWh" + Cubic_metres = "m³" diff --git a/backend/lcfs/web/api/fuel_type/services.py b/backend/lcfs/web/api/fuel_type/services.py new file mode 100644 index 000000000..2b58084b7 --- /dev/null +++ b/backend/lcfs/web/api/fuel_type/services.py @@ -0,0 +1,22 @@ +from logging import getLogger +from fastapi import Depends, Request +from typing import List +from lcfs.web.core.decorators import service_handler +from lcfs.web.api.fuel_type.repo import FuelTypeRepository + +logger = getLogger(__name__) + + +class FuelTypeServices: + def __init__( + self, + request: Request = None, + repo: FuelTypeRepository = Depends(), + ) -> None: + self.request = request + self.repo = repo + + @service_handler + async def get_fuel_type_others(self) -> List[str]: + """Get fuel type others""" + return await self.repo.get_fuel_type_others() diff --git a/backend/lcfs/web/api/fuel_type/views.py b/backend/lcfs/web/api/fuel_type/views.py new file mode 100644 index 000000000..5bb65501c --- /dev/null +++ b/backend/lcfs/web/api/fuel_type/views.py @@ -0,0 +1,27 @@ +from logging import getLogger +from typing import List + +from fastapi import ( + APIRouter, + status, + Request, + Depends, +) + +from lcfs.db import dependencies +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.fuel_type.services import FuelTypeServices + +router = APIRouter() +logger = getLogger("fuel_type_view") +get_async_db = dependencies.get_async_db_session + + +@router.get("/others/list", response_model=List[str], status_code=status.HTTP_200_OK) +@view_handler(["*"]) +async def get_fuel_type_others( + request: Request, + service: FuelTypeServices = Depends(), +): + """Endpoint to get list of fuel type others""" + return await service.get_fuel_type_others() diff --git a/backend/lcfs/web/api/initiative_agreement/__init__.py b/backend/lcfs/web/api/initiative_agreement/__init__.py new file mode 100644 index 000000000..66db04cc6 --- /dev/null +++ b/backend/lcfs/web/api/initiative_agreement/__init__.py @@ -0,0 +1,5 @@ +"""User API.""" + +from lcfs.web.api.initiative_agreement.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/initiative_agreement/repo.py b/backend/lcfs/web/api/initiative_agreement/repo.py new file mode 100644 index 000000000..26a5b3e76 --- /dev/null +++ b/backend/lcfs/web/api/initiative_agreement/repo.py @@ -0,0 +1,162 @@ +from fastapi import Depends +from datetime import datetime +from typing import Optional +from sqlalchemy import select, and_ +from sqlalchemy.orm import selectinload +from sqlalchemy.ext.asyncio import AsyncSession +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.db.models.initiative_agreement.InitiativeAgreement import InitiativeAgreement +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatus, +) +from lcfs.db.models.initiative_agreement.InitiativeAgreementHistory import ( + InitiativeAgreementHistory, +) +from lcfs.web.api.initiative_agreement.schema import InitiativeAgreementCreateSchema + +from lcfs.db.dependencies import get_async_db_session +from lcfs.web.core.decorators import repo_handler + + +class InitiativeAgreementRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_initiative_agreement_by_id( + self, initiative_agreement_id: int + ) -> Optional[InitiativeAgreement]: + query = ( + select(InitiativeAgreement) + .options( + selectinload(InitiativeAgreement.to_organization), + selectinload(InitiativeAgreement.current_status), + selectinload(InitiativeAgreement.history).selectinload( + InitiativeAgreementHistory.user_profile + ), + selectinload(InitiativeAgreement.history).selectinload( + InitiativeAgreementHistory.initiative_agreement_status + ), + ) + .where( + InitiativeAgreement.initiative_agreement_id == initiative_agreement_id + ) + ) + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def create_initiative_agreement( + self, initiative_agreement: InitiativeAgreement + ) -> InitiativeAgreement: + self.db.add(initiative_agreement) + await self.db.flush() + await self.db.refresh( + initiative_agreement, + [ + "to_organization", + "current_status", + "history", + ], + ) # Ensures that all specified relations are up-to-date + return initiative_agreement + + @repo_handler + async def update_initiative_agreement( + self, initiative_agreement: InitiativeAgreement + ) -> InitiativeAgreement: + merged_initiative_agreement = await self.db.merge(initiative_agreement) + await self.db.flush() + return merged_initiative_agreement + + @repo_handler + async def get_initiative_agreement_status_by_name( + self, status_name: str + ) -> InitiativeAgreementStatus: + query = await self.db.execute( + select(InitiativeAgreementStatus).where( + InitiativeAgreementStatus.status == status_name + ) + ) + status = query.scalars().first() + + if not status: + raise DataNotFoundException( + f"Initiative Agreement status '{status_name}' not found" + ) + + return status + + @repo_handler + async def add_initiative_agreement_history( + self, + initiative_agreement_id: int, + initiative_agreement_status_id: int, + user_profile_id: int, + ) -> InitiativeAgreementHistory: + """ + Adds a new record to the initiative agreement history in the database. + + Args: + initiative_agreement_id (int): The ID of the initiative agreement to which this history record relates. + initiative_agreement_status_id (int): The status ID that describes the current state of the initiative agreement. + user_profile_id (int): The ID of the user who made the change. + + Returns: + InitiativeAgreementHistory: The newly created initiative agreement history record. + """ + new_history_record = InitiativeAgreementHistory( + initiative_agreement_id=initiative_agreement_id, + initiative_agreement_status_id=initiative_agreement_status_id, + user_profile_id=user_profile_id, + ) + self.db.add(new_history_record) + await self.db.flush() + return new_history_record + + @repo_handler + async def update_initiative_agreement_history( + self, + initiative_agreement_id: int, + initiative_agreement_status_id: int, + user_profile_id: int, + ) -> InitiativeAgreementHistory: + """ + Updates an initiative agreement history record in the database. + + Args: + initiative_agreement_id (int): The ID of the initiative agreement to which this history record relates. + initiative_agreement_status_id (int): The status ID that describes the current state of the initiative agreement. + user_profile_id (int): The ID of the user who made the change. + + Returns: + InitiativeAgreementHistory: The updated initiative agreement history record. + """ + existing_history = await self.db.scalar( + select(InitiativeAgreementHistory).where( + and_( + InitiativeAgreementHistory.initiative_agreement_id + == initiative_agreement_id, + InitiativeAgreementHistory.initiative_agreement_status_id + == initiative_agreement_status_id, + ) + ) + ) + existing_history.create_date = datetime.now() + existing_history.update_date = datetime.now() + existing_history.user_profile_id = user_profile_id + self.db.add(existing_history) + await self.db.flush() + return existing_history + + @repo_handler + async def refresh_initiative_agreement( + self, initiative_agreement: InitiativeAgreement + ) -> InitiativeAgreement: + """ + Commits and refreshes an initiative agreement object in db session + + """ + await self.db.flush() + await self.db.refresh(initiative_agreement) + return initiative_agreement diff --git a/backend/lcfs/web/api/initiative_agreement/schema.py b/backend/lcfs/web/api/initiative_agreement/schema.py new file mode 100644 index 000000000..c435b0806 --- /dev/null +++ b/backend/lcfs/web/api/initiative_agreement/schema.py @@ -0,0 +1,72 @@ +from lcfs.web.api.base import BaseSchema +from typing import Optional, List +from datetime import date, datetime +from pydantic import field_validator + + +class InitiativeAgreementStatusSchema(BaseSchema): + initiative_agreement_status_id: int + status: str + + class Config: + from_attributes = True + + +class HistoryUserSchema(BaseSchema): + first_name: str + last_name: str + + class Config: + from_attributes = True + + +class OrganizationSchema(BaseSchema): + organization_id: int + name: str + + class Config: + from_attributes = True + + +class InitiativeAgreementHistorySchema(BaseSchema): + create_date: datetime + initiative_agreement_status: InitiativeAgreementStatusSchema + user_profile: HistoryUserSchema + + class Config: + from_attributes = True + + +class InitiativeAgreementBaseSchema(BaseSchema): + compliance_units: int + current_status: InitiativeAgreementStatusSchema + transaction_effective_date: Optional[date] = None + to_organization_id: int + gov_comment: Optional[str] = None + internal_comment: Optional[str] = None + + @field_validator("compliance_units") + def validate_compliance_units(cls, v): + if v <= 0: + raise ValueError("compliance_units must be positive") + return v + + class Config: + from_attributes = True + + +class InitiativeAgreementSchema(InitiativeAgreementBaseSchema): + initiative_agreement_id: int + to_organization: OrganizationSchema + history: Optional[List[InitiativeAgreementHistorySchema]] + returned: Optional[bool] = False + create_date: datetime + + +class InitiativeAgreementCreateSchema(InitiativeAgreementBaseSchema): + current_status: str + + +class InitiativeAgreementUpdateSchema(InitiativeAgreementBaseSchema): + initiative_agreement_id: int + current_status: str diff --git a/backend/lcfs/web/api/initiative_agreement/services.py b/backend/lcfs/web/api/initiative_agreement/services.py new file mode 100644 index 000000000..c9cb3c4de --- /dev/null +++ b/backend/lcfs/web/api/initiative_agreement/services.py @@ -0,0 +1,243 @@ +import json +from lcfs.web.api.notification.schema import ( + INITIATIVE_AGREEMENT_STATUS_NOTIFICATION_MAPPER, + NotificationMessageSchema, + NotificationRequestSchema, +) +from lcfs.web.api.notification.services import NotificationService +import structlog +from datetime import datetime +from fastapi import Depends, Request, HTTPException +from lcfs.db.models.initiative_agreement.InitiativeAgreement import InitiativeAgreement +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatusEnum, +) +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.initiative_agreement.schema import ( + InitiativeAgreementCreateSchema, + InitiativeAgreementSchema, +) +from lcfs.web.api.initiative_agreement.repo import InitiativeAgreementRepository +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.web.core.decorators import service_handler +from lcfs.web.api.role.schema import user_has_roles +from lcfs.db.models.transaction.Transaction import TransactionActionEnum +from lcfs.web.api.organizations.services import OrganizationsService +from lcfs.web.api.internal_comment.services import InternalCommentService +from lcfs.web.api.internal_comment.schema import ( + InternalCommentCreateSchema, + AudienceScopeEnum, + EntityTypeEnum, +) + +logger = structlog.get_logger(__name__) + + +class InitiativeAgreementServices: + def __init__( + self, + repo: InitiativeAgreementRepository = Depends(InitiativeAgreementRepository), + org_service: OrganizationsService = Depends(OrganizationsService), + internal_comment_service: InternalCommentService = Depends( + InternalCommentService + ), + notfn_service: NotificationService = Depends(NotificationService), + request: Request = None, + ) -> None: + self.repo = repo + self.org_service = org_service + self.internal_comment_service = internal_comment_service + self.request = request + self.notfn_service = notfn_service + + @service_handler + async def get_initiative_agreement( + self, initiative_agreement_id: int + ) -> InitiativeAgreementSchema: + """Fetch an initiative agreement by its ID.""" + initiative_agreement = await self.repo.get_initiative_agreement_by_id( + initiative_agreement_id + ) + return InitiativeAgreementSchema.from_orm(initiative_agreement) + + @service_handler + async def update_initiative_agreement( + self, initiative_agreement_data: InitiativeAgreementCreateSchema + ) -> InitiativeAgreementSchema: + """Update an existing initiative agreement.""" + # Fetch the existing initiative agreement + initiative_agreement = await self.repo.get_initiative_agreement_by_id( + initiative_agreement_data.initiative_agreement_id + ) + if not initiative_agreement: + raise DataNotFoundException( + f"Initiative Agreement with id {initiative_agreement_data.initiative_agreement_id} not found" + ) + + # Fetch the new status + new_status = await self.repo.get_initiative_agreement_status_by_name( + initiative_agreement_data.current_status + ) + status_has_changed = initiative_agreement.current_status != new_status + + # Update the fields except for 'current_status' + for field, value in initiative_agreement_data.dict(exclude_unset=True).items(): + if field != "current_status": + setattr(initiative_agreement, field, value) + + # Initialize status flags + returned, re_recommended = False, False + + if status_has_changed: + initiative_agreement.current_status = new_status + + # Issue compliance units by Director if status is approved + if new_status.status == InitiativeAgreementStatusEnum.Approved: + await self.director_approve_initiative_agreement(initiative_agreement) + + # Check previous recommended status + previous_recommended = any( + history.initiative_agreement_status.status + == InitiativeAgreementStatusEnum.Recommended + for history in initiative_agreement.history + ) + + if previous_recommended: + if new_status.status == InitiativeAgreementStatusEnum.Draft: + returned = True + elif new_status.status == InitiativeAgreementStatusEnum.Recommended: + re_recommended = True + + # Update or add history record based on status flags + history_method = ( + self.repo.update_initiative_agreement_history + if re_recommended + else self.repo.add_initiative_agreement_history + ) + # We only track history changes on Recommended and Approved, not Draft + if new_status.status != InitiativeAgreementStatusEnum.Draft: + await history_method( + initiative_agreement.initiative_agreement_id, + new_status.initiative_agreement_status_id, + self.request.user.user_profile_id, + ) + + # Save the updated initiative agreement + updated_initiative_agreement = await self.repo.update_initiative_agreement( + initiative_agreement + ) + + # Return the updated initiative agreement schema with the returned status flag + ia_schema = InitiativeAgreementSchema.from_orm(updated_initiative_agreement) + ia_schema.returned = returned + await self._perform_notification_call(updated_initiative_agreement, returned) + return ia_schema + + @service_handler + async def create_initiative_agreement( + self, initiative_agreement_data: InitiativeAgreementCreateSchema + ) -> InitiativeAgreementSchema: + """ + Handles creating an initiative agreement, including creating a comment (if provided). + """ + current_status = await self.repo.get_initiative_agreement_status_by_name( + initiative_agreement_data.current_status + ) + + initiative_agreement = InitiativeAgreement( + **initiative_agreement_data.model_dump( + exclude={"current_status", "internal_comment"} + ) + ) + + initiative_agreement.current_status = current_status + + initiative_agreement = await self.repo.create_initiative_agreement( + initiative_agreement + ) + + # If we skip draft on create and recommend then add history record + if current_status.status == InitiativeAgreementStatusEnum.Recommended: + await self.repo.add_initiative_agreement_history( + initiative_agreement.initiative_agreement_id, + current_status.initiative_agreement_status_id, + self.request.user.user_profile_id, + ) + + # Create internal comment if provided + if initiative_agreement_data.internal_comment: + internal_comment_data = InternalCommentCreateSchema( + entity_type=EntityTypeEnum.INITIATIVE_AGREEMENT, + entity_id=initiative_agreement.initiative_agreement_id, + comment=initiative_agreement_data.internal_comment, + audience_scope=AudienceScopeEnum.ANALYST, + ) + await self.internal_comment_service.create_internal_comment( + internal_comment_data + ) + await self._perform_notification_call(initiative_agreement) + return initiative_agreement + + async def director_approve_initiative_agreement( + self, initiative_agreement: InitiativeAgreement + ): + """Create ledger transaction for approved initiative agreement""" + + user = self.request.user + has_director_role = user_has_roles(user, [RoleEnum.DIRECTOR]) + + if not has_director_role: + logger.error( + "Non-Director tried to approve Agreement", + initiative_agreement_id=initiative_agreement.initiative_agreement_id, + ) + raise HTTPException(status_code=403, detail="Forbidden.") + + if initiative_agreement.transaction != None: + raise HTTPException(status_code=422, detail="Transaction already exists.") + + # Create new transaction for receiving organization + to_transaction = await self.org_service.adjust_balance( + transaction_action=TransactionActionEnum.Adjustment, + compliance_units=initiative_agreement.compliance_units, + organization_id=initiative_agreement.to_organization_id, + ) + initiative_agreement.transaction = to_transaction + + # Set effective date to today if the analyst left it blank + if initiative_agreement.transaction_effective_date is None: + initiative_agreement.transaction_effective_date = datetime.now().date() + + await self.repo.refresh_initiative_agreement(initiative_agreement) + + async def _perform_notification_call(self, ia, returned=False): + """Send notifications based on the current status of the transfer.""" + status = ia.current_status.status if not returned else "Return to analyst" + status_val = ( + status.value + if isinstance(status, InitiativeAgreementStatusEnum) + else status + ).lower() + notifications = INITIATIVE_AGREEMENT_STATUS_NOTIFICATION_MAPPER.get( + status, None + ) + message_data = { + "service": "InitiativeAgreement", + "id": ia.initiative_agreement_id, + "transactionId": ia.transaction_id, + "status": status_val, + } + notification_data = NotificationMessageSchema( + type=f"Initiative agreement {status_val}", + related_transaction_id=f"IA{ia.initiative_agreement_id}", + message=json.dumps(message_data), + related_organization_id=ia.to_organization_id, + origin_user_profile_id=self.request.user.user_profile_id, + ) + if notifications and isinstance(notifications, list): + await self.notfn_service.send_notification( + NotificationRequestSchema( + notification_types=notifications, + notification_data=notification_data, + ) + ) diff --git a/backend/lcfs/web/api/initiative_agreement/validation.py b/backend/lcfs/web/api/initiative_agreement/validation.py new file mode 100644 index 000000000..b8756ee24 --- /dev/null +++ b/backend/lcfs/web/api/initiative_agreement/validation.py @@ -0,0 +1,69 @@ +from fastapi import Depends, HTTPException, Request +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.initiative_agreement.schema import ( + InitiativeAgreementSchema, + InitiativeAgreementCreateSchema, +) +from lcfs.web.api.initiative_agreement.services import InitiativeAgreementServices +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatusEnum, +) +from lcfs.web.api.role.schema import user_has_roles +from starlette import status + + +class InitiativeAgreementValidation: + def __init__( + self, + request: Request = None, + service: InitiativeAgreementServices = Depends(InitiativeAgreementServices), + ) -> None: + self.request = request + self.service = service + + async def validate_initiative_agreement_create( + self, request, initiative_agreement: InitiativeAgreementCreateSchema + ): + pass + + async def validate_initiative_agreement_update( + self, request, updated_data: InitiativeAgreementSchema + ): + # Retrieve the current initiative agreement from the database + initiative_agreement = await self.service.get_initiative_agreement( + updated_data.initiative_agreement_id + ) + + # Check if the current status is "Approved" + if ( + initiative_agreement.current_status.status + == InitiativeAgreementStatusEnum.Approved + ): + raise HTTPException( + status_code=403, + detail="Editing an approved initiative agreement is not allowed.", + ) + + async def validate_organization_access(self, initiative_agreement_id: int): + initiative_agreement = await self.service.get_initiative_agreement(initiative_agreement_id) + if not initiative_agreement: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Transaction not found.", + ) + + organization_id = initiative_agreement.to_organization.organization_id + user_organization_id = ( + self.request.user.organization.organization_id + if self.request.user.organization + else None + ) + + if ( + not user_has_roles(self.request.user, [RoleEnum.GOVERNMENT]) + and organization_id != user_organization_id + ): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User does not have access to this transaction.", + ) diff --git a/backend/lcfs/web/api/initiative_agreement/views.py b/backend/lcfs/web/api/initiative_agreement/views.py new file mode 100644 index 000000000..9fb4e384c --- /dev/null +++ b/backend/lcfs/web/api/initiative_agreement/views.py @@ -0,0 +1,59 @@ +from fastapi import APIRouter, Depends, Request, status +from lcfs.web.api.initiative_agreement.services import InitiativeAgreementServices +from lcfs.web.api.initiative_agreement.schema import ( + InitiativeAgreementCreateSchema, + InitiativeAgreementSchema, + InitiativeAgreementUpdateSchema, +) +from lcfs.web.api.initiative_agreement.validation import InitiativeAgreementValidation +from lcfs.web.core.decorators import view_handler +from lcfs.db.models.user.Role import RoleEnum + +router = APIRouter() + + +@router.get("/{initiative_agreement_id}", response_model=InitiativeAgreementSchema) +@view_handler(["*"]) +async def get_initiative_agreement( + request: Request, + initiative_agreement_id: int, + service: InitiativeAgreementServices = Depends(), + validate: InitiativeAgreementValidation = Depends(), +): + """Endpoint to fetch an initiative agreement by its ID.""" + await validate.validate_organization_access(initiative_agreement_id) + return await service.get_initiative_agreement(initiative_agreement_id) + + +@router.put( + "/", response_model=InitiativeAgreementSchema, status_code=status.HTTP_200_OK +) +@view_handler([RoleEnum.GOVERNMENT]) +async def update_initiative_agreement( + request: Request, + initiative_agreement_data: InitiativeAgreementUpdateSchema = ..., + service: InitiativeAgreementServices = Depends(), + validate: InitiativeAgreementValidation = Depends(), +): + """Endpoint to update an existing initiative agreement.""" + await validate.validate_initiative_agreement_update( + request, initiative_agreement_data + ) + return await service.update_initiative_agreement(initiative_agreement_data) + + +@router.post( + "/", response_model=InitiativeAgreementSchema, status_code=status.HTTP_201_CREATED +) +@view_handler([RoleEnum.GOVERNMENT]) +async def create_initiative_agreement( + request: Request, + initiative_agreement_create: InitiativeAgreementCreateSchema = ..., + service: InitiativeAgreementServices = Depends(), + validate: InitiativeAgreementValidation = Depends(), +): + """Endpoint to create a new initiative agreement.""" + await validate.validate_initiative_agreement_create( + request, initiative_agreement_create + ) + return await service.create_initiative_agreement(initiative_agreement_create) diff --git a/backend/lcfs/web/api/internal_comment/__init__.py b/backend/lcfs/web/api/internal_comment/__init__.py new file mode 100644 index 000000000..d3518b222 --- /dev/null +++ b/backend/lcfs/web/api/internal_comment/__init__.py @@ -0,0 +1,5 @@ +"""API for internal comments.""" + +from lcfs.web.api.internal_comment.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/internal_comment/repo.py b/backend/lcfs/web/api/internal_comment/repo.py new file mode 100644 index 000000000..fc66c6863 --- /dev/null +++ b/backend/lcfs/web/api/internal_comment/repo.py @@ -0,0 +1,246 @@ +import structlog +from typing import List + +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, desc + +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.db.dependencies import get_async_db_session +from lcfs.web.core.decorators import repo_handler + +from lcfs.web.api.user.repo import UserRepository +from lcfs.db.models.user.UserProfile import UserProfile +from lcfs.db.models.comment.InternalComment import InternalComment +from lcfs.web.api.internal_comment.schema import EntityTypeEnum +from lcfs.db.models.comment.TransferInternalComment import TransferInternalComment +from lcfs.db.models.comment.InitiativeAgreementInternalComment import InitiativeAgreementInternalComment +from lcfs.db.models.comment.AdminAdjustmentInternalComment import AdminAdjustmentInternalComment +from lcfs.db.models.comment.ComplianceReportInternalComment import ComplianceReportInternalComment + + +logger = structlog.get_logger(__name__) + + +class InternalCommentRepository: + """ + Repository class for internal comments + """ + + def __init__( + self, + db: AsyncSession = Depends(get_async_db_session), + user_repo: UserRepository = Depends(), + ): + """ + Initializes the repository with an asynchronous database session. + + Args: + db (AsyncSession): Injected database session for executing asynchronous queries. + """ + self.db = db + self.user_repo = user_repo + + @repo_handler + async def create_internal_comment( + self, + internal_comment: InternalComment, + entity_type: EntityTypeEnum, + entity_id: int, + ): + """ + Creates a new internal comment and associates it with a transfer, initiative agreement or admin adjustment entity. + + Args: + internal_comment (InternalComment): The internal comment object to be added to the database. + entity_type (str): The type of entity the comment is associated with. + entity_id (int): The ID of the entity the comment is associated with. + + Returns: + InternalComment: The newly created internal comment with additional information such as 'full_name'. + """ + self.db.add(internal_comment) + await self.db.flush() + await self.db.refresh(internal_comment) + + # Determine and create the appropriate entity association based on the provided entity type + if entity_type == EntityTypeEnum.TRANSFER: + association = TransferInternalComment( + transfer_id=entity_id, + internal_comment_id=internal_comment.internal_comment_id, + ) + elif entity_type == EntityTypeEnum.INITIATIVE_AGREEMENT: + association = InitiativeAgreementInternalComment( + initiative_agreement_id=entity_id, + internal_comment_id=internal_comment.internal_comment_id, + ) + elif entity_type == EntityTypeEnum.ADMIN_ADJUSTMENT: + association = AdminAdjustmentInternalComment( + admin_adjustment_id=entity_id, + internal_comment_id=internal_comment.internal_comment_id, + ) + elif entity_type == EntityTypeEnum.COMPLIANCE_REPORT: + association = ComplianceReportInternalComment( + compliance_report_id=entity_id, + internal_comment_id=internal_comment.internal_comment_id, + ) + + # Add the association to the session and commit + self.db.add(association) + + # Update the internal comment with the creator's full name. + internal_comment.full_name = await self.user_repo.get_full_name( + internal_comment.create_user + ) + + await self.db.flush() + await self.db.refresh(internal_comment) + return internal_comment + + @repo_handler + async def get_internal_comments( + self, entity_type: EntityTypeEnum, entity_id: int + ) -> List[dict]: + """ + Retrieves a list of internal comments associated with a specific entity, + along with the full name of the user who created each comment. + + Args: + entity_type (EntityTypeEnum): The type of entity to retrieve comments for. + entity_id (int): The ID of the entity to retrieve comments for. + + Returns: + List[Dict[str, any]]: A list of dictionaries containing internal comment + information and the full name of the comment creator. + + Raises: + DataNotFoundException: If no comments are found for the given entity. + """ + # Mapping of entity types to their respective models and where conditions + entity_mapping = { + EntityTypeEnum.TRANSFER: ( + TransferInternalComment, + TransferInternalComment.transfer_id, + ), + EntityTypeEnum.INITIATIVE_AGREEMENT: ( + InitiativeAgreementInternalComment, + InitiativeAgreementInternalComment.initiative_agreement_id, + ), + EntityTypeEnum.ADMIN_ADJUSTMENT: ( + AdminAdjustmentInternalComment, + AdminAdjustmentInternalComment.admin_adjustment_id, + ), + EntityTypeEnum.COMPLIANCE_REPORT: ( + ComplianceReportInternalComment, + ComplianceReportInternalComment.compliance_report_id, + ), + } + + # Get the specific model and where condition for the given entity_type + entity_model, where_condition = entity_mapping[entity_type] + + # Construct the base query + base_query = ( + select( + InternalComment, + (UserProfile.first_name + " " + UserProfile.last_name).label( + "full_name" + ), + ) + .join( + entity_model, + entity_model.internal_comment_id == InternalComment.internal_comment_id, + ) + .join( + UserProfile, + UserProfile.keycloak_username == InternalComment.create_user, + ) + .where(where_condition == entity_id) + .order_by(desc(InternalComment.internal_comment_id)) + ) + + # Execute the query + results = await self.db.execute(base_query) + + # Compile and return the list of internal comments with user info + comments_with_user_info = [ + { + "internal_comment_id": internal_comment.internal_comment_id, + "comment": internal_comment.comment, + "audience_scope": internal_comment.audience_scope, + "create_user": internal_comment.create_user, + "create_date": internal_comment.create_date, + "update_date": internal_comment.update_date, + "full_name": full_name, + } + for internal_comment, full_name in results + ] + + return comments_with_user_info + + @repo_handler + async def get_internal_comment_by_id( + self, internal_comment_id: int + ) -> InternalComment: + """ + Fetches an internal comment by its ID. + + Args: + internal_comment_id (int): The ID of the internal comment. + + Returns: + InternalComment: The internal comment object if found. + + Raises: + DataNotFoundException: If no internal comment with the given ID is found. + """ + base_query = select(InternalComment).where( + InternalComment.internal_comment_id == internal_comment_id + ) + result = await self.db.execute(base_query) + internal_comment = result.scalars().first() + + if not internal_comment: + raise DataNotFoundException( + f"Internal comment with ID {internal_comment_id} not found." + ) + + return internal_comment + + @repo_handler + async def update_internal_comment( + self, internal_comment_id: int, new_comment_text: str + ) -> InternalComment: + """ + Updates the text of an existing internal comment. + + Args: + internal_comment_id (int): The ID of the internal comment to update. + new_comment_text (str): The new text for the comment. + + Returns: + InternalComment: The updated internal comment object. + + Raises: + DataNotFoundException: If no internal comment matching the criteria is found. + """ + statement = select(InternalComment).where( + InternalComment.internal_comment_id == internal_comment_id + ) + result = await self.db.execute(statement) + internal_comment = result.scalars().first() + + if not internal_comment: + raise DataNotFoundException( + f"Internal comment with ID {internal_comment_id} not found." + ) + + internal_comment.comment = new_comment_text + await self.db.flush() + await self.db.refresh(internal_comment) + + # Updated the internal comment with the creator's full name. + internal_comment.full_name = await self.user_repo.get_full_name( + internal_comment.create_user + ) + return internal_comment diff --git a/backend/lcfs/web/api/internal_comment/schema.py b/backend/lcfs/web/api/internal_comment/schema.py new file mode 100644 index 000000000..caff8568f --- /dev/null +++ b/backend/lcfs/web/api/internal_comment/schema.py @@ -0,0 +1,49 @@ +from typing import Optional +from enum import Enum +from datetime import datetime +from lcfs.web.api.base import BaseSchema + + +# -------------------------------------- +# Base Configuration +# -------------------------------------- +class BaseConfig: + from_attributes = True + + +# -------------------------------------- +# Internal Comment +# -------------------------------------- +class EntityTypeEnum(str, Enum): + TRANSFER = "Transfer" + INITIATIVE_AGREEMENT = "initiativeAgreement" + ADMIN_ADJUSTMENT = "administrativeAdjustment" + ASSESSMENT = "Assessment" + COMPLIANCE_REPORT = "complianceReport" + + +class AudienceScopeEnum(str, Enum): + COMPLIANCE_MANAGER = "Compliance Manager" + DIRECTOR = "Director" + ANALYST = "Analyst" + + +class InternalCommentCreateSchema(BaseSchema): + entity_type: EntityTypeEnum + entity_id: int + comment: str + audience_scope: AudienceScopeEnum + + +class InternalCommentUpdateSchema(BaseSchema): + comment: Optional[str] = None + + +class InternalCommentResponseSchema(BaseSchema): + internal_comment_id: int + comment: str + audience_scope: AudienceScopeEnum + create_user: Optional[str] = None + create_date: Optional[datetime] = None + update_date: Optional[datetime] = None + full_name: Optional[str] = None diff --git a/backend/lcfs/web/api/internal_comment/services.py b/backend/lcfs/web/api/internal_comment/services.py new file mode 100644 index 000000000..c051f03cb --- /dev/null +++ b/backend/lcfs/web/api/internal_comment/services.py @@ -0,0 +1,115 @@ +import structlog +from typing import List + +from fastapi import Depends, Request + +from lcfs.web.core.decorators import service_handler +from lcfs.db.models.comment.InternalComment import InternalComment +from .repo import InternalCommentRepository +from .schema import InternalCommentCreateSchema, InternalCommentResponseSchema + + +logger = structlog.get_logger(__name__) + + +class InternalCommentService: + """ + Service class for handling internal comment-related operations. + """ + + def __init__( + self, + request: Request = None, + repo: InternalCommentRepository = Depends(InternalCommentRepository), + ) -> None: + """ + Initializes the InternalCommentService with a request object and an + instance of InternalCommentRepository. + + Args: + request (Request, optional): The current HTTP request. Defaults to None. + repo (InternalCommentRepository): The repository instance for internal comment operations. + """ + self.request = request + self.repo = repo + + @service_handler + async def create_internal_comment( + self, data: InternalCommentCreateSchema + ) -> InternalCommentResponseSchema: + """ + Creates a new internal comment based on the provided data schema. + + Args: + data (InternalCommentCreateSchema): The input data for creating a new internal comment. + + Returns: + InternalCommentResponseSchema: The created internal comment as a data transfer object. + """ + username = self.request.user.keycloak_username + comment = InternalComment( + comment=data.comment, + audience_scope=data.audience_scope, + create_user=username, + ) + created_comment = await self.repo.create_internal_comment( + comment, data.entity_type, data.entity_id + ) + return InternalCommentResponseSchema.from_orm(created_comment) + + @service_handler + async def get_internal_comments( + self, entity_type: str, entity_id: int + ) -> List[InternalCommentResponseSchema]: + """ + Retrieves internal comments associated with a specific entity, identified by + its type and ID. + + Args: + entity_type (str): The type of the associated entity. + entity_id (int): The ID of the associated entity. + + Returns: + List[InternalCommentResponseSchema]: A list of internal comments as data transfer objects. + """ + comments = await self.repo.get_internal_comments(entity_type, entity_id) + return [ + InternalCommentResponseSchema.model_validate(comment) + for comment in comments + ] + + @service_handler + async def get_internal_comment_by_id( + self, internal_comment_id: int + ) -> InternalCommentResponseSchema: + """ + Retrieves a single internal comment by its ID. + + Args: + internal_comment_id (int): The ID of the internal comment to be retrieved. + + Returns: + InternalCommentResponseSchema: The internal comment as a data transfer object. + """ + comment = await self.repo.get_internal_comment_by_id(internal_comment_id) + return InternalCommentResponseSchema.from_orm(comment) + + @service_handler + async def update_internal_comment( + self, internal_comment_id: int, new_comment_text: str + ) -> InternalCommentResponseSchema: + """ + Updates the text of an existing internal comment, identified by its ID and the + username of the comment creator. + + Args: + internal_comment_id (int): The ID of the internal comment to be updated. + new_comment_text (str): The new text to update the comment with. + + Returns: + InternalCommentResponseSchema: The updated internal comment as a data transfer object. + """ + updated_comment = await self.repo.update_internal_comment( + internal_comment_id, new_comment_text + ) + return InternalCommentResponseSchema.model_validate(updated_comment) diff --git a/backend/lcfs/web/api/internal_comment/views.py b/backend/lcfs/web/api/internal_comment/views.py new file mode 100644 index 000000000..78cd9a3b0 --- /dev/null +++ b/backend/lcfs/web/api/internal_comment/views.py @@ -0,0 +1,110 @@ +import structlog +from typing import List + +from fastapi import APIRouter, Body, Depends, status, Request, HTTPException +from starlette import status + +from lcfs.db import dependencies +from lcfs.web.core.decorators import view_handler +from .services import InternalCommentService + +from .schema import ( + InternalCommentCreateSchema, + InternalCommentUpdateSchema, + InternalCommentResponseSchema, +) +from lcfs.db.models.user.Role import RoleEnum + + +logger = structlog.get_logger(__name__) +router = APIRouter() +get_async_db = dependencies.get_async_db_session + + +@router.post( + "/", + response_model=InternalCommentResponseSchema, + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def create_comment( + request: Request, + comment_data: InternalCommentCreateSchema, + service: InternalCommentService = Depends(), +): + """ + API endpoint to create a new internal comment. Requires the user to have the 'Government' role. + + Args: + request (Request): The request object. + comment_data (InternalCommentCreateSchema): The schema containing the data for the new comment. + service (InternalCommentService, optional): The service handling the internal comment logic. + + Returns: + InternalCommentResponseSchema: The created internal comment. + """ + return await service.create_internal_comment(comment_data) + + +@router.get( + "/{entity_type}/{entity_id}", + response_model=List[InternalCommentResponseSchema], + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_comments( + request: Request, + entity_type: str, + entity_id: int, + service: InternalCommentService = Depends(), +): + """ + Retrieves all internal comments associated with a specified entity type and ID. Requires 'Government' role. + + Args: + request (Request): The request object. + entity_type (str): The type of the entity. + entity_id (int): The ID of the entity. + service (InternalCommentService, optional): The service handling the internal comment logic. + + Returns: + List[InternalCommentResponseSchema]: A list of internal comments associated with the entity. + """ + return await service.get_internal_comments(entity_type, entity_id) + + +@router.put("/{internal_comment_id}", response_model=InternalCommentResponseSchema) +@view_handler([RoleEnum.GOVERNMENT]) +async def update_comment( + request: Request, + internal_comment_id: int, + comment_data: InternalCommentUpdateSchema = Body(...), + service: InternalCommentService = Depends(), +): + """ + Updates the text of an existing internal comment. Requires the user to have the 'Government' role. + + Args: + request (Request): The request object. + internal_comment_id (int): The ID of the internal comment to be updated. + comment_data (InternalCommentUpdateSchema): The schema with the new comment text. + service (InternalCommentService, optional): The service handling the internal comment logic. + + Returns: + InternalCommentResponseSchema: The updated internal comment. + """ + # Fetch the existing comment to check the creator + existing_comment = await service.get_internal_comment_by_id(internal_comment_id) + + if not existing_comment: + raise HTTPException(status_code=404, detail="Internal comment not found.") + + current_username = request.user.keycloak_username + if existing_comment.create_user != current_username: + raise HTTPException( + status_code=403, detail="User is not the creator of the comment." + ) + + return await service.update_internal_comment( + internal_comment_id, comment_data.comment + ) diff --git a/backend/lcfs/web/api/monitoring/__init__.py b/backend/lcfs/web/api/monitoring/__init__.py index 4d7f9daac..672b2ee61 100644 --- a/backend/lcfs/web/api/monitoring/__init__.py +++ b/backend/lcfs/web/api/monitoring/__init__.py @@ -1,4 +1,5 @@ """API for checking project status.""" + from lcfs.web.api.monitoring.views import router __all__ = ["router"] diff --git a/backend/lcfs/web/api/monitoring/views.py b/backend/lcfs/web/api/monitoring/views.py index 1b36caf02..c599ef0b1 100644 --- a/backend/lcfs/web/api/monitoring/views.py +++ b/backend/lcfs/web/api/monitoring/views.py @@ -4,9 +4,10 @@ @router.get("/health") -def health_check() -> None: +def health_check() -> str: """ Checks the health of a project. It returns 200 if the project is healthy. """ + return "healthy" diff --git a/backend/lcfs/web/api/notification/__init__.py b/backend/lcfs/web/api/notification/__init__.py index edbb49894..1e59b2d9c 100644 --- a/backend/lcfs/web/api/notification/__init__.py +++ b/backend/lcfs/web/api/notification/__init__.py @@ -1,4 +1,5 @@ """User API.""" + from lcfs.web.api.notification.views import router __all__ = ["router"] diff --git a/backend/lcfs/web/api/notification/repo.py b/backend/lcfs/web/api/notification/repo.py new file mode 100644 index 000000000..bd9d874fa --- /dev/null +++ b/backend/lcfs/web/api/notification/repo.py @@ -0,0 +1,463 @@ +from lcfs.db.models.notification import ( + NotificationChannelSubscription, + NotificationMessage, + NotificationChannel, + NotificationType, + ChannelEnum, +) +from lcfs.db.models.organization import Organization +from lcfs.db.models.user import UserProfile +from lcfs.db.models.user.UserRole import UserRole +from lcfs.web.api.base import ( + NotificationTypeEnum, + PaginationRequestSchema, + apply_filter_conditions, + get_field_for_filter, + validate_pagination, +) +import structlog + +from typing import List, Optional, Sequence +from fastapi import Depends +from lcfs.db.dependencies import get_async_db_session +from lcfs.web.exception.exceptions import DataNotFoundException + +from sqlalchemy import asc, delete, desc, or_, select, func, update +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload, joinedload + +from lcfs.web.core.decorators import repo_handler +from sqlalchemy import and_ + +logger = structlog.get_logger(__name__) + + +class NotificationRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def create_notification_message( + self, notification_message: NotificationMessage + ) -> NotificationMessage: + """ + Create a new notification message + """ + self.db.add(notification_message) + await self.db.flush() + await self.db.refresh( + notification_message, + [ + "related_organization", + "origin_user_profile_id", + "related_user_profile_id", + "notification_type", + ], + ) + # await self.db.refresh(notification_message) + return notification_message + + @repo_handler + async def create_notification_messages( + self, notification_messages: List[NotificationMessage] + ) -> None: + """ + Create bulk notification messages + """ + self.db.add_all(notification_messages) + await self.db.flush() + + @repo_handler + async def get_notification_messages_by_user( + self, user_profile_id: int, is_read: Optional[bool] = None + ) -> list[NotificationMessage]: + """ + Retrieve all notification messages for a user + """ + # Start building the query + query = ( + select(NotificationMessage) + .options( + joinedload(NotificationMessage.related_organization), + joinedload(NotificationMessage.origin_user_profile) + .joinedload(UserProfile.user_roles) + .joinedload(UserRole.role), + ) + .where(NotificationMessage.related_user_profile_id == user_profile_id) + ) + + # Apply additional filter for `is_read` if provided + if is_read is not None: + query = query.where(NotificationMessage.is_read == is_read) + + # Execute the query and retrieve the results + result = await self.db.execute(query) + return result.unique().scalars().all() + + def _apply_notification_filters( + self, pagination: PaginationRequestSchema, conditions: List + ): + for filter in pagination.filters: + filter_value = filter.filter + filter_option = filter.type + filter_type = filter.filter_type + + # Handle date filters + if filter.field == "date": + filter_value = filter.date_from + field = get_field_for_filter(NotificationMessage, "create_date") + conditions.append( + apply_filter_conditions( + field, filter_value, filter_option, filter_type + ) + ) + elif filter.field == "user": + conditions.append( + NotificationMessage.origin_user_profile.has( + UserProfile.first_name.like(f"%{filter_value}%") + ) + ) + elif filter.field == "organization": + conditions.append( + NotificationMessage.related_organization.has( + Organization.name.like(f"%{filter_value}%") + ) + ) + elif filter.field == "transaction_id": + field = get_field_for_filter(NotificationMessage, 'related_transaction_id') + conditions.append( + apply_filter_conditions( + field, filter_value, filter_option, filter_type + ) + ) + else: + field = get_field_for_filter(NotificationMessage, filter.field) + conditions.append( + apply_filter_conditions( + field, filter_value, filter_option, filter_type + ) + ) + + return conditions + + @repo_handler + async def get_paginated_notification_messages( + self, user_id, pagination: PaginationRequestSchema + ) -> tuple[Sequence[NotificationMessage], int]: + """ + Queries notification messages from the database with optional filters. Supports pagination and sorting. + + Args: + pagination (dict): Pagination and sorting parameters. + + Returns: + List[NotificationSchema]: A list of notification messages matching the query. + """ + conditions = [NotificationMessage.related_user_profile_id == user_id] + pagination = validate_pagination(pagination) + + if pagination.filters: + self._apply_notification_filters(pagination, conditions) + + offset = 0 if (pagination.page < 1) else (pagination.page - 1) * pagination.size + limit = pagination.size + # Start building the query + query = ( + select(NotificationMessage) + .options( + joinedload(NotificationMessage.related_organization), + joinedload(NotificationMessage.origin_user_profile) + .joinedload(UserProfile.user_roles) + .joinedload(UserRole.role), + ) + .where(and_(*conditions)) + ) + + # Apply sorting + order_clauses = [] + if not pagination.sort_orders: + order_clauses.append(desc(NotificationMessage.create_date)) + else: + for order in pagination.sort_orders: + direction = asc if order.direction == "asc" else desc + if order.field == "date": + field = NotificationMessage.create_date + elif order.field == "user": + field = UserProfile.first_name + elif order.field == "organization": + field = Organization.name + elif order.field == "transaction_id": + field = NotificationMessage.related_transaction_id + else: + field = getattr(NotificationMessage, order.field) + if field is not None: + order_clauses.append(direction(field)) + query = query.order_by(*order_clauses) + + # Execute the count query to get the total count + count_query = query.with_only_columns(func.count()).order_by(None) + total_count = (await self.db.execute(count_query)).scalar() + + # Execute the main query to retrieve all notification_messages + result = await self.db.execute(query.offset(offset).limit(limit)) + notification_messages = result.unique().scalars().all() + return notification_messages, total_count + + @repo_handler + async def get_notification_message_by_id( + self, notification_id: int + ) -> Optional[NotificationMessage]: + """ + Retrieve notification message by id + """ + query = select(NotificationMessage).where( + (NotificationMessage.notification_message_id == notification_id) + ) + result = await self.db.execute(query) + notification = result.scalar_one_or_none() + + if not notification: + raise DataNotFoundException( + f"notification id '{notification_id}' not found" + ) + + return notification + + @repo_handler + async def get_unread_notification_message_count_by_user_id( + self, user_id: int + ) -> int: + """ + Retrieve count of unread notification message by user id + """ + query = select(func.count(NotificationMessage.notification_message_id)).where( + NotificationMessage.related_user_profile_id == user_id, + NotificationMessage.is_read == False, + ) + + result = await self.db.execute(query) + count = result.scalar_one() + + return count + + @repo_handler + async def update_notification_message(self, notification) -> NotificationMessage: + """ + Update a notification message + """ + merged_notification = await self.db.merge(notification) + await self.db.flush() + + return merged_notification + + @repo_handler + async def delete_notification_message(self, notification_id: int): + """ + Delete a notification_message by id + """ + query = delete(NotificationMessage).where( + NotificationMessage.notification_message_id == notification_id + ) + await self.db.execute(query) + await self.db.flush() + + @repo_handler + async def delete_notification_messages(self, user_id, notification_ids: List[int]): + """ + Delete a notification_message by id + """ + query = delete(NotificationMessage).where( + and_( + NotificationMessage.notification_message_id.in_(notification_ids), + NotificationMessage.related_user_profile_id == user_id, + ) + ) + await self.db.execute(query) + await self.db.flush() + + @repo_handler + async def mark_notification_as_read( + self, notification_id + ) -> Optional[NotificationMessage]: + """ + Mark a notification message as read + """ + query = select(NotificationMessage).where( + NotificationMessage.notification_message_id == notification_id + ) + result = await self.db.execute(query) + notification = result.scalar_one_or_none() + + if notification: + notification.is_read = True + await self.db.commit() + await self.db.refresh(notification) + + return notification + + @repo_handler + async def mark_notifications_as_read( + self, user_id: int, notification_ids: List[int] + ): + """ + Mark notification messages as read for a user + """ + if not notification_ids: + return [] + + stmt = ( + update(NotificationMessage) + .where( + and_( + NotificationMessage.notification_message_id.in_(notification_ids), + NotificationMessage.related_user_profile_id == user_id, + ) + ) + .values(is_read=True) + ) + await self.db.execute(stmt) + await self.db.flush() + + return notification_ids + + @repo_handler + async def create_notification_channel_subscription( + self, notification_channel_subscription: NotificationChannelSubscription + ) -> NotificationChannelSubscription: + """ + Create a new notification channel subscription + """ + self.db.add(notification_channel_subscription) + await self.db.flush() + await self.db.refresh( + notification_channel_subscription, + ["notification_type", "user_profile", "notification_channel"], + ) + return notification_channel_subscription + + @repo_handler + async def get_notification_channel_subscriptions_by_user( + self, user_profile_id: int + ) -> List[NotificationChannelSubscription]: + """ + Retrieve channel subscriptions for a user, including channel name and notification type name. + """ + query = ( + select(NotificationChannelSubscription) + .options( + selectinload(NotificationChannelSubscription.notification_channel), + selectinload(NotificationChannelSubscription.notification_type), + ) + .where(NotificationChannelSubscription.user_profile_id == user_profile_id) + ) + result = await self.db.execute(query) + subscriptions = result.scalars().all() + + if not subscriptions: + raise DataNotFoundException( + f"Channel subscriptions not found for user id: '{user_profile_id}'" + ) + + return subscriptions + + @repo_handler + async def get_notification_channel_subscription_by_id( + self, notification_channel_subscription_id: int + ) -> Optional[NotificationChannelSubscription]: + """ + Retrieve a channel subscription by id + """ + query = select(NotificationChannelSubscription).where( + ( + NotificationChannelSubscription.notification_channel_subscription_id + == notification_channel_subscription_id + ) + ) + result = await self.db.execute(query) + subscription = result.scalar_one() + + if not subscription: + raise DataNotFoundException( + f"Channel subscription with id '{notification_channel_subscription_id}'not found." + ) + + return subscription + + @repo_handler + async def delete_notification_channel_subscription( + self, notification_channel_subscription_id: int + ): + """ + Delete a channel subscription by id + """ + query = delete(NotificationChannelSubscription).where( + NotificationChannelSubscription.notification_channel_subscription_id + == notification_channel_subscription_id + ) + await self.db.execute(query) + await self.db.flush() + + @repo_handler + async def get_notification_type_by_name(self, name: str) -> Optional[int]: + """ + Retrieve a NotificationType by its name + """ + query = select(NotificationType.notification_type_id).where( + NotificationType.name == name + ) + result = await self.db.execute(query) + x = result.scalars().first() + return x + + @repo_handler + async def get_notification_channel_by_name( + self, name: ChannelEnum + ) -> Optional[int]: + """ + Retrieve a NotificationChannel by its name + """ + query = select(NotificationChannel.notification_channel_id).where( + NotificationChannel.channel_name == name.value + ) + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def get_subscribed_users_by_channel( + self, + notification_type: NotificationTypeEnum, + channel: ChannelEnum, + organization_id: int = None, + ) -> List[int]: + """ + Retrieve a list of user ids subscribed to a notification type + """ + query = ( + select(NotificationChannelSubscription) + .join( + NotificationType, + NotificationType.notification_type_id + == NotificationChannelSubscription.notification_type_id, + ) + .join( + NotificationChannel, + NotificationChannel.notification_channel_id + == NotificationChannelSubscription.notification_channel_id, + ) + .join( + UserProfile, + UserProfile.user_profile_id + == NotificationChannelSubscription.user_profile_id, + ) + .filter( + NotificationType.name == notification_type.value, + NotificationChannelSubscription.is_enabled == True, + NotificationChannel.channel_name == channel.value, + or_( + UserProfile.organization_id == organization_id, + UserProfile.organization_id.is_(None), + ), + ) + ) + result = await self.db.execute(query) + return result.scalars().all() diff --git a/backend/lcfs/web/api/notification/schema.py b/backend/lcfs/web/api/notification/schema.py index 80e99fd5d..30466bfa8 100644 --- a/backend/lcfs/web/api/notification/schema.py +++ b/backend/lcfs/web/api/notification/schema.py @@ -1,14 +1,164 @@ +from datetime import datetime +from typing import Any, Dict, List, Optional -from typing import Optional +from lcfs.db.models.compliance.ComplianceReportStatus import ComplianceReportStatusEnum +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatusEnum, +) +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum +from lcfs.web.api.base import BaseSchema, NotificationTypeEnum, PaginationResponseSchema +from pydantic import computed_field, model_validator -from pydantic import BaseModel +class NotificationOrganizationSchema(BaseSchema): + organization_id: int + name: str -class NotificationMessageRequest(BaseModel): - is_read: bool - is_archived: bool -class NotificationChannelSubscriptionRequest(BaseModel): - is_enabled: bool - channel_id: int - notification_type_id: int \ No newline at end of file +class NotificationUserProfileSchema(BaseSchema): + first_name: str + last_name: str + organization_id: Optional[int] = None + is_government: bool = False + + @computed_field + @property + def full_name(self) -> str: + if self.is_government: + return "Government of B.C." + return f"{self.first_name} {self.last_name}" + + +class NotificationMessageSchema(BaseSchema): + notification_message_id: Optional[int] = None + is_read: Optional[bool] = False + is_archived: Optional[bool] = False + is_warning: Optional[bool] = False + is_error: Optional[bool] = False + type: Optional[str] = None + message: Optional[str] = None + related_organization_id: Optional[int] = None + related_organization: Optional[NotificationOrganizationSchema] = None + related_transaction_id: Optional[str] = None + create_date: Optional[datetime] = None + origin_user_profile_id: Optional[int] = None + origin_user_profile: Optional[NotificationUserProfileSchema] = None + related_user_profile_id: Optional[int] = None + notification_type_id: Optional[int] = None + deleted: Optional[bool] = None + + +class NotificationCountSchema(BaseSchema): + count: int + + +class DeleteNotificationMessageSchema(BaseSchema): + notification_message_id: int + deleted: bool + + +class DeleteNotificationMessageResponseSchema(BaseSchema): + message: str + + +class SubscriptionSchema(BaseSchema): + notification_channel_subscription_id: Optional[int] = None + is_enabled: Optional[bool] = True + notification_channel_name: Optional[str] = None + user_profile_id: Optional[int] = None + notification_type_name: Optional[str] = None + deleted: Optional[bool] = None + + +class DeleteSubscriptionSchema(BaseSchema): + notification_channel_subscription_id: int + deleted: bool + + +class DeleteNotificationChannelSubscriptionResponseSchema(BaseSchema): + message: str + + +class NotificationsSchema(BaseSchema): + notifications: List[NotificationMessageSchema] = [] + pagination: PaginationResponseSchema = None + + +class NotificationRequestSchema(BaseSchema): + notification_types: List[NotificationTypeEnum] + notification_context: Optional[Dict[str, Any]] = {} + notification_data: Optional[NotificationMessageSchema] = None + + +COMPLIANCE_REPORT_STATUS_NOTIFICATION_MAPPER = { + ComplianceReportStatusEnum.Submitted: [ + NotificationTypeEnum.IDIR_ANALYST__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW, + NotificationTypeEnum.IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW, + ], + ComplianceReportStatusEnum.Recommended_by_analyst: [ + NotificationTypeEnum.IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__ANALYST_RECOMMENDATION + ], + ComplianceReportStatusEnum.Recommended_by_manager: [ + NotificationTypeEnum.IDIR_DIRECTOR__COMPLIANCE_REPORT__MANAGER_RECOMMENDATION + ], + ComplianceReportStatusEnum.Assessed: [ + NotificationTypeEnum.IDIR_ANALYST__COMPLIANCE_REPORT__DIRECTOR_DECISION, + NotificationTypeEnum.IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT, + NotificationTypeEnum.BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT, + ], + ComplianceReportStatusEnum.ReAssessed: [ + NotificationTypeEnum.IDIR_ANALYST__COMPLIANCE_REPORT__DIRECTOR_DECISION, + NotificationTypeEnum.IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT, + NotificationTypeEnum.BCEID__COMPLIANCE_REPORT__DIRECTOR_ASSESSMENT, + ], + "Return to analyst": [ + NotificationTypeEnum.IDIR_ANALYST__COMPLIANCE_REPORT__SUBMITTED_FOR_REVIEW + ], + "Return to manager": [ + NotificationTypeEnum.IDIR_COMPLIANCE_MANAGER__COMPLIANCE_REPORT__ANALYST_RECOMMENDATION + ], +} + + +TRANSFER_STATUS_NOTIFICATION_MAPPER = { + TransferStatusEnum.Sent: [ + NotificationTypeEnum.BCEID__TRANSFER__PARTNER_ACTIONS, + ], + TransferStatusEnum.Rescinded: [ + NotificationTypeEnum.BCEID__TRANSFER__PARTNER_ACTIONS, + ], + TransferStatusEnum.Declined: [ + NotificationTypeEnum.BCEID__TRANSFER__PARTNER_ACTIONS, + ], + TransferStatusEnum.Submitted: [ + NotificationTypeEnum.BCEID__TRANSFER__PARTNER_ACTIONS, + NotificationTypeEnum.IDIR_ANALYST__TRANSFER__SUBMITTED_FOR_REVIEW, + ], + TransferStatusEnum.Recommended: [ + NotificationTypeEnum.IDIR_DIRECTOR__TRANSFER__ANALYST_RECOMMENDATION + ], + TransferStatusEnum.Refused: [ + NotificationTypeEnum.IDIR_ANALYST__TRANSFER__RESCINDED_ACTION, + NotificationTypeEnum.BCEID__TRANSFER__DIRECTOR_DECISION, + ], + TransferStatusEnum.Recorded: [ + NotificationTypeEnum.BCEID__TRANSFER__DIRECTOR_DECISION, + NotificationTypeEnum.IDIR_ANALYST__TRANSFER__DIRECTOR_RECORDED, + ], + "Return to analyst": [ + NotificationTypeEnum.IDIR_ANALYST__TRANSFER__SUBMITTED_FOR_REVIEW + ], +} + +INITIATIVE_AGREEMENT_STATUS_NOTIFICATION_MAPPER = { + InitiativeAgreementStatusEnum.Recommended: [ + NotificationTypeEnum.IDIR_DIRECTOR__INITIATIVE_AGREEMENT__ANALYST_RECOMMENDATION + ], + InitiativeAgreementStatusEnum.Approved: [ + NotificationTypeEnum.BCEID__INITIATIVE_AGREEMENT__DIRECTOR_APPROVAL, + NotificationTypeEnum.IDIR_ANALYST__INITIATIVE_AGREEMENT__RETURNED_TO_ANALYST + ], + "Return to analyst": [ + NotificationTypeEnum.IDIR_ANALYST__INITIATIVE_AGREEMENT__RETURNED_TO_ANALYST + ], +} diff --git a/backend/lcfs/web/api/notification/services.py b/backend/lcfs/web/api/notification/services.py new file mode 100644 index 000000000..a36796af7 --- /dev/null +++ b/backend/lcfs/web/api/notification/services.py @@ -0,0 +1,285 @@ +import math +from typing import List, Optional +from lcfs.db.models.notification import ( + NotificationChannelSubscription, + NotificationMessage, + ChannelEnum, +) +from lcfs.web.api.base import ( + PaginationRequestSchema, + PaginationResponseSchema, +) +from lcfs.web.api.email.services import CHESEmailService +from lcfs.web.api.notification.schema import ( + NotificationRequestSchema, + NotificationsSchema, + SubscriptionSchema, + NotificationMessageSchema, +) +from lcfs.web.exception.exceptions import DataNotFoundException +import structlog +from fastapi import Depends +from lcfs.web.api.notification.repo import NotificationRepository +from lcfs.web.core.decorators import service_handler + +logger = structlog.get_logger(__name__) + + +class NotificationService: + + def __init__( + self, + repo: NotificationRepository = Depends(NotificationRepository), + email_service: CHESEmailService = Depends(CHESEmailService), + ) -> None: + self.repo = repo + self.email_service = email_service + + @service_handler + async def get_notification_messages_by_user_id( + self, user_id: int, is_read: Optional[bool] = None + ): + """ + Retrieve all notifications for a given user. + Optionally filter by read status. + """ + notification_models = await self.repo.get_notification_messages_by_user( + user_profile_id=user_id, is_read=is_read + ) + + return [ + NotificationMessageSchema.model_validate(message) + for message in notification_models + ] + + @service_handler + async def get_paginated_notification_messages( + self, user_id: int, pagination: PaginationRequestSchema + ) -> NotificationsSchema: + """ + Retrieve all notifications for a given user with pagination, filtering and sorting. + """ + notifications, total_count = ( + await self.repo.get_paginated_notification_messages(user_id, pagination) + ) + return NotificationsSchema( + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + notifications=[ + NotificationMessageSchema.model_validate(notification) + for notification in notifications + ], + ) + + @service_handler + async def update_notification_messages( + self, user_id: int, notification_ids: List[int] + ): + """ + Update multiple notifications (mark as read). + """ + await self.repo.mark_notifications_as_read(user_id, notification_ids) + + return notification_ids + + @service_handler + async def delete_notification_messages( + self, user_id: int, notification_ids: List[int] + ): + """ + Delete multiple notifications. + """ + await self.repo.delete_notification_messages(user_id, notification_ids) + + return notification_ids + + @service_handler + async def get_notification_message_by_id(self, notification_id: int): + """ + Retrieve a specific notification by ID. + """ + notification = await self.repo.get_notification_message_by_id(notification_id) + + return NotificationMessageSchema.model_validate(notification) + + @service_handler + async def count_unread_notifications_by_user_id(self, user_id: int): + """ + Retrieve notification count by user id + """ + return await self.repo.get_unread_notification_message_count_by_user_id( + user_id=user_id + ) + + @service_handler + async def mark_notification_as_read(self, notification_id: int): + """ + Mark a specific notification as read. + """ + notification = await self.repo.mark_notification_as_read(notification_id) + if not notification: + raise DataNotFoundException( + f"Notification with ID {notification_id} not found." + ) + return notification + + @service_handler + async def create_notification_message( + self, notification_data: NotificationMessageSchema + ): + """ + Create a new notification message. + """ + notification = NotificationMessage( + **notification_data.model_dump(exclude={"deleted"}) + ) + return await self.repo.create_notification_message(notification) + + @service_handler + async def update_notification(self, notification_data: NotificationMessageSchema): + """ + Update an existing notification. + """ + notification = NotificationMessage( + **notification_data.model_dump(exclude={"deleted"}) + ) + return await self.repo.update_notification_message(notification) + + @service_handler + async def delete_notification_message(self, notification_id: int): + """ + Delete a notification. + """ + # Fetch the notification to confirm it exists + notification = await self.repo.get_notification_message_by_id(notification_id) + + if notification: + await self.repo.delete_notification_message(notification_id) + logger.info(f"Notification with ID {notification_id} has been deleted.") + else: + raise DataNotFoundException(f"Notification with ID {notification_id}.") + + @service_handler + async def get_notification_type_id_by_name(self, name: str) -> int: + notification_type = await self.repo.get_notification_type_by_name(name) + if not notification_type: + raise ValueError(f"Invalid notification type name: {name}") + return notification_type + + @service_handler + async def get_notification_channel_id_by_name(self, name: ChannelEnum) -> int: + notification_channel = await self.repo.get_notification_channel_by_name(name) + if not notification_channel: + raise ValueError(f"Invalid notification channel name: {name}") + return notification_channel + + @service_handler + async def create_notification_channel_subscription( + self, subscription_data: SubscriptionSchema, user_profile_id: int + ): + channel_enum_name = ChannelEnum(subscription_data.notification_channel_name) + notification_channel_id = await self.get_notification_channel_id_by_name( + channel_enum_name + ) + notification_type_id = await self.get_notification_type_id_by_name( + subscription_data.notification_type_name + ) + + subscription = NotificationChannelSubscription( + user_profile_id=user_profile_id, + notification_channel_id=notification_channel_id, + notification_type_id=notification_type_id, + is_enabled=subscription_data.is_enabled, + ) + created_subscription = await self.repo.create_notification_channel_subscription( + subscription + ) + x = 1 + return SubscriptionSchema.model_validate(created_subscription) + + @service_handler + async def get_notification_channel_subscriptions_by_user_id(self, user_id: int): + """ + Retrieve all notification channel subscriptions for a user. + """ + subscriptions = await self.repo.get_notification_channel_subscriptions_by_user( + user_id + ) + + subscriptions_with_names = [ + { + "notification_channel_subscription_id": subscription.notification_channel_subscription_id, + "notification_channel_name": subscription.notification_channel.channel_name.name, + "notification_type_name": subscription.notification_type.name, + } + for subscription in subscriptions + ] + + return subscriptions_with_names + + @service_handler + async def get_notification_channel_subscription_by_id( + self, notification_channel_subscription_id: int + ): + """ + Retrieve a specific notification channel subscription by ID. + """ + return await self.repo.get_notification_channel_subscription_by_id( + notification_channel_subscription_id + ) + + @service_handler + async def delete_notification_channel_subscription( + self, subscription_id: int, user_profile_id: int + ): + subscription = await self.repo.get_notification_channel_subscription_by_id( + subscription_id + ) + if not subscription or subscription.user_profile_id != user_profile_id: + raise DataNotFoundException( + "Subscription not found or you are not authorized to delete it." + ) + + await self.repo.delete_notification_channel_subscription(subscription_id) + logger.info(f"Deleted notification channel subscription {subscription_id}.") + + @service_handler + async def send_notification(self, notification: NotificationRequestSchema): + """ + Send subscribed notifications to users. + """ + # Prepare context once, outside the loop + notification.notification_context.update( + {"organization_id": notification.notification_data.related_organization_id} + ) + + for notification_type in notification.notification_types: + in_app_subscribed_users = await self.repo.get_subscribed_users_by_channel( + notification_type, + ChannelEnum.IN_APP, + notification.notification_data.related_organization_id, + ) + + # Batch create in-app notifications + in_app_notifications = [ + NotificationMessage( + **notification.notification_data.model_dump( + exclude_unset=True, exclude={"deleted"} + ), + notification_type_id=subscription.notification_type_id, + related_user_profile_id=subscription.user_profile_id, + ) + for subscription in in_app_subscribed_users + ] + if in_app_notifications: + await self.repo.create_notification_messages(in_app_notifications) + + await self.email_service.send_notification_email( + notification_type, + notification.notification_context, + notification.notification_data.related_organization_id, + ) diff --git a/backend/lcfs/web/api/notification/views.py b/backend/lcfs/web/api/notification/views.py index f649921af..f5caa8695 100644 --- a/backend/lcfs/web/api/notification/views.py +++ b/backend/lcfs/web/api/notification/views.py @@ -1,75 +1,242 @@ -from typing import Annotated -from fastapi import APIRouter, Depends -from fastapi.responses import Response -from lcfs.db.dependencies import SessionLocal -from lcfs.web.api.base import EntityResponse -from lcfs.web.api.notification.schema import NotificationChannelSubscriptionRequest, NotificationMessageRequest +""" +Notification endpoints +""" + +from typing import Union, List +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.exception.exceptions import DataNotFoundException +import structlog +from fastapi import APIRouter, Body, Depends, HTTPException, Request, Response +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.notification.schema import ( + DeleteNotificationChannelSubscriptionResponseSchema, + DeleteNotificationMessageResponseSchema, + DeleteSubscriptionSchema, + DeleteNotificationMessageSchema, + NotificationsSchema, + SubscriptionSchema, + NotificationMessageSchema, + NotificationCountSchema, +) +from lcfs.web.api.notification.services import NotificationService +from lcfs.web.core.decorators import view_handler from starlette import status -from sqlalchemy.ext.asyncio import AsyncSession -from lcfs.db.dependencies import get_async_db_session -from lcfs.db.models import NotificationMessage, UserProfile -from sqlalchemy import select, update router = APIRouter() +logger = structlog.get_logger(__name__) + + +@router.get( + "/", response_model=List[NotificationMessageSchema], status_code=status.HTTP_200_OK +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_notification_messages_by_user_id( + request: Request, + is_read: bool = None, + service: NotificationService = Depends(), +): + """ + Retrieve all notifications of a user + """ + + return await service.get_notification_messages_by_user_id( + user_id=request.user.user_profile_id, is_read=is_read + ) + + +@router.post( + "/list", response_model=NotificationsSchema, status_code=status.HTTP_200_OK +) +@view_handler(["*"]) +async def get_notification_messages_by_user_id( + request: Request, + pagination: PaginationRequestSchema = Body(..., embed=False), + response: Response = None, + service: NotificationService = Depends(), +): + """ + Retrieve all notifications of a user with pagination + """ + return await service.get_paginated_notification_messages( + user_id=request.user.user_profile_id, pagination=pagination + ) + + +@router.put("/", response_model=List[int], status_code=status.HTTP_200_OK) +@view_handler(["*"]) +async def update_notification_messages_to_read( + request: Request, + notification_ids: List[int] = Body(..., embed=False), + response: Response = None, + service: NotificationService = Depends(), +): + """ + Update notifications (mark the messages as read) + """ + return await service.update_notification_messages( + request.user.user_profile_id, notification_ids + ) + + +@router.delete("/", response_model=List[int], status_code=status.HTTP_200_OK) +@view_handler(["*"]) +async def delete_notification_messages( + request: Request, + notification_ids: List[int] = Body(..., embed=False), + response: Response = None, + service: NotificationService = Depends(), +): + """ + Delete notification messages + """ + return await service.delete_notification_messages( + request.user.user_profile_id, notification_ids + ) + + +@router.get( + "/count", + response_model=NotificationCountSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_unread_notifications( + request: Request, service: NotificationService = Depends() +): + """ + Retrieve counter for unread notifications by user id + """ + # Count unread notifications + count = await service.count_unread_notifications_by_user_id( + user_id=request.user.user_profile_id + ) + return NotificationCountSchema(count=count) + + +@router.get( + "/subscriptions", + response_model=List[SubscriptionSchema], + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_notifications_channel_subscriptions_by_user_id( + request: Request, service: NotificationService = Depends() +): + return await service.get_notification_channel_subscriptions_by_user_id( + user_id=request.user.user_profile_id + ) + + +@router.post( + "/save", + response_model=Union[ + NotificationMessageSchema, DeleteNotificationMessageResponseSchema + ], + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def save_notification( + request: Request, + request_data: Union[ + NotificationMessageSchema, DeleteNotificationMessageSchema + ] = Body(...), + service: NotificationService = Depends(), +): + """ + Save a single notification + """ + notification_id = request_data.notification_message_id + if request_data.deleted: + try: + await service.delete_notification_message(notification_id) + return DeleteNotificationMessageResponseSchema( + message="Notification Message deleted successfully" + ) + except DataNotFoundException as e: + raise HTTPException(status_code=404, detail=str(e)) + elif notification_id: + return await service.update_notification(request_data) + else: + return await service.create_notification_message(request_data) + + +@router.post( + "/subscriptions/save", + response_model=Union[ + SubscriptionSchema, DeleteNotificationChannelSubscriptionResponseSchema + ], + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def save_subscription( + request: Request, + request_data: Union[SubscriptionSchema, DeleteSubscriptionSchema] = Body(...), + service: NotificationService = Depends(), +): + user_profile_id = request.user.user_profile_id + subscription_id = request_data.notification_channel_subscription_id + + if request_data.deleted: + try: + await service.delete_notification_channel_subscription( + subscription_id, user_profile_id + ) + return DeleteNotificationChannelSubscriptionResponseSchema( + message="Notification Subscription deleted successfully" + ) + except DataNotFoundException as e: + raise HTTPException(status_code=404, detail=str(e)) + else: + return await service.create_notification_channel_subscription( + request_data, user_profile_id + ) + + +@router.get( + "/subscriptions/{notification_channel_subscription_id}", + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_notification_channel_subscription_by_id( + request: Request, + notification_channel_subscription_id: int, + service: NotificationService = Depends(), +): + + subscription = await service.get_notification_channel_subscription_by_id( + notification_channel_subscription_id=notification_channel_subscription_id + ) + + if subscription.user_profile_id != request.user.user_profile_id: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="You are not authorized to access this resource.", + ) + + return subscription + + +@router.get( + "/{notification_id}", + response_model=NotificationMessageSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_notification_message_by_id( + request: Request, notification_id: int, service: NotificationService = Depends() +): + """ + Retrieve a single notification by ID + """ + notification = await service.get_notification_message_by_id( + notification_id=notification_id + ) + if notification.related_user_profile_id != request.user.user_profile_id: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="You are not authorized to access this resource.", + ) -# all routes should have a User dependency - -@router.get('/', status_code=status.HTTP_200_OK) -async def get_notifications(user, db: Annotated[AsyncSession, Depends(get_async_db_session)], response_model=EntityResponse) -> EntityResponse: - # get all notifications of a user - # check for user auth. raise otherwise - # raise HTTPException() - # return db.execute(select(NotificationMessage).where(NotificationMessage.user_id == user.user_id)) - return - -@router.get('/{notification_id}', status_code=status.HTTP_200_OK) -async def get_notification(user, notification_id: int, db: Annotated[AsyncSession, Depends(get_async_db_session)], response_model=EntityResponse) -> EntityResponse: - # get a single notitification of a user - # check for user auth. raise otherwise - # raise HTTPException() - # get notification by id - # return db.execute(select(NotificationMessage).where(NotificationMessage.notification_id == notification_id)) - return - -@router.put('/{notification_id}', status_code=status.HTTP_204_NO_CONTENT) -async def update_notification(user, notification_id: int, db:Annotated[AsyncSession, Depends(get_async_db_session)], reqeust: NotificationMessageRequest,response_model=EntityResponse) -> Response: - # update a notification of a user - # check for user auth. raise otherwise - # raise HTTPException() - # return db.execute(update(NotificationMessage).where(NotificationMessage.notification_id == notification_id).values()) - return Response(content=None, status_code=status.HTTP_204_NO_CONTENT) - -@router.get('/', status_code=status.HTTP_200_OK) -async def get_notifications_channel_subscriptions(user, db: Annotated[AsyncSession, Depends(get_async_db_session)], response_model=EntityResponse) -> Response: - # get all notification subscriptions - # check for user auth. raise otherwise - # raise HTTPException() - # return db.execute(select(NotificationChannelSubscription).where(NotificationChannelSubscription.user_id == user.user_id)) - return - -@router.get('/{notification_channel_subscription_id}', status_code=status.HTTP_200_OK) -async def get_notification_channel_subscription(user, notification_channel_subscription_id: int, db:Annotated[AsyncSession, Depends(get_async_db_session)], response_model=EntityResponse) -> EntityResponse: - # get a single notification subscriptions - # check for user auth. raise otherwise - # raise HTTPException() - # get notification subscription by id - # return db.execute(select(NotificationChannelSubscription).where(NotificationChannelSubscription.notification_channel_subscription_id == notification_channel_subscription_id)) - return - -@router.put('/{notification_channel_subscription_id}', status_code=status.HTTP_204_NO_CONTENT) -async def update_notification_channel_subscription(user, notification_channel_subscription_id: int, db:Annotated[AsyncSession, Depends(get_async_db_session)], request: NotificationChannelSubscriptionRequest, response_model=EntityResponse ) -> Response: - # update a notification subscription of a user - # check for user auth. raise otherwise - # raise HTTPException() - # return db.execute(update(NotificationChannelSubscription).where(NotificationChannelSubscription.notification_channel_subscription_id == notification_channel_subscription_id).values()) - return Response(content=None, status_code=status.HTTP_204_NO_CONTENT) - -@router.delete('/{notification_channel_subscription_id}', status_code=status.HTTP_204_NO_CONTENT) -async def delete_notification_channel_subscription(user, notification_channel_subscription_id: int, db:Annotated[AsyncSession, Depends(get_async_db_session)], response_model=EntityResponse ) -> Response: - # delete a notification subscription of a user - # check for user auth. raise otherwise - # raise HTTPException() - # return db.execute(delete(NotificationChannelSubscription).where(NotificationChannelSubscription.notification_channel_subscription_id == notification_channel_subscription_id)) - return Response(content=None, status_code=status.HTTP_204_NO_CONTENT) \ No newline at end of file + return notification diff --git a/backend/lcfs/web/api/notional_transfer/__init__.py b/backend/lcfs/web/api/notional_transfer/__init__.py new file mode 100644 index 000000000..d5735d811 --- /dev/null +++ b/backend/lcfs/web/api/notional_transfer/__init__.py @@ -0,0 +1,5 @@ +"""Notional Transfer API.""" + +from lcfs.web.api.notional_transfer.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/notional_transfer/repo.py b/backend/lcfs/web/api/notional_transfer/repo.py new file mode 100644 index 000000000..439b08a76 --- /dev/null +++ b/backend/lcfs/web/api/notional_transfer/repo.py @@ -0,0 +1,288 @@ +import structlog +from typing import List, Optional, Tuple, Any + +from fastapi import Depends + +from lcfs.db.base import ActionTypeEnum, UserTypeEnum +from lcfs.db.dependencies import get_async_db_session + +from sqlalchemy import select, delete, func, case, and_ +from sqlalchemy.orm import joinedload +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.models.compliance.NotionalTransfer import ( + NotionalTransfer, + ReceivedOrTransferredEnum, +) +from lcfs.db.models.compliance import ComplianceReport +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.notional_transfer.schema import NotionalTransferSchema +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.core.decorators import repo_handler + +logger = structlog.get_logger(__name__) + + +class NotionalTransferRepository: + def __init__( + self, + db: AsyncSession = Depends(get_async_db_session), + fuel_repo: FuelCodeRepository = Depends(), + ): + self.db = db + self.fuel_code_repo = fuel_repo + + @repo_handler + async def get_table_options(self) -> dict: + """Get all table options""" + fuel_categories = await self.fuel_code_repo.get_fuel_categories() + received_or_transferred = [e.value for e in ReceivedOrTransferredEnum] + return { + "fuel_categories": fuel_categories, + "received_or_transferred": received_or_transferred, + } + + @repo_handler + async def get_notional_transfers( + self, compliance_report_id: int + ) -> List[NotionalTransferSchema]: + """ + Queries notional transfers from the database for a specific compliance report. + """ + # Retrieve the compliance report's group UUID + report_group_query = await self.db.execute( + select(ComplianceReport.compliance_report_group_uuid).where( + ComplianceReport.compliance_report_id == compliance_report_id + ) + ) + group_uuid = report_group_query.scalar() + if not group_uuid: + return [] + + result = await self.get_effective_notional_transfers(group_uuid) + return result + + async def get_effective_notional_transfers( + self, compliance_report_group_uuid: str + ) -> List[NotionalTransferSchema]: + """ + Retrieves effective notional transfers for a compliance report group UUID. + """ + # Step 1: Get all compliance_report_ids in the specified group + compliance_reports_select = select(ComplianceReport.compliance_report_id).where( + ComplianceReport.compliance_report_group_uuid + == compliance_report_group_uuid + ) + + # Step 2: Identify group_uuids that have any DELETE action + delete_group_select = ( + select(NotionalTransfer.group_uuid) + .where( + NotionalTransfer.compliance_report_id.in_(compliance_reports_select), + NotionalTransfer.action_type == ActionTypeEnum.DELETE, + ) + .distinct() + ) + + # Step 3: Find the maximum version and priority per group_uuid, excluding deleted groups + user_type_priority = case( + (NotionalTransfer.user_type == UserTypeEnum.GOVERNMENT, 1), + (NotionalTransfer.user_type == UserTypeEnum.SUPPLIER, 0), + else_=0, + ) + + valid_notional_transfers_select = ( + select( + NotionalTransfer.group_uuid, + func.max(NotionalTransfer.version).label("max_version"), + func.max(user_type_priority).label("max_role_priority"), + ) + .where( + NotionalTransfer.compliance_report_id.in_(compliance_reports_select), + NotionalTransfer.action_type != ActionTypeEnum.DELETE, + ~NotionalTransfer.group_uuid.in_(delete_group_select), + ) + .group_by(NotionalTransfer.group_uuid) + ) + valid_notional_transfers_subq = valid_notional_transfers_select.subquery() + + notional_transfers_select = ( + select(NotionalTransfer) + .options(joinedload(NotionalTransfer.fuel_category)) + .join( + valid_notional_transfers_subq, + and_( + NotionalTransfer.group_uuid + == valid_notional_transfers_subq.c.group_uuid, + NotionalTransfer.version + == valid_notional_transfers_subq.c.max_version, + user_type_priority + == valid_notional_transfers_subq.c.max_role_priority, + ), + ) + .order_by(NotionalTransfer.notional_transfer_id) + ) + + result = await self.db.execute(notional_transfers_select) + notional_transfers = result.unique().scalars().all() + + return [ + NotionalTransferSchema( + notional_transfer_id=nt.notional_transfer_id, + compliance_report_id=nt.compliance_report_id, + quantity=nt.quantity, + legal_name=nt.legal_name, + address_for_service=nt.address_for_service, + fuel_category=nt.fuel_category.category, + received_or_transferred=nt.received_or_transferred, + group_uuid=nt.group_uuid, + version=nt.version, + user_type=nt.user_type, + action_type=nt.action_type, + ) + for nt in notional_transfers + ] + + async def get_notional_transfers_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> Tuple[List[NotionalTransferSchema], int]: + # Retrieve the compliance report's group UUID + report_group_query = await self.db.execute( + select(ComplianceReport.compliance_report_group_uuid).where( + ComplianceReport.compliance_report_id == compliance_report_id + ) + ) + group_uuid = report_group_query.scalar() + if not group_uuid: + return [], 0 + + # Retrieve effective notional transfers using the group UUID + notional_transfers = await self.get_effective_notional_transfers( + compliance_report_group_uuid=group_uuid + ) + + # Manually apply pagination + total_count = len(notional_transfers) + offset = 0 if pagination.page < 1 else (pagination.page - 1) * pagination.size + limit = pagination.size + paginated_notional_transfers = notional_transfers[offset : offset + limit] + + return paginated_notional_transfers, total_count + + @repo_handler + async def save_notional_transfers( + self, notional_transfers: List[NotionalTransfer] + ) -> str: + """ + Saves or updates notional transfers in the database. + """ + for transfer in notional_transfers: + if transfer.notional_transfer_id: + existing_transfer = await self.db.get( + NotionalTransfer, transfer.notional_transfer_id + ) + if existing_transfer: + existing_transfer.compliance_report_id = ( + transfer.compliance_report_id + ) + existing_transfer.quantity = transfer.quantity + existing_transfer.legal_name = transfer.legal_name + existing_transfer.address_for_service = transfer.address_for_service + existing_transfer.fuel_category_id = transfer.fuel_category_id + existing_transfer.received_or_transferred = ( + transfer.received_or_transferred + ) + else: + self.db.add(transfer) + else: + self.db.add(transfer) + + await self.db.flush() + return "Notional transfers saved or updated successfully" + + @repo_handler + async def get_notional_transfer( + self, notional_transfer_id: int + ) -> Optional[NotionalTransfer]: + """ + Get a specific notional transfer by id. + """ + return await self.db.scalar( + select(NotionalTransfer) + .options(joinedload(NotionalTransfer.fuel_category)) + .where(NotionalTransfer.notional_transfer_id == notional_transfer_id) + ) + + @repo_handler + async def update_notional_transfer( + self, notional_transfer: NotionalTransfer + ) -> NotionalTransfer: + """ + Update an existing notional transfer in the database. + """ + updated_notional_transfer = await self.db.merge(notional_transfer) + await self.db.flush() + await self.db.refresh(updated_notional_transfer, ["fuel_category"]) + return updated_notional_transfer + + @repo_handler + async def create_notional_transfer( + self, notional_transfer: NotionalTransfer + ) -> NotionalTransfer: + """ + Create a new notional transfer in the database. + """ + self.db.add(notional_transfer) + await self.db.flush() + await self.db.refresh(notional_transfer, ["fuel_category"]) + return notional_transfer + + @repo_handler + async def delete_notional_transfer(self, notional_transfer_id: int): + """Delete a notional transfer from the database""" + await self.db.execute( + delete(NotionalTransfer).where( + NotionalTransfer.notional_transfer_id == notional_transfer_id + ) + ) + await self.db.flush() + + @repo_handler + async def get_latest_notional_transfer_by_group_uuid( + self, group_uuid: str + ) -> Optional[NotionalTransfer]: + """ + Retrieve the latest NotionalTransfer record for a given group UUID. + """ + query = ( + select(NotionalTransfer) + .options(joinedload(NotionalTransfer.fuel_category)) + .where(NotionalTransfer.group_uuid == group_uuid) + .order_by( + NotionalTransfer.user_type == UserTypeEnum.SUPPLIER, + NotionalTransfer.version.desc(), + ) + ) + + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def get_notional_transfer_version_by_user( + self, group_uuid: str, version: int, user_type: UserTypeEnum + ) -> Optional[NotionalTransfer]: + """ + Retrieve a specific NotionalTransfer record by group UUID, version, and user_type. + """ + query = ( + select(NotionalTransfer) + .where( + NotionalTransfer.group_uuid == group_uuid, + NotionalTransfer.version == version, + NotionalTransfer.user_type == user_type, + ) + .options(joinedload(NotionalTransfer.fuel_category)) + ) + + result = await self.db.execute(query) + return result.scalars().first() diff --git a/backend/lcfs/web/api/notional_transfer/schema.py b/backend/lcfs/web/api/notional_transfer/schema.py new file mode 100644 index 000000000..5f6571e57 --- /dev/null +++ b/backend/lcfs/web/api/notional_transfer/schema.py @@ -0,0 +1,76 @@ +from typing import Optional, List +from pydantic import Field +from lcfs.web.api.base import ( + BaseSchema, + FilterModel, + SortOrder, + PaginationRequestSchema, + PaginationResponseSchema, +) +from enum import Enum + + +class ReceivedOrTransferredEnumSchema(str, Enum): + Received = "Received" + Transferred = "Transferred" + + +class NotionalTransferCreateSchema(BaseSchema): + legal_name: str + address_for_service: str + fuel_category: str + received_or_transferred: ReceivedOrTransferredEnumSchema + quantity: int + notional_transfer_id: Optional[int] = None + compliance_report_id: int + deleted: Optional[bool] = None + group_uuid: Optional[str] = None + version: Optional[int] = None + user_type: Optional[str] = None + action_type: Optional[str] = None + + +class NotionalTransferSchema(NotionalTransferCreateSchema): + pass + + +class PaginatedNotionalTransferRequestSchema(BaseSchema): + compliance_report_id: int = Field(..., alias="complianceReportId") + filters: List[FilterModel] + page: int + size: int + sort_orders: List[SortOrder] + + +class NotionalTransfersSchema(BaseSchema): + notional_transfers: List[NotionalTransferSchema] + pagination: Optional[PaginationResponseSchema] = None + + +class NotionalTransfersAllSchema(BaseSchema): + notional_transfers: List[NotionalTransferSchema] + + +class NotionalTransferFuelCategorySchema(BaseSchema): + fuel_category_id: int + category: str + description: Optional[str] = None + + +class NotionalTransferTableOptionsSchema(BaseSchema): + fuel_categories: List[NotionalTransferFuelCategorySchema] + received_or_transferred: List[str] + + +class NotionalTransferListCreateSchema(BaseSchema): + compliance_report_id: int + notional_transfers: List[NotionalTransferSchema] + + +class DeleteNotionalTransferSchema(BaseSchema): + notional_transfer_id: int + compliance_report_id: int + + +class DeleteNotionalTransferResponseSchema(BaseSchema): + message: str diff --git a/backend/lcfs/web/api/notional_transfer/services.py b/backend/lcfs/web/api/notional_transfer/services.py new file mode 100644 index 000000000..207707bf6 --- /dev/null +++ b/backend/lcfs/web/api/notional_transfer/services.py @@ -0,0 +1,237 @@ +import math +import uuid +from typing import Optional + +import structlog +from fastapi import Depends + +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.web.api.notional_transfer.repo import NotionalTransferRepository +from lcfs.web.core.decorators import service_handler +from lcfs.db.models.compliance.NotionalTransfer import NotionalTransfer +from lcfs.web.api.base import PaginationRequestSchema, PaginationResponseSchema +from lcfs.web.api.notional_transfer.schema import ( + NotionalTransferCreateSchema, + NotionalTransferSchema, + NotionalTransfersSchema, + NotionalTransferTableOptionsSchema, + NotionalTransferFuelCategorySchema, + NotionalTransfersAllSchema, + DeleteNotionalTransferResponseSchema, +) +from lcfs.web.api.fuel_code.repo import FuelCodeRepository + +logger = structlog.get_logger(__name__) + +# Constants defining which fields to exclude during model operations +NOTIONAL_TRANSFER_EXCLUDE_FIELDS = { + "id", + "notional_transfer_id", + "deleted", + "group_uuid", + "user_type", + "version", + "action_type", +} + + +class NotionalTransferServices: + def __init__( + self, + repo: NotionalTransferRepository = Depends(NotionalTransferRepository), + fuel_repo: FuelCodeRepository = Depends(), + ) -> None: + self.repo = repo + self.fuel_repo = fuel_repo + + async def convert_to_model( + self, notional_transfer_data: NotionalTransferCreateSchema + ) -> NotionalTransfer: + """ + Converts data from NotionalTransferCreateSchema to NotionalTransfer data model to store into the database. + """ + fuel_category = await self.fuel_repo.get_fuel_category_by( + category=notional_transfer_data.fuel_category + ) + return NotionalTransfer( + **notional_transfer_data.model_dump( + exclude=NOTIONAL_TRANSFER_EXCLUDE_FIELDS.union( + {"fuel_category"}) + ), + fuel_category_id=fuel_category.fuel_category_id, + ) + + def model_to_schema(self, model: NotionalTransfer) -> NotionalTransferSchema: + """ + Converts data from NotionalTransfer model to NotionalTransferSchema. + """ + return NotionalTransferSchema( + notional_transfer_id=model.notional_transfer_id, + compliance_report_id=model.compliance_report_id, + quantity=model.quantity, + legal_name=model.legal_name, + address_for_service=model.address_for_service, + fuel_category=model.fuel_category.category, + received_or_transferred=model.received_or_transferred, + group_uuid=model.group_uuid, + version=model.version, + user_type=model.user_type, + action_type=model.action_type, + ) + + @service_handler + async def get_table_options(self) -> NotionalTransferTableOptionsSchema: + """ + Gets the list of table options related to notional transfers. + """ + table_options = await self.repo.get_table_options() + return NotionalTransferTableOptionsSchema( + fuel_categories=[ + NotionalTransferFuelCategorySchema.model_validate(category) + for category in table_options["fuel_categories"] + ], + received_or_transferred=table_options["received_or_transferred"], + ) + + @service_handler + async def get_notional_transfer(self, notional_transfer_id: int): + notional_transfer = await self.repo.get_notional_transfer(notional_transfer_id) + if not notional_transfer: + return None + return self.model_to_schema(notional_transfer) + + @service_handler + async def get_notional_transfers( + self, compliance_report_id: int + ) -> NotionalTransfersAllSchema: + """ + Gets the list of notional transfers for a specific compliance report. + """ + notional_transfers = await self.repo.get_notional_transfers( + compliance_report_id + ) + return NotionalTransfersAllSchema( + notional_transfers=[ + NotionalTransferSchema.model_validate(nt) for nt in notional_transfers + ] + ) + + @service_handler + async def get_notional_transfers_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> NotionalTransfersSchema: + notional_transfers, total_count = ( + await self.repo.get_notional_transfers_paginated( + pagination, compliance_report_id + ) + ) + return NotionalTransfersSchema( + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + notional_transfers=[ + NotionalTransferSchema.model_validate(nt) for nt in notional_transfers + ], + ) + + @service_handler + async def update_notional_transfer( + self, + notional_transfer_data: NotionalTransferCreateSchema, + user_type: UserTypeEnum, + ) -> NotionalTransferSchema: + """Update an existing notional transfer""" + existing_transfer = await self.repo.get_latest_notional_transfer_by_group_uuid( + notional_transfer_data.group_uuid + ) + if not existing_transfer: + raise ValueError("Notional transfer not found") + + if ( + existing_transfer.compliance_report_id + == notional_transfer_data.compliance_report_id + ): + # Update existing record if compliance report ID matches + for field, value in notional_transfer_data.model_dump( + exclude=NOTIONAL_TRANSFER_EXCLUDE_FIELDS.union( + {"fuel_category"}) + ).items(): + setattr(existing_transfer, field, value) + + if ( + existing_transfer.fuel_category.category + != notional_transfer_data.fuel_category + ): + existing_transfer.fuel_category = ( + await self.fuel_repo.get_fuel_category_by( + category=notional_transfer_data.fuel_category + ) + ) + + updated_transfer = await self.repo.update_notional_transfer( + existing_transfer + ) + return self.model_to_schema(updated_transfer) + else: + # Create a new version of the record + return await self.create_notional_transfer( + notional_transfer_data, user_type, existing_record=existing_transfer + ) + + @service_handler + async def create_notional_transfer( + self, + notional_transfer_data: NotionalTransferCreateSchema, + user_type: UserTypeEnum, + existing_record: Optional[NotionalTransfer] = None, + ) -> NotionalTransferSchema: + """Create a new notional transfer""" + notional_transfer = await self.convert_to_model(notional_transfer_data) + new_group_uuid = str(uuid.uuid4()) + notional_transfer.group_uuid = ( + new_group_uuid if not existing_record else existing_record.group_uuid + ) + notional_transfer.action_type = ( + ActionTypeEnum.CREATE if not existing_record else ActionTypeEnum.UPDATE + ) + notional_transfer.version = ( + 0 if not existing_record else existing_record.version + 1 + ) + notional_transfer.user_type = user_type + created_transfer = await self.repo.create_notional_transfer(notional_transfer) + + return self.model_to_schema(created_transfer) + + @service_handler + async def delete_notional_transfer( + self, + notional_transfer_data: NotionalTransferCreateSchema, + user_type: UserTypeEnum, + ) -> DeleteNotionalTransferResponseSchema: + """Delete a notional transfer""" + existing_transfer = await self.repo.get_latest_notional_transfer_by_group_uuid( + notional_transfer_data.group_uuid + ) + + if existing_transfer.action_type == ActionTypeEnum.DELETE: + return DeleteNotionalTransferResponseSchema(message="Already deleted.") + + deleted_entity = NotionalTransfer( + compliance_report_id=notional_transfer_data.compliance_report_id, + group_uuid=notional_transfer_data.group_uuid, + version=existing_transfer.version + 1, + action_type=ActionTypeEnum.DELETE, + user_type=user_type, + ) + + # Copy fields from the latest version for the deletion record + for field in existing_transfer.__table__.columns.keys(): + if field not in NOTIONAL_TRANSFER_EXCLUDE_FIELDS: + setattr(deleted_entity, field, getattr( + existing_transfer, field)) + + await self.repo.create_notional_transfer(deleted_entity) + return DeleteNotionalTransferResponseSchema(message="Marked as deleted.") diff --git a/backend/lcfs/web/api/notional_transfer/validation.py b/backend/lcfs/web/api/notional_transfer/validation.py new file mode 100644 index 000000000..63997a977 --- /dev/null +++ b/backend/lcfs/web/api/notional_transfer/validation.py @@ -0,0 +1,25 @@ +from typing import List +from fastapi import HTTPException, Request +from starlette import status + +from lcfs.web.api.notional_transfer.schema import NotionalTransferCreateSchema + + +class NotionalTransferValidation: + def __init__( + self, + request: Request = None, + ): + self.request = request + + async def validate_compliance_report_id( + self, + compliance_report_id: int, + notional_transfers: List[NotionalTransferCreateSchema], + ): + for notional_transfer in notional_transfers: + if notional_transfer.compliance_report_id != compliance_report_id: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Mismatch compliance_report_id in notional transfer: {notional_transfer}", + ) diff --git a/backend/lcfs/web/api/notional_transfer/views.py b/backend/lcfs/web/api/notional_transfer/views.py new file mode 100644 index 000000000..056cc07d5 --- /dev/null +++ b/backend/lcfs/web/api/notional_transfer/views.py @@ -0,0 +1,163 @@ +""" +Notional Transfers endpoints +""" + +import structlog +from typing import Optional, Union + +from fastapi import ( + APIRouter, + Body, + status, + Request, + Response, + Depends, + HTTPException, +) +from fastapi_cache.decorator import cache + +from lcfs.db import dependencies +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.notional_transfer.services import NotionalTransferServices +from lcfs.web.api.notional_transfer.schema import ( + NotionalTransferCreateSchema, + NotionalTransferSchema, + NotionalTransfersSchema, + NotionalTransferTableOptionsSchema, + DeleteNotionalTransferResponseSchema, + PaginatedNotionalTransferRequestSchema, + NotionalTransfersAllSchema, +) +from lcfs.web.api.base import ComplianceReportRequestSchema, PaginationRequestSchema +from lcfs.web.api.notional_transfer.validation import NotionalTransferValidation +from lcfs.db.models.user.Role import RoleEnum + +router = APIRouter() +logger = structlog.get_logger(__name__) +get_async_db = dependencies.get_async_db_session + + +@router.get( + "/table-options", + response_model=NotionalTransferTableOptionsSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +@cache(expire=60 * 60 * 24) # cache for 24 hours +async def get_table_options( + request: Request, + service: NotionalTransferServices = Depends(), +): + """Endpoint to retrieve table options related to notional transfers""" + return await service.get_table_options() + + +@router.post( + "/list-all", + response_model=NotionalTransfersAllSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_notional_transfers( + request: Request, + request_data: ComplianceReportRequestSchema = Body(...), + response: Response = None, + service: NotionalTransferServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +): + """Endpoint to get list of notional transfers for a compliance report""" + await report_validate.validate_organization_access( + request_data.compliance_report_id + ) + return await service.get_notional_transfers(request_data.compliance_report_id) + + +@router.post( + "/list", + response_model=NotionalTransfersSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_notional_transfers_paginated( + request: Request, + request_data: PaginatedNotionalTransferRequestSchema = Body(...), + service: NotionalTransferServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +) -> NotionalTransfersSchema: + pagination = PaginationRequestSchema( + page=request_data.page, + size=request_data.size, + sort_orders=request_data.sort_orders, + filters=request_data.filters, + ) + await report_validate.validate_organization_access( + request_data.compliance_report_id + ) + compliance_report_id = request_data.compliance_report_id + return await service.get_notional_transfers_paginated( + pagination, compliance_report_id + ) + + +@router.get("/{notional_transfer_id}", status_code=status.HTTP_200_OK) +@view_handler(["*"]) +async def get_notional_transfer( + request: Request, + notional_transfer_id: int, + service: NotionalTransferServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +) -> NotionalTransferSchema: + notional_transfer = await service.get_notional_transfer(notional_transfer_id) + if not notional_transfer: + raise HTTPException(status_code=404, detail="Notional transfer not found") + await report_validate.validate_organization_access( + notional_transfer.compliance_report_id + ) + return notional_transfer + + +@router.post( + "/save", + response_model=Union[NotionalTransferSchema, DeleteNotionalTransferResponseSchema], + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def save_notional_transfer_row( + request: Request, + request_data: NotionalTransferCreateSchema = Body(...), + service: NotionalTransferServices = Depends(), + validate: NotionalTransferValidation = Depends(), + report_validate: ComplianceReportValidation = Depends(), +): + """Endpoint to save a single notional transfer row""" + compliance_report_id = request_data.compliance_report_id + notional_transfer_id: Optional[int] = request_data.notional_transfer_id + + await report_validate.validate_organization_access(compliance_report_id) + + # Determine user type for record creation + current_user_type = request.user.user_type + if not current_user_type: + raise HTTPException( + status_code=403, detail="User does not have the required role." + ) + + if request_data.deleted: + # Delete existing notional transfer + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + return await service.delete_notional_transfer(request_data, current_user_type) + elif notional_transfer_id: + # Update existing notional transfer + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + return await service.update_notional_transfer(request_data, current_user_type) + else: + # Create new notional transfer + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + return await service.create_notional_transfer(request_data, current_user_type) diff --git a/backend/lcfs/web/api/organization/__init__.py b/backend/lcfs/web/api/organization/__init__.py index da7a032bb..b8c7d865f 100644 --- a/backend/lcfs/web/api/organization/__init__.py +++ b/backend/lcfs/web/api/organization/__init__.py @@ -1,4 +1,5 @@ -"""User API.""" +"""Organization API.""" + from lcfs.web.api.organization.views import router __all__ = ["router"] diff --git a/backend/lcfs/web/api/organization/schema.py b/backend/lcfs/web/api/organization/schema.py deleted file mode 100644 index 6cc49c682..000000000 --- a/backend/lcfs/web/api/organization/schema.py +++ /dev/null @@ -1,70 +0,0 @@ -from typing import Optional, List -from pydantic import BaseModel, EmailStr - - -class OrganizationBase(BaseModel): - name: str - status: int - type: int - - - -class OrganizationAttorneyAddressBase(BaseModel): - name: str - street_address: str - address_other: str - city: str - province_state: str - country: str - postalCode_zipCode: str - -class OrganizationAddressBase(BaseModel): - name: str - street_address: str - address_other: str - city: str - province_state: str - country: str - postalCode_zipCode: str - -class OrganizationAddressCreate(OrganizationAddressBase): - pass - -class OrganizationAttorneyAddressCreate(OrganizationAttorneyAddressBase): - pass - -class OrganizationCreate(OrganizationBase): - address: OrganizationAddressCreate - attorney_address: OrganizationAttorneyAddressCreate - -class Organization(OrganizationBase): - organization_id: int - -class OrganizationSummary(BaseModel): - organization_id: int - name: str - - class Config: - from_attributes = True - -class OrganizationUpdate(BaseModel): - name: Optional[str] - status: Optional[int] - type: Optional[int] - address: Optional[OrganizationAddressCreate] - attorney_address: Optional[OrganizationAttorneyAddressCreate] - -class OrganizationAddress(OrganizationAddressBase): - organization_id: int - -class OrganizationAttorneyAddress(OrganizationAttorneyAddressBase): - organization_id: int - -class OrganizationUser(BaseModel): - username: str - email: EmailStr - display_name: str - title: Optional[str] = None - phone: Optional[str] = None - mobile_phone: Optional[str] = None - user_roles: Optional[List[object]] = None diff --git a/backend/lcfs/web/api/organization/services.py b/backend/lcfs/web/api/organization/services.py new file mode 100644 index 000000000..5ebf3573d --- /dev/null +++ b/backend/lcfs/web/api/organization/services.py @@ -0,0 +1,186 @@ +import math +import structlog +from typing import List + +from fastapi import Depends, Request +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import and_ + +from lcfs.utils.constants import id_prefix_to_transaction_type_map +from lcfs.web.api.user.repo import UserRepository +from lcfs.db.dependencies import get_async_db_session +from lcfs.web.core.decorators import service_handler +from lcfs.web.api.base import ( + FilterModel, + PaginationRequestSchema, + PaginationResponseSchema, + apply_filter_conditions, + get_field_for_filter, + validate_pagination, +) +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.db.models.transaction.TransactionView import TransactionView +from lcfs.web.api.transaction.schema import TransactionViewSchema +from lcfs.web.api.transaction.repo import TransactionRepository +from lcfs.web.api.user.schema import UsersSchema + +logger = structlog.get_logger(__name__) + + +class OrganizationService: + def __init__( + self, + request: Request = None, + user_repo: UserRepository = Depends(UserRepository), + transaction_repo: TransactionRepository = Depends(TransactionRepository), + session: AsyncSession = Depends(get_async_db_session), + ) -> None: + self.transaction_repo = transaction_repo + self.user_repo = user_repo + self.request = request + self.session = session + + def apply_transaction_filters(self, pagination, conditions): + """ + Apply filters to the transactions query. + + Args: + pagination (PaginationRequestSchema): The pagination object containing page and size information. + conditions (List[Condition]): The list of conditions to apply. + + Returns: + List[Transactions]: The list of transactions after applying the filters. + """ + + for filter in pagination.filters: + if filter.field == "transaction_id": + filter_value = filter.filter.upper() + for ( + prefix, + transaction_type, + ) in id_prefix_to_transaction_type_map.items(): + if filter_value.startswith(prefix): + numeric_part = filter_value[len(prefix) :] + if numeric_part: + if numeric_part.isdigit(): + conditions.append( + and_( + TransactionView.transaction_type + == transaction_type, + TransactionView.transaction_id + == int(numeric_part), + ) + ) + else: + # Invalid numeric part, add a condition that will never match + conditions.append(False) + else: + # Only prefix provided, filter by transaction type only + conditions.append( + TransactionView.transaction_type == transaction_type + ) + break + else: + # If no prefix matches, treat the whole value as a potential transaction_id + if filter_value.isdigit(): + conditions.append( + TransactionView.transaction_id == int(filter_value) + ) + else: + # Invalid input, add a condition that will never match + conditions.append(False) + else: + # Handle other filters as before + field = get_field_for_filter(TransactionView, filter.field) + # Check if the filter is of type "date" + if filter.filter_type == "date": + if filter.type == "inRange": + filter_value = [filter.date_from, filter.date_to] + else: + filter_value = filter.date_from + else: + # For non-date filters, use the standard filter value + filter_value = filter.filter + + if field.description == 'transaction_type': + filter_value = filter_value.replace(" ", "").lower() + + filter_option = filter.type + filter_type = filter.filter_type + conditions.append( + apply_filter_conditions( + field, filter_value, filter_option, filter_type + ) + ) + + @service_handler + async def get_organization_users_list( + self, organization_id: int, status: str, pagination: PaginationRequestSchema + ) -> UsersSchema: + """ + Get all users for the organization + """ + # Add Organization and status to filter + if (pagination.filters is None) or (len(pagination.filters) == 0): + pagination.filters.append( + FilterModel( + filter_type="text", field="is_active", type="equals", filter=status + ) + ) + pagination.filters.append( + FilterModel( + filter_type="number", + field="organization_id", + type="equals", + filter=organization_id, + ) + ) + users, total_count = await self.user_repo.get_users_paginated( + pagination=pagination + ) + return UsersSchema( + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + users=users, + ) + + @service_handler + async def get_transactions_paginated( + self, pagination: PaginationRequestSchema = {}, organization_id: int = None + ) -> List[TransactionViewSchema]: + """ + Fetch transactions with filters, sorting, and pagination. + """ + conditions = [] + pagination = validate_pagination(pagination) + if pagination.filters and len(pagination.filters) > 0: + self.apply_transaction_filters(pagination, conditions) + + offset = (pagination.page - 1) * pagination.size if pagination.page > 0 else 0 + limit = pagination.size + + transactions, total_count = ( + await self.transaction_repo.get_transactions_paginated( + offset, limit, conditions, pagination.sort_orders, organization_id + ) + ) + + if not transactions: + raise DataNotFoundException("Transactions not found") + + return { + "transactions": [ + TransactionViewSchema.model_validate(transaction) + for transaction in transactions + ], + "pagination": PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + } diff --git a/backend/lcfs/web/api/organization/validation.py b/backend/lcfs/web/api/organization/validation.py new file mode 100644 index 000000000..cc0e1b2bc --- /dev/null +++ b/backend/lcfs/web/api/organization/validation.py @@ -0,0 +1,112 @@ +from fastapi import Depends, HTTPException, Request +from starlette import status + +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum +from lcfs.web.api.organizations.repo import OrganizationsRepository +from lcfs.web.api.transaction.repo import TransactionRepository +from lcfs.web.api.transfer.schema import TransferCreateSchema +from lcfs.web.api.compliance_report.schema import ComplianceReportCreateSchema +from lcfs.web.api.compliance_report.repo import ComplianceReportRepository +from lcfs.utils.constants import LCFS_Constants + + +class OrganizationValidation: + def __init__( + self, + request: Request = None, + org_repo: OrganizationsRepository = Depends(OrganizationsRepository), + transaction_repo: TransactionRepository = Depends(TransactionRepository), + report_repo: ComplianceReportRepository = Depends(ComplianceReportRepository), + ): + self.org_repo = org_repo + self.request = request + self.transaction_repo = transaction_repo + self.report_repo = report_repo + + async def check_available_balance(self, organization_id, quantity): + available_balance = await self.transaction_repo.calculate_available_balance( + organization_id + ) + if available_balance < quantity: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"Organization does not have enough compliance units to create this transfer.", + ) + + async def create_transfer( + self, organization_id, transfer_create: TransferCreateSchema + ): + is_to_org_registered = await self.org_repo.is_registered_for_transfer( + transfer_create.to_organization_id + ) + if ( + ( + transfer_create.from_organization_id != organization_id + and transfer_create.current_status + not in LCFS_Constants.FROM_ORG_TRANSFER_STATUSES # ensure the allowed statuses for creating transfer + ) + or self.request.user.organization.org_status.organization_status_id != 2 + or not is_to_org_registered + ): # ensure the organizations are registered for transfer + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Validation for authorization failed.", + ) + # Before creation, check for available balance + await self.check_available_balance(organization_id, transfer_create.quantity) + return + + async def update_transfer( + self, organization_id, transfer_create: TransferCreateSchema + ): + # Before updating, check for available balance + valid_status = ( + transfer_create.current_status in LCFS_Constants.FROM_ORG_TRANSFER_STATUSES + ) + + await self.check_available_balance(organization_id, transfer_create.quantity) + if ( + transfer_create.from_organization_id == organization_id and valid_status + ) or ( # status changes allowed for from-organization + transfer_create.to_organization_id == organization_id + and transfer_create.current_status + in LCFS_Constants.TO_ORG_TRANSFER_STATUSES + ): # status changes allowed for to-organization + return + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Validation for authorization failed.", + ) + + async def create_compliance_report( + self, organization_id, report_data: ComplianceReportCreateSchema + ): + if self.request.user.organization.organization_id != organization_id: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Validation for authorization failed.", + ) + # Before creating ensure that there isn't any existing report for the given compliance period. + period = await self.report_repo.get_compliance_period( + report_data.compliance_period + ) + if not period: + raise HTTPException(status_code=404, detail="Compliance period not found") + is_report_present = await self.report_repo.get_compliance_report_by_period( + organization_id, report_data.compliance_period + ) + if is_report_present: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Duplicate report for the compliance period", + ) + return + + # async def save_final_supply_equipment_rows( + # self, organization_id, report_id, fse_list + # ): + # report = await self.report_repo.get_compliance_report_by_id(report_id) + # if not report: + # raise HTTPException(status_code=404, detail="Report not found") + # # TODO: validate each row data + # return diff --git a/backend/lcfs/web/api/organization/views.py b/backend/lcfs/web/api/organization/views.py index 793a7af91..a33cdd984 100644 --- a/backend/lcfs/web/api/organization/views.py +++ b/backend/lcfs/web/api/organization/views.py @@ -1,75 +1,313 @@ -from logging import getLogger -from fastapi import APIRouter, Depends, HTTPException, status +import structlog + +from fastapi import ( + APIRouter, + Body, + Depends, + Query, + Response, + status, + Request, +) +from fastapi.responses import StreamingResponse +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from starlette import status from typing import List -from sqlalchemy.ext.asyncio import AsyncSession -from starlette.responses import Response -from lcfs.db.models import UserProfile from lcfs.db import dependencies -from lcfs.web.api.organization.schema import Organization, OrganizationCreate, OrganizationUpdate, OrganizationUser +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.user.schema import UserBaseSchema, UserCreateSchema, UsersSchema +from lcfs.db.models.user.UserProfile import UserProfile +from lcfs.db.models.transfer.Transfer import Transfer +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum +from lcfs.web.api.transfer.schema import ( + TransferCreateSchema, + TransferSchema, +) +from lcfs.web.api.transaction.schema import TransactionListSchema +from lcfs.web.api.transaction.services import TransactionsService +from lcfs.web.api.user.services import UserServices +from lcfs.web.api.compliance_report.schema import ( + ComplianceReportBaseSchema, + ComplianceReportCreateSchema, + ComplianceReportListSchema, + CompliancePeriodSchema, + ChainedComplianceReportSchema, +) +from lcfs.web.api.compliance_report.services import ComplianceReportServices +from .services import OrganizationService +from .validation import OrganizationValidation +from lcfs.web.api.transfer.services import TransferServices +from lcfs.db.models.user.Role import RoleEnum -logger = getLogger("organization") +logger = structlog.get_logger(__name__) router = APIRouter() get_async_db = dependencies.get_async_db_session -@router.post("/createorganization/", response_model=OrganizationCreate, status_code=status.HTTP_201_CREATED) -async def create_organization(organization: OrganizationCreate, db: AsyncSession = Depends(get_async_db)): - try: - db.add(organization) - await db.commit() - await db.refresh(organization) - - return organization - except Exception as e: - logger.error(f"Internal Server Error: {str(e)}") - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Internal Server Error") - -@router.put("/organizations/{organization_id}", response_model=Organization) -async def update_organization(organization_id: int, organization_data: OrganizationUpdate, db: AsyncSession = Depends(get_async_db)): - try: - async with db.begin(): - organization = await db.execute(Organization).filter(Organization.organization_id == organization_id).first() - - if not organization: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Organization not found") - - # Update the organization fields with new data - for key, value in organization_data.dict().items(): - setattr(organization, key, value) - - await db.commit() - await db.refresh(organization) - - return organization - except Exception as e: - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Internal Server Error: {str(e)}") - -@router.get("/organizations/", response_model=list[Organization]) -async def list_organizations(db: AsyncSession = Depends(get_async_db), response: Response = None): - try: - async with db.begin(): - organizations = await db.execute(Organization).all() - if not organizations: - logger.error("Error getting organizations") - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No organizations found") - - return organizations - except Exception as e: - logger.error(f"Internal Server Error: {str(e)}") - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Internal Server Error") - - -@router.get("/organizations/{organization_id}/users/", response_model=List[OrganizationUser]) -async def get_users_for_organization(organization_id: int, db: AsyncSession = Depends(get_async_db)): - try: - async with db.begin(): - users = await db.execute(UserProfile).filter(UserProfile.organization_id == organization_id).all() - - if not users: - raise HTTPException(status_code=404, detail="No users found for the organization") - - return users - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") +@router.post( + "/{organization_id}/users/list", + response_model=UsersSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER, RoleEnum.GOVERNMENT]) +async def get_org_users( + request: Request, + organization_id: int, + status: str = Query(default="Active", description="Active or Inactive users list"), + pagination: PaginationRequestSchema = Body(..., embed=False), + response: Response = None, + org_service: OrganizationService = Depends(), +): + """ + Enpoint to get information of all users related to organization for ag-grid in the UI + + Pagination Request Schema: + - page: offset/ page indicates the pagination of rows for the users list + - size: size indicates the number of rows per page for the users list + - sortOrders: sortOrders is an array of objects that specify the sorting criteria for the users list. + Each object has the following properties: + - field: the name of the field to sort by + - direction: the sorting direction ('asc' or 'desc') + - filterModel: filterModel is an array of objects that specifies the filtering criteria for the users list. + It has the following properties: + - filter_type: the type of filtering to perform ('text', 'number', 'date', 'boolean') + - type: the type of filter to apply ('equals', 'notEquals', 'contains', 'notContains', 'startsWith', 'endsWith') + - filter: the actual filter value + - field: Database Field that needs filtering. + """ + return await org_service.get_organization_users_list( + organization_id, status, pagination + ) + + +@router.get( + "/{organization_id}/users/{user_id}", + response_model=UserBaseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def get_user_by_id( + request: Request, + organization_id: int, + response: Response = None, + user_id: int = None, + user_service: UserServices = Depends(), +) -> UserBaseSchema: + """ + Endpoint to get information of a user by ID + This endpoint returns the information of a user by ID, including their roles and organization. + """ + return await user_service.get_user_by_id(user_id) + + +@router.post( + "/{organization_id}/users", response_model=None, status_code=status.HTTP_201_CREATED +) +@view_handler([RoleEnum.SUPPLIER]) +async def create_user( + request: Request, + organization_id: int, + response: Response = None, + user_create: UserCreateSchema = ..., + user_service: UserServices = Depends(), +) -> UserProfile: + """ + Endpoint to create a new user + This endpoint creates a new user and returns the information of the created user. + """ + user_create.organization_id = organization_id + return await user_service.create_user(user_create) + + +# TODO Check if security concern exists on user creating user in different org +@router.put( + "/{organization_id}/users/{user_id}", + response_model=UserBaseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def update_user( + request: Request, + organization_id: int, + response: Response = None, + user_id: int = None, + user_create: UserCreateSchema = ..., + user_service: UserServices = Depends(), +) -> UserBaseSchema: + """ + Endpoint to update a user + This endpoint updates a user and returns the information of the updated user. + """ + user_create.organization_id = organization_id + await user_service.update_user(user_create, user_id) + return await user_service.get_user_by_id(user_id) + + +@router.post( + "/transactions", + response_model=TransactionListSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def get_transactions_paginated_for_org( + request: Request, + pagination: PaginationRequestSchema = Body(..., embed=False), + org_service: OrganizationService = Depends(), + response: Response = None, +): + """ + Fetches a combined list of Issuances and Transfers, sorted by create_date, with pagination. + """ + organization_id = request.user.organization.organization_id + paginated_transactions = await org_service.get_transactions_paginated( + pagination, organization_id + ) + # for Organizations hide Recommended status. + for transaction in paginated_transactions["transactions"]: + if transaction.status == TransferStatusEnum.Recommended.value: + transaction.status = TransferStatusEnum.Submitted.name + return paginated_transactions + + +@router.get( + "/transactions/export", + response_class=StreamingResponse, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def export_transactions_for_org( + request: Request, + format: str = Query(default="xls", description="File export format"), + txn_service: TransactionsService = Depends(), +): + """ + Endpoint to export information of all transactions for a specific organization + """ + organization_id = request.user.organization.organization_id + return await txn_service.export_transactions(format, organization_id) + + +@router.post( + "/{organization_id}/transfers", + response_model=TransferSchema, + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.SUPPLIER]) +async def create_transfer( + request: Request, + organization_id: int, + transfer_create: TransferCreateSchema = ..., + transfer_service: TransferServices = Depends(), + validate: OrganizationValidation = Depends(), +): + """ + Endpoint to create a new transfer + This endpoint creates a new transfer and returns the information of the created transfer. + """ + await validate.create_transfer(organization_id, transfer_create) + return await transfer_service.create_transfer(transfer_create) + + +@router.put( + "/{organization_id}/transfers/{transfer_id}", + response_model=TransferSchema, + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.SUPPLIER]) +async def update_transfer( + request: Request, + organization_id: int, + transfer_id: int, + transfer_create: TransferCreateSchema = ..., + transfer_service: TransferServices = Depends(), + validate: OrganizationValidation = Depends(), +) -> Transfer: + """ + Endpoint to create a new transfer + This endpoint creates a new transfer and returns the information of the created transfer. + """ + await validate.update_transfer(organization_id, transfer_create) + transfer_create.transfer_id = transfer_id + return await transfer_service.update_transfer(transfer_create) + + +@router.post( + "/{organization_id}/reports", + response_model=ComplianceReportBaseSchema, + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.SUPPLIER]) +async def create_compliance_report( + request: Request, + organization_id: int, + report_data: ComplianceReportCreateSchema = ..., + report_service: ComplianceReportServices = Depends(), + validate: OrganizationValidation = Depends(), +): + await validate.create_compliance_report(organization_id, report_data) + return await report_service.create_compliance_report( + organization_id, report_data, request.user + ) + + +@router.post( + "/{organization_id}/reports/list", + response_model=ComplianceReportListSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def get_compliance_reports( + request: Request, + organization_id: int, + pagination: PaginationRequestSchema = Body(..., embed=False), + report_service: ComplianceReportServices = Depends(), +) -> ComplianceReportListSchema: + organization_id = request.user.organization.organization_id + return await report_service.get_compliance_reports_paginated( + pagination, organization_id, bceid_user=True + ) + + +@router.get( + "/{organization_id}/reports/reported-years", + response_model=List[CompliancePeriodSchema], + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def get_all_org_reported_years( + request: Request, + organization_id: int, + report_service: ComplianceReportServices = Depends(), +) -> List[CompliancePeriodSchema]: + """ + Gets all compliance report years that an organization has reported + """ + + return await report_service.get_all_org_reported_years(organization_id) + + +@router.get( + "/{organization_id}/reports/{report_id}", + response_model=ChainedComplianceReportSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def get_compliance_report_by_id( + request: Request, + organization_id: int, + response: Response = None, + report_id: int = None, + report_service: ComplianceReportServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +) -> ChainedComplianceReportSchema: + """ + Endpoint to get information of a user by ID + This endpoint returns the information of a user by ID, including their roles and organization. + """ + await report_validate.validate_organization_access(report_id) + return await report_service.get_compliance_report_by_id( + report_id, apply_masking=True, get_chain=True + ) diff --git a/backend/lcfs/web/api/organizations/__init__.py b/backend/lcfs/web/api/organizations/__init__.py new file mode 100644 index 000000000..c2070d7fb --- /dev/null +++ b/backend/lcfs/web/api/organizations/__init__.py @@ -0,0 +1,5 @@ +"""Organizations API.""" + +from lcfs.web.api.organizations.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/organizations/repo.py b/backend/lcfs/web/api/organizations/repo.py new file mode 100644 index 000000000..f687f326e --- /dev/null +++ b/backend/lcfs/web/api/organizations/repo.py @@ -0,0 +1,297 @@ +import structlog +from typing import List + +from fastapi import Depends +from sqlalchemy.orm import joinedload +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import and_, func, select, asc, desc, distinct, or_ + +from lcfs.db.dependencies import get_async_db_session +from lcfs.web.core.decorators import repo_handler +from lcfs.db.models.organization.Organization import Organization +from lcfs.db.models.organization.OrganizationAddress import OrganizationAddress +from lcfs.db.models.organization.OrganizationAttorneyAddress import ( + OrganizationAttorneyAddress, +) +from lcfs.db.models.organization.OrganizationStatus import ( + OrgStatusEnum, + OrganizationStatus, +) +from lcfs.db.models.organization.OrganizationType import OrganizationType +from lcfs.web.exception.exceptions import DataNotFoundException + +from .schema import ( + OrganizationSchema, + OrganizationStatusSchema, + OrganizationTypeSchema, + OrganizationResponseSchema, +) + + +logger = structlog.get_logger(__name__) + + +class OrganizationsRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_organizations(self) -> List[Organization]: + """ + Fetch all organizations from the database + """ + result = await self.db.execute( + select(Organization) + .options(joinedload(Organization.org_status)) + .order_by(Organization.organization_id) + ) + return result.scalars().all() + + @repo_handler + async def create_organization(self, org_model: Organization): + """ + save an organization in the database + """ + + self.db.add(org_model) + await self.db.flush() + await self.db.refresh(org_model) + return OrganizationResponseSchema.model_validate(org_model) + + @repo_handler + async def get_organization(self, organization_id: int) -> Organization: + """ + Fetch a single organization by organization id from the database + """ + query = ( + select(Organization) + .options( + joinedload(Organization.org_status), + joinedload(Organization.org_address), + joinedload(Organization.org_attorney_address), + ) + .where(Organization.organization_id == organization_id) + ) + return await self.db.scalar(query) + + + @repo_handler + async def update_organization(self, organization: Organization) -> Organization: + """ + Update an existing organization in the database + """ + organization = await self.db.merge(organization) + await self.db.flush() + await self.db.refresh(organization) + + return OrganizationResponseSchema.model_validate(organization) + + + @repo_handler + async def get_organization_lite(self, organization_id: int) -> Organization: + """ + Fetch a single organization by organization id from the database without related tables + """ + return await self.db.scalar( + select(Organization).where(Organization.organization_id == organization_id) + ) + + @repo_handler + async def get_organizations_paginated(self, offset, limit, conditions, pagination): + """ + Fetch all organizations. returns pagination data + """ + query = ( + select(Organization) + .join( + OrganizationStatus, + Organization.organization_status_id + == OrganizationStatus.organization_status_id, + ) + .options( + joinedload(Organization.org_type), + joinedload(Organization.org_status), + ) + .where(and_(*conditions)) + ) + count_query = await self.db.execute( + select(func.count(distinct(Organization.organization_id))) + .select_from(Organization) + .join( + OrganizationStatus, + Organization.organization_status_id + == OrganizationStatus.organization_status_id, + ) + .where(and_(*conditions)) + ) + total_count = count_query.unique().scalar_one_or_none() + # Sort the query results + for order in pagination.sort_orders: + sort_method = asc if order.direction == "asc" else desc + query = query.order_by( + sort_method(order.field if order.field != "status" else "description") + ) + + results = await self.db.execute(query.offset(offset).limit(limit)) + organizations = results.scalars().all() + + return [ + OrganizationSchema.model_validate(organization) + for organization in organizations + ], total_count + + @repo_handler + async def get_organization_statuses(self) -> List[OrganizationStatusSchema]: + """ + Get all available statuses for organizations from the database. + + Returns: + List[OrganizationStatusSchema]: A list of OrganizationStatusSchema objects containing the basic organization status details. + """ + query = select(OrganizationStatus).distinct() + status_results = await self.db.execute(query) + results = status_results.scalars().all() + return [OrganizationStatusSchema.model_validate(status) for status in results] + + @repo_handler + async def get_organization_types(self) -> List[OrganizationTypeSchema]: + """ + Get all available types for organizations from the database. + """ + query = select(OrganizationType).distinct() + result = await self.db.execute(query) + + return result.scalars().all() + + @repo_handler + async def get_organization_names(self, conditions=None, order_by=("name", "asc")): + """ + Fetches organization names and details based on provided conditions and dynamic ordering. + + Parameters: + conditions (list): SQLAlchemy conditions to filter the query. + order_by (tuple): A tuple containing the field name as the first element and the sorting direction ('asc' or 'desc') + as the second element. Ensures the field exists on the model to prevent errors. + + Returns: + List of dictionaries with organization details including ID, names, balances, and status. + """ + query = select(Organization).join(OrganizationStatus) + + if conditions: + query = query.filter(*conditions) + + # Apply dynamic ordering + if order_by: + field_name, direction = order_by + if hasattr(Organization, field_name): # Ensure the field is valid + order_function = asc if direction.lower() == "asc" else desc + query = query.order_by( + order_function(getattr(Organization, field_name)) + ) + else: + raise ValueError(f"Invalid ordering field specified: {field_name}") + + result = await self.db.execute(query) + organizations = result.scalars().all() + + return [ + { + "organization_id": org.organization_id, + "name": org.name, + "operating_name": org.operating_name, + "total_balance": org.total_balance, + "reserved_balance": org.reserved_balance, + } + for org in organizations + ] + + @repo_handler + async def get_externally_registered_organizations(self, conditions): + """ + Get all externally registered organizations from the database. + """ + + query = ( + select(Organization) + .where(and_(*conditions)) + .options( + joinedload(Organization.org_type), + joinedload(Organization.org_status), + ) + .order_by(Organization.name) + ) + + # Execute the query + results = await self.db.execute(query) + return results.scalars().all() + + @repo_handler + async def get_organization_address(self, organization_address_id: int): + return await self.db.scalar( + select(OrganizationAddress).where( + OrganizationAddress.organization_address_id == organization_address_id + ) + ) + + @repo_handler + async def get_organization_attorney_address( + self, organization_attorney_address_id: int + ): + return await self.db.scalar( + select(OrganizationAttorneyAddress).where( + OrganizationAttorneyAddress.organization_attorney_address_id + == organization_attorney_address_id + ) + ) + + @repo_handler + async def is_registered_for_transfer(self, organization_id: int): + """ + Check if an organization is registered in the database. + """ + result = await self.db.scalar( + select(Organization.organization_id).where( + and_( + Organization.organization_id == organization_id, + OrganizationStatus.status == OrgStatusEnum.Registered, + ) + ) + ) + return result is not None + + @repo_handler + async def search_organizations_by_name( + self, search_query: str + ) -> List[Organization]: + """ + Search for organizations based on a query string. + Return exact match first if found, followed by partial matches. + """ + query = ( + select(Organization) + .options(joinedload(Organization.org_address)) + .filter( + or_( + Organization.name.ilike(f"%{search_query}%"), + Organization.operating_name.ilike(f"%{search_query}%"), + ) + ) + .order_by(Organization.name) + .limit(10) + ) + + result = await self.db.execute(query) + organizations = result.scalars().all() + + # Separate exact matches and partial matches + exact_matches = [ + org + for org in organizations + if org.name.lower() == search_query.lower() + or org.operating_name.lower() == search_query.lower() + ] + partial_matches = [org for org in organizations if org not in exact_matches] + + # Return exact matches first, followed by partial matches + return exact_matches + partial_matches diff --git a/backend/lcfs/web/api/organizations/schema.py b/backend/lcfs/web/api/organizations/schema.py new file mode 100644 index 000000000..fc7f64b47 --- /dev/null +++ b/backend/lcfs/web/api/organizations/schema.py @@ -0,0 +1,209 @@ +from enum import Enum +from typing import List, Optional + +from lcfs.web.api.base import BaseSchema + +from lcfs.web.api.base import PaginationResponseSchema + + +# -------------------------------------- +# Base Configuration +# -------------------------------------- +class BaseConfig: + from_attributes = True + + +# -------------------------------------- +# Organization Type +# -------------------------------------- + + +class OrganizationTypeEnum(str, Enum): + FUEL_SUPPLIER = "Fuel Supplier" + ELECTRICITY_SUPPLIER = "Electricity Supplier" + BROKER = "Broker" + UTILITIES = "Utilities (local or public)" + + +class OrganizationTypeBase(BaseSchema): + organization_type_id: int + org_type: OrganizationTypeEnum + description: Optional[str] = None + + +class OrganizationTypeSchema(OrganizationTypeBase): + pass + + +# -------------------------------------- +# Organization Status +# -------------------------------------- + + +class OrganizationStatusEnum(str, Enum): + UNREGISTERED = "Unregistered" + REGISTERED = "Registered" + SUSPENDED = "Suspended" + CANCELED = "Canceled" + + +class OrganizationStatusBase(BaseSchema): + organization_status_id: int + status: OrganizationStatusEnum + description: Optional[str] = None + + +class OrganizationStatusSchema(OrganizationStatusBase): + pass + + +# -------------------------------------- +# Address Base Model +# Unified Address Model for 'Organization Address' and 'Organization Attorney Address' +# -------------------------------------- + + +class AddressBase(BaseSchema): + name: str + street_address: str + address_other: Optional[str] = None + city: str + province_state: str + country: str + postalCode_zipCode: Optional[str] = None + + +# -------------------------------------- +# Organization Address +# -------------------------------------- + + +class OrganizationAddressBase(AddressBase): + + # TODO[pydantic]: The `Config` class inherits from another class, please create the `model_config` manually. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information. + class Config(BaseConfig): + pass + + +class OrganizationAddressSchema(OrganizationAddressBase): + organization_id: Optional[int] = None + + +class OrganizationAddressCreateSchema(OrganizationAddressBase): + pass + + +# -------------------------------------- +# Organization Attorney Address +# -------------------------------------- + + +class OrganizationAttorneyAddressBase(AddressBase): + + # TODO[pydantic]: The `Config` class inherits from another class, please create the `model_config` manually. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information. + class Config(BaseConfig): + pass + + +class OrganizationAttorneyAddressSchema(OrganizationAttorneyAddressBase): + organization_id: Optional[int] = None + + +class OrganizationAttorneyAddressCreateSchema(OrganizationAddressBase): + pass + + +# -------------------------------------- +# Organization +# -------------------------------------- + + +class OrganizationBase(BaseSchema): + organization_id: Optional[int] = None + name: str + operating_name: str + email: Optional[str] = None + phone: Optional[str] = None + edrms_record: Optional[str] = None + total_balance: Optional[int] = None + reserved_balance: Optional[int] = None + organization_status_id: int + organization_type_id: int + + +class OrganizationSchema(OrganizationBase): + organization_address_id: Optional[int] = None + organization_attorney_address_id: Optional[int] = None + org_type: Optional[OrganizationTypeSchema] = [] + org_status: Optional[OrganizationStatusSchema] = [] + + +class OrganizationListSchema(BaseSchema): + pagination: PaginationResponseSchema + organizations: List[OrganizationSchema] + + +class OrganizationCreateSchema(BaseSchema): + name: str + operating_name: str + email: Optional[str] = None + phone: Optional[str] = None + edrms_record: Optional[str] = None + organization_status_id: int + organization_type_id: int + address: OrganizationAddressCreateSchema + attorney_address: OrganizationAttorneyAddressCreateSchema + + +class OrganizationUpdateSchema(BaseSchema): + name: Optional[str] = None + operating_name: Optional[str] = None + email: Optional[str] = None + phone: Optional[str] = None + edrms_record: Optional[str] = None + organization_status_id: Optional[int] = None + organization_type_id: Optional[int] = None + address: Optional[OrganizationAddressCreateSchema] = [] + attorney_address: Optional[OrganizationAttorneyAddressCreateSchema] = [] + + +class OrganizationResponseSchema(BaseSchema): + organization_id: int + name: str + operating_name: str + email: Optional[str] = None + phone: Optional[str] = None + edrms_record: Optional[str] = None + org_status: Optional[OrganizationStatusSchema] = [] + org_address: Optional[OrganizationAddressSchema] = [] + org_attorney_address: Optional[OrganizationAttorneyAddressSchema] = [] + + +class OrganizationSummaryResponseSchema(BaseSchema): + organization_id: int + name: Optional[str] = None + operating_name: Optional[str] = None + total_balance: Optional[int] = None + reserved_balance: Optional[int] = None + org_status: Optional[OrganizationStatusSchema] = None + + +class OrganizationCreateResponseSchema(BaseSchema): + organization_id: int + + +class OrganizationBalanceResponseSchema(BaseSchema): + name: str + registered: bool + organization_id: int + total_balance: int + reserved_balance: int + + +class OrganizationDetailsSchema(BaseSchema): + name: str + address: Optional[str] + email: Optional[str] + phone: Optional[str] diff --git a/backend/lcfs/web/api/organizations/services.py b/backend/lcfs/web/api/organizations/services.py new file mode 100644 index 000000000..35c2155a3 --- /dev/null +++ b/backend/lcfs/web/api/organizations/services.py @@ -0,0 +1,480 @@ +import io +import math +from datetime import datetime +import structlog +from typing import List + +from fastapi import Depends, Request +from fastapi.responses import StreamingResponse + +from lcfs.db.models.organization.Organization import Organization +from lcfs.db.models.organization.OrganizationAddress import OrganizationAddress +from lcfs.db.models.organization.OrganizationAttorneyAddress import ( + OrganizationAttorneyAddress, +) +from lcfs.db.models.organization.OrganizationStatus import ( + OrganizationStatus, + OrgStatusEnum, +) +from lcfs.db.models.transaction import Transaction +from lcfs.db.models.transaction.Transaction import TransactionActionEnum +from lcfs.services.tfrs.redis_balance import ( + RedisBalanceService, +) +from lcfs.utils.spreadsheet_builder import SpreadsheetBuilder +from lcfs.web.api.base import ( + PaginationRequestSchema, + PaginationResponseSchema, + apply_filter_conditions, + get_field_for_filter, + validate_pagination, +) +from lcfs.web.api.transaction.repo import TransactionRepository +from lcfs.web.core.decorators import service_handler +from lcfs.web.exception.exceptions import DataNotFoundException +from .repo import OrganizationsRepository +from .schema import ( + OrganizationTypeSchema, + OrganizationSchema, + OrganizationListSchema, + OrganizationCreateSchema, + OrganizationSummaryResponseSchema, + OrganizationDetailsSchema, +) + + +logger = structlog.get_logger(__name__) + + +class OrganizationsService: + def __init__( + self, + request: Request = None, + repo: OrganizationsRepository = Depends(OrganizationsRepository), + transaction_repo: TransactionRepository = Depends(TransactionRepository), + redis_balance_service: RedisBalanceService = Depends(RedisBalanceService), + ) -> None: + self.request = (request,) + self.repo = repo + self.transaction_repo = transaction_repo + self.redis_balance_service = redis_balance_service + + def apply_organization_filters(self, pagination, conditions): + """ + Apply filters to the organizations query. + + Args: + pagination (PaginationRequestSchema): The pagination object containing page and size information. + conditions (List[Condition]): The list of conditions to apply. + + Returns: + List[Organization]: The list of organizations after applying the filters. + """ + for filter in pagination.filters: + filter_value = filter.filter + filter_option = filter.type + filter_type = filter.filter_type + if filter.field == "status": + field = get_field_for_filter(OrganizationStatus, "status") + else: + field = get_field_for_filter(Organization, filter.field) + + conditions.append( + apply_filter_conditions(field, filter_value, filter_option, filter_type) + ) + + @service_handler + async def export_organizations(self) -> StreamingResponse: + """ + Prepares a list of organizations in a .xls file that is downloadable + """ + organizations = await self.repo.get_organizations() + + data = [ + [ + organization.organization_id, + organization.name, + # TODO: Update this section with actual data retrieval + # once the Compliance Units models are implemented. + 123456, + 123456, + organization.org_status.status.value, + ] + for organization in organizations + ] + + export_format = "xls" + + # Create a spreadsheet + builder = SpreadsheetBuilder(file_format=export_format) + + builder.add_sheet( + sheet_name="Organizations", + columns=[ + "ID", + "Organization Name", + "Compliance Units", + "In Reserve", + "Registered", + ], + rows=data, + styles={"bold_headers": True}, + ) + + file_content = builder.build_spreadsheet() + + # Get the current date in YYYY-MM-DD format + current_date = datetime.now().strftime("%Y-%m-%d") + + filename = f"BC-LCFS-organizations-{current_date}.{export_format}" + headers = {"Content-Disposition": f'attachment; filename="{filename}"'} + + return StreamingResponse( + io.BytesIO(file_content), + media_type="application/vnd.ms-excel", + headers=headers, + ) + + @service_handler + async def create_organization(self, organization_data: OrganizationCreateSchema): + """handles creating an organization""" + org_address = OrganizationAddress(**organization_data.address.dict()) + org_attorney_address = OrganizationAttorneyAddress( + **organization_data.attorney_address.dict() + ) + + # Create and add organization model to the database + org_model = Organization( + name=organization_data.name, + operating_name=organization_data.operating_name, + email=organization_data.email, + phone=organization_data.phone, + edrms_record=organization_data.edrms_record, + organization_status_id=organization_data.organization_status_id, + organization_type_id=organization_data.organization_type_id, + org_address=org_address, + org_attorney_address=org_attorney_address, + ) + + return await self.repo.create_organization(org_model) + + @service_handler + async def update_organization( + self, organization_id: int, organization_data: OrganizationCreateSchema + ): + """handles updating an organization""" + + organization = await self.repo.get_organization(organization_id) + + if not organization: + raise DataNotFoundException("Organization not found") + + for key, value in organization_data.dict().items(): + if hasattr(organization, key): + setattr(organization, key, value) + + if organization.organization_address_id: + org_address = await self.repo.get_organization_address( + organization.organization_address_id + ) + + if not org_address: + raise DataNotFoundException("Organization address not found") + + for key, value in organization_data.address.dict().items(): + if hasattr(org_address, key): + setattr(org_address, key, value) + + if organization.organization_attorney_address_id: + org_attorney_address = await self.repo.get_organization_attorney_address( + organization.organization_attorney_address_id + ) + + if not org_attorney_address: + raise DataNotFoundException("Organization attorney address not found") + + for key, value in organization_data.attorney_address.dict().items(): + if hasattr(org_attorney_address, key): + setattr(org_attorney_address, key, value) + + updated_organization = await self.repo.update_organization(organization) + return updated_organization + + @service_handler + async def get_organization(self, organization_id: int): + """handles fetching an organization""" + organization = await self.repo.get_organization(organization_id) + + if organization is None: + raise DataNotFoundException("org not found") + + return organization + + @service_handler + async def get_organizations( + self, pagination: PaginationRequestSchema = {} + ) -> List[OrganizationSchema]: + """handles fetching organizations and providing pagination data""" + """ + Get all organizations based on the provided filters and pagination. + This method returns a list of OrganizationSchema objects. + The OrganizationSchema objects contain the basic organization details, + including the organization type, organization status, and other relevant fields. + The pagination object is used to control the number of results returned + and the page number. + The filters object is used to filter the results based on specific criteria. + The OrganizationSchema objects are returned in the order specified by the sortOrders object. + The total_count field is used to return the total number of organizations that match the filters. + The OrganizationSchema objects are returned in the order specified by the sortOrders object. + + Args: + pagination (PaginationRequestSchema, optional): The pagination object containing page and size information. Defaults to {}. + + Returns: + List[OrganizationSchema]: A list of OrganizationSchema objects containing the basic organization details. + The total_count field is used to return the total number of organizations that match the filters. + The OrganizationSchema objects are returned in the order specified by the sortOrders object. + + Raises: + Exception: If any errors occur during the query execution. + ValueError: If the provided pagination object is invalid. + """ + # TODO: Implement Redis cache wherever required + # TODO: Implement Compliance Units and In Reserve fields once the transactions model is created + # Apply filters + conditions = [] + pagination = validate_pagination(pagination) + if pagination.filters and len(pagination.filters) > 0: + try: + self.apply_organization_filters(pagination, conditions) + except Exception: + raise ValueError(f"Invalid filter provided: {pagination.filters}.") + + # Apply pagination + offset = 0 if (pagination.page < 1) else (pagination.page - 1) * pagination.size + limit = pagination.size + + organizations, total_count = await self.repo.get_organizations_paginated( + offset, limit, conditions, pagination + ) + + if not organizations: + raise DataNotFoundException("Organizations not found") + + return OrganizationListSchema( + organizations=organizations, + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + ) + + @service_handler + async def get_organization_types(self) -> List[OrganizationTypeSchema]: + """handles fetching all organization types""" + result = await self.repo.get_organization_types() + + types = [OrganizationTypeSchema.model_validate(types) for types in result] + + if len(types) == 0: + raise DataNotFoundException("No organization types found") + + return types + + @service_handler + async def get_organization_names( + self, only_registered: bool = False, order_by=("name", "asc") + ) -> List[OrganizationSummaryResponseSchema]: + """ + Fetches all organization names and their detailed information, formatted as per OrganizationSummaryResponseSchema. + + Parameters: + only_registered (bool): If true, fetches only registered organizations. + order_by (tuple): Tuple containing the field name to sort by and the direction ('asc' or 'desc'). + + Returns: + List[OrganizationSummaryResponseSchema]: List of organizations with their summary information. + """ + conditions = [] + if only_registered: + conditions.append(OrganizationStatus.status == OrgStatusEnum.Registered) + + # The order_by tuple directly specifies both the sort field and direction + organization_data = await self.repo.get_organization_names(conditions, order_by) + + return [ + OrganizationSummaryResponseSchema( + organization_id=org["organization_id"], + name=org["name"], + operating_name=org["operating_name"], + total_balance=org["total_balance"], + reserved_balance=org["reserved_balance"], + ) + for org in organization_data + ] + + @service_handler + async def get_externally_registered_organizations( + self, org_id: int + ) -> List[OrganizationSummaryResponseSchema]: + """handles getting a list of organizations excluding the current organization""" + conditions = [ + Organization.org_status.has(status="Registered"), + Organization.organization_id != org_id, + ] + results = await self.repo.get_externally_registered_organizations(conditions) + + # Map the results to OrganizationSummaryResponseSchema + organizations = [ + OrganizationSummaryResponseSchema.model_validate(organization) + for organization in results + ] + + if not organizations: + raise DataNotFoundException("No externally registered organizations found") + + return organizations + + @service_handler + async def get_organization_statuses(self): + """handles fetching all organization statuses""" + statuses = await self.repo.get_organization_statuses() + + if len(statuses) == 0: + raise DataNotFoundException("No organization statuses found") + + return statuses + + @service_handler + async def calculate_total_balance(self, organization_id: int) -> int: + """ + Calculates the total balance for a given organization. + + Args: + organization_id (int): The ID of the organization. + + Returns: + int: The total balance of the organization. + """ + total_balance = await self.transaction_repo.calculate_total_balance( + organization_id + ) + return total_balance + + @service_handler + async def calculate_reserved_balance(self, organization_id: int) -> int: + """ + Calculates the reserved balance for a given organization. + + Args: + organization_id (int): The ID of the organization. + + Returns: + int: The reserved balance of the organization. + """ + reserved_balance = await self.transaction_repo.calculate_reserved_balance( + organization_id + ) + return reserved_balance + + @service_handler + async def calculate_available_balance(self, organization_id: int) -> int: + """ + Calculates the available balance for a given organization by subtracting the reserved balance from the total balance. + + Args: + organization_id (int): The ID of the organization. + + Returns: + int: The available balance of the organization. + """ + available_balance = await self.transaction_repo.calculate_available_balance( + organization_id + ) + return available_balance + + @service_handler + async def adjust_balance( + self, + transaction_action: TransactionActionEnum, + compliance_units: int, + organization_id: int, + ) -> Transaction: + """ + Adjusts an organization's balance based on the transaction action. + + Validates the transaction against the organization's current balances before proceeding. + It raises an error if the requested action violates balance constraints (e.g., attempting to reserve more than the available balance). + + Args: + transaction_action (TransactionActionEnum): The type of balance adjustment (Adjustment, Reserved, Released). + compliance_units (int): The number of compliance units involved in the transaction. + organization_id (int): The ID of the organization whose balance is being adjusted. + + Raises: + ValueError: If the transaction violates balance constraints. + """ + if compliance_units == 0: + raise ValueError("Compliance units cannot be zero.") + + # Retrieve balances + available_balance = await self.calculate_available_balance(organization_id) + reserved_balance = await self.calculate_reserved_balance(organization_id) + + # Check constraints based on transaction action + if transaction_action == TransactionActionEnum.Reserved: + if abs(compliance_units) > available_balance: + raise ValueError("Reserve amount cannot exceed available balance.") + elif transaction_action == TransactionActionEnum.Released: + if abs(compliance_units) > reserved_balance: + raise ValueError("Release amount cannot exceed reserved balance.") + elif ( + transaction_action == TransactionActionEnum.Adjustment + and compliance_units < 0 + ): + if abs(compliance_units) > available_balance: + raise ValueError("Cannot decrement available balance below zero.") + + # Create a new transaction record in the database + new_transaction = await self.transaction_repo.create_transaction( + transaction_action, compliance_units, organization_id + ) + + await self.redis_balance_service.populate_organization_redis_balance( + organization_id + ) + + return new_transaction + + @service_handler + async def search_organization_details( + self, search_query: str + ) -> List[OrganizationDetailsSchema]: + """ + Get organizations matching the transaction partner query. + The organization details include name, full address, email, and phone. + """ + organizations = await self.repo.search_organizations_by_name(search_query) + + return [ + { + "name": org.name, + "address": " ".join( + part + for part in [ + org.org_address.street_address, + org.org_address.address_other, + org.org_address.city, + org.org_address.province_state, + org.org_address.country, + org.org_address.postalCode_zipCode, + ] + if part + ), + "email": org.email, + "phone": org.phone, + } + for org in organizations + ] diff --git a/backend/lcfs/web/api/organizations/views.py b/backend/lcfs/web/api/organizations/views.py new file mode 100644 index 000000000..8ff755fc9 --- /dev/null +++ b/backend/lcfs/web/api/organizations/views.py @@ -0,0 +1,267 @@ +import structlog +from typing import List, Optional + +from fastapi import APIRouter, Body, Depends, status, Request, Query +from fastapi.responses import StreamingResponse +from fastapi_cache.decorator import cache +from starlette import status + +from lcfs.db import dependencies + +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.db.models.organization.OrganizationStatus import OrgStatusEnum + +from .services import OrganizationsService +from .schema import ( + OrganizationTypeSchema, + OrganizationStatusSchema, + OrganizationListSchema, + OrganizationCreateSchema, + OrganizationResponseSchema, + OrganizationSummaryResponseSchema, + OrganizationBalanceResponseSchema, + OrganizationUpdateSchema, + OrganizationDetailsSchema, +) +from lcfs.db.models.user.Role import RoleEnum + + +logger = structlog.get_logger(__name__) +router = APIRouter() +get_async_db = dependencies.get_async_db_session + + +@router.get("/export", response_class=StreamingResponse, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def export_organizations( + request: Request, service: OrganizationsService = Depends() +): + """ + Endpoint to export information of all organizations + + This endpoint can support exporting data in different file formats (xls, xlsx, csv) + as specified by the 'export_format' and 'media_type' variables. + - 'export_format' specifies the file format: options are 'xls', 'xlsx', and 'csv'. + - 'media_type' sets the appropriate MIME type based on 'export_format': + 'application/vnd.ms-excel' for 'xls', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' for 'xlsx', + 'text/csv' for 'csv'. + + The SpreadsheetBuilder class is used for building the spreadsheet. + It allows adding multiple sheets with custom styling options and exports them as a byte stream. + Also, an example of how to use the SpreadsheetBuilder is provided in its class documentation. + + Note: Only the first sheet data is used for the CSV format, + as CSV files do not support multiple sheets. + """ + return await service.export_organizations() + + +@router.post( + "/create", + response_model=OrganizationResponseSchema, + status_code=status.HTTP_201_CREATED, +) +@view_handler([RoleEnum.GOVERNMENT, RoleEnum.ADMINISTRATOR]) +async def create_organization( + request: Request, + organization_data: OrganizationCreateSchema, + service: OrganizationsService = Depends(), +): + """ + Endpoint to create a new organization. This includes processing the provided + organization details along with associated addresses. + """ + return await service.create_organization(organization_data) + + +@router.get( + "/search", + response_model=List[OrganizationDetailsSchema], + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def search_organizations( + request: Request, + org_name: str = Query( + ..., min_length=3, description="Company name or operating name" + ), + service: OrganizationsService = Depends(), +): + """ + Search for organizations based on a query string. + Returns a list of organizations with their names and formatted addresses. + """ + return await service.search_organization_details(org_name) + + +@router.get( + "/{organization_id}", + response_model=OrganizationResponseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_organization( + request: Request, organization_id: int, service: OrganizationsService = Depends() +): + """Fetch a single organization by id""" + return await service.get_organization(organization_id) + + +@router.put("/{organization_id}") +# FIXME: should we be allowing any user to update the organizaiton details? +@view_handler(["*"]) +async def update_organization( + request: Request, + organization_id: int, + organization_data: OrganizationUpdateSchema, + service: OrganizationsService = Depends(), +): + """Update an organizations data by id""" + return await service.update_organization(organization_id, organization_data) + + +@router.post("/", response_model=OrganizationListSchema, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_organizations( + request: Request, + pagination: PaginationRequestSchema = Body(..., embed=False), + service: OrganizationsService = Depends(), +): + """Fetch a list of organizations""" + + return await service.get_organizations(pagination) + + +@router.get( + "/statuses/", + response_model=List[OrganizationStatusSchema], + status_code=status.HTTP_200_OK, +) +@cache(expire=60 * 60 * 24) # cache for 24 hours +@view_handler(["*"]) +async def get_organization_statuses( + request: Request, service: OrganizationsService = Depends() +) -> List[OrganizationStatusSchema]: + """Fetch all organization statuses""" + return await service.get_organization_statuses() + + +@router.get( + "/types/", + response_model=List[OrganizationTypeSchema], + status_code=status.HTTP_200_OK, +) +@cache(expire=60 * 60 * 24) # cache for 24 hours +@view_handler(["*"]) +async def get_organization_types( + request: Request, service: OrganizationsService = Depends() +) -> List[OrganizationTypeSchema]: + """Fetch all organization types""" + return await service.get_organization_types() + + +# TODO review security of this endpoint around returning balances +# for all organizations +@router.get( + "/names/", + response_model=List[OrganizationSummaryResponseSchema], + status_code=status.HTTP_200_OK, +) +@cache(expire=1) # cache for 1 hour +@view_handler(["*"]) +async def get_organization_names( + request: Request, service: OrganizationsService = Depends() +): + """Fetch all organization names""" + + # Set the default sorting order + order_by = ("name", "asc") + + # Call the service with only_registered set to True to fetch only registered organizations + return await service.get_organization_names(True, order_by) + + +@router.get( + "/registered/external", + response_model=List[OrganizationSummaryResponseSchema], + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_externally_registered_organizations( + request: Request, service: OrganizationsService = Depends() +): + """ + Retrieve a list of registered organizations, excluding the specified organization. + + Args: + org_id (int): The ID of the organization to be excluded from the list. + + Returns: + List[OrganizationSummaryResponseSchema]: A list of OrganizationSummaryResponseSchema objects + representing registered organizations, excluding the specified organization. + + Raises: + Exception: If an error occurs during the database query. + """ + return await service.get_externally_registered_organizations( + org_id=request.user.organization_id + ) + + +@router.get( + "/balances/{organization_id}", + response_model=OrganizationBalanceResponseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_balances( + request: Request, organization_id: int, service: OrganizationsService = Depends() +): + """ + Retrieve the total and reserved balances for a specific organization identified by its ID. + """ + total_balance = await service.calculate_total_balance(organization_id) + reserved_balance = await service.calculate_reserved_balance(organization_id) + organization = await service.get_organization(organization_id) + + return OrganizationBalanceResponseSchema( + organization_id=organization_id, + name=organization.name, + registered=( + True + if organization.org_status.status == OrgStatusEnum.Registered + else False + ), + total_balance=total_balance, + reserved_balance=reserved_balance, + ) + + +@router.get( + "/current/balances", + response_model=OrganizationBalanceResponseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_balances(request: Request, service: OrganizationsService = Depends()): + """ + Retrieve the total and reserved balances for a specific organization identified by its ID. + """ + org_id = request.user.organization_id + total_balance = await service.calculate_total_balance(org_id) + reserved_balance = await service.calculate_reserved_balance(org_id) + organization = await service.get_organization(org_id) + + return OrganizationBalanceResponseSchema( + organization_id=org_id, + name=organization.name, + registered=( + True + if organization.org_status.status == OrgStatusEnum.Registered + else False + ), + total_balance=total_balance, + reserved_balance=reserved_balance, + ) diff --git a/backend/lcfs/web/api/other_uses/__init__.py b/backend/lcfs/web/api/other_uses/__init__.py new file mode 100644 index 000000000..5afa2c704 --- /dev/null +++ b/backend/lcfs/web/api/other_uses/__init__.py @@ -0,0 +1,5 @@ +"""Other Uses API.""" + +from lcfs.web.api.other_uses.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/other_uses/repo.py b/backend/lcfs/web/api/other_uses/repo.py new file mode 100644 index 000000000..8e515b484 --- /dev/null +++ b/backend/lcfs/web/api/other_uses/repo.py @@ -0,0 +1,387 @@ +import structlog +from datetime import date +from typing import List, Optional, Tuple, Dict, Any + +from fastapi import Depends +from lcfs.db.base import ActionTypeEnum, UserTypeEnum +from lcfs.db.dependencies import get_async_db_session + +from sqlalchemy import select, delete, func, case, and_, or_ +from sqlalchemy.orm import joinedload, contains_eager +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.models.compliance import ComplianceReport +from lcfs.db.models.compliance.OtherUses import OtherUses +from lcfs.db.models.fuel.ProvisionOfTheAct import ProvisionOfTheAct +from lcfs.db.models.fuel.FuelCode import FuelCode +from lcfs.db.models.fuel.FuelType import FuelType, QuantityUnitsEnum +from lcfs.db.models.fuel.FuelInstance import FuelInstance +from lcfs.web.api.fuel_code.repo import FuelCodeRepository +from lcfs.web.api.other_uses.schema import OtherUsesSchema +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.core.decorators import repo_handler +from sqlalchemy.dialects import postgresql + +logger = structlog.get_logger(__name__) + + +class OtherUsesRepository: + def __init__( + self, + db: AsyncSession = Depends(get_async_db_session), + fuel_repo: FuelCodeRepository = Depends(), + ): + self.db = db + self.fuel_code_repo = fuel_repo + + @repo_handler + async def get_table_options(self) -> dict: + """Get all table options""" + fuel_categories = await self.fuel_code_repo.get_fuel_categories() + fuel_types = await self.get_formatted_fuel_types() + expected_uses = await self.fuel_code_repo.get_expected_use_types() + units_of_measure = [unit.value for unit in QuantityUnitsEnum] + provisions_of_the_act = ( + (await self.db.execute(select(ProvisionOfTheAct))).scalars().all() + ) + fuel_codes = (await self.db.execute(select(FuelCode))).scalars().all() + + return { + "fuel_types": fuel_types, + "fuel_categories": fuel_categories, + "provisions_of_the_act": provisions_of_the_act, + "expected_uses": expected_uses, + "units_of_measure": units_of_measure, + "fuel_codes": fuel_codes, + } + + @repo_handler + async def get_latest_other_uses_by_group_uuid( + self, group_uuid: str + ) -> Optional[OtherUses]: + """ + Retrieve the latest OtherUses record for a given group UUID. + Government records are prioritized over supplier records by ordering first by `user_type` + (with GOVERNMENT records coming first) and then by `version` in descending order. + """ + query = ( + select(OtherUses) + .where(OtherUses.group_uuid == group_uuid) + .order_by( + # OtherUses.user_type == UserTypeEnum.SUPPLIER evaluates to False for GOVERNMENT, + # thus bringing GOVERNMENT records to the top in the ordered results. + OtherUses.user_type == UserTypeEnum.SUPPLIER, + OtherUses.version.desc(), + ) + ) + + result = await self.db.execute(query) + return result.unique().scalars().first() + + @repo_handler + async def get_other_uses(self, compliance_report_id: int) -> List[OtherUsesSchema]: + """ + Queries other uses from the database for a specific compliance report. + """ + + # Retrieve the compliance report's group UUID + report_group_query = await self.db.execute( + select(ComplianceReport.compliance_report_group_uuid).where( + ComplianceReport.compliance_report_id == compliance_report_id + ) + ) + group_uuid = report_group_query.scalar() + if not group_uuid: + return [] + + result = await self.get_effective_other_uses(group_uuid) + return result + + async def get_effective_other_uses( + self, compliance_report_group_uuid: str + ) -> List[OtherUsesSchema]: + """ + Queries other uses from the database for a specific compliance report. + """ + + # Step 1: Subquery to get all compliance_report_ids in the specified group + compliance_reports_select = select(ComplianceReport.compliance_report_id).where( + ComplianceReport.compliance_report_group_uuid + == compliance_report_group_uuid + ) + + # Step 2: Subquery to identify record group_uuids that have any DELETE action + delete_group_select = ( + select(OtherUses.group_uuid) + .where( + OtherUses.compliance_report_id.in_(compliance_reports_select), + OtherUses.action_type == ActionTypeEnum.DELETE, + ) + .distinct() + ) + + # Step 3: Subquery to find the maximum version and priority per group_uuid, + # excluding groups with any DELETE action + user_type_priority = case( + (OtherUses.user_type == UserTypeEnum.GOVERNMENT, 1), + (OtherUses.user_type == UserTypeEnum.SUPPLIER, 0), + else_=0, + ) + + valid_other_uses_select = ( + select( + OtherUses.group_uuid, + func.max(OtherUses.version).label("max_version"), + func.max(user_type_priority).label("max_role_priority"), + ) + .where( + OtherUses.compliance_report_id.in_(compliance_reports_select), + OtherUses.action_type != ActionTypeEnum.DELETE, + ~OtherUses.group_uuid.in_(delete_group_select), + ) + .group_by(OtherUses.group_uuid) + ) + # Now create a subquery for use in the JOIN + valid_fuel_supplies_subq = valid_other_uses_select.subquery() + + other_uses_select = ( + select(OtherUses) + .options( + joinedload(OtherUses.fuel_category), + joinedload(OtherUses.fuel_type), + joinedload(OtherUses.expected_use), + joinedload(OtherUses.provision_of_the_act), + joinedload(OtherUses.fuel_code), + ) + .join( + valid_fuel_supplies_subq, + and_( + OtherUses.group_uuid == valid_fuel_supplies_subq.c.group_uuid, + OtherUses.version == valid_fuel_supplies_subq.c.max_version, + user_type_priority == valid_fuel_supplies_subq.c.max_role_priority, + ), + ) + .order_by(OtherUses.other_uses_id) + ) + + result = await self.db.execute(other_uses_select) + other_uses = result.unique().scalars().all() + + return [ + OtherUsesSchema( + other_uses_id=ou.other_uses_id, + compliance_report_id=ou.compliance_report_id, + quantity_supplied=ou.quantity_supplied, + fuel_type=ou.fuel_type.fuel_type, + fuel_category=ou.fuel_category.category, + ci_of_fuel=ou.ci_of_fuel, + provision_of_the_act=( + ou.provision_of_the_act.name if ou.provision_of_the_act else None + ), + fuel_code=(ou.fuel_code.fuel_code if ou.fuel_code else None), + expected_use=ou.expected_use.name, + units=ou.units, + rationale=ou.rationale, + group_uuid=ou.group_uuid, + version=ou.version, + user_type=ou.user_type, + action_type=ou.action_type, + ) + for ou in other_uses + ] + + async def get_other_uses_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> tuple[list[Any], int] | tuple[list[OtherUsesSchema], int]: + # Retrieve the compliance report's group UUID + report_group_query = await self.db.execute( + select(ComplianceReport.compliance_report_group_uuid).where( + ComplianceReport.compliance_report_id == compliance_report_id + ) + ) + group_uuid = report_group_query.scalar() + if not group_uuid: + return [], 0 + + # Retrieve effective fuel supplies using the group UUID + other_uses = await self.get_effective_other_uses( + compliance_report_group_uuid=group_uuid + ) + + # Manually apply pagination + total_count = len(other_uses) + offset = 0 if pagination.page < 1 else (pagination.page - 1) * pagination.size + limit = pagination.size + paginated_other_uses = other_uses[offset : offset + limit] + + return paginated_other_uses, total_count + + @repo_handler + async def get_other_use(self, other_uses_id: int) -> OtherUses: + """ + Get a specific other use by id. + """ + return await self.db.scalar( + select(OtherUses) + .options( + joinedload(OtherUses.fuel_category), + joinedload(OtherUses.fuel_type), + joinedload(OtherUses.expected_use), + ) + .where(OtherUses.other_uses_id == other_uses_id) + ) + + @repo_handler + async def update_other_use(self, other_use: OtherUses) -> OtherUses: + """ + Update an existing other use in the database. + """ + updated_other_use = await self.db.merge(other_use) + await self.db.flush() + await self.db.refresh( + other_use, + [ + "fuel_category", + "fuel_type", + "expected_use", + "provision_of_the_act", + "fuel_code", + ], + ) + return updated_other_use + + @repo_handler + async def create_other_use(self, other_use: OtherUses) -> OtherUses: + """ + Create a new other use in the database. + """ + self.db.add(other_use) + await self.db.flush() + await self.db.refresh( + other_use, + [ + "fuel_category", + "fuel_type", + "expected_use", + "provision_of_the_act", + "fuel_code", + ], + ) + return other_use + + @repo_handler + async def delete_other_use(self, other_uses_id: int): + """Delete an other use from the database""" + await self.db.execute( + delete(OtherUses).where(OtherUses.other_uses_id == other_uses_id) + ) + await self.db.flush() + + @repo_handler + async def get_other_use_version_by_user( + self, group_uuid: str, version: int, user_type: UserTypeEnum + ) -> Optional[OtherUses]: + """ + Retrieve a specific OtherUses record by group UUID, version, and user_type. + This method explicitly requires user_type to avoid ambiguity. + """ + query = ( + select(OtherUses) + .where( + OtherUses.group_uuid == group_uuid, + OtherUses.version == version, + OtherUses.user_type == user_type, + ) + .options( + joinedload(OtherUses.fuel_category), + joinedload(OtherUses.fuel_type), + joinedload(OtherUses.expected_use), + joinedload(OtherUses.provision_of_the_act), + joinedload(OtherUses.fuel_code), + ) + ) + + result = await self.db.execute(query) + return result.scalars().first() + + @repo_handler + async def get_formatted_fuel_types(self) -> List[Dict[str, Any]]: + """Get all fuel type options with their associated fuel categories and fuel codes for other uses""" + # Define the filtering conditions for fuel codes + current_date = date.today() + fuel_code_filters = ( + or_( + FuelCode.effective_date == None, FuelCode.effective_date <= current_date + ) + & or_( + FuelCode.expiration_date == None, + FuelCode.expiration_date > current_date, + ) + & (FuelType.other_uses_fossil_derived == True) + ) + + # Build the query with filtered fuel_codes + query = ( + select(FuelType) + .outerjoin(FuelType.fuel_instances) + .outerjoin(FuelInstance.fuel_category) + .outerjoin(FuelType.fuel_codes) + .where(fuel_code_filters) + .options( + contains_eager(FuelType.fuel_instances).contains_eager( + FuelInstance.fuel_category + ), + contains_eager(FuelType.fuel_codes), + joinedload(FuelType.provision_1), + joinedload(FuelType.provision_2), + ) + ) + + result = await self.db.execute(query) + fuel_types = result.unique().scalars().all() + + # Prepare the data in the format matching your schema + formatted_fuel_types = [] + for fuel_type in fuel_types: + formatted_fuel_type = { + "fuel_type_id": fuel_type.fuel_type_id, + "fuel_type": fuel_type.fuel_type, + "default_carbon_intensity": fuel_type.default_carbon_intensity, + "units": fuel_type.units if fuel_type.units else None, + "unrecognized": fuel_type.unrecognized, + "fuel_categories": [ + { + "fuel_category_id": fc.fuel_category.fuel_category_id, + "category": fc.fuel_category.category, + } + for fc in fuel_type.fuel_instances + ], + "fuel_codes": [ + { + "fuel_code_id": fc.fuel_code_id, + "fuel_code": fc.fuel_code, + "carbon_intensity": fc.carbon_intensity, + } + for fc in fuel_type.fuel_codes + ], + "provision_of_the_act": [], + } + + if fuel_type.provision_1: + formatted_fuel_type["provision_of_the_act"].append( + { + "provision_of_the_act_id": fuel_type.provision_1_id, + "name": fuel_type.provision_1.name, + } + ) + + if fuel_type.provision_2: + formatted_fuel_type["provision_of_the_act"].append( + { + "provision_of_the_act_id": fuel_type.provision_2_id, + "name": fuel_type.provision_2.name, + } + ) + formatted_fuel_types.append(formatted_fuel_type) + + return formatted_fuel_types diff --git a/backend/lcfs/web/api/other_uses/schema.py b/backend/lcfs/web/api/other_uses/schema.py new file mode 100644 index 000000000..db3e591be --- /dev/null +++ b/backend/lcfs/web/api/other_uses/schema.py @@ -0,0 +1,135 @@ +from typing import Optional, List +from pydantic import Field, field_validator +from lcfs.web.api.base import ( + BaseSchema, + FilterModel, + SortOrder, + PaginationRequestSchema, + PaginationResponseSchema, +) +from enum import Enum + +from lcfs.web.api.fuel_type.schema import FuelTypeQuantityUnitsEnumSchema + + +class FuelCodeStatusEnumSchema(str, Enum): + Draft = "Draft" + Approved = "Approved" + Deleted = "Deleted" + +class FuelCodeSchema(BaseSchema): + fuel_code_id: int + fuel_code: str + carbon_intensity: float + + +class ProvisionOfTheActSchema(BaseSchema): + provision_of_the_act_id: int + name: str + + +class UnitOfMeasureSchema(BaseSchema): + uom_id: int + name: str + description: Optional[str] = None + + +class ExpectedUseTypeSchema(BaseSchema): + expected_use_type_id: int + name: str + description: Optional[str] = None + + +class FuelCategorySchema(BaseSchema): + fuel_category_id: int + category: str + description: Optional[str] = None + + +class FuelTypeSchema(BaseSchema): + fuel_type_id: int + fuel_type: str + fossil_derived: Optional[bool] = None + provision_1_id: Optional[int] = None + provision_2_id: Optional[int] = None + fuel_categories: List[FuelCategorySchema] + default_carbon_intensity: Optional[float] = None + fuel_codes: Optional[List[FuelCodeSchema]] = [] + provision_of_the_act: Optional[List[ProvisionOfTheActSchema]] = [] + units: FuelTypeQuantityUnitsEnumSchema + + @field_validator("default_carbon_intensity") + def quantize_default_carbon_intensity(cls, value): + return round(value, 2) + + +class FuelCategorySchema(BaseSchema): + fuel_category_id: int + category: str + description: Optional[str] = None + + +class OtherUsesFuelCategorySchema(BaseSchema): + fuel_category_id: int + category: str + description: Optional[str] = None + + +class OtherUsesTableOptionsSchema(BaseSchema): + fuel_categories: List[OtherUsesFuelCategorySchema] + fuel_types: List[FuelTypeSchema] + units_of_measure: List[str] + provisions_of_the_act: List[ProvisionOfTheActSchema] + fuel_codes: List[FuelCodeSchema] + expected_uses: List[ExpectedUseTypeSchema] + + +class OtherUsesCreateSchema(BaseSchema): + other_uses_id: Optional[int] = None + compliance_report_id: int + fuel_type: str + fuel_category: str + provision_of_the_act: str + quantity_supplied: int + units: str + expected_use: str + fuel_code: Optional[str] = None + ci_of_fuel: Optional[float] = None + expected_use: str + other_uses_id: Optional[int] = None + rationale: Optional[str] = None + deleted: Optional[bool] = None + group_uuid: Optional[str] = None + version: Optional[int] = None + user_type: Optional[str] = None + action_type: Optional[str] = None + + +class OtherUsesSchema(OtherUsesCreateSchema): + pass + + +class PaginatedOtherUsesRequestSchema(BaseSchema): + compliance_report_id: int = Field(..., alias="complianceReportId") + filters: List[FilterModel] + page: int + size: int + sort_orders: List[SortOrder] + + +class OtherUsesListSchema(BaseSchema): + other_uses: List[OtherUsesSchema] + pagination: PaginationResponseSchema + + +class OtherUsesAllSchema(BaseSchema): + other_uses: List[OtherUsesSchema] + + +class DeleteOtherUsesSchema(BaseSchema): + other_uses_id: int + compliance_report_id: int + + +class DeleteOtherUsesResponseSchema(BaseSchema): + message: str diff --git a/backend/lcfs/web/api/other_uses/services.py b/backend/lcfs/web/api/other_uses/services.py new file mode 100644 index 000000000..db5b22f81 --- /dev/null +++ b/backend/lcfs/web/api/other_uses/services.py @@ -0,0 +1,298 @@ +import math +import uuid +from typing import Optional + +import structlog +from fastapi import Depends + +from lcfs.db.base import UserTypeEnum, ActionTypeEnum +from lcfs.web.api.other_uses.repo import OtherUsesRepository +from lcfs.web.core.decorators import service_handler +from lcfs.db.models.compliance.OtherUses import OtherUses +from lcfs.web.api.base import PaginationRequestSchema, PaginationResponseSchema +from lcfs.web.api.other_uses.schema import ( + OtherUsesCreateSchema, + OtherUsesSchema, + OtherUsesListSchema, + OtherUsesTableOptionsSchema, + OtherUsesFuelCategorySchema, + OtherUsesAllSchema, + FuelTypeSchema, + ExpectedUseTypeSchema, + ProvisionOfTheActSchema, + FuelCodeSchema, + DeleteOtherUsesResponseSchema, +) +from lcfs.web.api.fuel_code.repo import FuelCodeRepository + +logger = structlog.get_logger(__name__) + +# Constants defining which fields to exclude during model operations +OTHER_USE_EXCLUDE_FIELDS = { + "id", + "other_uses_id", + "deleted", + "group_uuid", + "user_type", + "version", + "action_type", +} + + +class OtherUsesServices: + def __init__( + self, + repo: OtherUsesRepository = Depends(OtherUsesRepository), + fuel_repo: FuelCodeRepository = Depends(), + ) -> None: + self.repo = repo + self.fuel_repo = fuel_repo + + async def schema_to_model(self, other_use: OtherUsesCreateSchema) -> OtherUses: + """ + Converts data from OtherUsesCreateSchema to OtherUses data model to store into the database. + """ + fuel_category = await self.fuel_repo.get_fuel_category_by( + category=other_use.fuel_category + ) + fuel_type = await self.fuel_repo.get_fuel_type_by_name(other_use.fuel_type) + expected_use = await self.fuel_repo.get_expected_use_type_by_name( + other_use.expected_use + ) + provision_of_the_act = await self.fuel_repo.get_provision_of_the_act_by_name( + other_use.provision_of_the_act + ) + fuel_code_id = None + if other_use.fuel_code: + fuel_code = await self.fuel_repo.get_fuel_code_by_name(other_use.fuel_code) + fuel_code_id = fuel_code.fuel_code_id + + return OtherUses( + **other_use.model_dump( + exclude={ + "other_uses_id", + "id", + "fuel_category", + "fuel_type", + "provision_of_the_act", + "fuel_code", + "expected_use", + "deleted", + } + ), + fuel_category_id=fuel_category.fuel_category_id, + fuel_type_id=fuel_type.fuel_type_id, + provision_of_the_act_id=provision_of_the_act.provision_of_the_act_id, + fuel_code_id=fuel_code_id, + expected_use_id=expected_use.expected_use_type_id + ) + + def model_to_schema(self, model: OtherUses): + """ + Converts data from OtherUses to OtherUsesCreateSchema data model to store into the database. + """ + updated_schema = OtherUsesSchema( + other_uses_id=model.other_uses_id, + compliance_report_id=model.compliance_report_id, + quantity_supplied=model.quantity_supplied, + rationale=model.rationale, + units=model.units, + fuel_type=model.fuel_type.fuel_type, + fuel_category=model.fuel_category.category, + provision_of_the_act=model.provision_of_the_act.name, + fuel_code=model.fuel_code.fuel_code if model.fuel_code else None, + ci_of_fuel=model.ci_of_fuel, + expected_use=model.expected_use.name, + user_type=model.user_type, + group_uuid=model.group_uuid, + version=model.version, + action_type=model.action_type, + ) + return updated_schema + + @service_handler + async def get_table_options(self) -> OtherUsesTableOptionsSchema: + """ + Gets the list of table options related to other uses. + """ + table_options = await self.repo.get_table_options() + return OtherUsesTableOptionsSchema( + fuel_categories=[ + OtherUsesFuelCategorySchema.model_validate(category) + for category in table_options["fuel_categories"] + ], + fuel_types=[ + FuelTypeSchema.model_validate(fuel_type) + for fuel_type in table_options["fuel_types"] + ], + units_of_measure=table_options["units_of_measure"], + expected_uses=[ + ExpectedUseTypeSchema.model_validate(use) + for use in table_options["expected_uses"] + ], + provisions_of_the_act=[ + ProvisionOfTheActSchema.model_validate(provision) + for provision in table_options["provisions_of_the_act"] + ], + fuel_codes=[ + FuelCodeSchema.model_validate(fuel_code) + for fuel_code in table_options["fuel_codes"] + ], + ) + + @service_handler + async def get_other_uses(self, compliance_report_id: int) -> OtherUsesListSchema: + """ + Gets the list of other uses for a specific compliance report. + """ + other_uses = await self.repo.get_other_uses(compliance_report_id) + return OtherUsesAllSchema( + other_uses=[OtherUsesSchema.model_validate( + ou) for ou in other_uses] + ) + + @service_handler + async def get_other_uses_paginated( + self, pagination: PaginationRequestSchema, compliance_report_id: int + ) -> OtherUsesListSchema: + other_uses, total_count = await self.repo.get_other_uses_paginated( + pagination, compliance_report_id + ) + return OtherUsesListSchema( + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + other_uses=other_uses, + ) + + @service_handler + async def update_other_use( + self, other_use_data: OtherUsesCreateSchema, user_type: UserTypeEnum + ) -> OtherUsesSchema: + """Update an existing other use""" + other_use = await self.repo.get_other_use_version_by_user( + other_use_data.group_uuid, other_use_data.version, user_type + ) + + if not other_use: + raise ValueError("Other use not found") + + if other_use.compliance_report_id == other_use_data.compliance_report_id: + # Update existing record if compliance report ID matches + for field, value in other_use_data.model_dump( + exclude={ + "id", + "deleted", + "fuel_type", + "fuel_category", + "expected_use", + "provision_of_the_act", + "fuel_code", + } + ).items(): + setattr(other_use, field, value) + + if other_use.fuel_type.fuel_type != other_use_data.fuel_type: + other_use.fuel_type = await self.fuel_repo.get_fuel_type_by_name( + other_use_data.fuel_type + ) + + if other_use.fuel_category.category != other_use_data.fuel_category: + other_use.fuel_category = ( + await self.fuel_repo.get_fuel_category_by( + category=other_use_data.fuel_category + ) + ) + + if other_use.expected_use.name != other_use_data.expected_use: + other_use.expected_use = ( + await self.fuel_repo.get_expected_use_type_by_name( + other_use_data.expected_use + ) + ) + + if ( + not other_use.provision_of_the_act + or other_use.provision_of_the_act.name + != other_use_data.provision_of_the_act + ): + other_use.provision_of_the_act = ( + await self.fuel_repo.get_provision_of_the_act_by_name( + other_use_data.provision_of_the_act + ) + ) + if ( + other_use.fuel_code is None + or other_use_data.fuel_code is None + or other_use.fuel_code.fuel_code != other_use_data.fuel_code + ): + other_use.fuel_code = await self.fuel_repo.get_fuel_code_by_name( + other_use_data.fuel_code + ) + other_use.ci_of_fuel = other_use_data.ci_of_fuel + + updated_use = await self.repo.update_other_use(other_use) + updated_schema = self.model_to_schema(updated_use) + return OtherUsesSchema.model_validate(updated_schema) + + else: + updated_use = await self.create_other_use( + other_use_data, user_type, existing_record=other_use + ) + return OtherUsesSchema.model_validate(updated_use) + + @service_handler + async def create_other_use( + self, + other_use_data: OtherUsesCreateSchema, + user_type: UserTypeEnum, + existing_record: Optional[OtherUses] = None, + ) -> OtherUsesSchema: + """Create a new other use""" + other_use = await self.schema_to_model(other_use_data) + new_group_uuid = str(uuid.uuid4()) + other_use.group_uuid = ( + new_group_uuid if not existing_record else existing_record.group_uuid + ) + other_use.action_type = ( + ActionTypeEnum.CREATE if not existing_record else ActionTypeEnum.UPDATE + ) + other_use.version = 0 if not existing_record else existing_record.version + 1 + other_use.user_type = user_type + created_use = await self.repo.create_other_use(other_use) + + return self.model_to_schema(created_use) + + @service_handler + async def delete_other_use( + self, other_use_data: OtherUsesCreateSchema, user_type: UserTypeEnum + ) -> DeleteOtherUsesResponseSchema: + """Delete an other use""" + existing_fuel_supply = await self.repo.get_latest_other_uses_by_group_uuid( + other_use_data.group_uuid + ) + + if existing_fuel_supply.action_type == ActionTypeEnum.DELETE: + return DeleteOtherUsesResponseSchema( + success=True, message="Already deleted." + ) + + deleted_entity = OtherUses( + compliance_report_id=other_use_data.compliance_report_id, + group_uuid=other_use_data.group_uuid, + version=existing_fuel_supply.version + 1, + action_type=ActionTypeEnum.DELETE, + user_type=user_type, + ) + + # Copy fields from the latest version for the deletion record + for field in existing_fuel_supply.__table__.columns.keys(): + if field not in OTHER_USE_EXCLUDE_FIELDS: + setattr(deleted_entity, field, getattr( + existing_fuel_supply, field)) + + await self.repo.create_other_use(deleted_entity) + return DeleteOtherUsesResponseSchema(success=True, message="Marked as deleted.") diff --git a/backend/lcfs/web/api/other_uses/validation.py b/backend/lcfs/web/api/other_uses/validation.py new file mode 100644 index 000000000..60b267cba --- /dev/null +++ b/backend/lcfs/web/api/other_uses/validation.py @@ -0,0 +1,25 @@ +from typing import List +from fastapi import HTTPException, Request +from starlette import status + +from lcfs.web.api.other_uses.schema import ( + OtherUsesCreateSchema, +) + + +class OtherUsesValidation: + def __init__( + self, + request: Request = None, + ): + self.request = request + + async def validate_compliance_report_id( + self, compliance_report_id: int, other_uses: List[OtherUsesCreateSchema] + ): + for other_use in other_uses: + if other_use.compliance_report_id != compliance_report_id: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Mismatch compliance_report_id in other use: {other_use}", + ) diff --git a/backend/lcfs/web/api/other_uses/views.py b/backend/lcfs/web/api/other_uses/views.py new file mode 100644 index 000000000..78704e62e --- /dev/null +++ b/backend/lcfs/web/api/other_uses/views.py @@ -0,0 +1,138 @@ +import structlog +from typing import Optional, Union + +from fastapi import ( + APIRouter, + Body, + status, + Request, + Response, + Depends, + HTTPException, +) + +from lcfs.db import dependencies +from lcfs.web.api.compliance_report.validation import ComplianceReportValidation +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.other_uses.services import OtherUsesServices +from lcfs.web.api.other_uses.schema import ( + OtherUsesCreateSchema, + OtherUsesSchema, + OtherUsesTableOptionsSchema, + DeleteOtherUsesResponseSchema, + PaginatedOtherUsesRequestSchema, + OtherUsesListSchema, + OtherUsesAllSchema, +) +from lcfs.web.api.base import ComplianceReportRequestSchema, PaginationRequestSchema +from lcfs.web.api.other_uses.validation import OtherUsesValidation +from lcfs.db.models.user.Role import RoleEnum + +router = APIRouter() +logger = structlog.get_logger(__name__) +get_async_db = dependencies.get_async_db_session + + +@router.get( + "/table-options", + response_model=OtherUsesTableOptionsSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +# @cache(expire=60 * 60 * 24) # cache for 24 hours +async def get_table_options( + request: Request, + service: OtherUsesServices = Depends(), +): + """Endpoint to retrieve table options related to other uses""" + return await service.get_table_options() + + +@router.post( + "/list-all", response_model=OtherUsesAllSchema, status_code=status.HTTP_200_OK +) +@view_handler(["*"]) +async def get_other_uses( + request: Request, + request_data: ComplianceReportRequestSchema = Body(...), + response: Response = None, + service: OtherUsesServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +): + """Endpoint to get list of other uses for a compliance report""" + await report_validate.validate_organization_access( + request_data.compliance_report_id + ) + return await service.get_other_uses(request_data.compliance_report_id) + + +@router.post( + "/list", + response_model=OtherUsesListSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def get_other_uses_paginated( + request: Request, + request_data: PaginatedOtherUsesRequestSchema = Body(...), + service: OtherUsesServices = Depends(), + report_validate: ComplianceReportValidation = Depends(), +) -> OtherUsesListSchema: + pagination = PaginationRequestSchema( + page=request_data.page, + size=request_data.size, + sort_orders=request_data.sort_orders, + filters=request_data.filters, + ) + await report_validate.validate_organization_access( + request_data.compliance_report_id + ) + compliance_report_id = request_data.compliance_report_id + return await service.get_other_uses_paginated(pagination, compliance_report_id) + + +@router.post( + "/save", + response_model=Union[OtherUsesSchema, DeleteOtherUsesResponseSchema], + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.SUPPLIER]) +async def save_other_uses_row( + request: Request, + request_data: OtherUsesCreateSchema = Body(...), + service: OtherUsesServices = Depends(), + validate: OtherUsesValidation = Depends(), + report_validate: ComplianceReportValidation = Depends(), +): + """Endpoint to save a single other uses row""" + compliance_report_id = request_data.compliance_report_id + other_uses_id: Optional[int] = request_data.other_uses_id + + await report_validate.validate_organization_access(compliance_report_id) + + # Determine user type for record creation + current_user_type = request.user.user_type + if not current_user_type: + raise HTTPException( + status_code=403, detail="User does not have the required role." + ) + + if request_data.deleted: + # Delete existing other use + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + await service.delete_other_use(request_data, current_user_type) + return DeleteOtherUsesResponseSchema(message="Other use deleted successfully") + elif other_uses_id: + # Update existing other use + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + return await service.update_other_use(request_data, current_user_type) + else: + # Create new other use + await validate.validate_compliance_report_id( + compliance_report_id, [request_data] + ) + return await service.create_other_use(request_data, current_user_type) diff --git a/backend/lcfs/web/api/redis/__init__.py b/backend/lcfs/web/api/redis/__init__.py deleted file mode 100644 index e4e0cbc2d..000000000 --- a/backend/lcfs/web/api/redis/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Redis API.""" -from lcfs.web.api.redis.views import router - -__all__ = ["router"] diff --git a/backend/lcfs/web/api/redis/schema.py b/backend/lcfs/web/api/redis/schema.py deleted file mode 100644 index e7e025d5c..000000000 --- a/backend/lcfs/web/api/redis/schema.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Optional - -from pydantic import BaseModel - - -class RedisValueDTO(BaseModel): - """DTO for redis values.""" - - key: str - value: Optional[str] # noqa: WPS110 diff --git a/backend/lcfs/web/api/redis/views.py b/backend/lcfs/web/api/redis/views.py deleted file mode 100644 index 5760d75dc..000000000 --- a/backend/lcfs/web/api/redis/views.py +++ /dev/null @@ -1,44 +0,0 @@ -from fastapi import APIRouter -from fastapi.param_functions import Depends -from redis.asyncio import ConnectionPool, Redis - -from lcfs.services.redis.dependency import get_redis_pool -from lcfs.web.api.redis.schema import RedisValueDTO - -router = APIRouter() - - -@router.get("/", response_model=RedisValueDTO) -async def get_redis_value( - key: str, - redis_pool: ConnectionPool = Depends(get_redis_pool), -) -> RedisValueDTO: - """ - Get value from redis. - - :param key: redis key, to get data from. - :param redis_pool: redis connection pool. - :returns: information from redis. - """ - async with Redis(connection_pool=redis_pool) as redis: - redis_value = await redis.get(key) - return RedisValueDTO( - key=key, - value=redis_value, - ) - - -@router.put("/") -async def set_redis_value( - redis_value: RedisValueDTO, - redis_pool: ConnectionPool = Depends(get_redis_pool), -) -> None: - """ - Set value in redis. - - :param redis_value: new value data. - :param redis_pool: redis connection pool. - """ - if redis_value.value is not None: - async with Redis(connection_pool=redis_pool) as redis: - await redis.set(name=redis_value.key, value=redis_value.value) diff --git a/backend/lcfs/web/api/role/__init__.py b/backend/lcfs/web/api/role/__init__.py index 5586bcfb6..904159331 100644 --- a/backend/lcfs/web/api/role/__init__.py +++ b/backend/lcfs/web/api/role/__init__.py @@ -1,4 +1,5 @@ """User API.""" + from lcfs.web.api.role.views import router __all__ = ["router"] diff --git a/backend/lcfs/web/api/role/repo.py b/backend/lcfs/web/api/role/repo.py new file mode 100644 index 000000000..dbe2678d2 --- /dev/null +++ b/backend/lcfs/web/api/role/repo.py @@ -0,0 +1,44 @@ +import structlog +from typing import List + +from fastapi import Depends +from lcfs.db.dependencies import get_async_db_session + +from sqlalchemy import and_, select +from sqlalchemy.ext.asyncio import AsyncSession +from starlette.requests import Request + +from lcfs.web.api.role.schema import RoleSchema +from lcfs.db.models.user.Role import Role, RoleEnum + +logger = structlog.get_logger(__name__) + + +class RoleRepository: + def __init__( + self, + session: AsyncSession = Depends(get_async_db_session), + request: Request = None, + ): + self.session = session + self.request = request + + async def get_roles(self, government_roles_only) -> List[RoleSchema]: + logger.info("Getting all roles from repository") + # exclude "Government & Supplier Roles" + exclude_roles = [RoleEnum.GOVERNMENT, RoleEnum.SUPPLIER] + conditions = [] + if government_roles_only is not None: + conditions.append(Role.name.not_in(exclude_roles)) + conditions.append(Role.is_government_role == government_roles_only) + + results = await self.session.execute( + select(Role) + .where(and_(*conditions)) + .order_by(Role.is_government_role.desc(), Role.display_order.asc()) + ) + # Convert Role instances to dictionaries + roles = results.scalars().unique().all() + role_dicts = [role.__dict__ for role in roles] + + return [RoleSchema.model_validate(role) for role in role_dicts] diff --git a/backend/lcfs/web/api/role/schema.py b/backend/lcfs/web/api/role/schema.py index ce4f6ca2f..e1b5e17ef 100644 --- a/backend/lcfs/web/api/role/schema.py +++ b/backend/lcfs/web/api/role/schema.py @@ -1,8 +1,32 @@ -from pydantic import BaseModel +from typing import Optional, List +from lcfs.db.models import UserProfile +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.base import BaseSchema -class RoleSchema(BaseModel): + +class RoleSchema(BaseSchema): role_id: int - name: str - description: str + name: Optional[str] = None + description: Optional[str] = None + display_order: Optional[int] = None is_government_role: bool + + +def user_has_roles(user: UserProfile, desired_role_names: List[RoleEnum]) -> bool: + """ + Checks if the user has all specified roles, ignoring invalid role names. + + Parameters: + - user: User object with a user_roles attribute containing role objects. + - desired_role_names: List of strings representing the desired role names. + + Returns: + - True if the user has all the desired roles, False otherwise. + """ + user_role_names = user.role_names + desired_role_set = set(desired_role_names) + + # Check if all desired roles are in the user's roles + # This line checks if the desired roles are a subset of the user's roles + return desired_role_set.issubset(user_role_names) diff --git a/backend/lcfs/web/api/role/services.py b/backend/lcfs/web/api/role/services.py new file mode 100644 index 000000000..2c1f13665 --- /dev/null +++ b/backend/lcfs/web/api/role/services.py @@ -0,0 +1,25 @@ +from typing import List +import structlog +from fastapi import Depends + +from lcfs.web.api.role.repo import RoleRepository +from lcfs.web.api.role.schema import RoleSchema +from lcfs.web.core.decorators import service_handler +from lcfs.web.exception.exceptions import DataNotFoundException + +logger = structlog.get_logger(__name__) + + +class RoleServices: + def __init__(self, repo: RoleRepository = Depends(RoleRepository)) -> None: + self.repo = repo + + @service_handler + async def get_roles(self, government_roles_only) -> List[RoleSchema]: + """ + Gets the list of roles related to government user or org users. + """ + roles = await self.repo.get_roles(government_roles_only) + if len(roles) == 0: + raise DataNotFoundException("No roles found") + return roles diff --git a/backend/lcfs/web/api/role/session.py b/backend/lcfs/web/api/role/session.py deleted file mode 100644 index 1f2c021c6..000000000 --- a/backend/lcfs/web/api/role/session.py +++ /dev/null @@ -1,27 +0,0 @@ -from logging import getLogger -from typing import List - -from sqlalchemy import text -from sqlalchemy.ext.asyncio import AsyncSession -from starlette.requests import Request - -from lcfs.web.api.base import row_to_dict -from lcfs.web.api.role.schema import RoleSchema - -logger = getLogger("role") -role_stmt = "Select * from role_view" - - -class RoleRepository: - def __init__(self, session: AsyncSession, request: Request = None): - self.session = session - self.request = request - - async def get_all_roles(self, government_roles_only) -> List[RoleSchema]: - logger.info("Getting all roles from repository") - stmt = f"{role_stmt} where is_government_role = {government_roles_only}" - results = await self.session.execute(text(stmt)) - results = results.fetchall() - if results.__len__() == 0: - return [] - return [row_to_dict(role, schema=RoleSchema) for role in results] diff --git a/backend/lcfs/web/api/role/views.py b/backend/lcfs/web/api/role/views.py index 7fb6e4f85..198d73311 100644 --- a/backend/lcfs/web/api/role/views.py +++ b/backend/lcfs/web/api/role/views.py @@ -1,36 +1,38 @@ -from logging import getLogger +""" +Roles endpoints +GET: /roles?government_roles_only=true +""" -from fastapi import APIRouter, status, FastAPI, Depends -from sqlalchemy.ext.asyncio import AsyncSession +import structlog + +from fastapi import APIRouter, Request, status, FastAPI, Depends +from typing import List +from lcfs.web.api.role.services import RoleServices from starlette.responses import Response +from fastapi_cache.decorator import cache from lcfs.db import dependencies -from lcfs.web.api.base import EntityResponse -from lcfs.web.api.role.session import RoleRepository +from lcfs.web.api.role.schema import RoleSchema +from lcfs.web.core.decorators import view_handler +from lcfs.db.models.user.Role import RoleEnum router = APIRouter() -logger = getLogger("role") +logger = structlog.get_logger(__name__) get_async_db = dependencies.get_async_db_session -# get_db = dependencies.get_db_session app = FastAPI() -@router.get("", response_model=EntityResponse, status_code=status.HTTP_200_OK) -async def get_roles(government_roles_only: bool = False, db: AsyncSession = Depends(get_async_db), response: Response = None) -> EntityResponse: - try: - roles = await RoleRepository(db).get_all_roles(government_roles_only=government_roles_only) - if len(roles) == 0: - logger.error("Error getting roles") - response.status_code = status.HTTP_404_NOT_FOUND - return EntityResponse(status=status.HTTP_404_NOT_FOUND, - message="Failed", success=False, data={}, - error={"message": "No roles found"}) - return EntityResponse(status=status.HTTP_200_OK, data=roles, - total=len(roles), error={}, - success=True, message="Success") - except Exception as e: - response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR - logger.error("Error getting roles", str(e.args[0])) - return EntityResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR, - message="Failed", success=False, data={}, - error={"message": f"Technical error: {e.args[0]}"}) +@router.get("/", response_model=List[RoleSchema], status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT, RoleEnum.SUPPLIER]) +@cache(expire=60 * 60 * 24) # cache for 24 hours +async def get_roles( + request: Request, + government_roles_only: bool = None, + service: RoleServices = Depends(), + response: Response = None, +) -> List[RoleSchema]: + logger.info( + "Retrieving roles: government_roles_only", + government_roles_only=government_roles_only + ) + return await service.get_roles(government_roles_only) diff --git a/backend/lcfs/web/api/router.py b/backend/lcfs/web/api/router.py index fa92acf7e..0ae1659cc 100644 --- a/backend/lcfs/web/api/router.py +++ b/backend/lcfs/web/api/router.py @@ -1,11 +1,88 @@ from fastapi.routing import APIRouter -from lcfs.web.api import echo, monitoring, redis, user, role, notification +from lcfs.web.api import ( + echo, + fuel_supply, + monitoring, + user, + role, + notification, + organization, + organizations, + transfer, + transaction, + internal_comment, + fuel_code, + fuel_export, + admin_adjustment, + initiative_agreement, + compliance_report, + notional_transfer, + other_uses, + final_supply_equipment, + dashboard, + allocation_agreement, + document, + fuel_type, + audit_log, + email, +) api_router = APIRouter() api_router.include_router(monitoring.router) +api_router.include_router( + allocation_agreement.router, + prefix="/allocation-agreement", + tags=["allocation-agreements"], +) +api_router.include_router( + transaction.router, prefix="/transactions", tags=["transactions"] +) +api_router.include_router(transfer.router, prefix="/transfers", tags=["transfers"]) api_router.include_router(echo.router, prefix="/echo", tags=["echo"]) -api_router.include_router(redis.router, prefix="/redis", tags=["redis"]) api_router.include_router(user.router, prefix="/users", tags=["users"]) api_router.include_router(role.router, prefix="/roles", tags=["roles"]) -api_router.include_router(notification.router, prefix='/notifications', tags=["notifications"]) \ No newline at end of file +api_router.include_router(dashboard.router, prefix="/dashboard", tags=["dashboard"]) +api_router.include_router( + notification.router, prefix="/notifications", tags=["notifications"] +) +api_router.include_router( + organizations.router, prefix="/organizations", tags=["organizations"] +) +api_router.include_router( + organization.router, prefix="/organization", tags=["organization"] +) +api_router.include_router( + internal_comment.router, prefix="/internal_comments", tags=["internal_comments"] +) +api_router.include_router(fuel_code.router, prefix="/fuel-codes", tags=["fuel-codes"]) +api_router.include_router( + fuel_export.router, prefix="/fuel-exports", tags=["fuel-exports"] +) +api_router.include_router( + admin_adjustment.router, prefix="/admin-adjustments", tags=["admin_adjustments"] +) +api_router.include_router( + initiative_agreement.router, + prefix="/initiative-agreements", + tags=["initiative_agreements"], +) +api_router.include_router( + compliance_report.router, prefix="/reports", tags=["compliance_reports"] +) +api_router.include_router( + notional_transfer.router, prefix="/notional-transfers", tags=["notional_transfers"] +) +api_router.include_router(other_uses.router, prefix="/other-uses", tags=["other_uses"]) +api_router.include_router( + final_supply_equipment.router, + prefix="/final-supply-equipments", + tags=["final_supply_equipments"], +) +api_router.include_router( + fuel_supply.router, prefix="/fuel-supply", tags=["fuel_supplies"] +) +api_router.include_router(document.router, prefix="/documents", tags=["documents"]) +api_router.include_router(fuel_type.router, prefix="/fuel-type", tags=["fuel_type"]) +api_router.include_router(audit_log.router, prefix="/audit-log", tags=["audit_log"]) +api_router.include_router(email.router, prefix="/email", tags=["emails"]) diff --git a/backend/lcfs/web/api/transaction/__init__.py b/backend/lcfs/web/api/transaction/__init__.py new file mode 100644 index 000000000..7e241e355 --- /dev/null +++ b/backend/lcfs/web/api/transaction/__init__.py @@ -0,0 +1,5 @@ +"""User API.""" + +from lcfs.web.api.transaction.views import router + +__all__ = ["router"] diff --git a/backend/lcfs/web/api/transaction/repo.py b/backend/lcfs/web/api/transaction/repo.py new file mode 100644 index 000000000..861b1e32b --- /dev/null +++ b/backend/lcfs/web/api/transaction/repo.py @@ -0,0 +1,466 @@ +# transactions/repo.py + +from datetime import datetime +from enum import Enum +from typing import List, Optional + +from fastapi import Depends +from sqlalchemy import select, update, func, desc, asc, and_, case, or_, extract +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.transaction.Transaction import Transaction, TransactionActionEnum +from lcfs.db.models.transaction.TransactionStatusView import TransactionStatusView +from lcfs.db.models.transaction.TransactionView import TransactionView +from lcfs.db.models.transfer.TransferStatus import TransferStatus +from lcfs.web.core.decorators import repo_handler + + +class EntityType(Enum): + Government = "Government" + Transferor = "Transferor" + Transferee = "Transferee" + + +class TransactionRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_transactions_paginated( + self, + offset: int, + limit: Optional[int] = None, + conditions: list = [], + sort_orders: list = [], + organization_id: Optional[int] = None, + ): + """ + Fetches paginated, filtered, and sorted transactions. + Adjusts visibility based on the requester's role: transferor, transferee, or government. + + Args: + offset (int): Pagination offset. + limit (int): Pagination limit. + conditions (list): Filtering conditions. + sort_orders (list): Sorting orders. + organization_id (int, optional): ID of the requesting organization; determines visibility rules. Defaults to government view if None. + + Returns: + A tuple of (list of TransactionView instances, total count). + """ + query_conditions = conditions + + # Base condition for transaction type "Transfer" + transfer_type_condition = TransactionView.transaction_type == "Transfer" + + if organization_id is not None: + # Fetch visible statuses for both roles for "Transfer" transactions + visible_statuses_transferor = await self.get_visible_statuses( + EntityType.Transferor + ) + visible_statuses_transferee = await self.get_visible_statuses( + EntityType.Transferee + ) + + # Construct role-specific conditions, applying them only to "Transfer" transactions + transferor_condition = and_( + transfer_type_condition, + TransactionView.from_organization_id == organization_id, + TransactionView.status.in_( + [status.value for status in visible_statuses_transferor] + ), + ) + transferee_condition = and_( + transfer_type_condition, + TransactionView.to_organization_id == organization_id, + TransactionView.status.in_( + [status.value for status in visible_statuses_transferee] + ), + ) + + # Conditions for non-transfer transactions, only "Approved" for suppliers + non_transfer_condition = and_( + TransactionView.transaction_type != "Transfer", + TransactionView.to_organization_id == organization_id, + TransactionView.status + == "Approved", # This status includes InitiativeAgreement and AdminAdjustment + ) + + # Combine conditions since an organization can be both transferor and transferee, or neither for non-"Transfer" transactions + combined_role_condition = or_( + transferor_condition, transferee_condition, non_transfer_condition + ) + query_conditions.append(combined_role_condition) + else: + # Government view should see all non-transfer transactions regardless of status + gov_transfer_condition = and_( + transfer_type_condition, + TransactionView.status.in_( + [ + status.value + for status in await self.get_visible_statuses( + EntityType.Government + ) + ] + ), + ) + gov_non_transfer_condition = TransactionView.transaction_type != "Transfer" + + query_conditions.append( + or_(gov_transfer_condition, gov_non_transfer_condition) + ) + + # Add additional conditions + if conditions: + query_conditions.extend(conditions) + + query = select(TransactionView).where(and_(*query_conditions)) + + # Apply sorting + for order in sort_orders: + direction = asc if order.direction == "asc" else desc + query = query.order_by(direction(getattr(TransactionView, order.field))) + + # Execute count query for total records matching the filter + count_query = select(func.count()).select_from(query.subquery()) + total_count_result = await self.db.execute(count_query) + total_count = total_count_result.scalar_one() + + # Apply pagination + query = query.offset(offset).limit(limit) + + # Execute the query + result = await self.db.execute(query) + transactions = result.scalars().all() + + return transactions, total_count + + @repo_handler + async def get_transaction_by_id(self, transaction_id: int) -> Transaction: + """ + Retrieves a transaction by its ID. + + Args: + transaction_id (int): The ID of the transaction to retrieve. + + Returns: + Transaction: The transaction view object. + """ + query = select(Transaction).where(Transaction.transaction_id == transaction_id) + result = await self.db.execute(query) + return result.scalar_one() + + @repo_handler + async def get_transaction_statuses(self) -> List[TransactionStatusView]: + """ + Get all available statuses for transactions from the database. + + Returns: + List[TransactionStatusView]: A list of TransactionStatusView objects containing the basic transaction status details. + """ + query = ( + select(TransactionStatusView) + .distinct(TransactionStatusView.status) + .order_by(asc(TransactionStatusView.status)) + ) + status_results = await self.db.execute(query) + return status_results.scalars().all() + + @repo_handler + async def calculate_total_balance(self, organization_id: int): + """ + Calculate the total balance for a specific organization based on adjustments. + + This method computes the sum of compliance units for all transactions marked as adjustments for the given organization. + + Args: + organization_id (int): The ID of the organization for which to calculate the total balance. + + Returns: + int: The total balance of compliance units as adjustments for the specified organization. Returns 0 if no balance is calculated. + """ + total_balance = await self.db.scalar( + select( + func.sum( + case( + ( + Transaction.transaction_action + == TransactionActionEnum.Adjustment, + Transaction.compliance_units, + ), + else_=0, + ) + ).label("total_balance") + ).where(Transaction.organization_id == organization_id) + ) + return total_balance or 0 + + @repo_handler + async def calculate_reserved_balance(self, organization_id: int): + """ + Calculate the reserved balance for a specific organization. + + Args: + organization_id (int): The ID of the organization for which to calculate the reserved balance. + + Returns: + int: The reserved balance of compliance units for the specified organization. Returns 0 if no balance is calculated. + """ + reserved_balance = await self.db.scalar( + select( + func.sum( + case( + ( + Transaction.transaction_action + == TransactionActionEnum.Reserved, + Transaction.compliance_units, + ), + else_=0, + ) + ).label("reserved_balance") + ).where(Transaction.organization_id == organization_id) + ) + return reserved_balance or 0 + + @repo_handler + async def calculate_available_balance(self, organization_id: int): + """ + Calculate the available balance for a specific organization. + + Args: + organization_id (int): The ID of the organization for which to calculate the available balance. + + Returns: + int: The available balance of compliance units for the specified organization. Returns 0 if no balance is calculated. + """ + available_balance = await self.db.scalar( + select( + ( + func.sum( + case( + ( + Transaction.transaction_action + == TransactionActionEnum.Adjustment, + Transaction.compliance_units, + ), + else_=0, + ) + ) + - func.sum( + case( + ( + Transaction.transaction_action + == TransactionActionEnum.Reserved, + Transaction.compliance_units, + ), + else_=0, + ) + ) + ).label("available_balance") + ).where(Transaction.organization_id == organization_id) + ) + return available_balance or 0 + + @repo_handler + async def calculate_available_balance_for_period( + self, organization_id: int, compliance_period: int + ): + """ + Calculate the available balance for a specific organization available to a specific compliance period. + + Args: + organization_id (int): The ID of the organization for which to calculate the available balance. + compliance_period_end (datetime): The end date of the compliance period for which to calculate the available balance. + + Returns: + int: The available balance of compliance units for the specified organization and period. Returns 0 if no balance is calculated. + """ + compliance_period_end = datetime.strptime( + f"{str(compliance_period + 1)}-03-31", "%Y-%m-%d" + ) + async with self.db.begin_nested(): + # Calculate the sum of all transactions up to the specified date + balance_to_date = await self.db.scalar( + select(func.coalesce(func.sum(Transaction.compliance_units), 0)).where( + and_( + Transaction.organization_id == organization_id, + Transaction.create_date <= compliance_period_end, + Transaction.transaction_action + != TransactionActionEnum.Released, + ) + ) + ) + + # Calculate the sum of future negative transactions + future_negative_transactions = await self.db.scalar( + select(func.coalesce(func.sum(Transaction.compliance_units), 0)).where( + and_( + Transaction.organization_id == organization_id, + Transaction.create_date > compliance_period_end, + Transaction.compliance_units < 0, + Transaction.transaction_action + != TransactionActionEnum.Released, + ) + ) + ) + + # Calculate the available balance, round to the nearest whole number, and if negative, set to zero + available_balance = max( + round(balance_to_date - abs(future_negative_transactions)), 0 + ) + + return available_balance + + @repo_handler + async def create_transaction( + self, + transaction_action: TransactionActionEnum, + compliance_units: int, + organization_id: int, + ) -> Transaction: + """ + Creates and saves a new transaction to the database. + + Args: + transaction_action (TransactionActionEnum): The type of action the transaction represents (e.g., Adjustment, Reserved, Released). + compliance_units (int): The number of compliance units involved in the transaction. + organization_id (int): The ID of the organization related to this transaction. + + Returns: + Transaction: The newly created and saved transaction with its unique ID. + """ + new_transaction = Transaction( + transaction_action=transaction_action, + compliance_units=compliance_units, + organization_id=organization_id, + ) + self.db.add(new_transaction) + await self.db.flush() + await self.db.refresh(new_transaction, ["organization"]) + return new_transaction + + @repo_handler + async def reserve_transaction(self, transaction_id: int) -> bool: + """ + Attempt to reserve a transaction by updating its transaction_action to Reserved. + + Args: + transaction_id (int): The ID of the transaction to reserve. + + Returns: + bool: True if the action was successfully applied, False otherwise. + """ + result = await self.db.execute( + update(Transaction) + .where(Transaction.transaction_id == transaction_id) + .values(transaction_action=TransactionActionEnum.Reserved) + ) + # Check if the update statement affected any rows + return result.rowcount > 0 + + @repo_handler + async def release_transaction(self, transaction_id: int) -> bool: + """ + Attempt to release a transaction by updating its transaction_action to Released. + + Args: + transaction_id (int): The ID of the transaction to release. + + Returns: + bool: True if the action was successfully applied, False otherwise. + """ + result = await self.db.execute( + update(Transaction) + .where(Transaction.transaction_id == transaction_id) + .values(transaction_action=TransactionActionEnum.Released) + ) + # Check if the update statement affected any rows + return result.rowcount > 0 + + @repo_handler + async def confirm_transaction(self, transaction_id: int) -> bool: + """ + Attempt to confirm a transaction by updating its transaction_action to Adjustment. + + Args: + transaction_id (int): The ID of the transaction to release. + + Returns: + bool: True if the action was successfully applied, False otherwise. + """ + # Execute the update statement + result = await self.db.execute( + update(Transaction) + .where(Transaction.transaction_id == transaction_id) + .values(transaction_action=TransactionActionEnum.Adjustment) + ) + # Commit the update to make it permanent + # await self.db.commit() + + # Check if the update statement affected any rows + if result.rowcount > 0: + # Retrieve the updated transaction instance + query = select(Transaction).where( + Transaction.transaction_id == transaction_id + ) + updated_transaction = await self.db.scalar(query) + + # If the transaction is found, refresh it and return True + if updated_transaction: + await self.db.refresh(updated_transaction) + # await self.db.refresh(updated_transaction.organization) + return True + # If no rows were affected or transaction could not be retrieved, return False + return False + + @repo_handler + async def get_visible_statuses(self, entity_type: EntityType) -> List[str]: + """ + Fetches transaction statuses visible to the specified entity type. + + Args: + entity_type (EntityType): The entity type (transferor, transferee, government). + + Returns: + List[str]: Visible status strings for the entity. + + Raises: + ValueError: For invalid entity type. + """ + # Map entity types to their corresponding conditions + conditions = { + EntityType.Transferor: TransferStatus.visible_to_transferor.is_(True), + EntityType.Transferee: TransferStatus.visible_to_transferee.is_(True), + EntityType.Government: TransferStatus.visible_to_government.is_(True), + } + + # Fetch the condition for the given entity type + condition = conditions.get(entity_type) + + # Ensure a valid entity type was provided + if condition is None: + raise ValueError(f"Invalid entity type: {entity_type}") + + query = select(TransferStatus.status).where(condition) + result = await self.db.execute(query) + + # Directly fetching string representations of the statuses + return [status[0] for status in result.fetchall()] + + @repo_handler + async def get_transaction_start_year(self): + """ + Returns the year of the oldest transaction in the system + + Returns: + oldest_year (int): the year of the oldest transaction in the system + """ + + oldest_year = await self.db.scalar( + select(extract("year", Transaction.create_date).label("year")) + .order_by(Transaction.create_date.asc()) + .limit(1) + ) + + return oldest_year diff --git a/backend/lcfs/web/api/transaction/schema.py b/backend/lcfs/web/api/transaction/schema.py new file mode 100644 index 000000000..ad0d8411e --- /dev/null +++ b/backend/lcfs/web/api/transaction/schema.py @@ -0,0 +1,83 @@ +from typing import Optional, List + +from pydantic import ConfigDict, Field +from lcfs.web.api.base import BaseSchema +from datetime import datetime +from enum import Enum +from typing import List, Optional +from lcfs.web.api.organizations.schema import OrganizationSummaryResponseSchema +from lcfs.web.api.base import PaginationResponseSchema + + +# -------------------------------------- +# Base Configuration +# -------------------------------------- +class BaseConfig: + from_attributes = True + + +# -------------------------------------- +# Transaction Status +# -------------------------------------- + + +class TransactionStatusEnum(str, Enum): + DRAFT = "Draft" + RECOMMENDED = "Recommended" + SENT = "Sent" + SUBMITTED = "Submitted" + APPROVED = "Approved" + RECORDED = "Recorded" + REFUSED = "Refused" + DELETED = "Deleted" + DECLINED = "Declined" + RESCINDED = "Rescinded" + + +class TransactionStatusBase(BaseSchema): + status: TransactionStatusEnum + description: Optional[str] = None + + +class TransactionStatusSchema(TransactionStatusBase): + pass + + +class TransactionActionEnum(str, Enum): + Adjustment = "Adjustment" + Reserved = "Reserved" + Released = "Released" + + +class TransactionCreateSchema(BaseSchema): + transaction_action: TransactionActionEnum + compliance_units: int + organization_id: int + + +class TransactionBaseSchema(BaseSchema): + model_config = ConfigDict(from_attributes=True) + + transaction_id: int + compliance_units: int + organization_id: int + transaction_action: TransactionActionEnum + + organization: Optional[OrganizationSummaryResponseSchema] = None + + +class TransactionViewSchema(BaseSchema): + transaction_id: int + transaction_type: str + from_organization: Optional[str] = None + to_organization: str + quantity: int + price_per_unit: Optional[float] = None + status: str + create_date: datetime + update_date: datetime + + +class TransactionListSchema(BaseSchema): + pagination: PaginationResponseSchema + transactions: List[TransactionViewSchema] diff --git a/backend/lcfs/web/api/transaction/services.py b/backend/lcfs/web/api/transaction/services.py new file mode 100644 index 000000000..9c2bf0011 --- /dev/null +++ b/backend/lcfs/web/api/transaction/services.py @@ -0,0 +1,236 @@ +import io +from datetime import datetime +from typing import List, Dict, Union +from fastapi import Depends +from fastapi.responses import StreamingResponse +from math import ceil +from lcfs.db.models.transaction.Transaction import Transaction +from sqlalchemy import or_, and_, cast, String + +from .repo import TransactionRepository # Adjust import path as needed +from lcfs.web.core.decorators import service_handler +from lcfs.db.models.transaction.TransactionView import TransactionView +from lcfs.web.api.transaction.schema import ( + TransactionStatusSchema, + TransactionViewSchema, +) +from lcfs.web.exception.exceptions import DataNotFoundException +from lcfs.web.api.base import ( + FilterModel, + PaginationRequestSchema, + PaginationResponseSchema, + apply_filter_conditions, + get_field_for_filter, + validate_pagination, +) +from lcfs.utils.constants import ( + LCFS_Constants, + FILE_MEDIA_TYPE, + transaction_type_to_id_prefix_map, + id_prefix_to_transaction_type_map, +) +from lcfs.utils.spreadsheet_builder import SpreadsheetBuilder + + +class TransactionsService: + def __init__( + self, repo: TransactionRepository = Depends(TransactionRepository) + ) -> None: + self.repo = repo + + def apply_transaction_filters(self, pagination, conditions): + """ + Apply filters to the transactions query. + + Args: + pagination (PaginationRequestSchema): The pagination object containing page and size information. + conditions (List[Condition]): The list of conditions to apply. + + Returns: + List[Transactions]: The list of transactions after applying the filters. + """ + + for filter in pagination.filters: + if filter.field == "transaction_id": + filter_value = filter.filter.upper() + for ( + prefix, + transaction_type, + ) in id_prefix_to_transaction_type_map.items(): + if filter_value.startswith(prefix): + numeric_part = filter_value[len(prefix) :] + if numeric_part: + if numeric_part.isdigit(): + conditions.append( + and_( + TransactionView.transaction_type + == transaction_type, + TransactionView.transaction_id + == int(numeric_part), + ) + ) + else: + # Only prefix provided, filter by transaction type only + conditions.append( + TransactionView.transaction_type == transaction_type + ) + break + else: + # If no prefix matches, treat the whole value as a potential transaction_id + if filter_value.isdigit(): + conditions.append( + TransactionView.transaction_id == int(filter_value) + ) + else: + # Handle other filters + field = get_field_for_filter(TransactionView, filter.field) + conditions.append( + apply_filter_conditions( + field, filter.filter, filter.type, filter.filter_type + ) + ) + + @service_handler + async def get_transactions_paginated( + self, pagination: PaginationRequestSchema = {}, organization_id: int = None + ) -> Dict[str, Union[List[TransactionViewSchema], PaginationResponseSchema]]: + """ + Fetch transactions with filters, sorting, and pagination. + """ + pagination.filters.append( + FilterModel( + field="status", filter="Deleted", type="notEqual", filter_type="text" + ) + ) + conditions = [] + pagination = validate_pagination(pagination) + if pagination.filters and len(pagination.filters) > 0: + self.apply_transaction_filters(pagination, conditions) + + offset = (pagination.page - 1) * pagination.size if pagination.page > 0 else 0 + limit = pagination.size + + if organization_id: + org_conditions = or_( + TransactionView.from_organization_id == organization_id, + TransactionView.to_organization_id == organization_id, + ) + conditions.append(org_conditions) + + transactions, total_count = await self.repo.get_transactions_paginated( + offset, limit, conditions, pagination.sort_orders, None + ) + + if not transactions: + raise DataNotFoundException("Transactions not found") + + return { + "transactions": [ + TransactionViewSchema.model_validate(transaction) + for transaction in transactions + ], + "pagination": PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=ceil(total_count / pagination.size), + ), + } + + @service_handler + async def get_transaction_by_id(self, transaction_id: int) -> Transaction: + """handles fetching a single transaction""" + return await self.repo.get_transaction_by_id(transaction_id) + + @service_handler + async def get_transaction_statuses(self) -> List[TransactionStatusSchema]: + """handles fetching all transaction statuses""" + results = await self.repo.get_transaction_statuses() + statuses = [ + TransactionStatusSchema.model_validate(status) for status in results + ] + + if len(statuses) == 0: + raise DataNotFoundException("No transaction statuses found") + + return statuses + + @service_handler + async def export_transactions( + self, export_format, organization_id=None + ) -> StreamingResponse: + """ + Prepares a list of transactions in a file that is downloadable + """ + if not export_format in ["xls", "xlsx", "csv"]: + raise DataNotFoundException("Export format not supported") + + results = await self.repo.get_transactions_paginated( + 0, + None, + [ + or_( + TransactionView.status == "Approved", + TransactionView.status == "Recorded", + ) + ], + [], + organization_id, + ) + + # Prepare data for the spreadsheet + data = [] + for result in results[0]: + data.append( + [ + f"{transaction_type_to_id_prefix_map[result.transaction_type]}{result.transaction_id}", + result.compliance_period, + result.transaction_type, + result.from_organization, + result.to_organization, + result.quantity, + result.price_per_unit, + result.category, + result.status, + ( + result.transaction_effective_date.strftime("%Y-%m-%d") + if result.transaction_effective_date + else None + ), + ( + result.recorded_date.strftime("%Y-%m-%d") + if result.recorded_date + else None + ), + ( + result.approved_date.strftime("%Y-%m-%d") + if result.approved_date + else None + ), + result.comment, + ] + ) + + # Create a spreadsheet + builder = SpreadsheetBuilder(file_format=export_format) + + builder.add_sheet( + sheet_name=LCFS_Constants.TRANSACTIONS_EXPORT_SHEETNAME, + columns=LCFS_Constants.TRANSACTIONS_EXPORT_COLUMNS, + rows=data, + styles={"bold_headers": True}, + ) + + file_content = builder.build_spreadsheet() + + # Get the current date in YYYY-MM-DD format + current_date = datetime.now().strftime("%Y-%m-%d") + + filename = f"{LCFS_Constants.TRANSACTIONS_EXPORT_FILENAME}-{current_date}.{export_format}" + headers = {"Content-Disposition": f'attachment; filename="{filename}"'} + + return StreamingResponse( + io.BytesIO(file_content), + media_type=FILE_MEDIA_TYPE[export_format.upper()].value, + headers=headers, + ) diff --git a/backend/lcfs/web/api/transaction/views.py b/backend/lcfs/web/api/transaction/views.py new file mode 100644 index 000000000..47bd64b24 --- /dev/null +++ b/backend/lcfs/web/api/transaction/views.py @@ -0,0 +1,93 @@ +from typing import List +import structlog +from fastapi import APIRouter, Depends, status, Request, Body, Query +from fastapi.responses import StreamingResponse +from fastapi_cache.decorator import cache +from lcfs.web.core.decorators import view_handler +from lcfs.web.api.transaction.services import TransactionsService +from lcfs.web.api.transaction.schema import ( + TransactionListSchema, + TransactionStatusSchema, +) +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.db.models.user.Role import RoleEnum + +logger = structlog.get_logger(__name__) + +router = APIRouter() + + +@router.post( + "/{organization_id}", + response_model=TransactionListSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_transactions_paginated_by_org( + request: Request, + organization_id: int = None, + pagination: PaginationRequestSchema = Body(..., embed=False), + service: TransactionsService = Depends(), +): + """ + Fetches a combined list of Issuances and Transfers for a specific organization, sorted by create_date, with pagination. + """ + return await service.get_transactions_paginated(pagination, organization_id) + + +@router.get( + "/{organization_id}/export", + response_class=StreamingResponse, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def export_transactions_by_org( + request: Request, + organization_id: int = None, + format: str = Query(default="xls", description="File export format"), + service: TransactionsService = Depends(), +): + """ + Endpoint to export information of all transactions for a specific organization + """ + return await service.export_transactions(format, organization_id) + + +@router.post("/", response_model=TransactionListSchema, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_transactions_paginated( + request: Request, + pagination: PaginationRequestSchema = Body(..., embed=False), + service: TransactionsService = Depends(), +): + """ + Fetches a combined list of Issuances and Transfers, sorted by create_date, with pagination. + """ + return await service.get_transactions_paginated(pagination) + + +@router.get("/export", response_class=StreamingResponse, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def export_transactions( + request: Request, + format: str = Query(default="xls", description="File export format"), + service: TransactionsService = Depends(), +): + """ + Endpoint to export information of all transactions + """ + return await service.export_transactions(format) + + +@router.get( + "/statuses/", + response_model=List[TransactionStatusSchema], + status_code=status.HTTP_200_OK, +) +@cache(expire=60 * 60 * 24) # cache for 24 hours +@view_handler(["*"]) +async def get_transaction_statuses( + request: Request, service: TransactionsService = Depends() +) -> List[TransactionStatusSchema]: + """Fetch all transaction statuses""" + return await service.get_transaction_statuses() diff --git a/backend/lcfs/web/api/transfer/__init__.py b/backend/lcfs/web/api/transfer/__init__.py index da7a032bb..c62383a1f 100644 --- a/backend/lcfs/web/api/transfer/__init__.py +++ b/backend/lcfs/web/api/transfer/__init__.py @@ -1,4 +1,5 @@ """User API.""" -from lcfs.web.api.organization.views import router + +from lcfs.web.api.transfer.views import router __all__ = ["router"] diff --git a/backend/lcfs/web/api/transfer/repo.py b/backend/lcfs/web/api/transfer/repo.py new file mode 100644 index 000000000..b7274f31b --- /dev/null +++ b/backend/lcfs/web/api/transfer/repo.py @@ -0,0 +1,228 @@ +import structlog +from typing import List + +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, and_ +from sqlalchemy.orm import selectinload +from datetime import datetime + +from lcfs.db.dependencies import get_async_db_session +from lcfs.web.core.decorators import repo_handler +from lcfs.web.api.transfer.schema import TransferSchema + +from lcfs.db.models.transfer.Transfer import Transfer +from lcfs.db.models.transfer.TransferStatus import TransferStatus, TransferStatusEnum +from lcfs.db.models.transfer.TransferCategory import ( + TransferCategory, + TransferCategoryEnum, +) +from lcfs.db.models.transfer.TransferHistory import TransferHistory + +logger = structlog.get_logger(__name__) + + +class TransferRepository: + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + @repo_handler + async def get_all_transfers(self) -> List[Transfer]: + """Queries the database for all transfer records.""" + query = select(Transfer).options( + selectinload(Transfer.from_organization), + selectinload(Transfer.to_organization), + selectinload(Transfer.current_status), + selectinload(Transfer.transfer_category), + selectinload(Transfer.transfer_history).selectinload( + TransferHistory.user_profile + ), + selectinload(Transfer.transfer_history).selectinload( + TransferHistory.transfer_status + ), + ) + result = await self.db.execute(query) + transfers = result.scalars().all() + return transfers + + # @repo_handler + # async def get_transfers_paginated(self, page: int, size: int) -> List[Transfer]: + # """ + # Fetches a paginated list of Transfer records from the database, ordered by their creation date. + # """ + # offset = (page - 1) * size + # query = ( + # select(Transfer) + # .order_by(Transfer.create_date.desc()) + # .offset(offset) + # .limit(size) + # ) + # results = await self.db.execute(query) + # transfers = results.scalars().all() + # return transfers + + @repo_handler + async def get_transfer_by_id(self, transfer_id: int) -> Transfer: + """ + Queries the database for a transfer by its ID and returns the ORM model. + Eagerly loads related entities to prevent lazy loading issues. + """ + query = ( + select(Transfer) + .options( + selectinload(Transfer.from_organization), + selectinload(Transfer.to_organization), + selectinload(Transfer.from_transaction), + selectinload(Transfer.to_transaction), + selectinload(Transfer.current_status), + selectinload(Transfer.transfer_category), + selectinload(Transfer.transfer_history).selectinload( + TransferHistory.user_profile + ), + selectinload(Transfer.transfer_history).selectinload( + TransferHistory.transfer_status + ), + ) + .where(Transfer.transfer_id == transfer_id) + ) + + result = await self.db.execute(query) + transfer = result.scalars().first() + return transfer + + @repo_handler + async def create_transfer(self, transfer: Transfer) -> TransferSchema: + """Save a transfer and its associated comment in the database.""" + self.db.add(transfer) + await self.db.flush() # Ensures IDs and relationships are populated + await self.db.refresh( + transfer, + [ + "from_organization", + "to_organization", + "current_status", + "transfer_category", + "transfer_history", + ], + ) # Ensures that all specified relations are up-to-date + + # Convert to schema + transfer_schema = TransferSchema.from_orm(transfer) + return transfer_schema + + # @repo_handler + # async def get_transfer_status_by_id( + # self, transfer_status_id: int + # ) -> TransferStatus: + # """Fetch a single transfer status by transfer status id from the database""" + # return await self.db.scalar( + # select(TransferStatus).where( + # TransferStatus.transfer_status_id == transfer_status_id + # ) + # ) + + # @repo_handler + # async def get_transfer_category(self, transfer_category_id: int) -> TransferStatus: + # """Fetch a single category by category id from the database""" + # return await self.db.scalar( + # select(TransferCategory).where( + # TransferCategory.transfer_category_id == transfer_category_id + # ) + # ) + + @repo_handler + async def get_transfer_status_by_name( + self, transfer_status_name: str + ) -> TransferStatus: + """Fetch a single transfer status by transfer status name from the database""" + return await self.db.scalar( + select(TransferStatus).where(TransferStatus.status == transfer_status_name) + ) + + @repo_handler + async def get_transfer_category_by_name( + self, transfer_category_name: str + ) -> TransferCategory: + return await self.db.scalar( + select(TransferCategory).where( + TransferCategory.category + == getattr(TransferCategoryEnum, transfer_category_name) + ) + ) + + @repo_handler + async def update_transfer(self, transfer: Transfer) -> TransferSchema: + """Persists the changes made to the Transfer object to the database.""" + await self.db.flush() + await self.db.refresh( + transfer, + [ + "from_organization", + "to_organization", + "current_status", + "transfer_category", + "transfer_history", + ], + ) + return TransferSchema.model_validate(transfer) + + @repo_handler + async def add_transfer_history( + self, transfer_id: int, transfer_status_id: int, user_profile_id: int + ) -> TransferHistory: + """ + Adds a new record to the transfer history in the database. + + Args: + transfer_id (int): The ID of the transfer to which this history record relates. + transfer_status_id (int): The status ID that describes the current state of the transfer. + + Returns: + TransferHistory: The newly created transfer history record. + """ + new_history_record = TransferHistory( + transfer_id=transfer_id, + transfer_status_id=transfer_status_id, + user_profile_id=user_profile_id, + ) + self.db.add(new_history_record) + await self.db.flush() + return new_history_record + + @repo_handler + async def update_transfer_history( + self, transfer_id: int, transfer_status_id: int, user_profile_id: int + ) -> TransferHistory: + """ + Updates a transfer history record in the database. + + Args: + transfer_id (int): The ID of the transfer to which this history record relates. + transfer_status_id (int): The status ID that describes the current state of the transfer. + + Returns: + TransferHistory: updated transfer history record. + """ + existing_history = await self.db.scalar( + select(TransferHistory).where( + and_( + TransferHistory.transfer_id == transfer_id, + TransferHistory.transfer_status_id == transfer_status_id, + ) + ) + ) + existing_history.create_date = datetime.now() + existing_history.update_date = datetime.now() + existing_history.user_profile_id = user_profile_id + self.db.add(existing_history) + await self.db.flush() + return existing_history + + @repo_handler + async def refresh_transfer(self, transfer: Transfer) -> Transfer: + """ + Commits and refreshes a transfer object in db session + + """ + await self.db.refresh(transfer) + return transfer diff --git a/backend/lcfs/web/api/transfer/schema.py b/backend/lcfs/web/api/transfer/schema.py index f4d149db4..889437c8a 100644 --- a/backend/lcfs/web/api/transfer/schema.py +++ b/backend/lcfs/web/api/transfer/schema.py @@ -1,87 +1,90 @@ -from pydantic import BaseModel, constr +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum +from lcfs.web.api.base import BaseSchema from typing import Optional, List -from datetime import datetime +from datetime import date, datetime from enum import Enum +from pydantic import ConfigDict, Field -class TransactionTypeEnum(str, Enum): - administrative_adjustment = "Administrative Adjustment" - initiative_agreement = "Initiative Agreement" - assessment = "Assessment" - transfer = "Transfer" - -class TransferStatusEnum(str, Enum): - draft = "Draft" - deleted = "Deleted" - sent = "Sent" - submitted = "Submitted" - recommended = "Recommended" - recorded = "Recorded" - refused = "Refused" - declined = "Declined" - rescinded = "Rescinded" - -class TransferBase(BaseModel): - from_organization: int - to_organization: int - transaction_id: int - transaction_effective_date: Optional[datetime] - comment_id: Optional[int] - transfer_status: int - transfer_category: int - -class Transfer(TransferBase): - transfer_id: int - class Config: - orm_mode = True +class TransferRecommendationEnumSchema(str, Enum): + Record = "Record" + Refuse = "Refuse" -class TransferHistory(TransferBase): - transfer_history_id: int - transfer_id: int +class TransferStatusSchema(BaseSchema): + transfer_status_id: int + status: str - class Config: - orm_mode = True +class TransferCategorySchema(BaseSchema): + transfer_category_id: int + category: str -class IssuanceSchema(BaseModel): - issuance_id: int - compliance_units: int - organization_id: int - transaction_effective_date: datetime - transaction_id: int - comment_id: Optional[int] - -class IssuanceHistorySchema(BaseModel): - issuance_history_id: int - compliance_units: int - issuance_id: int + +class TransferOrganizationSchema(BaseSchema): organization_id: int - transaction_id: Optional[int] - transaction_effective_date: Optional[datetime] - comment_id: Optional[int] + name: str -class TransactionSchema(BaseModel): - transaction_id: int - compliance_units: int - issuance_id: int - transfer_id: int - transaction_type: TransactionTypeEnum - organization: int +class TransferHistoryUserSchema(BaseSchema): + first_name: str + last_name: str + organization: Optional[TransferOrganizationSchema] = None + -class TransactionTypeSchema(BaseModel): - transaction_typ_id: int - type: TransactionTypeEnum +class TransferHistorySchema(BaseSchema): + create_date: datetime + transfer_status: TransferStatusSchema + user_profile: TransferHistoryUserSchema -class TransferStatusSchema(BaseModel): - transaction_status_id: int - status: TransferStatusEnum -class CommentSchema(BaseModel): - comment_id: int - comment: str +class TransferCommentSchema(BaseSchema): + name: str + comment: Optional[str] = None -class CategorySchema(BaseModel): - category_id: int - category: str \ No newline at end of file + +class TransferSchema(BaseSchema): + transfer_id: int + from_organization: TransferOrganizationSchema + to_organization: TransferOrganizationSchema + agreement_date: date + quantity: int + price_per_unit: float + comments: Optional[List[TransferCommentSchema]] = None + from_org_comment: Optional[str] = None + to_org_comment: Optional[str] = None + gov_comment: Optional[str] = None + current_status: TransferStatusSchema + transfer_category: Optional[TransferCategorySchema] = None + transfer_history: Optional[List[TransferHistorySchema]] = None + recommendation: Optional[TransferRecommendationEnumSchema] = None + model_config = ConfigDict(extra="ignore", from_attributes=True) + + +class TransferCreateSchema(BaseSchema): + transfer_id: Optional[int] = None + from_organization_id: int + to_organization_id: int + from_transaction_id: Optional[int] = None + to_transaction_id: Optional[int] = None + agreement_date: Optional[date] = None + quantity: Optional[int] = None + price_per_unit: Optional[float] = None + from_org_comment: Optional[str] = None + to_org_comment: Optional[str] = None + gov_comment: Optional[str] = None + transfer_category_id: Optional[int] = None + current_status_id: Optional[int] = None + current_status: Optional[TransferStatusEnum] = None + recommendation: Optional[TransferRecommendationEnumSchema] = None + + +class TransferUpdate(BaseSchema): + current_status_id: int + comments: Optional[str] = None + recommendation: Optional[str] = None + + +# class TransferHistory(BaseSchema): +# transfer_history_id: int +# transfer_id: int diff --git a/backend/lcfs/web/api/transfer/services.py b/backend/lcfs/web/api/transfer/services.py new file mode 100644 index 000000000..4b2aba0c9 --- /dev/null +++ b/backend/lcfs/web/api/transfer/services.py @@ -0,0 +1,442 @@ +import json +from lcfs.web.api.notification.schema import ( + TRANSFER_STATUS_NOTIFICATION_MAPPER, + NotificationMessageSchema, + NotificationRequestSchema, +) +from lcfs.web.api.notification.services import NotificationService +import structlog +from typing import List, Optional +from fastapi import Depends, Request, HTTPException +from datetime import datetime +from dateutil.relativedelta import relativedelta + +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.transfer.validation import TransferValidation +from lcfs.web.core.decorators import service_handler +from lcfs.web.exception.exceptions import DataNotFoundException, ServiceException + +# models +from lcfs.db.models.transfer.Transfer import Transfer +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum +from lcfs.db.models.transfer.TransferCategory import TransferCategoryEnum +from lcfs.db.models.transaction.Transaction import TransactionActionEnum + +# services +from lcfs.web.api.organizations.services import OrganizationsService + +# schema +from lcfs.web.api.role.schema import user_has_roles +from lcfs.web.api.transfer.schema import ( + TransferCommentSchema, + TransferSchema, + TransferCreateSchema, + TransferCategorySchema, +) + +# repo +from lcfs.web.api.organizations.repo import OrganizationsRepository +from lcfs.web.api.transfer.repo import TransferRepository +from lcfs.web.api.transaction.repo import TransactionRepository + +logger = structlog.get_logger(__name__) + + +class TransferServices: + def __init__( + self, + request: Request = None, + validate: TransferValidation = Depends(TransferValidation), + repo: TransferRepository = Depends(TransferRepository), + org_repo: OrganizationsRepository = Depends(OrganizationsRepository), + org_service: OrganizationsService = Depends(OrganizationsService), + transaction_repo: TransactionRepository = Depends(TransactionRepository), + notfn_service: NotificationService = Depends(NotificationService), + ) -> None: + self.validate = validate + self.repo = repo + self.request = request + self.org_repo = org_repo + self.org_service = org_service + self.transaction_repo = transaction_repo + self.notfn_service = notfn_service + + @service_handler + async def get_all_transfers(self) -> List[TransferSchema]: + """Fetches all transfer records and converts them to Pydantic models.""" + transfers = await self.repo.get_all_transfers() + return [TransferSchema.model_validate(transfer) for transfer in transfers] + + # @service_handler + # async def get_transfers_paginated( + # self, page: int, size: int + # ) -> List[TransferSchema]: + # transfers = await self.repo.get_transfers_paginated(page, size) + # return [TransferSchema.model_validate(transfer) for transfer in transfers] + + @service_handler + async def get_transfer(self, transfer_id: int) -> TransferSchema: + """Fetches a single transfer by its ID and converts it to a Pydantic model.""" + transfer = await self.repo.get_transfer_by_id(transfer_id) + if not transfer: + raise DataNotFoundException(f"Transfer with ID {transfer_id} not found") + + transfer_view = TransferSchema.model_validate(transfer) + comments: List[TransferCreateSchema] = [] + if ( + transfer.from_org_comment != None + and transfer.from_org_comment != "" + and transfer.current_status.status != TransferStatusEnum.Draft + ): + comments.append( + TransferCommentSchema( + name=transfer.from_organization.name, + comment=transfer.from_org_comment, + ) + ) + if transfer.to_org_comment != None and transfer.to_org_comment != "": + comments.append( + TransferCommentSchema( + name=transfer.to_organization.name, + comment=transfer.to_org_comment, + ) + ) + if ( + transfer.gov_comment != None + and transfer.gov_comment != "" + and transfer.current_status.status + in [TransferStatusEnum.Recorded, TransferStatusEnum.Refused] + ): + comments.append( + TransferCommentSchema( + name="Government of British Columbia", + comment=transfer.gov_comment, + ) + ) + transfer_view.comments = comments + # Hide Recommended status to organizations or if the transfer is returned to analyst by the director and it is in Submitted status + if ( + self.request.user.organization is not None + or transfer_view.current_status.status == TransferStatusEnum.Submitted.value + ): + if ( + transfer_view.current_status.status + == TransferStatusEnum.Recommended.value + ): + transfer_view.current_status = ( + await self.repo.get_transfer_status_by_name( + TransferStatusEnum.Submitted.value + ) + ) + transfer_view.transfer_history = list( + filter( + lambda history: history.transfer_status.status + != TransferStatusEnum.Recommended.value, + transfer_view.transfer_history, + ) + ) + return transfer_view + + @service_handler + async def create_transfer( + self, transfer_data: TransferCreateSchema + ) -> TransferSchema: + """ + Handles creating a transfer, including creating a comment and any necessary + preprocessing. This method fetches organization instances and creates a new + transfer record along with a comment (if provided). If any part of the process + fails due to missing data or database issues, appropriate exceptions are raised + and handled by the @service_handler decorator. + """ + transfer = Transfer(**transfer_data.model_dump(exclude={"current_status"})) + current_status = await self.repo.get_transfer_status_by_name( + transfer_data.current_status + ) + # TODO: Currenty by default category id is set to CATEGORY - A + # transfer.transfer_category_id = 1 + + transfer.current_status = current_status + if current_status.status == TransferStatusEnum.Sent: + await self.sign_and_send_from_supplier(transfer) + + transfer = await self.repo.create_transfer(transfer) + # Add a new transfer history record if the status has changed + await self.repo.add_transfer_history( + transfer.transfer_id, + current_status.transfer_status_id, + self.request.user.user_profile_id, + ) + await self._perform_notification_call(transfer, current_status.status) + return transfer + + @service_handler + async def update_transfer(self, transfer_data: TransferCreateSchema) -> Transfer: + """Updates an existing transfer record with new data.""" + new_status = await self.repo.get_transfer_status_by_name( + transfer_data.current_status + ) + transfer = await self.repo.get_transfer_by_id(transfer_data.transfer_id) + + if not transfer: + raise DataNotFoundException( + f"Transfer with id {transfer_data.transfer_id} not found" + ) + + # Check if the new status is different from the current status of the transfer + status_has_changed = transfer.current_status != new_status + re_recommended = ( + any( + history.transfer_status.status == TransferStatusEnum.Recommended + for history in transfer.transfer_history + ) + and new_status.status == TransferStatusEnum.Recommended + ) + # if the transfer status is Draft or Sent then update all the fields within the transfer + if transfer_data.current_status in [ + TransferStatusEnum.Draft.value, + TransferStatusEnum.Sent.value, + ]: + # Only update certain fields if the status is Draft or changing to Sent + if ( + status_has_changed + or transfer_data.current_status == TransferStatusEnum.Draft.value + ): + transfer.to_organization_id = transfer_data.to_organization_id + transfer.agreement_date = transfer_data.agreement_date + transfer.quantity = transfer_data.quantity + transfer.price_per_unit = transfer_data.price_per_unit + transfer.from_org_comment = transfer_data.from_org_comment + # update comments + elif status_has_changed and new_status.status in [ + TransferStatusEnum.Submitted, + TransferStatusEnum.Declined, + ]: + transfer.to_org_comment = transfer_data.to_org_comment + transfer.gov_comment = transfer_data.gov_comment + # if the transfer is returned back to analyst by the director then don't store the history. + if ( + new_status.status == TransferStatusEnum.Submitted + and transfer.current_status.status == TransferStatusEnum.Recommended + ) or re_recommended: + status_has_changed = False + if ( + transfer_data.recommendation + and transfer_data.recommendation != transfer.recommendation + ): + transfer.recommendation = transfer_data.recommendation + # Update existing history record + if re_recommended: + transfer.current_status = await self.repo.update_transfer_history( + transfer_id=transfer.transfer_id, + transfer_status_id=new_status.transfer_status_id, + user_profile_id=self.request.user.user_profile_id, + ) + # Update transfer history and handle status-specific actions if the status has changed + if status_has_changed: + logger.debug( + f"Status change: \ + {transfer.current_status.status} -> {new_status.status}" + ) + + # Matching the current status with enums directly is safer if they are comparable + if new_status.status == TransferStatusEnum.Sent: + await self.sign_and_send_from_supplier(transfer) + elif new_status.status == TransferStatusEnum.Recorded: + + await self.director_record_transfer(transfer) + elif new_status.status in [ + TransferStatusEnum.Declined, + TransferStatusEnum.Rescinded, + TransferStatusEnum.Refused, + ]: + await self.cancel_transfer(transfer) + + new_status = await self.repo.get_transfer_status_by_name( + transfer_data.current_status + ) + # Add a new transfer history record to reflect the status change + await self.repo.add_transfer_history( + transfer_data.transfer_id, + new_status.transfer_status_id, + self.request.user.user_profile_id, + ) + + # Finally, update the transfer's status and save the changes + transfer.current_status = new_status + transfer_result = await self.repo.update_transfer(transfer) + await self._perform_notification_call( + transfer, + status=( + new_status.status + if status_has_changed or re_recommended + else "Return to analyst" + ), + ) + return transfer_result + + async def _perform_notification_call( + self, transfer: TransferSchema, status: TransferStatusEnum + ): + """Send notifications based on the current status of the transfer.""" + notifications = TRANSFER_STATUS_NOTIFICATION_MAPPER.get(status) + status_val = ( + status.value if isinstance(status, TransferStatusEnum) else status + ).lower() + organization_ids = [] + if status in [ + TransferStatusEnum.Submitted, + TransferStatusEnum.Recommended, + TransferStatusEnum.Declined, + ]: + organization_ids = [transfer.from_organization.organization_id] + elif status in [ + TransferStatusEnum.Sent, + TransferStatusEnum.Rescinded, + ]: + organization_ids = [transfer.to_organization.organization_id] + elif status in [ + TransferStatusEnum.Recorded, + TransferStatusEnum.Refused, + ]: + organization_ids = [ + transfer.to_organization.organization_id, + transfer.from_organization.organization_id, + ] + message_data = { + "service": "Transfer", + "id": transfer.transfer_id, + "transactionId": transfer.from_transaction.transaction_id if getattr(transfer, 'from_transaction', None) else None, + "status": status_val, + "fromOrganizationId": transfer.from_organization.organization_id, + "fromOrganization": transfer.from_organization.name, + "toOrganizationId": transfer.to_organization.organization_id, + "toOrganization": transfer.to_organization.name, + } + type = f"Transfer {status_val}" + if status_val == "sent": + type = "Transfer received" + elif status_val == "return to analyst": + type = "Transfer returned" + for org_id in organization_ids: + notification_data = NotificationMessageSchema( + type=type, + related_transaction_id=f"CT{transfer.transfer_id}", + message=json.dumps(message_data), + related_organization_id=org_id, + origin_user_profile_id=self.request.user.user_profile_id, + ) + if notifications and isinstance(notifications, list): + await self.notfn_service.send_notification( + NotificationRequestSchema( + notification_types=notifications, + notification_data=notification_data, + ) + ) + + async def sign_and_send_from_supplier(self, transfer): + """Create reserved transaction to reserve compliance units for sending organization.""" + user = self.request.user + has_signing_role = user_has_roles( + user, [RoleEnum.SUPPLIER, RoleEnum.SIGNING_AUTHORITY] + ) + if not has_signing_role: + raise HTTPException(status_code=403, detail="Forbidden.") + + from_transaction = await self.org_service.adjust_balance( + transaction_action=TransactionActionEnum.Reserved, + compliance_units=-transfer.quantity, # Negative quantity for sending org + organization_id=transfer.from_organization_id, + ) + transfer.from_transaction = from_transaction + + async def director_record_transfer(self, transfer: Transfer): + """Confirm transaction for sending organization and create new transaction for receiving org.""" + + user = self.request.user + has_director_role = user_has_roles( + user, [RoleEnum.GOVERNMENT, RoleEnum.DIRECTOR] + ) + + if not has_director_role: + raise HTTPException(status_code=403, detail="Forbidden.") + + if transfer.from_transaction is None: + raise ServiceException( + f"From transaction not found for transfer \ + {transfer.transfer_id}. Contact support." + ) + + # Confirm transaction of sending organization + confirm_result = await self.transaction_repo.confirm_transaction( + transfer.from_transaction_id + ) + + if not confirm_result: + raise ServiceException( + f"Failed to confirm transaction \ + {transfer.from_transaction_id} for transfer {transfer.transfer_id}. Update cancelled." + ) + + await self.repo.refresh_transfer(transfer) + + if not hasattr(transfer.transfer_category, "category"): + today = datetime.now() + + diff = relativedelta(today, transfer.agreement_date) + + category = "A" + if (diff.years == 0 and diff.months >= 6 and diff.days > 1) or ( + diff.years == 1 and diff.months == 0 and diff.days == 1 + ): + category = "B" + elif diff.years >= 1: + category = "C" + + await self.update_category(transfer.transfer_id, category) + + await self.repo.refresh_transfer(transfer) + + # Create new transaction for receiving organization + to_transaction = await self.org_service.adjust_balance( + transaction_action=TransactionActionEnum.Adjustment, + compliance_units=transfer.quantity, # Positive quantity for receiving org + organization_id=transfer.to_organization_id, + ) + transfer.to_transaction = to_transaction + + await self.repo.refresh_transfer(transfer) + + async def cancel_transfer(self, transfer): + """ " + Handle the cancellation of a transfer by releasing reserved transactions. + This method is invoked when a transfer is declined, rescinded, or refused. + """ + release_result = await self.transaction_repo.release_transaction( + transfer.from_transaction_id + ) + if not release_result: + raise ServiceException( + f"Failed to release transaction \ + {transfer.from_transaction_id} for transfer {transfer.transfer_id}. Update cancelled." + ) + + def is_valid_category(self, category: str) -> bool: + return category in (item.value for item in TransferCategoryEnum) + + @service_handler + async def update_category(self, transfer_id: int, category: Optional[str] = None): + new_category = None + + if category != None: + valid_category = self.is_valid_category(category) + + if not valid_category: + raise ServiceException(f"Not a valid category") + + new_category = await self.repo.get_transfer_category_by_name(category) + + transfer = await self.repo.get_transfer_by_id(transfer_id) + + transfer.transfer_category = new_category + + return transfer diff --git a/backend/lcfs/web/api/transfer/validation.py b/backend/lcfs/web/api/transfer/validation.py new file mode 100644 index 000000000..b3b796ffb --- /dev/null +++ b/backend/lcfs/web/api/transfer/validation.py @@ -0,0 +1,63 @@ +from fastapi import Depends, HTTPException, Request +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.role.schema import user_has_roles +from starlette import status + +from lcfs.db.models.transfer.TransferStatus import TransferStatusEnum + +from lcfs.web.api.transfer.schema import TransferCreateSchema, TransferSchema +from lcfs.web.api.organizations.repo import OrganizationsRepository + +from lcfs.utils.constants import LCFS_Constants + + +class TransferValidation: + def __init__( + self, + request: Request = None, + org_repo: OrganizationsRepository = Depends(OrganizationsRepository), + ) -> None: + self.org_repo = org_repo + self.request = request + + async def government_update_transfer( + self, request: Request, transfer_create: TransferCreateSchema + ): + # Ensure only the valid statuses are passed. + if transfer_create.current_status not in LCFS_Constants.GOV_TRANSFER_STATUSES: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Validation for authorization failed.", + ) + # Before recording the transfer ensure both the organizations are still registered for transfer. + if TransferStatusEnum.Recorded.value == transfer_create.current_status: + is_to_org_registered = await self.org_repo.is_registered_for_transfer( + transfer_create.to_organization_id + ) + is_from_org_registered = await self.org_repo.is_registered_for_transfer( + transfer_create.from_organization_id + ) + if not is_to_org_registered or not is_from_org_registered: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Validation for authorization failed.", + ) + # TODO: Ensure the logged in user has the necessary permissions and roles. + + async def get_transfer(self, transfer: TransferSchema): + if not transfer: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Transfer not found." + ) + org_id = None + if self.request.user.organization: + org_id = self.request.user.organization.organization_id + + if not user_has_roles(self.request.user, [RoleEnum.GOVERNMENT]) and not ( + transfer.from_organization.organization_id == org_id + or transfer.to_organization.organization_id == org_id + ): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Validation for authorization failed.", + ) diff --git a/backend/lcfs/web/api/transfer/views.py b/backend/lcfs/web/api/transfer/views.py index 6d676b44f..939acf776 100644 --- a/backend/lcfs/web/api/transfer/views.py +++ b/backend/lcfs/web/api/transfer/views.py @@ -1,222 +1,66 @@ -from fastapi import APIRouter, Depends, HTTPException -from sqlalchemy.ext.asyncio import AsyncSession -from typing import List +from fastapi import APIRouter, Depends, Request, status, Body +from typing import List, Annotated + +from lcfs.web.api.transfer.validation import TransferValidation from lcfs.db import dependencies -from lcfs.db.models import Transfer, Issuance, TransferHistory, IssuanceHistory -from api.transfer import schema +from lcfs.web.api.transfer.schema import ( + TransferCreateSchema, + TransferSchema, +) +from lcfs.web.api.transfer.services import TransferServices +from lcfs.web.core.decorators import view_handler +from lcfs.db.models.user.Role import RoleEnum router = APIRouter() get_async_db = dependencies.get_async_db_session -@router.post("/transfers/", response_model=schema.Transfer) -async def create_transfer(transfer: schema.TransferBase, db: AsyncSession = Depends(get_async_db)): - try: - new_transfer = Transfer(**transfer.dict()) - db.add(new_transfer) - await db.commit() - await db.refresh(new_transfer) - return new_transfer - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - -@router.get("/transfers/", response_model=List[schema.Transfer]) -async def get_transfers(db: AsyncSession = Depends(get_async_db)): - try: - transfers = await db.execute(Transfer.query().all()) - return transfers - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.post("/transfer_histories/", response_model=schema.TransferHistory) -async def create_transfer_history(transfer_history: schema.TransferHistory, db: AsyncSession = Depends(get_async_db)): - try: - new_transfer_history = TransferHistory(**transfer_history.dict()) - db.add(new_transfer_history) - await db.commit() - await db.refresh(new_transfer_history) - return new_transfer_history - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - -@router.get("/transfer_histories/", response_model=List[schema.TransferHistory]) -async def get_transfer_histories(db: AsyncSession = Depends(get_async_db)): - try: - transfer_histories = await db.execute(TransferHistory.query().all()) - return transfer_histories - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") +@router.get("/", response_model=List[TransferSchema]) +@view_handler(["*"]) +async def get_all_transfers(request: Request, service: TransferServices = Depends()): + """Endpoint to fetch all transfers.""" + return await service.get_all_transfers() -@router.get("/transfer_histories/{transfer_history_id}", response_model=schema.TransferHistory) -async def get_transfer_history(transfer_history_id: int, db: AsyncSession = Depends(get_async_db)): - try: - transfer_history = await db.execute(models.TransferHistory.query().get(transfer_history_id)) - if transfer_history is None: - raise HTTPException(status_code=404, detail="Transfer history not found") - return transfer_history - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - -@router.put("/transfer_histories/{transfer_history_id}", response_model=schema.TransferHistory) -async def update_transfer_history( - transfer_history_id: int, transfer_history: schema.TransferHistory, db: AsyncSession = Depends(get_async_db) +@router.get("/{transfer_id}", response_model=TransferSchema) +@view_handler(["*"]) +async def get_transfer( + request: Request, transfer_id: int, service: TransferServices = Depends(), + validate: TransferValidation = Depends(), ): - try: - db_transfer_history = await db.execute(TransferHistory.query().get(transfer_history_id)) - if db_transfer_history is None: - raise HTTPException(status_code=404, detail="Transfer history not found") - - for field, value in transfer_history.dict().items(): - setattr(db_transfer_history, field, value) - - await db.commit() - await db.refresh(db_transfer_history) - return db_transfer_history - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.delete("/transfer_histories/{transfer_history_id}") -async def delete_transfer_history(transfer_history_id: int, db: AsyncSession = Depends(get_async_db)): - try: - transfer_history = await db.execute(TransferHistory.query().get(transfer_history_id)) - if transfer_history is None: - raise HTTPException(status_code=404, detail="Transfer history not found") - - db.delete(transfer_history) - await db.commit() - return {"message": "Transfer history deleted"} - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - - -@router.post("/issuances/", response_model=schema.IssuanceSchema) -async def create_issuance(issuance: schema.IssuanceSchema, db: AsyncSession = Depends(get_async_db)): - try: - new_issuance = Issuance(**issuance.dict()) - db.add(new_issuance) - await db.commit() - await db.refresh(new_issuance) - return new_issuance - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.get("/issuances/", response_model=List[schema.IssuanceSchema]) -async def get_issuances(db: AsyncSession = Depends(get_async_db)): - try: - issuances = await db.execute(Issuance.query().all()) - return issuances - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.get("/issuances/{issuance_id}", response_model=schema.IssuanceSchema) -async def get_issuance(issuance_id: int, db: AsyncSession = Depends(get_async_db)): - try: - issuance = await db.execute(Issuance.query().get(issuance_id)) - if issuance is None: - raise HTTPException(status_code=404, detail="Issuance not found") - return issuance - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.put("/issuances/{issuance_id}", response_model=schema.IssuanceSchema) -async def update_issuance( - issuance_id: int, issuance: schema.IssuanceSchema, db: AsyncSession = Depends(get_async_db) + """Endpoint to fetch a transfer by its ID.""" + response = await service.get_transfer(transfer_id) + await validate.get_transfer(response) + return response + + +@router.put( + "/{transfer_id}", response_model=TransferSchema, status_code=status.HTTP_200_OK +) +@view_handler([RoleEnum.GOVERNMENT]) +async def government_update_transfer( + request: Request, + transfer_id: int, + transfer_data: TransferCreateSchema, + service: TransferServices = Depends(), + validate: TransferValidation = Depends(), ): - try: - db_issuance = await db.execute(Issuance.query().get(issuance_id)) - if db_issuance is None: - raise HTTPException(status_code=404, detail="Issuance not found") - - for field, value in issuance.dict().items(): - setattr(db_issuance, field, value) - - await db.commit() - await db.refresh(db_issuance) - return db_issuance - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.delete("/issuances/{issuance_id}") -async def delete_issuance(issuance_id: int, db: AsyncSession = Depends(get_async_db)): - try: - issuance = await db.execute(Issuance.query().get(issuance_id)) - if issuance is None: - raise HTTPException(status_code=404, detail="Issuance not found") - - db.delete(issuance) - await db.commit() - return {"message": "Issuance deleted"} - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - -@router.post("/issuance_histories/", response_model=schema.IssuanceHistorySchema) -async def create_issuance_history(issuance_history: schema.IssuanceHistorySchema, db: AsyncSession = Depends(get_async_db)): - try: - new_issuance_history = IssuanceHistory(**issuance_history.dict()) - db.add(new_issuance_history) - await db.commit() - await db.refresh(new_issuance_history) - return new_issuance_history - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.get("/issuance_histories/", response_model=List[schema.IssuanceHistorySchema]) -async def get_issuance_histories(db: AsyncSession = Depends(get_async_db)): - try: - issuance_histories = await db.execute(IssuanceHistory.query().all()) - return issuance_histories - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.get("/issuance_histories/{issuance_history_id}", response_model=schema.IssuanceHistorySchema) -async def get_issuance_history(issuance_history_id: int, db: AsyncSession = Depends(get_async_db)): - try: - issuance_history = await db.execute(IssuanceHistory.query().get(issuance_history_id)) - if issuance_history is None: - raise HTTPException(status_code=404, detail="Issuance history not found") - return issuance_history - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.put("/issuance_histories/{issuance_history_id}", response_model=schema.IssuanceHistorySchema) -async def update_issuance_history( - issuance_history_id: int, issuance_history: schema.IssuanceHistorySchema, db: AsyncSession = Depends(get_async_db) + """Endpoint to set an existing transfers status to 'Deleted'.""" + await validate.government_update_transfer(request, transfer_data) + transfer_data.transfer_id = transfer_id + return await service.update_transfer(transfer_data) + + +@router.put( + "/{transfer_id}/category", + response_model=TransferSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.GOVERNMENT]) +async def update_category( + request: Request, + transfer_id: int, + category: Annotated[str, Body()] = None, + service: TransferServices = Depends(), ): - try: - db_issuance_history = await db.execute(IssuanceHistory.query().get(issuance_history_id)) - if db_issuance_history is None: - raise HTTPException(status_code=404, detail="Issuance history not found") - - for field, value in issuance_history.dict().items(): - setattr(db_issuance_history, field, value) - - await db.commit() - await db.refresh(db_issuance_history) - return db_issuance_history - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") - - -@router.delete("/issuance_histories/{issuance_history_id}") -async def delete_issuance_history(issuance_history_id: int, db: AsyncSession = Depends(get_async_db)): - try: - issuance_history = await db.execute(IssuanceHistory.query().get(issuance_history_id)) - if issuance_history is None: - raise HTTPException(status_code=404, detail="Issuance history not found") - - db.delete(issuance_history) - await db.commit() - return {"message": "Issuance history deleted"} - except Exception as e: - raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(e)}") \ No newline at end of file + return await service.update_category(transfer_id, category) diff --git a/backend/lcfs/web/api/user/__init__.py b/backend/lcfs/web/api/user/__init__.py index 62b6f2c66..6a9ab9fe0 100644 --- a/backend/lcfs/web/api/user/__init__.py +++ b/backend/lcfs/web/api/user/__init__.py @@ -1,4 +1,5 @@ """User API.""" + from lcfs.web.api.user.views import router __all__ = ["router"] diff --git a/backend/lcfs/web/api/user/repo.py b/backend/lcfs/web/api/user/repo.py new file mode 100644 index 000000000..f55e0eab4 --- /dev/null +++ b/backend/lcfs/web/api/user/repo.py @@ -0,0 +1,688 @@ +import structlog +from typing import List + +from fastapi import Depends +from fastapi_cache.decorator import cache +from lcfs.db.models.user import UserLoginHistory +from sqlalchemy import ( + and_, + select, + asc, + desc, + union_all, + literal_column, + func, + cast, + String, + update, +) +from sqlalchemy.orm import joinedload +from sqlalchemy.ext.asyncio import AsyncSession +from lcfs.web.exception.exceptions import DataNotFoundException + +from lcfs.services.keycloak.dependencies import parse_external_username +from lcfs.web.core.decorators import repo_handler +from lcfs.db.dependencies import get_async_db_session +from lcfs.db.models.user.UserProfile import UserProfile +from lcfs.db.models.user.UserRole import UserRole +from lcfs.db.models.organization.Organization import Organization +from lcfs.db.models.user.Role import Role, RoleEnum +from lcfs.db.models.transfer.TransferHistory import TransferHistory +from lcfs.db.models.transfer.TransferStatus import TransferStatus +from lcfs.db.models.initiative_agreement.InitiativeAgreementHistory import ( + InitiativeAgreementHistory, +) +from lcfs.db.models.initiative_agreement.InitiativeAgreementStatus import ( + InitiativeAgreementStatus, +) +from lcfs.db.models.admin_adjustment.AdminAdjustmentHistory import ( + AdminAdjustmentHistory, +) +from lcfs.db.models.admin_adjustment.AdminAdjustmentStatus import AdminAdjustmentStatus +from lcfs.web.api.user.schema import ( + UserCreateSchema, + UserBaseSchema, + UserLoginHistorySchema, +) +from lcfs.web.api.base import ( + PaginationRequestSchema, + camel_to_snake, + apply_filter_conditions, + get_field_for_filter, +) + +logger = structlog.get_logger(__name__) + + +class UserRepository: + EXCLUDED_USER_ACTIVITY_STATUSES = ["Draft"] + + def __init__(self, db: AsyncSession = Depends(get_async_db_session)): + self.db = db + + def apply_filters(self, pagination, conditions): + role_filter_present = False + for filter in pagination.filters: + filter_value = filter.filter + filter_option = filter.type + filter_type = filter.filter_type + + if filter.field == "role": + field = get_field_for_filter(Role, "name") + role_filter_present = True + conditions.append( + Role.name.in_( + [RoleEnum(role.strip()) for role in filter_value.split(",")] + ) + ) + elif filter.field == "is_active": + filter_value = True if filter_value == "Active" else False + filter_option = "true" if filter_value else "false" + field = get_field_for_filter(UserProfile, "is_active") + conditions.append( + apply_filter_conditions( + field, + filter_value, + filter_option, + filter_type, + ) + ) + elif filter.field == "first_name": + txt = filter_value.split(" ") + field1 = get_field_for_filter(UserProfile, "first_name") + conditions.append( + apply_filter_conditions( + field1, + txt[0], + filter_option, + filter_type, + ) + ) + field2 = get_field_for_filter(UserProfile, "last_name") + conditions.append( + apply_filter_conditions( + field2, + txt[1] if len(txt) > 1 else "", + filter_option, + filter_type, + ) + ) + else: + field = get_field_for_filter(UserProfile, filter.field) + conditions.append( + apply_filter_conditions( + field, filter_value, filter_option, filter_type + ) + ) + return role_filter_present + + async def find_user_role(self, role_name): + role_result = await self.db.execute(select(Role).filter(Role.name == role_name)) + role = role_result.scalar_one_or_none() + if role: + db_user_role = UserRole(role=role) + return db_user_role + return None + + async def update_idir_roles(self, user, new_roles, existing_roles_set): + for new_role in new_roles: + if new_role in { + RoleEnum.ANALYST, + RoleEnum.COMPLIANCE_MANAGER, + RoleEnum.DIRECTOR, + }: + if new_role not in existing_roles_set: + # Remove existing roles + roles_to_keep = [ + user_role + for user_role in user.user_roles + if user_role.role.name + not in { + RoleEnum.ANALYST, + RoleEnum.COMPLIANCE_MANAGER, + RoleEnum.DIRECTOR, + } + ] + # Add new role + user.user_roles = roles_to_keep + user.user_roles.append(await self.find_user_role(new_role)) + elif ( + new_role == RoleEnum.ADMINISTRATOR + and RoleEnum.ADMINISTRATOR not in existing_roles_set + ): + # Add administrator role + user.user_roles.append(await self.find_user_role(new_role)) + elif ( + new_role == RoleEnum.GOVERNMENT + and RoleEnum.GOVERNMENT not in existing_roles_set + ): + # Add government role + user.user_roles.append(await self.find_user_role(new_role)) + + if ( + RoleEnum.ADMINISTRATOR not in new_roles + and RoleEnum.ADMINISTRATOR in existing_roles_set + ): + # Remove existing roles + roles_to_keep = [ + user_role + for user_role in user.user_roles + if user_role.role.name != RoleEnum.ADMINISTRATOR + ] + user.user_roles = roles_to_keep + + async def update_bceid_roles(self, user, new_roles, existing_roles_set): + for new_role in new_roles: + if ( + new_role == RoleEnum.READ_ONLY + and RoleEnum.READ_ONLY not in existing_roles_set + ): + roles_to_keep = [ + user_role + for user_role in user.user_roles + if user_role.role.name == RoleEnum.SUPPLIER + ] + # Add read_only role + user.user_roles = roles_to_keep + user.user_roles.append(await self.find_user_role(new_role)) + elif ( + new_role == RoleEnum.SUPPLIER + and RoleEnum.SUPPLIER not in existing_roles_set + ): + # Add supplier role + user.user_roles.append(await self.find_user_role(new_role)) + elif new_role in { + RoleEnum.COMPLIANCE_REPORTING, + RoleEnum.MANAGE_USERS, + RoleEnum.TRANSFER, + RoleEnum.SIGNING_AUTHORITY, + }: + if new_role not in existing_roles_set: + # Add missing role + user.user_roles.append(await self.find_user_role(new_role)) + user_roles_to_keep = [user_role for user_role in user.user_roles] + for user_role in user.user_roles: + if ( + user_role.role.name + in { + RoleEnum.COMPLIANCE_REPORTING, + RoleEnum.MANAGE_USERS, + RoleEnum.TRANSFER, + RoleEnum.SIGNING_AUTHORITY, + RoleEnum.READ_ONLY, + } + and user_role.role.name not in new_roles + ): + user_roles_to_keep.remove(user_role) + user.user_roles = user_roles_to_keep + + @repo_handler + async def get_users_paginated( + self, + pagination: PaginationRequestSchema, + ) -> List[UserBaseSchema]: + """ + Queries users from the database with optional filters for username, surname, + organization, and active status. Supports pagination and sorting. + + Args: + username (Optional[str]): Filter for users with a username similar to the provided value. + organization (Optional[str]): Filter for users belonging to a specific organization. + surname (Optional[str]): Filter for users with a surname similar to the provided value. + include_inactive (bool): Whether to include inactive users in the results. + pagination (dict): Pagination and sorting parameters. + + Returns: + List[UserBaseSchema]: A list of user profiles matching the query. + """ + # Build the base query statement + conditions = [] + role_filter_present = False + if pagination.filters and len(pagination.filters) > 0: + try: + role_filter_present = self.apply_filters(pagination, conditions) + except Exception as e: + raise ValueError(f"Invalid filter provided: {pagination.filters}.") + + # Apply pagination and sorting parameters + offset = 0 if (pagination.page < 1) else (pagination.page - 1) * pagination.size + limit = pagination.size + # get distinct profile ids from UserProfile + unique_ids_query = select(UserProfile).where(and_(*conditions)) + if role_filter_present: + unique_ids_query = unique_ids_query.join( + UserRole, UserProfile.user_profile_id == UserRole.user_profile_id + ).join(Role, UserRole.role_id == Role.role_id) + + # Applying pagination, sorting, and filters to the query + query = ( + select(UserProfile) + .join( + UserRole, + UserProfile.user_profile_id == UserRole.user_profile_id, + isouter=True, + ) + .join(Role, UserRole.role_id == Role.role_id, isouter=True) + .options( + joinedload(UserProfile.organization), + joinedload(UserProfile.user_roles).options(joinedload(UserRole.role)), + ) + ) + + query_result = ( + (await self.db.execute(unique_ids_query)).unique().scalars().all() + ) + total_count = len(query_result) + # Sort the query results + for order in pagination.sort_orders: + sort_method = asc if order.direction == "asc" else desc + if order.field == "role": + order.field = get_field_for_filter(Role, "name") + unique_ids_query = unique_ids_query.order_by(sort_method(order.field)) + query = query.order_by(sort_method(order.field)) + unique_ids = ( + (await self.db.execute(unique_ids_query.offset(offset).limit(limit))) + .unique() + .scalars() + .all() + ) + + profile_id_list = [user.user_profile_id for user in unique_ids] + if limit <= 0: + query = query.where(*conditions) + else: + query = query.where( + and_(UserProfile.user_profile_id.in_(profile_id_list), *conditions) + ) + # Execute the query + user_results = await self.db.execute(query) + results = user_results.scalars().unique().all() + + # Convert the results to UserBaseSchema schemas + return [UserBaseSchema.model_validate(user) for user in results], total_count + + @repo_handler + async def get_user_by_id(self, user_id: int) -> UserProfile: + query = ( + select(UserProfile) + .options( + joinedload(UserProfile.organization), + joinedload(UserProfile.user_roles).options(joinedload(UserRole.role)), + ) + .where(UserProfile.user_profile_id == user_id) + ) + + # Execute the query + user_result = await self.db.execute(query) + return user_result.unique().scalar_one_or_none() if user_result else None + + @repo_handler + async def create_user( + self, user_create: UserCreateSchema, user_id: int = None + ) -> UserProfile: + db_user_profile = UserProfile(**user_create.model_dump(exclude={"roles"})) + user_data = user_create.model_dump() + roles = user_data.pop("roles", {}) + # Find the RoleEnum member corresponding to each role + role_enum_members = [ + role_enum for role_enum in RoleEnum if role_enum.value.lower() in roles + ] + + if db_user_profile.is_active: + for role_name in role_enum_members: + role_result = await self.db.execute( + select(Role).filter(Role.name == role_name) + ) + role = role_result.scalar_one_or_none() + if role: + db_user_role = UserRole(role=role) + db_user_profile.user_roles.append(db_user_role) + if db_user_profile.organization_id: + org_result = await self.db.execute( + select(Organization).filter( + Organization.organization_id == db_user_profile.organization_id + ) + ) + org = org_result.scalar_one_or_none() + db_user_profile.organization = org + self.db.add(db_user_profile) + return db_user_profile + + @repo_handler + async def update_user( + self, user: UserProfile, user_update: UserCreateSchema + ) -> None: + user_data = user_update.model_dump() + updated_user_profile = UserProfile(**user_update.model_dump(exclude={"roles"})) + roles = user_data.pop("roles", {}) + # Find the RoleEnum member corresponding to each role + new_roles = [ + role_enum for role_enum in RoleEnum if role_enum.value.lower() in roles + ] + # Create a set for faster membership checks + existing_roles_set = set(user.role_names) + + # Update the user object with the new data + user.email = updated_user_profile.email + user.title = updated_user_profile.title + user.first_name = updated_user_profile.first_name + user.last_name = updated_user_profile.last_name + user.is_active = updated_user_profile.is_active + user.keycloak_email = updated_user_profile.keycloak_email + user.keycloak_username = updated_user_profile.keycloak_username + user.phone = updated_user_profile.phone + user.mobile_phone = updated_user_profile.mobile_phone + + if user.organization: + await self.update_bceid_roles(user, new_roles, existing_roles_set) + else: + await self.update_idir_roles(user, new_roles, existing_roles_set) + self.db.add(user) + return user + + @repo_handler + async def delete_user(self, user: UserProfile) -> None: + await self.db.delete(user) + logger.info("Deleted user", user_profile_id=user.user_profile_id) + return None + + @repo_handler + async def get_full_name(self, username: str) -> str: + """ + Fetches the full name of a user based on their username. + + Args: + username (str): Username of the user whose full name is to be fetched. + + Returns: + str: The full name of the user. + """ + full_name_result = await self.db.execute( + select( + (UserProfile.first_name + " " + UserProfile.last_name).label( + "full_name" + ) + ).where(UserProfile.keycloak_username == username) + ) + return full_name_result.scalars().first() + + def _build_activity_queries(self, user_id: int = None): + """ + Builds the activity queries for user-specific or all-user activity logs. + + Args: + user_id (Optional[int]): If provided, filters activities for a specific user. + + Returns: + Tuple: Combined query for user activities, list of relevant conditions. + """ + + # Note: If we encounter performance issues in the future, + # consider replacing the current UNION ALL with a materialized view to pre-compute the union of activity logs. + # We also need to add indexes on create_date and other sorting columns to improve performance across the system. + + # TransferHistory Query + transfer_query = ( + select( + cast(TransferHistory.transfer_id, String).label("transaction_id"), + cast(TransferStatus.status, String).label("action_taken"), + literal_column("'Transfer'").label("transaction_type"), + TransferHistory.create_date.label("create_date"), + TransferHistory.user_profile_id.label("user_id"), + ) + .select_from(TransferHistory) + .join( + TransferStatus, + TransferHistory.transfer_status_id == TransferStatus.transfer_status_id, + ) + .where( + cast(TransferStatus.status, String).notin_( + self.EXCLUDED_USER_ACTIVITY_STATUSES + ) + ) + ) + + # InitiativeAgreementHistory Query + initiative_query = ( + select( + cast(InitiativeAgreementHistory.initiative_agreement_id, String).label( + "transaction_id" + ), + cast(InitiativeAgreementStatus.status, String).label("action_taken"), + literal_column("'InitiativeAgreement'").label("transaction_type"), + InitiativeAgreementHistory.create_date.label("create_date"), + InitiativeAgreementHistory.user_profile_id.label("user_id"), + ) + .select_from(InitiativeAgreementHistory) + .join( + InitiativeAgreementStatus, + InitiativeAgreementHistory.initiative_agreement_status_id + == InitiativeAgreementStatus.initiative_agreement_status_id, + ) + .where( + cast(InitiativeAgreementStatus.status, String).notin_( + self.EXCLUDED_USER_ACTIVITY_STATUSES + ) + ) + ) + + # AdminAdjustmentHistory Query + admin_adjustment_query = ( + select( + cast(AdminAdjustmentHistory.admin_adjustment_id, String).label( + "transaction_id" + ), + cast(AdminAdjustmentStatus.status, String).label("action_taken"), + literal_column("'AdminAdjustment'").label("transaction_type"), + AdminAdjustmentHistory.create_date.label("create_date"), + AdminAdjustmentHistory.user_profile_id.label("user_id"), + ) + .select_from(AdminAdjustmentHistory) + .join( + AdminAdjustmentStatus, + AdminAdjustmentHistory.admin_adjustment_status_id + == AdminAdjustmentStatus.admin_adjustment_status_id, + ) + .where( + cast(AdminAdjustmentStatus.status, String).notin_( + self.EXCLUDED_USER_ACTIVITY_STATUSES + ) + ) + ) + + # Combine all queries using union_all + combined_query = union_all( + transfer_query, initiative_query, admin_adjustment_query + ).alias("activities") + + # If a specific user_id is provided, filter by that user_id + conditions = [] + if user_id is not None: + conditions.append(combined_query.c.user_id == user_id) + + return combined_query, conditions + + async def _get_paginated_user_activities( + self, combined_query, conditions, pagination + ): + """ + Handles pagination, filtering, and sorting for user activities. + + Args: + combined_query: The SQLAlchemy query for activities. + conditions: List of conditions to apply to the query. + pagination: PaginationRequestSchema for pagination and filtering. + + Returns: + Tuple: List of activities and total count. + """ + # Apply filters from pagination + if pagination.filters: + for filter in pagination.filters: + field_name = camel_to_snake(filter.field) + field = getattr(combined_query.c, field_name, None) + if field is not None: + condition = apply_filter_conditions( + field, filter.filter, filter.type, filter.filter_type + ) + if condition is not None: + conditions.append(condition) + + # Apply ordering + order_by_clauses = [] + if pagination.sort_orders: + for sort_order in pagination.sort_orders: + field_name = camel_to_snake(sort_order.field) + field = getattr(combined_query.c, field_name, None) + if field is not None: + order = asc(field) if sort_order.direction == "asc" else desc(field) + order_by_clauses.append(order) + else: + # Default ordering by timestamp descending + order_by_clauses.append(desc(combined_query.c.create_date)) + + # Build the final query with conditions, ordering, and pagination + final_query = ( + select(combined_query) + .where(and_(*conditions)) + .order_by(*order_by_clauses) + .offset((pagination.page - 1) * pagination.size) + .limit(pagination.size) + ) + + # Execute the query + result = await self.db.execute(final_query) + activities = result.fetchall() + + # Get total count for pagination + count_query = ( + select(func.count()).select_from(combined_query).where(and_(*conditions)) + ) + total_count_result = await self.db.execute(count_query) + total_count = total_count_result.scalar() + + return activities, total_count + + @repo_handler + async def get_user_activities_paginated( + self, user_id: int, pagination: PaginationRequestSchema + ) -> List[dict]: + """ + Fetches major activities for a specific user with pagination and filters, + excluding specified statuses. + """ + combined_query, conditions = self._build_activity_queries(user_id=user_id) + return await self._get_paginated_user_activities( + combined_query, conditions, pagination + ) + + @repo_handler + async def get_all_user_activities_paginated( + self, pagination: PaginationRequestSchema + ) -> List[dict]: + """ + Fetches major activities for all users with pagination and filters, + excluding specified statuses. + """ + combined_query, conditions = self._build_activity_queries() + return await self._get_paginated_user_activities( + combined_query, conditions, pagination + ) + + def _apply_login_history_filters(self, query, pagination): + """ + Applies filters and sorting orders to the login history query based on the pagination parameters. + + Args: + query: The SQLAlchemy query for login history. + conditions: List of conditions to apply to the query. + pagination: PaginationRequestSchema for pagination and filtering. + + Returns: + Tuple: The modified query and conditions. + """ + conditions = [] + if pagination.filters and len(pagination.filters) > 0: + for filter in pagination.filters: + filter_value = filter.filter + filter_option = filter.type + filter_type = filter.filter_type + if filter.field is not None: + field = get_field_for_filter(UserLoginHistory, filter.field) + if field is not None: + condition = apply_filter_conditions( + field, filter_value, filter_option, filter_type + ) + if condition is not None: + conditions.append(condition) + + query = query.where(and_(*conditions)) + # Apply ordering + order_by_clauses = [] + if pagination.sort_orders and len(pagination.sort_orders) > 0: + for sort_order in pagination.sort_orders: + field = get_field_for_filter(UserLoginHistory, sort_order.field) + if field is not None: + sort_order = ( + asc(field) if sort_order.direction == "asc" else desc(field) + ) + order_by_clauses.append(sort_order) + else: + # Default ordering by timestamp descending + order_by_clauses.append(desc(UserLoginHistory.create_date)) + return query.order_by(*order_by_clauses) + + @repo_handler + async def get_all_user_login_history_paginated( + self, pagination: PaginationRequestSchema + ) -> List[UserLoginHistorySchema]: + """ + Fetches major activities for all users with pagination and filters, + excluding specified statuses. + """ + query = self._apply_login_history_filters(select(UserLoginHistory), pagination) + total_count = len((await self.db.execute(query)).scalars().unique().all()) + offset = 0 if (pagination.page < 1) else (pagination.page - 1) * pagination.size + limit = pagination.size + result = ( + (await self.db.execute(query.offset(offset).limit(limit))) + .scalars() + .unique() + .all() + ) + return [ + UserLoginHistorySchema.model_validate(history) for history in result + ], total_count + + @repo_handler + async def create_login_history(self, user: UserProfile): + """ + Creates a user login history entry asynchronously. + """ + login_history = UserLoginHistory( + keycloak_email=user.email, + external_username=user.keycloak_username, + keycloak_user_id=user.keycloak_user_id, + is_login_successful=True, + ) + + self.db.add(login_history) + + @repo_handler + async def update_email( + self, user_profile_id: int, email: str + ) -> UserProfile: + # Fetch the user profile + query = select(UserProfile).where( + UserProfile.user_profile_id == user_profile_id + ) + result = await self.db.execute(query) + user_profile = result.scalar_one_or_none() + + user_profile.email = email + + # Flush and refresh without committing + await self.db.flush() + await self.db.refresh(user_profile) + + return user_profile diff --git a/backend/lcfs/web/api/user/schema.py b/backend/lcfs/web/api/user/schema.py index 716c445f8..e773133b1 100644 --- a/backend/lcfs/web/api/user/schema.py +++ b/backend/lcfs/web/api/user/schema.py @@ -1,9 +1,12 @@ -from typing import Optional, List +from typing import Optional, List, Union +from datetime import datetime -from pydantic import BaseModel, EmailStr +from pydantic import EmailStr +from lcfs.web.api.base import BaseSchema -from lcfs.web.api.organization.schema import OrganizationSummary +from lcfs.web.api.organizations.schema import OrganizationSummaryResponseSchema from lcfs.web.api.role.schema import RoleSchema +from lcfs.web.api.base import PaginationResponseSchema """ Base - all shared attributes of a resource @@ -12,23 +15,86 @@ InDB - attributes present on any resource coming out of the database Public - attributes present on public facing resources being returned from GET, POST, and PUT requests """ -class UserBase(BaseModel): + + +class UserCreateSchema(BaseSchema): + user_profile_id: Optional[int] = None + title: str + keycloak_username: str + keycloak_email: EmailStr + email: Optional[EmailStr] = "" + phone: Optional[str] = None + mobile_phone: Optional[str] = None + first_name: str + last_name: str + is_active: bool + organization_id: Optional[int] = None + organization: Optional[OrganizationSummaryResponseSchema] = None + roles: Optional[List[str]] = [] + + +class UserBaseSchema(BaseSchema): """DTO for user values.""" - username: str - email: EmailStr - display_name: str + + user_profile_id: int + keycloak_username: str + keycloak_email: EmailStr + email: Optional[EmailStr] = None title: Optional[str] = None phone: Optional[str] = None + first_name: Optional[str] = None + last_name: Optional[str] = None + is_active: Union[bool, str] mobile_phone: Optional[str] = None - organization: Optional[OrganizationSummary] = None - # user_roles: Optional[List[RoleSchema]] = None - - class Config: - from_attributes = True + organization: Optional[OrganizationSummaryResponseSchema] = None + roles: Optional[List[RoleSchema]] = None + is_government_user: Optional[bool] = None + @classmethod + def model_validate(cls, user_profile): + is_government_user = False + roles = [] + for role in user_profile.user_roles: + roles.append(role.to_dict()) + if role.role.is_government_role: + is_government_user = True + user_dict = user_profile.__dict__.copy() # make a copy of the dictionary + user_dict.pop("roles", None) # remove the roles key if it exists + return cls(roles=roles, is_government_user=is_government_user, **user_dict) -class UserCreate(UserBase): - keycloak_user_id: str + +class UsersSchema(BaseSchema): + pagination: PaginationResponseSchema + users: List[UserBaseSchema] + + +class UserActivitySchema(BaseSchema): + transaction_id: int + action_taken: str + transaction_type: str + create_date: datetime + user_id: Optional[int] = None # Present when fetching activities for all users + + +class UserActivitiesResponseSchema(BaseSchema): + activities: List[UserActivitySchema] + pagination: PaginationResponseSchema + + +class UserLoginHistorySchema(BaseSchema): + user_login_history_id: int keycloak_email: str - keycloak_username: str - pass + external_username: str + keycloak_user_id: str + is_login_successful: bool + login_error_message: Optional[str] = None + create_date: datetime + + +class UserLoginHistoryResponseSchema(BaseSchema): + histories: List[UserLoginHistorySchema] + pagination: PaginationResponseSchema + + +class UpdateEmailSchema(BaseSchema): + email: EmailStr diff --git a/backend/lcfs/web/api/user/services.py b/backend/lcfs/web/api/user/services.py new file mode 100644 index 000000000..f6d21fa08 --- /dev/null +++ b/backend/lcfs/web/api/user/services.py @@ -0,0 +1,345 @@ +import io +import math +from datetime import datetime +import structlog +from typing import List + +from fastapi import Depends, Request, HTTPException +from fastapi.responses import StreamingResponse +from sqlalchemy.ext.asyncio import AsyncSession + +from lcfs.web.api.role.schema import RoleSchema, user_has_roles +from lcfs.db.dependencies import get_async_db_session +from lcfs.utils.constants import LCFS_Constants, FILE_MEDIA_TYPE +from lcfs.web.core.decorators import service_handler +from lcfs.web.exception.exceptions import ( + DataNotFoundException, + PermissionDeniedException, +) +from lcfs.web.api.base import ( + FilterModel, + PaginationRequestSchema, + PaginationResponseSchema, + validate_pagination, +) +from lcfs.db.models import UserProfile +from lcfs.web.api.user.schema import ( + UserCreateSchema, + UserBaseSchema, + UserLoginHistoryResponseSchema, + UsersSchema, + UserActivitySchema, + UserActivitiesResponseSchema, +) +from lcfs.utils.spreadsheet_builder import SpreadsheetBuilder +from lcfs.web.api.user.repo import UserRepository +from fastapi_cache import FastAPICache +from lcfs.db.models.user.Role import RoleEnum + +logger = structlog.get_logger(__name__) + + +class UserServices: + def __init__( + self, + request: Request = None, + repo: UserRepository = Depends(UserRepository), + session: AsyncSession = Depends(get_async_db_session), + ) -> None: + self.repo = repo + self.request = request + self.session = session + + @service_handler + async def export_users(self, export_format) -> StreamingResponse: + """ + Prepares a list of users in a file that is downloadable + """ + if not export_format in ["xls", "xlsx", "csv"]: + raise DataNotFoundException("Export format not supported") + + exclude_government_users = FilterModel( + filter_type="number", + type="notBlank", + field="organization_id", + filter=None, + ) + # Query database for the list of users. Exclude government users. + results = await self.repo.get_users_paginated( + pagination=PaginationRequestSchema( + page=1, + size=0, + filters=[exclude_government_users], + sortOrders=[], + ) + ) + + # Prepare data for the spreadsheet + data = [] + for u in results[0]: + user = u if isinstance(u, dict) else u.dict() + data.append( + [ + user["last_name"], + user["first_name"], + user["email"], + user["keycloak_username"], + user["title"], + user["phone"], + user["mobile_phone"], + "Active" if user["is_active"] else "Inactive", + ", ".join(role["name"] for role in user["roles"]), + user["organization"]["name"], + ] + ) + + # Create a spreadsheet + builder = SpreadsheetBuilder(file_format=export_format) + + builder.add_sheet( + sheet_name=LCFS_Constants.USERS_EXPORT_SHEETNAME, + columns=LCFS_Constants.USERS_EXPORT_COLUMNS, + rows=data, + styles={"bold_headers": True}, + ) + + file_content = builder.build_spreadsheet() + + # Get the current date in YYYY-MM-DD format + current_date = datetime.now().strftime("%Y-%m-%d") + + filename = ( + f"{LCFS_Constants.USERS_EXPORT_FILENAME}-{current_date}.{export_format}" + ) + headers = {"Content-Disposition": f'attachment; filename="{filename}"'} + + return StreamingResponse( + io.BytesIO(file_content), + media_type=FILE_MEDIA_TYPE[export_format.upper()].value, + headers=headers, + ) + + @service_handler + async def get_all_users(self, pagination: PaginationRequestSchema) -> UsersSchema: + """ + Get all users + """ + users, total_count = await self.repo.get_users_paginated(pagination=pagination) + return UsersSchema( + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=( + math.ceil(total_count / pagination.size) if total_count > 0 else 0 + ), + ), + users=users, + ) + + @service_handler + async def get_user_by_id(self, user_id: int) -> UserBaseSchema: + """ + Get user info by ID + """ + user = await self.repo.get_user_by_id(user_id) + if not user: + raise DataNotFoundException("User not found") + if user_has_roles(self.request.user, [RoleEnum.GOVERNMENT]): + pass + elif ( + not user.organization + or self.request.user.organization.organization_id + != user.organization.organization_id + ): + raise HTTPException( + status_code=403, + detail="You do not have permission to view this user's information.", + ) + return UserBaseSchema.model_validate(user) + + @service_handler + async def create_user(self, user_create: UserCreateSchema) -> str: + """ + Create a new user + """ + user = await self.repo.create_user(user_create) + await FastAPICache.clear(namespace="users") + return "User created successfully" + + @service_handler + async def update_user( + self, user_create: UserCreateSchema, user_id: int + ) -> UserProfile: + """ + Update user info + """ + user = await self.repo.get_user_by_id(user_id) + if not user: + raise DataNotFoundException("User not found") + user = await self.repo.update_user(user, user_create) + await FastAPICache.clear(namespace="users") + return user + + @service_handler + async def delete_user(self, user_id: int) -> None: + """ + Delete only if the user has never logged in to the system. + """ + user = await self.repo.get_user_by_id(user_id) + if not user: + raise DataNotFoundException("User not found") + history = await self.repo.get_user_history(user_id) + if len(history) <= 0: + await self.repo.delete_user(user) + await FastAPICache.clear(namespace="users") + return None + + @service_handler + async def get_user_roles(self, user_id: int) -> List[dict]: + """ + Get user roles + """ + user = await self.repo.get_user_by_id(user_id) + if not user: + raise DataNotFoundException("User not found") + return [RoleSchema.model_validate(role.to_dict()) for role in user.user_roles] + + @service_handler + async def get_user_activities( + self, user_id: int, current_user, pagination: PaginationRequestSchema + ) -> UserActivitiesResponseSchema: + """ + Retrieves activities for a specific user with proper permission checks. + """ + # Permission Checks + if not await self._has_access_to_user_activities(current_user, user_id): + raise HTTPException( + status_code=403, + detail="You do not have permission to view this user's activities.", + ) + + pagination = validate_pagination(pagination) + + activities, total_count = await self.repo.get_user_activities_paginated( + user_id, pagination + ) + activities_schema = [ + UserActivitySchema(**activity._asdict()) for activity in activities + ] + + return UserActivitiesResponseSchema( + activities=activities_schema, + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + ) + + @service_handler + async def get_all_user_activities( + self, current_user, pagination: PaginationRequestSchema + ) -> UserActivitiesResponseSchema: + """ + Retrieves activities for all users (Administrator role only). + """ + if not any( + role.role.name == RoleEnum.ADMINISTRATOR for role in current_user.user_roles + ): + raise HTTPException( + status_code=403, + detail="You do not have permission to view all user activities.", + ) + + pagination = validate_pagination(pagination) + + activities, total_count = await self.repo.get_all_user_activities_paginated( + pagination + ) + activities_schema = [ + UserActivitySchema(**activity._asdict()) for activity in activities + ] + + return UserActivitiesResponseSchema( + activities=activities_schema, + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + ) + + @service_handler + async def get_all_user_login_history( + self, current_user, pagination: PaginationRequestSchema + ) -> UserLoginHistoryResponseSchema: + """ + Retrieves login histories for all users (Administrator role only). + """ + if not any( + role.role.name == RoleEnum.ADMINISTRATOR for role in current_user.user_roles + ): + raise HTTPException( + status_code=403, + detail="You do not have permission to view all user login histories.", + ) + + pagination = validate_pagination(pagination) + login_history, total_count = ( + await self.repo.get_all_user_login_history_paginated(pagination) + ) + + return UserLoginHistoryResponseSchema( + histories=login_history, + pagination=PaginationResponseSchema( + total=total_count, + page=pagination.page, + size=pagination.size, + total_pages=math.ceil(total_count / pagination.size), + ), + ) + + async def _has_access_to_user_activities( + self, current_user: UserProfile, target_user_id: int + ) -> bool: + """ + Checks if the current user has access to the target user's activities. + """ + target_user = await self.repo.get_user_by_id(target_user_id) + if not target_user: + raise HTTPException( + status_code=404, + detail="User not found.", + ) + + current_user_roles = current_user.role_names + + # Administrator users can access any user's activities + if RoleEnum.ADMINISTRATOR in current_user_roles: + return True + + # Manage Users can access activities of users within the same organization + if ( + RoleEnum.MANAGE_USERS in current_user_roles + and current_user.organization_id == target_user.organization_id + ): + return True + + return False + + async def track_user_login(self, user: UserProfile): + await self.repo.create_login_history(user) + + @service_handler + async def update_email(self, user_id: int, email: str): + try: + return await self.repo.update_email(user_id, email) + except DataNotFoundException as e: + logger.error(f"User not found: {e}") + raise HTTPException(status_code=404, detail=str(e)) + except Exception as e: + logger.error(f"Error updating email: {e}") + raise HTTPException(status_code=500, detail="Internal Server Error") diff --git a/backend/lcfs/web/api/user/session.py b/backend/lcfs/web/api/user/session.py deleted file mode 100644 index f03b2e4a0..000000000 --- a/backend/lcfs/web/api/user/session.py +++ /dev/null @@ -1,144 +0,0 @@ -from logging import getLogger -from typing import List, Optional -from fastapi import Depends - -from sqlalchemy import text, and_, func, select, asc, desc -from sqlalchemy.ext.asyncio import AsyncSession -from starlette.requests import Request - -from lcfs.db.models.UserProfile import UserProfile -from lcfs.web.api.user.schema import UserCreate -from lcfs.web.api.base import row_to_dict -from lcfs.web.api.user.schema import UserBase -from lcfs.web.api.dependancies import pagination_query - -logger = getLogger("user_repo") -USER_VIEW_STMT = "select * from public.user_profile u where 1=1" -USER_COUNT_STMT = "select count(*) from public.user_profile u where 1=1" -# USER_VIEW_STMT = "select * from user_view u where 1=1" -# USER_COUNT_STMT = "select count(*) from user_view u where 1=1" - - -class UserRepository: - - def __init__(self, session: AsyncSession, request: Request = None): - self.session = session - self.request = request - - async def get_users_count(self) -> int: - """ - Retrieves the count of all active users in the database. - - Returns: - int: The number of active users. - """ - count_query = text(USER_COUNT_STMT) - count_result = await self.session.execute(count_query) - count = count_result.fetchone() - - # Extract the count from the first column of the first row. - # The [0] index accesses the first column in the row. - user_count = count[0] - - return user_count - - - async def query_users( - self, - username: Optional[str] = None, - organization: Optional[str] = None, - include_inactive: bool = False, - pagination: dict = Depends(pagination_query) - ) -> List[UserBase]: - """ - Queries users from the database with optional filters for username, surname, - organization, and active status. Supports pagination and sorting. - - Args: - username (Optional[str]): Filter for users with a username similar to the provided value. - organization (Optional[str]): Filter for users belonging to a specific organization. - surname (Optional[str]): Filter for users with a surname similar to the provided value. - include_inactive (bool): Whether to include inactive users in the results. - pagination (dict): Pagination and sorting parameters. - - Returns: - List[UserBase]: A list of user profiles matching the query. - """ - # Build the base query statement - conditions = [] - if username: - conditions.append(func.lower(UserProfile.username).like(f"%{username.lower()}%")) - if organization: - conditions.append(func.lower(UserProfile.organization.name).like(f"%{organization.lower()}%")) - # if not include_inactive: - # conditions.append(UserProfile.is_active.is_(True)) - - # Sorting direction - sort_function = desc if pagination["sort_direction"] == "desc" else asc - sort_field = pagination["sort_field"] - offset = pagination["page"] * pagination["per_page"] - limit = pagination["per_page"] - - # Applying pagination, sorting, and filters to the query - query = ( - select(UserProfile) - .where(and_(*conditions)) - .order_by(sort_function(getattr(UserProfile, sort_field))) - .offset(offset) - .limit(limit) - ) - - # Execute the query - user_results = await self.session.execute(query) - results = user_results.scalars().all() - - # Convert the results to UserBase schemas - return [UserBase.model_validate(user) for user in results] - - - async def get_user(self, user_id: int) -> UserBase: - """ - Gets a user by their ID in the database. - - This method queries the database for a user with a given ID and an active status. If found, it - converts the database row to a dictionary matching the UserBase Pydantic model. - - Args: - user_id (int): The unique identifier of the user to be found. - - Returns: - UserBase: The found user's data converted to a UserBase Pydantic model. If no user is found, returns None. - """ - stmt = f'{USER_VIEW_STMT} and u.is_active = true and u.id = :user_id' - - user_results = await self.session.execute(text(stmt), {'user_id': user_id}) - - user = user_results.fetchone() - return row_to_dict(user, UserBase) if user else None - - - async def create_user(self, user_create: UserCreate) -> UserProfile: - """ - Creates a new user entity in the database. - - This method takes a UserCreate Pydantic model, converts it into a dictionary, and then creates a new - User ORM model instance. If an organization is associated with the user, it ensures that the organization_id - is set appropriately. The new user is then added to the database session and saved to the database. - - Args: - user_create (UserCreate): The Pydantic model containing the data for the new user. - - Returns: - User: The ORM model instance of the newly created user. - """ - user_data = user_create.dict(exclude_unset=True, exclude_none=True) - - if 'organization' in user_data: - user_data['organization_id'] = user_data.pop('organization').id - - new_user = UserProfile(**user_data) - - self.session.add(new_user) - await self.session.commit() - - return new_user diff --git a/backend/lcfs/web/api/user/views.py b/backend/lcfs/web/api/user/views.py index efb5f8ac4..fab92a37f 100644 --- a/backend/lcfs/web/api/user/views.py +++ b/backend/lcfs/web/api/user/views.py @@ -1,141 +1,263 @@ -""" -User management endpoints -GET: /users/current -GET: /users/ -GET: /users -GET: /users/search?username=...&organization=...&surname=...&include_inactive=false -POST: /users/create -PATCH: /users/ -DELETE: /users/ (Delete only if the user has never logged in/mapped) -GET: /users//roles {List of Roles with IDs} -GET: /users//history -""" -import math -from logging import getLogger from typing import List -from fastapi import APIRouter, status, FastAPI, Depends, HTTPException, Request -from starlette.responses import Response -from sqlalchemy.ext.asyncio import AsyncSession +import structlog +from fastapi import APIRouter, Body, status, Request, Response, Depends, Query +from fastapi.responses import StreamingResponse + from lcfs.db import dependencies -from lcfs.web.api.base import EntityResponse -from lcfs.web.api.user.session import UserRepository -from lcfs.web.api.user.schema import UserCreate, UserBase +from lcfs.db.models.user.Role import RoleEnum +from lcfs.web.api.base import PaginationRequestSchema +from lcfs.web.api.role.schema import RoleSchema +from lcfs.web.api.user.schema import ( + UserCreateSchema, + UserBaseSchema, + UserLoginHistoryResponseSchema, + UsersSchema, + UserActivitiesResponseSchema, + UpdateEmailSchema, +) +from lcfs.web.api.user.services import UserServices +from lcfs.web.core.decorators import view_handler router = APIRouter() -logger = getLogger("users") +logger = structlog.get_logger(__name__) get_async_db = dependencies.get_async_db_session -user_repo = None # Define user_repo at the global level - -@router.on_event("startup") -async def startup_event(): - global user_repo - async for db in get_async_db(): # Iterate over the async_generator - user_repo = UserRepository(db) - break # Break after obtaining the database connection - - -@router.get("/current", response_model=UserBase, status_code=status.HTTP_200_OK) -async def get_current_user(request: Request, response: Response = None) -> UserBase: - try: - current_user = request.user - if not current_user: - err_msg = "Current user not found" - logger.error(err_msg) - response.status_code = status.HTTP_404_NOT_FOUND - return UserBase(status=status.HTTP_404_NOT_FOUND, - message="Not Found", success=False, data={}, - error={"message": err_msg}) - return UserBase.from_orm(current_user) - except Exception as e: - response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR - logger.error("Error getting current user", str(e.args[0])) - return UserBase(status=status.HTTP_500_INTERNAL_SERVER_ERROR, - message="Failed", success=False, data={}, - error={"message": f"Technical error: {e.args[0]}"}) - - - -@router.get("/", response_model=List[UserBase], status_code=status.HTTP_200_OK) -async def get_users(limit: int = 10, offset: int = 0, response: Response = None) -> List[UserBase]: - current_page = math.ceil(offset / limit) + 1 - try: - users = await user_repo.query_users( - pagination={ - "per_page": limit, - "page": offset, - "sort_field": "username", - "sort_direction": "asc" - } - ) - if len(users) == 0: - logger.error("Error getting users") - response.status_code = status.HTTP_404_NOT_FOUND - return [] - - # Convert the users to a list of UserBase objects - user_bases = [UserBase.from_orm(user) for user in users] - return user_bases # Return the list of UserBase objects - except Exception as e: - response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR - logger.error("Error getting users", str(e.args[0])) - return [] # Return an empty list if there's an error - - -@router.get("/search", response_model=EntityResponse, status_code=status.HTTP_200_OK) -async def get_user_search(username: str = None, organization: str = None, - surname: str = None, include_inactive: bool = False, - response: Response = None) -> EntityResponse: - # TODO: add sorting and pagination - try: - users = await user_repo.search_users(username, organization, - surname, include_inactive) - if users.__len__() == 0: - logger.error("Error getting users") - response.status_code = status.HTTP_404_NOT_FOUND - return EntityResponse(status=status.HTTP_404_NOT_FOUND, - message="Not Found", success=False, data={}, - error={"message": "No users found"}) - return EntityResponse(status=status.HTTP_200_OK, data=users, - total=len(users), error={}, - success=True, message="Success") - except Exception as e: - response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR - logger.error("Error getting users", str(e.args[0])) - return EntityResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR, - message="Failed", success=False, data={}, - error={"message": f"Technical error: {e.args[0]}"}) - - -@router.get("/{user_id}", response_model=EntityResponse, status_code=status.HTTP_200_OK) -async def get_user_by_id(user_id: int, response: Response = None) -> EntityResponse: - try: - user = await user_repo.get_user(user_id=user_id) - if user is None: - err_msg = f"User {user_id} not found" - logger.error(err_msg) - response.status_code = status.HTTP_404_NOT_FOUND - return EntityResponse(status=status.HTTP_404_NOT_FOUND, - message="Not Found", success=False, data={}, - error={"message": err_msg}) - return EntityResponse(status=status.HTTP_200_OK, data=user, - total=1, error={}, total_pages=1, limit=1, - success=True, message="Success") - except Exception as e: - response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR - logger.error(f"Error finding user {user_id}", str(e.args[0])) - return EntityResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR, - message="Failed", success=False, data={}, - error={"message": f"Technical error: {e.args[0]}"}) - - -@router.post("/create", response_model=EntityResponse, status_code=status.HTTP_201_CREATED) -async def create_user(user_data: UserCreate, db: AsyncSession = Depends(get_async_db)) -> EntityResponse: - try: - user_repo = UserRepository(db) - created_user = await user_repo.create_user(user_data) - return EntityResponse(status=status.HTTP_201_CREATED, data=created_user, error={}, success=True, message="User created successfully") - except Exception as e: - raise HTTPException(status_code=500, detail=f"Failed to create user: {str(e)}") +@router.get("/export", response_class=StreamingResponse, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def export_users( + request: Request, + format: str = Query(default="xls", description="File export format"), + service: UserServices = Depends(), +): + """ + Endpoint to export information of all users + + This endpoint can support exporting data in different file formats (xls, xlsx, csv) + as specified by the 'format' and 'media_type' variables. + - 'format' specifies the file format: options are 'xls', 'xlsx', and 'csv'. + - 'media_type' sets the appropriate MIME type based on 'format': + 'application/vnd.ms-excel' for 'xls', + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' for 'xlsx', + 'text/csv' for 'csv'. + + The SpreadsheetBuilder class is used for building the spreadsheet. + It allows adding multiple sheets with custom styling options and exports them as a byte stream. + Also, an example of how to use the SpreadsheetBuilder is provided in its class documentation. + + Note: Only the first sheet data is used for the CSV format, + as CSV files do not support multiple sheets. + """ + return await service.export_users(format) + + +@router.post("/list", response_model=UsersSchema, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_users( + request: Request, + pagination: PaginationRequestSchema = Body(..., embed=False), + response: Response = None, + service: UserServices = Depends(), +) -> UsersSchema: + """ + Endpoint to get information of all users for ag-grid in the UI + + Pagination Request Schema: + - page: offset/ page indicates the pagination of rows for the users list + - size: size indicates the number of rows per page for the users list + - sortOrders: sortOrders is an array of objects that specify the sorting criteria for the users list. + Each object has the following properties: + - field: the name of the field to sort by + - direction: the sorting direction ('asc' or 'desc') + - filterModel: filterModel is an array of objects that specifies the filtering criteria for the users list. + It has the following properties: + - filter_type: the type of filtering to perform ('text', 'number', 'date', 'boolean') + - type: the type of filter to apply ('equals', 'notEquals', 'contains', 'notContains', 'startsWith', 'endsWith') + - filter: the actual filter value + - field: Database Field that needs filtering. + """ + return await service.get_all_users(pagination) + + +@router.post("/logged-in", status_code=status.HTTP_200_OK) +@view_handler(["*"]) +async def track_logged_in( + request: Request, response: Response = None, service: UserServices = Depends() +) -> str: + """ + Endpoint to track when a user logs in + + This endpoint returns the information of the current user, including their roles and organization. + """ + + await service.track_user_login(request.user) + return "Tracked" + + +@router.get("/current", response_model=UserBaseSchema, status_code=status.HTTP_200_OK) +@view_handler(["*"]) +async def get_current_user( + request: Request, response: Response = None +) -> UserBaseSchema: + """ + Endpoint to get information of the current user + + This endpoint returns the information of the current user, including their roles and organization. + """ + return UserBaseSchema.model_validate(request.user) + + +@router.get("/{user_id}", response_model=UserBaseSchema, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_user_by_id( + request: Request, + user_id: int, + service: UserServices = Depends(), +) -> UserBaseSchema: + """ + Endpoint to get information of a user by ID + This endpoint returns the information of a user by ID, including their roles and organization. + """ + return await service.get_user_by_id(user_id) + + +@router.post("", response_model=None, status_code=status.HTTP_201_CREATED) +@view_handler([RoleEnum.ADMINISTRATOR]) +async def create_user( + request: Request, + response: Response = None, + user_create: UserCreateSchema = ..., + service: UserServices = Depends(), +) -> None: + """ + Endpoint to create a new user + This endpoint creates a new user and returns the information of the created user. + """ + return await service.create_user(user_create) + + +@router.put("/{user_id}", response_model=UserBaseSchema, status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.ADMINISTRATOR]) +async def update_user( + request: Request, + response: Response = None, + user_id: int = None, + user_create: UserCreateSchema = ..., + service: UserServices = Depends(), +) -> UserBaseSchema: + """ + Endpoint to update a user + This endpoint updates a user and returns the information of the updated user. + """ + await service.update_user(user_create, user_id) + return await service.get_user_by_id(user_id) + + +@router.delete("/{user_id}", status_code=status.HTTP_200_OK) +@view_handler([RoleEnum.ADMINISTRATOR]) +async def delete_user( + request: Request, + response: Response = None, + user_id: int = None, + service: UserServices = Depends(), +) -> None: + """ + Endpoint to delete a user + This endpoint deletes a user, if the user had never logged in before. + """ + return await service.delete_user(user_id) + + +@router.get( + "/{user_id}/roles", response_model=List[RoleSchema], status_code=status.HTTP_200_OK +) +@view_handler([RoleEnum.GOVERNMENT]) +async def get_user_roles( + request: Request, + response: Response = None, + user_id: int = None, + service: UserServices = Depends(), +) -> List[RoleSchema]: + """ + Endpoint to get the roles of a user + """ + return await service.get_user_roles(user_id) + + +@router.post( + "/{user_id}/activity", + response_model=UserActivitiesResponseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.ADMINISTRATOR, RoleEnum.MANAGE_USERS]) +async def get_user_activities( + request: Request, + user_id: int, + pagination: PaginationRequestSchema = Body(...), + service: UserServices = Depends(), +) -> UserActivitiesResponseSchema: + """ + Get activities of a specific user. + + Permissions: + - Government users with 'ADMINISTRATOR' role can access any user's activities. + - Supplier users with 'MANAGE_USERS' role can access activities of users within + their own organization. + """ + current_user = request.user + return await service.get_user_activities(user_id, current_user, pagination) + + +@router.post( + "/activities/all", + response_model=UserActivitiesResponseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.ADMINISTRATOR]) +async def get_all_user_activities( + request: Request, + pagination: PaginationRequestSchema = Body(...), + service: UserServices = Depends(), +) -> UserActivitiesResponseSchema: + """ + Get activities of all users. + """ + current_user = request.user + return await service.get_all_user_activities(current_user, pagination) + + +@router.post( + "/login-history", + response_model=UserLoginHistoryResponseSchema, + status_code=status.HTTP_200_OK, +) +@view_handler([RoleEnum.ADMINISTRATOR]) +async def get_all_user_login_history( + request: Request, + pagination: PaginationRequestSchema = Body(...), + service: UserServices = Depends(), +) -> UserLoginHistoryResponseSchema: + """ + Get users login history. + """ + current_user = request.user + return await service.get_all_user_login_history(current_user, pagination) + + +@router.post( + "/update-email", + response_model=UpdateEmailSchema, + status_code=status.HTTP_200_OK, +) +@view_handler(["*"]) +async def update_email( + request: Request, + email_data: UpdateEmailSchema = Body(...), + service: UserServices = Depends(), +): + user_id = request.user.user_profile_id + email = email_data.email + + user = await service.update_email(user_id, email) + return UpdateEmailSchema(email=user.email) diff --git a/backend/lcfs/web/application.py b/backend/lcfs/web/application.py index 46a441c9b..f107d499f 100644 --- a/backend/lcfs/web/application.py +++ b/backend/lcfs/web/application.py @@ -1,40 +1,61 @@ -from importlib import metadata import logging - import os import debugpy -import colorlog -from fastapi import FastAPI -from fastapi.responses import UJSONResponse +import uuid + +import structlog +from fastapi import FastAPI, HTTPException +from fastapi.exceptions import RequestValidationError from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import UJSONResponse from prometheus_fastapi_instrumentator import Instrumentator +from starlette.authentication import ( + AuthenticationBackend, + AuthCredentials, + UnauthenticatedUser, +) from starlette.middleware.authentication import AuthenticationMiddleware -from starlette.authentication import AuthenticationBackend, AuthCredentials, UnauthenticatedUser +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.responses import JSONResponse -from lcfs.web.api.router import api_router +from lcfs.logging_config import setup_logging, correlation_id_var from lcfs.services.keycloak.authentication import UserAuthentication +from lcfs.settings import settings +from lcfs.web.api.router import api_router +from lcfs.web.exception.exception_handler import validation_exception_handler from lcfs.web.lifetime import register_shutdown_event, register_startup_event -# Create a colorized log formatter -log_formatter = colorlog.ColoredFormatter( - "%(log_color)s[%(levelname)s] [%(asctime)s] %(name)s.%(funcName)s - %(message)s", - log_colors={ - 'DEBUG': 'cyan', - 'INFO': 'green', - 'WARNING': 'yellow', - 'ERROR': 'red', - 'CRITICAL': 'bold_red' - } -) +origins = [ + "http://localhost", + "http://localhost:3000", + "https://lcfs-dev.apps.silver.devops.gov.bc.ca", + "https://lcfs-test.apps.silver.devops.gov.bc.ca", + "https://lcfs-prod.apps.silver.devops.gov.bc.ca", + "https://lcfs.apps.silver.devops.gov.bc.ca", +] + -# Create a colorized console handler with the formatter -console_handler = colorlog.StreamHandler() -console_handler.setFormatter(log_formatter) +class MiddlewareExceptionWrapper(BaseHTTPMiddleware): + """ + Catches HTTP exception from other middlewares, returns JSON responses, and adds CORS headers. + """ -# Configure the root logger with the console handler -root_logger = logging.getLogger() -root_logger.addHandler(console_handler) -root_logger.setLevel(logging.DEBUG) + async def dispatch(self, request: Request, call_next): + try: + return await call_next(request) + except HTTPException as exc: + response = JSONResponse( + status_code=exc.status_code, + content={"status": exc.status_code, "detail": exc.detail}, + ) + + # Check if the request origin is in the allowed origins + request_origin = request.headers.get("origin") + if request_origin in origins: + response.headers["Access-Control-Allow-Origin"] = request_origin + + return response class LazyAuthenticationBackend(AuthenticationBackend): @@ -42,21 +63,46 @@ def __init__(self, app): self.app = app async def authenticate(self, request): - if request.scope['method'] == "OPTIONS": + if request.scope["method"] == "OPTIONS": + return AuthCredentials([]), UnauthenticatedUser() + + if request.url.path == "/api/health": # Skip for health check return AuthCredentials([]), UnauthenticatedUser() - + # Lazily retrieve Redis, session, and settings from app state - redis_pool = self.app.state.redis_pool - session = self.app.state.db_session_factory + redis_client = self.app.state.redis_client + session_factory = self.app.state.db_session_factory settings = self.app.state.settings # Now that we have the dependencies, we can instantiate the real backend - real_backend = UserAuthentication(redis_pool=redis_pool, session=session, settings=settings) - + real_backend = UserAuthentication( + redis_client=redis_client, + session_factory=session_factory, + settings=settings, + ) + # Call the authenticate method of the real backend return await real_backend.authenticate(request) +# Middleware to handle correlation IDs +class CorrelationIdMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next): + correlation_id = request.headers.get("X-Correlation-ID", str(uuid.uuid4())) + correlation_id_var.set(correlation_id) + response = await call_next(request) + response.headers["X-Correlation-ID"] = correlation_id + return response + + +# Middleware to set context variables +class ContextMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next): + request.state.correlation_id = correlation_id_var.get() + response = await call_next(request) + return response + + def get_app() -> FastAPI: """ Get FastAPI application. @@ -72,21 +118,20 @@ def get_app() -> FastAPI: # print("⏳ Waiting for debugger attach on port 5678...") # debugpy.wait_for_client() + # Map the string log level to the logging module's integer constants + log_level = getattr(logging, settings.log_level.value.upper(), logging.DEBUG) + setup_logging(level=log_level) + # Create the fastapi instance app = FastAPI( title="LCFS Backend API Development", - version=metadata.version("lcfs"), + version="0.1.0", docs_url="/api/docs", redoc_url="/api/redoc", openapi_url="/api/openapi.json", default_response_class=UJSONResponse, ) - origins = [ - "http://localhost", - "http://localhost:3000", - ] - # Set up CORS middleware options app.add_middleware( CORSMiddleware, @@ -94,10 +139,20 @@ def get_app() -> FastAPI: allow_credentials=True, allow_methods=["*"], # Allows all methods allow_headers=["*"], # Allows all headers + expose_headers=[ + "Content-Disposition" + ], # Expose Content-Disposition header to the frontend ) - # Apply custom authentication handler for user injection purposes + # Apply middlewares app.add_middleware(AuthenticationMiddleware, backend=LazyAuthenticationBackend(app)) + app.add_middleware(MiddlewareExceptionWrapper) + app.add_middleware(CorrelationIdMiddleware) + app.add_middleware(ContextMiddleware) + + # Register exception handlers + app.add_exception_handler(RequestValidationError, validation_exception_handler) + app.add_exception_handler(Exception, global_exception_handler) # Adds prometheus metrics instrumentation. Instrumentator().instrument(app).expose(app) @@ -110,3 +165,21 @@ def get_app() -> FastAPI: app.include_router(router=api_router, prefix="/api") return app + + +async def global_exception_handler(request: Request, exc: Exception): + """Handle uncaught exceptions.""" + logger = structlog.get_logger(__name__) + logger.error( + "Unhandled exception", + error=str(exc), + exc_info=True, + request_url=str(request.url), + method=request.method, + headers=dict(request.headers), + correlation_id=correlation_id_var.get(), + ) + return JSONResponse( + status_code=500, + content={"detail": "Internal Server Error"}, + ) diff --git a/backend/lcfs/web/core/__init__.py b/backend/lcfs/web/core/__init__.py new file mode 100644 index 000000000..2fe70a309 --- /dev/null +++ b/backend/lcfs/web/core/__init__.py @@ -0,0 +1 @@ +"""lcfs core package.""" diff --git a/backend/lcfs/web/core/decorators.py b/backend/lcfs/web/core/decorators.py new file mode 100644 index 000000000..0ccfc9a3b --- /dev/null +++ b/backend/lcfs/web/core/decorators.py @@ -0,0 +1,253 @@ +import structlog +import sys +import traceback +import inspect +from functools import wraps +from typing import List, Union, Literal +import warnings +import contextvars +import os + +from fastapi import HTTPException, Request +from fastapi.exceptions import RequestValidationError + +from lcfs.services.clamav.client import VirusScanException +from lcfs.web.exception.exceptions import ( + ServiceException, + DatabaseException, + DataNotFoundException, +) +from lcfs.db.models.user.Role import RoleEnum + +# Context variables +request_var = contextvars.ContextVar("request") +user_var = contextvars.ContextVar("user") +session_var = contextvars.ContextVar("session") + + +def extract_context(): + """Extract context for logging.""" + frame = sys._getframe(3) + local_vars = {k: repr(v) for k, v in frame.f_locals.items()} + request = request_var.get(None) + user = user_var.get(None) + session = session_var.get(None) + return { + "local_vars": local_vars, + "request": { + "url": str(request.url) if request else None, + "method": request.method if request else None, + "headers": dict(request.headers) if request else None, + }, + "user": { + "id": user.user_profile_id if user else None, + "roles": user.role_names if user else [], + }, + "session_state": repr(session) if session else "No session", + } + + +async def get_request_payload(request): + """Helper function to get the request payload.""" + request_payload = None + if request.method in ("POST", "PUT", "PATCH"): + try: + request_payload = await request.json() + except Exception: + pass + return request_payload + + +def get_source_info(func=None, e=None): + """Helper function to get source information for logging.""" + if func: + pathname = inspect.getfile(func) + linenumber = inspect.getsourcelines(func)[1] + function_name = func.__name__ + elif e: + tb = e.__traceback__ + frames = traceback.extract_tb(tb) + # Reverse frames to start from the deepest call + frames = frames[::-1] + # Skip frames from 'decorators.py' + for frame in frames: + if "decorators.py" not in frame.filename: + pathname = frame.filename + function_name = frame.name + linenumber = frame.lineno + break + else: + # If all frames are in 'decorators.py', use the last frame + frame = frames[-1] + pathname = frame.filename + function_name = frame.name + linenumber = frame.lineno + else: + return {} + return { + "pathname": pathname, + "func_name": function_name, + "lineno": linenumber, + } + + +def log_unhandled_exception(logger, e, context, layer_name, func=None): + """Helper function to log unhandled exceptions with correct traceback.""" + source_info = get_source_info(func=func, e=e) + logger.error( + f"Unhandled exception in {layer_name}", + error=str(e), + exc_info=True, + source_info=source_info, + **context, + ) + + +def view_handler(required_roles: List[Union[RoleEnum, Literal["*"]]]): + """Handles try except in the view layer""" + + # check if required roles argument was passed + if not required_roles or not isinstance(required_roles, list): + raise ValueError("required_roles must be a non-empty list") + + # check if a valid role was passed + for role in required_roles: + if role != "*" and not isinstance(role, RoleEnum): + raise ValueError(f"Invalid role: {role}. Must be RoleEnum or '*'") + + def decorator(func): + @wraps(func) + async def wrapper(request: Request, *args, **kwargs): + logger = structlog.get_logger(func.__module__) + request_var.set(request) + user = getattr(request, "user", None) + user_var.set(user) + session = ( + request.state.session if hasattr(request.state, "session") else None + ) + session_var.set(session) + + # check if user is authenticated + if not user: + raise HTTPException(status_code=401, detail="User not authenticated") + + # check if the endpoint can be accessed + if "*" in required_roles: + warnings.warn("This endpoint is accessible by all roles") + else: + user_roles = user.role_names + + # Check if user has all the required roles + if not any( + required_role in user_roles for required_role in required_roles + ): + raise HTTPException( + status_code=403, detail="Insufficient permissions" + ) + + org_id = kwargs.get("organization_id", None) + if ( + RoleEnum.SUPPLIER in user_roles + and org_id + and int(org_id) != user.organization_id + ): + raise HTTPException( + status_code=403, + detail="Insufficient permissions for this organization", + ) + + # run through the view function + try: + return await func(request, *args, **kwargs) + except ValueError as e: + source_info = get_source_info(func=func) + logger.error( + str(e), + source_info=source_info, + exc_info=e, + ) + raise HTTPException(status_code=400, detail=str(e)) + except (DatabaseException, ServiceException) as e: + source_info = get_source_info(func=func) + logger.error( + str(e), + source_info=source_info, + exc_info=e, + ) + raise HTTPException(status_code=500, detail="Internal Server Error") + except HTTPException as e: + source_info = get_source_info(func=func) + logger.error( + str(e), + source_info=source_info, + exc_info=e, + ) + if e.status_code == 403: + raise HTTPException(status_code=403, detail="Forbidden resource") + raise + except DataNotFoundException: + raise HTTPException(status_code=404, detail="Not Found") + except VirusScanException: + raise HTTPException( + status_code=422, + detail="Viruses detected in file, please upload another", + ) + except RequestValidationError as e: + raise e + except Exception as e: + context = extract_context() + log_unhandled_exception(logger, e, context, "view", func=func) + # Raise HTTPException with original traceback + new_exception = HTTPException( + status_code=500, detail="Internal Server Error" + ) + raise new_exception.with_traceback(e.__traceback__) + + return wrapper + + return decorator + + +def service_handler(func): + """Handles try except in the service layer""" + + @wraps(func) + async def wrapper(*args, **kwargs): + logger = structlog.get_logger(func.__module__) + try: + return await func(*args, **kwargs) + + # raise the error to the view layer + except (DatabaseException, HTTPException, DataNotFoundException, ValueError): + raise + # all other errors that occur in the service layer will log an error + except Exception as e: + context = extract_context() + log_unhandled_exception(logger, e, context, "service", func=func) + # Raise ServiceException with original traceback + new_exception = ServiceException() + raise new_exception.with_traceback(e.__traceback__) from e + + return wrapper + + +def repo_handler(func): + """Handles try except in the repo layer""" + + @wraps(func) + async def wrapper(*args, **kwargs): + logger = structlog.get_logger(func.__module__) + try: + return await func(*args, **kwargs) + # raise the error to the service layer + except (HTTPException, DataNotFoundException, VirusScanException): + raise + # all exceptions will trigger a DatabaseError and cause a 500 response in the view layer + except Exception as e: + context = extract_context() + log_unhandled_exception(logger, e, context, "repository", func=func) + # Raise DatabaseException with original traceback + new_exception = DatabaseException() + raise new_exception.with_traceback(e.__traceback__) from e + + return wrapper diff --git a/backend/lcfs/web/exception/exception_handler.py b/backend/lcfs/web/exception/exception_handler.py index fe884a78e..ddb1f278a 100644 --- a/backend/lcfs/web/exception/exception_handler.py +++ b/backend/lcfs/web/exception/exception_handler.py @@ -1,17 +1,20 @@ -from fastapi import HTTPException, FastAPI -from starlette.exceptions import HTTPException as StarletteHTTPException +from fastapi import HTTPException +from fastapi.exceptions import RequestValidationError from starlette.requests import Request from starlette.responses import JSONResponse -app = FastAPI() +async def validation_exception_handler(request: Request, exc: RequestValidationError): + standard_errors = [ + {"fields": [error["loc"][-1]], "message": error["msg"]} + for error in exc.errors() + ] -@app.exception_handler(StarletteHTTPException) -def exception_404_handler(request: Request, exc: HTTPException): - return JSONResponse(status_code=exc.status_code, content={ - "error": {"status_code": exc.status_code, "message": exc.detail}}) - - -class CommonHTTPException(HTTPException): - def __init__(self, status_code: int, message: str): - super().__init__(status_code=status_code, detail=message) + return JSONResponse( + status_code=422, + content={ + "message": "Validation failed", + "details": exc.errors(), # This provides the detailed validation error + "errors": standard_errors, # The body of the request, if needed + }, + ) diff --git a/backend/lcfs/web/exception/exceptions.py b/backend/lcfs/web/exception/exceptions.py new file mode 100644 index 000000000..a89244e82 --- /dev/null +++ b/backend/lcfs/web/exception/exceptions.py @@ -0,0 +1,15 @@ +class DatabaseException(Exception): + """Exception raised for errors in the repository layer.""" + pass + +class ServiceException(Exception): + """Exception raised for errors in the business logic (service layer).""" + pass + +class DataNotFoundException(Exception): + """Exception raised for errors where data is not found.""" + pass + +class PermissionDeniedException(Exception): + """Exception raised when permission is denied for an action.""" + pass diff --git a/backend/lcfs/web/lifetime.py b/backend/lcfs/web/lifetime.py index b43884513..fbe7b0b6e 100644 --- a/backend/lcfs/web/lifetime.py +++ b/backend/lcfs/web/lifetime.py @@ -1,9 +1,15 @@ from typing import Awaitable, Callable from fastapi import FastAPI +import boto3 +from fastapi_cache import FastAPICache +from fastapi_cache.backends.redis import RedisBackend +from redis.asyncio import Redis from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine +from lcfs.services.rabbitmq.consumers import start_consumers, stop_consumers from lcfs.services.redis.lifetime import init_redis, shutdown_redis +from lcfs.services.tfrs.redis_balance import init_org_balance_cache from lcfs.settings import settings @@ -45,11 +51,18 @@ async def _startup() -> None: # noqa: WPS430 _setup_db(app) # Initialize Redis connection pool - init_redis(app) + await init_redis(app) # Assign settings to app state for global access app.state.settings = settings - pass # noqa: WPS420 + + # Initialize FastAPI cache with the Redis client + FastAPICache.init(RedisBackend(app.state.redis_client), prefix="lcfs") + + await init_org_balance_cache(app) + + # Setup RabbitMQ Listeners + await start_consumers(app) return _startup @@ -69,6 +82,6 @@ async def _shutdown() -> None: # noqa: WPS430 await app.state.db_engine.dispose() await shutdown_redis(app) - pass # noqa: WPS420 + await stop_consumers() return _shutdown diff --git a/backend/lcfs/web/utils/calculations.py b/backend/lcfs/web/utils/calculations.py new file mode 100644 index 000000000..965f1a244 --- /dev/null +++ b/backend/lcfs/web/utils/calculations.py @@ -0,0 +1,30 @@ +def calculate_compliance_units( + TCI: float, EER: float, RCI: float, UCI: float, Q: float, ED: float +) -> int: + """ + Calculate the compliance units using the fuel supply formula. + + Parameters: + - TCI: Target Carbon Intensity + - EER: Energy Efficiency Ratio + - RCI: Recorded Carbon Intensity + - UCI: Additional Carbon Intensity Attributable to Use + - Q: Quantity of Fuel Supplied + - ED: Energy Density + + Returns: + - The calculated compliance units as a rounded integer. + """ + # Ensure all inputs are floats + TCI = float(TCI) + EER = float(EER) + RCI = float(RCI) + UCI = float(UCI) + Q = float(Q) + ED = float(ED) + + # Perform the calculation + compliance_units = (TCI * EER - (RCI + UCI)) * ((Q * ED) / 1_000_000) + + # Return the rounded integer + return round(compliance_units) diff --git a/backend/migrate.sh b/backend/migrate.sh index 4d7099b88..a82b5b17a 100755 --- a/backend/migrate.sh +++ b/backend/migrate.sh @@ -1,11 +1,28 @@ -#!/bin/bash +#!/usr/bin/env bash SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" export PYTHONPATH=$SCRIPT_DIR:$PYTHONPATH -# Installing dependencies using Poetry -echo "Installing dependencies with Poetry..." -poetry install +echo ' __ _____________ ___ ___ __ ____ __ _ ' +echo ' / / / ___/ __/ __/ / _ \/ _ ) / |/ (_)__ ________ _/ /_(_)__ ___ ' +echo ' / /__/ /__/ _/_\ \ / // / _ | / /|_/ / / _ `/ __/ _ `/ __/ / _ \/ _ \' +echo '/____/\___/_/ /___/ /____/____/ /_/ /_/_/\_, /_/ \_,_/\__/_/\___/_//_/' +echo ' /___/ ' +echo -e + +# Function to seed the database +seed_database() { + echo "🌱 Seeding the database..." + + # Note: Calling seed_database will default to using 'dev' seeds for the development environment. + if poetry run python /app/lcfs/db/seeders/seed_database.py; then + echo "✅ Database successfully seeded." + else + echo "❌ Seeding failed. Attempting to revert changes..." + downgrade_database + exit 1 + fi +} # Function for generating new migrations generate_migration() { @@ -13,19 +30,46 @@ generate_migration() { echo "Usage: $0 -g " exit 1 fi - poetry run alembic revision --autogenerate -m "$1" + echo "🛠 Generating new migration: $1" + if poetry run alembic revision --autogenerate -m "$1"; then + echo "✅ New migration created." + else + echo "❌ Failed to create migration." + exit 1 + fi } # Function for upgrading the database upgrade_database() { - revision=${1:-head} - poetry run alembic upgrade $revision + local revision=${1:-head} + echo "📈 Upgrading the database to revision: $revision" + if poetry run alembic upgrade $revision; then + echo "✅ Database upgraded to $revision." + else + echo "❌ Database upgrade failed." + exit 1 + fi } # Function for downgrading the database downgrade_database() { - revision=${1:-base} - poetry run alembic downgrade $revision + local revision=${1:-base} + echo "📉 Downgrading the database to revision: $revision" + if poetry run alembic downgrade $revision; then + echo "✅ Database downgraded to $revision." + else + echo "❌ Database downgrade failed." + exit 1 + fi +} + +# Function for resetting and seeding the database +reset_and_seed_database() { + echo "🔄 Resetting and seeding the database..." + downgrade_database + upgrade_database + seed_database + echo "✅ Database reset and seeded. Ready for fresh start. 🌱" } # Function for displaying help manual @@ -33,26 +77,41 @@ display_help() { echo "Usage: $0 [option]" echo "Options:" echo " -g Generate new migration with a description." - echo " -u [revision] Upgrade the database to a specific revision or to the latest ('head')." - echo " -d [revision] Downgrade the database to a specific revision or to the base." + echo " -u [revision] Upgrade the database to a specific revision or to the latest ('head'). Optional argument." + echo " -d [revision] Downgrade the database to a specific revision or to the base. Optional argument." + echo " -s Reset and seed the database." echo " -h Display this help manual." } +# Installing dependencies using Poetry +echo "📚 Installing dependencies with Poetry..." +if poetry install; then + echo "✅ Dependencies installed." +else + echo "❌ Failed to install dependencies." + exit 1 +fi +echo "=====================================================" + +# Check if no arguments were provided +if [ $# -eq 0 ]; then + display_help + exit 0 +fi + # Command line options -while getopts ":g:u::d::h" opt; do +while getopts ":g:u::d::sh" opt; do case $opt in g) generate_migration "$OPTARG" ;; u) upgrade_database "$OPTARG" ;; d) downgrade_database "$OPTARG" ;; + s) reset_and_seed_database ;; h) display_help; exit 0 ;; - \?) echo "Invalid option -$OPTARG" >&2; exit 1 ;; - :) if [ "$OPTARG" = "u" ]; then - upgrade_database - elif [ "$OPTARG" = "d" ]; then - downgrade_database - else - echo "Option -$OPTARG requires an argument." >&2 - exit 1 - fi ;; + \?) echo "❌ Invalid option -$OPTARG" >&2; exit 1 ;; + :) case $OPTARG in + u) upgrade_database ;; + d) downgrade_database ;; + *) echo "❌ Option -$OPTARG requires an argument." >&2; exit 1 ;; + esac ;; esac done diff --git a/backend/poetry.lock b/backend/poetry.lock index 85af7baa8..30bf2f4bf 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,14 +1,44 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aio-pika" +version = "9.4.3" +description = "Wrapper around the aiormq for asyncio and humans" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "aio_pika-9.4.3-py3-none-any.whl", hash = "sha256:f1423d2d5a8b7315d144efe1773763bf687ac17aa1535385982687e9e5ed49bb"}, + {file = "aio_pika-9.4.3.tar.gz", hash = "sha256:fd2b1fce25f6ed5203ef1dd554dc03b90c9a46a64aaf758d032d78dc31e5295d"}, +] + +[package.dependencies] +aiormq = ">=6.8.0,<6.9.0" +yarl = "*" + +[[package]] +name = "aiormq" +version = "6.8.1" +description = "Pure python AMQP asynchronous client library" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "aiormq-6.8.1-py3-none-any.whl", hash = "sha256:5da896c8624193708f9409ffad0b20395010e2747f22aa4150593837f40aa017"}, + {file = "aiormq-6.8.1.tar.gz", hash = "sha256:a964ab09634be1da1f9298ce225b310859763d5cf83ef3a7eae1a6dc6bd1da1a"}, +] + +[package.dependencies] +pamqp = "3.3.0" +yarl = "*" [[package]] name = "alembic" -version = "1.13.0" +version = "1.14.0" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.0-py3-none-any.whl", hash = "sha256:a23974ea301c3ee52705db809c7413cecd165290c6679b9998dd6c74342ca23a"}, - {file = "alembic-1.13.0.tar.gz", hash = "sha256:ab4b3b94d2e1e5f81e34be8a9b7b7575fc9dd5398fccb0bef351ec9b14872623"}, + {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, + {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, ] [package.dependencies] @@ -19,15 +49,30 @@ typing-extensions = ">=4" [package.extras] tz = ["backports.zoneinfo"] +[[package]] +name = "alembic-postgresql-enum" +version = "1.3.0" +description = "Alembic autogenerate support for creation, alteration and deletion of enums" +optional = false +python-versions = "<4.0,>=3.7" +files = [ + {file = "alembic_postgresql_enum-1.3.0-py3-none-any.whl", hash = "sha256:42539e225f8b6c9adc9862a259edeed105b02143d55503152eeeea7a0b5af70b"}, + {file = "alembic_postgresql_enum-1.3.0.tar.gz", hash = "sha256:64d5de7ac2ea39433afd965b057ca882fb420eb5cd6a7db8e2b4d0e7e673cae1"}, +] + +[package.dependencies] +alembic = ">=1.7" +SQLAlchemy = ">=1.4" + [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [[package]] @@ -64,13 +109,13 @@ files = [ [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] @@ -125,21 +170,22 @@ test = ["flake8 (>=5.0.4,<5.1.0)", "uvloop (>=0.15.3)"] [[package]] name = "attrs" -version = "23.1.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "autoflake" @@ -158,132 +204,217 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [[package]] name = "bandit" -version = "1.7.5" +version = "1.7.10" description = "Security oriented static analyser for python code." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, - {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, + {file = "bandit-1.7.10-py3-none-any.whl", hash = "sha256:665721d7bebbb4485a339c55161ac0eedde27d51e638000d91c8c2d68343ad02"}, + {file = "bandit-1.7.10.tar.gz", hash = "sha256:59ed5caf5d92b6ada4bf65bc6437feea4a9da1093384445fed4d472acc6cff7b"}, ] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" PyYAML = ">=5.3.1" rich = "*" stevedore = ">=1.20.0" [package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "tomli (>=1.1.0)"] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] toml = ["tomli (>=1.1.0)"] yaml = ["PyYAML"] [[package]] name = "black" -version = "22.12.0" +version = "24.10.0" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.7" -files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +python-versions = ">=3.9" +files = [ + {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, + {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, + {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, + {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, + {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, + {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, + {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, + {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, + {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, + {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, + {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, + {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, + {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, + {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, + {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, + {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, + {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, + {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, + {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, + {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, + {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, + {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "boto3" +version = "1.35.36" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.35.36-py3-none-any.whl", hash = "sha256:33735b9449cd2ef176531ba2cb2265c904a91244440b0e161a17da9d24a1e6d1"}, + {file = "boto3-1.35.36.tar.gz", hash = "sha256:586524b623e4fbbebe28b604c6205eb12f263cc4746bccb011562d07e217a4cb"}, +] + +[package.dependencies] +botocore = ">=1.35.36,<1.36.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.35.36" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.35.36-py3-none-any.whl", hash = "sha256:64241c778bf2dc863d93abab159e14024d97a926a5715056ef6411418cb9ead3"}, + {file = "botocore-1.35.36.tar.gz", hash = "sha256:354ec1b766f0029b5d6ff0c45d1a0f9e5007b7d2f3ec89bcdd755b208c5bc797"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = [ + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, +] + +[package.extras] +crt = ["awscrt (==0.22.0)"] + +[[package]] +name = "bump-pydantic" +version = "0.8.0" +description = "Convert Pydantic from V1 to V2 ♻" +optional = false +python-versions = ">=3.8" +files = [ + {file = "bump_pydantic-0.8.0-py3-none-any.whl", hash = "sha256:6cbb4deb5869a69baa5a477f28f3e2d8fb09b687e114c018bd54470590ae7bf7"}, + {file = "bump_pydantic-0.8.0.tar.gz", hash = "sha256:6092e61930e85619e74eeb04131b4387feda16f02d8bb2e3cf9507fa492c69e9"}, +] + +[package.dependencies] +libcst = ">=0.4.2" +rich = "*" +typer = ">=0.7.0" +typing-extensions = "*" + [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -300,6 +431,120 @@ files = [ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + [[package]] name = "click" version = "8.1.7" @@ -325,82 +570,75 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "colorlog" -version = "6.7.0" -description = "Add colours to the output of Python's logging module." -optional = false -python-versions = ">=3.6" -files = [ - {file = "colorlog-6.7.0-py2.py3-none-any.whl", hash = "sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662"}, - {file = "colorlog-6.7.0.tar.gz", hash = "sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -development = ["black", "flake8", "mypy", "pytest", "types-colorama"] - [[package]] name = "coverage" -version = "7.3.2" +version = "7.6.7" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:108bb458827765d538abcbf8288599fee07d2743357bdd9b9dad456c287e121e"}, + {file = "coverage-7.6.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c973b2fe4dc445cb865ab369df7521df9c27bf40715c837a113edaa2aa9faf45"}, + {file = "coverage-7.6.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c6b24007c4bcd0b19fac25763a7cac5035c735ae017e9a349b927cfc88f31c1"}, + {file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acbb8af78f8f91b3b51f58f288c0994ba63c646bc1a8a22ad072e4e7e0a49f1c"}, + {file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad32a981bcdedb8d2ace03b05e4fd8dace8901eec64a532b00b15217d3677dd2"}, + {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:34d23e28ccb26236718a3a78ba72744212aa383141961dd6825f6595005c8b06"}, + {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e25bacb53a8c7325e34d45dddd2f2fbae0dbc230d0e2642e264a64e17322a777"}, + {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af05bbba896c4472a29408455fe31b3797b4d8648ed0a2ccac03e074a77e2314"}, + {file = "coverage-7.6.7-cp310-cp310-win32.whl", hash = "sha256:796c9b107d11d2d69e1849b2dfe41730134b526a49d3acb98ca02f4985eeff7a"}, + {file = "coverage-7.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:987a8e3da7da4eed10a20491cf790589a8e5e07656b6dc22d3814c4d88faf163"}, + {file = "coverage-7.6.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e61b0e77ff4dddebb35a0e8bb5a68bf0f8b872407d8d9f0c726b65dfabe2469"}, + {file = "coverage-7.6.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a5407a75ca4abc20d6252efeb238377a71ce7bda849c26c7a9bece8680a5d99"}, + {file = "coverage-7.6.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df002e59f2d29e889c37abd0b9ee0d0e6e38c24f5f55d71ff0e09e3412a340ec"}, + {file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673184b3156cba06154825f25af33baa2671ddae6343f23175764e65a8c4c30b"}, + {file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69ad502f1a2243f739f5bd60565d14a278be58be4c137d90799f2c263e7049a"}, + {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60dcf7605c50ea72a14490d0756daffef77a5be15ed1b9fea468b1c7bda1bc3b"}, + {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9c2eb378bebb2c8f65befcb5147877fc1c9fbc640fc0aad3add759b5df79d55d"}, + {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c0317288f032221d35fa4cbc35d9f4923ff0dfd176c79c9b356e8ef8ef2dff4"}, + {file = "coverage-7.6.7-cp311-cp311-win32.whl", hash = "sha256:951aade8297358f3618a6e0660dc74f6b52233c42089d28525749fc8267dccd2"}, + {file = "coverage-7.6.7-cp311-cp311-win_amd64.whl", hash = "sha256:5e444b8e88339a2a67ce07d41faabb1d60d1004820cee5a2c2b54e2d8e429a0f"}, + {file = "coverage-7.6.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f07ff574986bc3edb80e2c36391678a271d555f91fd1d332a1e0f4b5ea4b6ea9"}, + {file = "coverage-7.6.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:49ed5ee4109258973630c1f9d099c7e72c5c36605029f3a91fe9982c6076c82b"}, + {file = "coverage-7.6.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3e8796434a8106b3ac025fd15417315d7a58ee3e600ad4dbcfddc3f4b14342c"}, + {file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b925300484a3294d1c70f6b2b810d6526f2929de954e5b6be2bf8caa1f12c1"}, + {file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c42ec2c522e3ddd683dec5cdce8e62817afb648caedad9da725001fa530d354"}, + {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0266b62cbea568bd5e93a4da364d05de422110cbed5056d69339bd5af5685433"}, + {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e5f2a0f161d126ccc7038f1f3029184dbdf8f018230af17ef6fd6a707a5b881f"}, + {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c132b5a22821f9b143f87446805e13580b67c670a548b96da945a8f6b4f2efbb"}, + {file = "coverage-7.6.7-cp312-cp312-win32.whl", hash = "sha256:7c07de0d2a110f02af30883cd7dddbe704887617d5c27cf373362667445a4c76"}, + {file = "coverage-7.6.7-cp312-cp312-win_amd64.whl", hash = "sha256:fd49c01e5057a451c30c9b892948976f5d38f2cbd04dc556a82743ba8e27ed8c"}, + {file = "coverage-7.6.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:46f21663e358beae6b368429ffadf14ed0a329996248a847a4322fb2e35d64d3"}, + {file = "coverage-7.6.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:40cca284c7c310d622a1677f105e8507441d1bb7c226f41978ba7c86979609ab"}, + {file = "coverage-7.6.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77256ad2345c29fe59ae861aa11cfc74579c88d4e8dbf121cbe46b8e32aec808"}, + {file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87ea64b9fa52bf395272e54020537990a28078478167ade6c61da7ac04dc14bc"}, + {file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d608a7808793e3615e54e9267519351c3ae204a6d85764d8337bd95993581a8"}, + {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdd94501d65adc5c24f8a1a0eda110452ba62b3f4aeaba01e021c1ed9cb8f34a"}, + {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82c809a62e953867cf57e0548c2b8464207f5f3a6ff0e1e961683e79b89f2c55"}, + {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb684694e99d0b791a43e9fc0fa58efc15ec357ac48d25b619f207c41f2fd384"}, + {file = "coverage-7.6.7-cp313-cp313-win32.whl", hash = "sha256:963e4a08cbb0af6623e61492c0ec4c0ec5c5cf74db5f6564f98248d27ee57d30"}, + {file = "coverage-7.6.7-cp313-cp313-win_amd64.whl", hash = "sha256:14045b8bfd5909196a90da145a37f9d335a5d988a83db34e80f41e965fb7cb42"}, + {file = "coverage-7.6.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f2c7a045eef561e9544359a0bf5784b44e55cefc7261a20e730baa9220c83413"}, + {file = "coverage-7.6.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dd4e4a49d9c72a38d18d641135d2fb0bdf7b726ca60a103836b3d00a1182acd"}, + {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c95e0fa3d1547cb6f021ab72f5c23402da2358beec0a8e6d19a368bd7b0fb37"}, + {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f63e21ed474edd23f7501f89b53280014436e383a14b9bd77a648366c81dce7b"}, + {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead9b9605c54d15be228687552916c89c9683c215370c4a44f1f217d2adcc34d"}, + {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0573f5cbf39114270842d01872952d301027d2d6e2d84013f30966313cadb529"}, + {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e2c8e3384c12dfa19fa9a52f23eb091a8fad93b5b81a41b14c17c78e23dd1d8b"}, + {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:70a56a2ec1869e6e9fa69ef6b76b1a8a7ef709972b9cc473f9ce9d26b5997ce3"}, + {file = "coverage-7.6.7-cp313-cp313t-win32.whl", hash = "sha256:dbba8210f5067398b2c4d96b4e64d8fb943644d5eb70be0d989067c8ca40c0f8"}, + {file = "coverage-7.6.7-cp313-cp313t-win_amd64.whl", hash = "sha256:dfd14bcae0c94004baba5184d1c935ae0d1231b8409eb6c103a5fd75e8ecdc56"}, + {file = "coverage-7.6.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37a15573f988b67f7348916077c6d8ad43adb75e478d0910957394df397d2874"}, + {file = "coverage-7.6.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b6cce5c76985f81da3769c52203ee94722cd5d5889731cd70d31fee939b74bf0"}, + {file = "coverage-7.6.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ab9763d291a17b527ac6fd11d1a9a9c358280adb320e9c2672a97af346ac2c"}, + {file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cf96ceaa275f071f1bea3067f8fd43bec184a25a962c754024c973af871e1b7"}, + {file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee9cf6b0134d6f932d219ce253ef0e624f4fa588ee64830fcba193269e4daa3"}, + {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2bc3e45c16564cc72de09e37413262b9f99167803e5e48c6156bccdfb22c8327"}, + {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:623e6965dcf4e28a3debaa6fcf4b99ee06d27218f46d43befe4db1c70841551c"}, + {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850cfd2d6fc26f8346f422920ac204e1d28814e32e3a58c19c91980fa74d8289"}, + {file = "coverage-7.6.7-cp39-cp39-win32.whl", hash = "sha256:c296263093f099da4f51b3dff1eff5d4959b527d4f2f419e16508c5da9e15e8c"}, + {file = "coverage-7.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:90746521206c88bdb305a4bf3342b1b7316ab80f804d40c536fc7d329301ee13"}, + {file = "coverage-7.6.7-pp39.pp310-none-any.whl", hash = "sha256:0ddcb70b3a3a57581b450571b31cb774f23eb9519c2aaa6176d3a84c9fc57671"}, + {file = "coverage-7.6.7.tar.gz", hash = "sha256:d79d4826e41441c9a118ff045e4bccb9fdbdcb1d02413e7ea6eb5c87b5439d24"}, ] [package.dependencies] @@ -411,47 +649,51 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.7" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, -] - -[package.dependencies] -cffi = ">=1.12" + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -467,81 +709,90 @@ files = [ [[package]] name = "debugpy" -version = "1.8.0" +version = "1.8.8" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, - {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, - {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, - {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, - {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, - {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, - {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, - {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, - {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, - {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, - {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, - {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, - {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, - {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, - {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, - {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, - {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, - {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, + {file = "debugpy-1.8.8-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:e59b1607c51b71545cb3496876544f7186a7a27c00b436a62f285603cc68d1c6"}, + {file = "debugpy-1.8.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6531d952b565b7cb2fbd1ef5df3d333cf160b44f37547a4e7cf73666aca5d8d"}, + {file = "debugpy-1.8.8-cp310-cp310-win32.whl", hash = "sha256:b01f4a5e5c5fb1d34f4ccba99a20ed01eabc45a4684f4948b5db17a319dfb23f"}, + {file = "debugpy-1.8.8-cp310-cp310-win_amd64.whl", hash = "sha256:535f4fb1c024ddca5913bb0eb17880c8f24ba28aa2c225059db145ee557035e9"}, + {file = "debugpy-1.8.8-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:c399023146e40ae373753a58d1be0a98bf6397fadc737b97ad612886b53df318"}, + {file = "debugpy-1.8.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09cc7b162586ea2171eea055985da2702b0723f6f907a423c9b2da5996ad67ba"}, + {file = "debugpy-1.8.8-cp311-cp311-win32.whl", hash = "sha256:eea8821d998ebeb02f0625dd0d76839ddde8cbf8152ebbe289dd7acf2cdc6b98"}, + {file = "debugpy-1.8.8-cp311-cp311-win_amd64.whl", hash = "sha256:d4483836da2a533f4b1454dffc9f668096ac0433de855f0c22cdce8c9f7e10c4"}, + {file = "debugpy-1.8.8-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:0cc94186340be87b9ac5a707184ec8f36547fb66636d1029ff4f1cc020e53996"}, + {file = "debugpy-1.8.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64674e95916e53c2e9540a056e5f489e0ad4872645399d778f7c598eacb7b7f9"}, + {file = "debugpy-1.8.8-cp312-cp312-win32.whl", hash = "sha256:5c6e885dbf12015aed73770f29dec7023cb310d0dc2ba8bfbeb5c8e43f80edc9"}, + {file = "debugpy-1.8.8-cp312-cp312-win_amd64.whl", hash = "sha256:19ffbd84e757a6ca0113574d1bf5a2298b3947320a3e9d7d8dc3377f02d9f864"}, + {file = "debugpy-1.8.8-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:705cd123a773d184860ed8dae99becd879dfec361098edbefb5fc0d3683eb804"}, + {file = "debugpy-1.8.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890fd16803f50aa9cb1a9b9b25b5ec321656dd6b78157c74283de241993d086f"}, + {file = "debugpy-1.8.8-cp313-cp313-win32.whl", hash = "sha256:90244598214bbe704aa47556ec591d2f9869ff9e042e301a2859c57106649add"}, + {file = "debugpy-1.8.8-cp313-cp313-win_amd64.whl", hash = "sha256:4b93e4832fd4a759a0c465c967214ed0c8a6e8914bced63a28ddb0dd8c5f078b"}, + {file = "debugpy-1.8.8-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:143ef07940aeb8e7316de48f5ed9447644da5203726fca378f3a6952a50a9eae"}, + {file = "debugpy-1.8.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f95651bdcbfd3b27a408869a53fbefcc2bcae13b694daee5f1365b1b83a00113"}, + {file = "debugpy-1.8.8-cp38-cp38-win32.whl", hash = "sha256:26b461123a030e82602a750fb24d7801776aa81cd78404e54ab60e8b5fecdad5"}, + {file = "debugpy-1.8.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3cbf1833e644a3100eadb6120f25be8a532035e8245584c4f7532937edc652a"}, + {file = "debugpy-1.8.8-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:53709d4ec586b525724819dc6af1a7703502f7e06f34ded7157f7b1f963bb854"}, + {file = "debugpy-1.8.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a9c013077a3a0000e83d97cf9cc9328d2b0bbb31f56b0e99ea3662d29d7a6a2"}, + {file = "debugpy-1.8.8-cp39-cp39-win32.whl", hash = "sha256:ffe94dd5e9a6739a75f0b85316dc185560db3e97afa6b215628d1b6a17561cb2"}, + {file = "debugpy-1.8.8-cp39-cp39-win_amd64.whl", hash = "sha256:5c0e5a38c7f9b481bf31277d2f74d2109292179081f11108e668195ef926c0f9"}, + {file = "debugpy-1.8.8-py2.py3-none-any.whl", hash = "sha256:ec684553aba5b4066d4de510859922419febc710df7bba04fe9e7ef3de15d34f"}, + {file = "debugpy-1.8.8.zip", hash = "sha256:e6355385db85cbd666be703a96ab7351bc9e6c61d694893206f8001e22aee091"}, ] [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] name = "dnspython" -version = "2.4.2" +version = "2.7.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.9" files = [ - {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, - {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -dnssec = ["cryptography (>=2.6,<42.0)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] -doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.23)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] [[package]] name = "docutils" -version = "0.20.1" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] name = "email-validator" -version = "2.1.0.post1" +version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" files = [ - {file = "email_validator-2.1.0.post1-py3-none-any.whl", hash = "sha256:c973053efbeddfef924dc0bd93f6e77a1ea7ee0fce935aea7103c7a3d6d2d637"}, - {file = "email_validator-2.1.0.post1.tar.gz", hash = "sha256:a4b0bd1cf55f073b924258d19321b1f3aa74b4b5a71a42c305575dba920e1a44"}, + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, ] [package.dependencies] @@ -559,15 +810,26 @@ files = [ {file = "eradicate-2.3.0.tar.gz", hash = "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37"}, ] +[[package]] +name = "et-xmlfile" +version = "2.0.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, + {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, +] + [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -575,59 +837,85 @@ test = ["pytest (>=6)"] [[package]] name = "fakeredis" -version = "2.20.0" +version = "2.26.1" description = "Python implementation of redis API, can be used for testing purposes." optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.7" files = [ - {file = "fakeredis-2.20.0-py3-none-any.whl", hash = "sha256:c9baf3c7fd2ebf40db50db4c642c7c76b712b1eed25d91efcc175bba9bc40ca3"}, - {file = "fakeredis-2.20.0.tar.gz", hash = "sha256:69987928d719d1ae1665ae8ebb16199d22a5ebae0b7d0d0d6586fc3a1a67428c"}, + {file = "fakeredis-2.26.1-py3-none-any.whl", hash = "sha256:68a5615d7ef2529094d6958677e30a6d30d544e203a5ab852985c19d7ad57e32"}, + {file = "fakeredis-2.26.1.tar.gz", hash = "sha256:69f4daafe763c8014a6dbf44a17559c46643c95447b3594b3975251a171b806d"}, ] [package.dependencies] -redis = ">=4" +redis = {version = ">=4.3", markers = "python_full_version > \"3.8.0\""} sortedcontainers = ">=2,<3" +typing-extensions = {version = ">=4.7,<5.0", markers = "python_version < \"3.11\""} [package.extras] -bf = ["pybloom-live (>=4.0,<5.0)"] +bf = ["pyprobables (>=0.6,<0.7)"] +cf = ["pyprobables (>=0.6,<0.7)"] json = ["jsonpath-ng (>=1.6,<2.0)"] -lua = ["lupa (>=1.14,<3.0)"] +lua = ["lupa (>=2.1,<3.0)"] +probabilistic = ["pyprobables (>=0.6,<0.7)"] [[package]] name = "fastapi" -version = "0.104.1" +version = "0.115.5" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.104.1-py3-none-any.whl", hash = "sha256:752dc31160cdbd0436bb93bad51560b57e525cbb1d4bbf6f4904ceee75548241"}, - {file = "fastapi-0.104.1.tar.gz", hash = "sha256:e5e4540a7c5e1dcfbbcf5b903c234feddcdcd881f191977a1c5dfd917487e7ae"}, + {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"}, + {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"}, ] [package.dependencies] -anyio = ">=3.7.1,<4.0.0" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.27.0,<0.28.0" +starlette = ">=0.40.0,<0.42.0" typing-extensions = ">=4.8.0" [package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-cache2" +version = "0.2.2" +description = "Cache for FastAPI" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "fastapi_cache2-0.2.2-py3-none-any.whl", hash = "sha256:e1fae86d8eaaa6c8501dfe08407f71d69e87cc6748042d59d51994000532846c"}, + {file = "fastapi_cache2-0.2.2.tar.gz", hash = "sha256:71bf4450117dc24224ec120be489dbe09e331143c9f74e75eb6f576b78926026"}, +] + +[package.dependencies] +fastapi = "*" +pendulum = ">=3.0.0,<4.0.0" +typing-extensions = ">=4.1.0" +uvicorn = "*" + +[package.extras] +all = ["aiobotocore (>=2.13.1,<3.0.0)", "aiomcache (>=0.8.2,<0.9.0)", "redis (>=4.2.0rc1,<5.0.0)"] +dynamodb = ["aiobotocore (>=2.13.1,<3.0.0)"] +memcache = ["aiomcache (>=0.8.2,<0.9.0)"] +redis = ["redis (>=4.2.0rc1,<5.0.0)"] [[package]] name = "filelock" -version = "3.13.1" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "flake8" @@ -710,17 +998,17 @@ flake8 = ">=2" [[package]] name = "flake8-comprehensions" -version = "3.14.0" +version = "3.16.0" description = "A flake8 plugin to help you write better list/set/dict comprehensions." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "flake8_comprehensions-3.14.0-py3-none-any.whl", hash = "sha256:7b9d07d94aa88e62099a6d1931ddf16c344d4157deedf90fe0d8ee2846f30e97"}, - {file = "flake8_comprehensions-3.14.0.tar.gz", hash = "sha256:81768c61bfc064e1a06222df08a2580d97de10cb388694becaf987c331c6c0cf"}, + {file = "flake8_comprehensions-3.16.0-py3-none-any.whl", hash = "sha256:7c1eadc9d22e765f39857798febe7766b4d9c519793c6c149e3e13bf99693f70"}, + {file = "flake8_comprehensions-3.16.0.tar.gz", hash = "sha256:9cbf789905a8f03f9d350fb82b17b264d9a16c7ce3542b2a7b871ef568cafabe"}, ] [package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0" +flake8 = ">=3,<3.2 || >3.2" [[package]] name = "flake8-debugger" @@ -802,16 +1090,17 @@ flake8 = "*" [[package]] name = "flake8-quotes" -version = "3.3.2" +version = "3.4.0" description = "Flake8 lint for quotes." optional = false python-versions = "*" files = [ - {file = "flake8-quotes-3.3.2.tar.gz", hash = "sha256:6e26892b632dacba517bf27219c459a8396dcfac0f5e8204904c5a4ba9b480e1"}, + {file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"}, ] [package.dependencies] flake8 = "*" +setuptools = "*" [[package]] name = "flake8-rst-docstrings" @@ -843,105 +1132,90 @@ files = [ [package.dependencies] flake8 = "*" -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.40" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, - {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] - [[package]] name = "greenlet" -version = "3.0.1" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, - {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, - {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, - {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, - {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, - {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, - {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, - {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, - {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, - {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, - {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, - {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, - {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, - {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, - {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] -docs = ["Sphinx"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] @@ -957,100 +1231,105 @@ files = [ [[package]] name = "hiredis" -version = "2.2.3" +version = "3.0.0" description = "Python wrapper for hiredis" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "hiredis-2.2.3-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:9a1a80a8fa767f2fdc3870316a54b84fe9fc09fa6ab6a2686783de6a228a4604"}, - {file = "hiredis-2.2.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3f006c28c885deb99b670a5a66f367a175ab8955b0374029bad7111f5357dcd4"}, - {file = "hiredis-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffaf841546905d90ff189de7397aa56413b1ce5e54547f17a98f0ebf3a3b0a3b"}, - {file = "hiredis-2.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cadb0ac7ba3babfd804e425946bec9717b320564a1390f163a54af9365a720a"}, - {file = "hiredis-2.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33bc4721632ef9708fa44e5df0066053fccc8e65410a2c48573192517a533b48"}, - {file = "hiredis-2.2.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:227c5b4bcb60f89008c275d596e4a7b6625a6b3c827b8a66ae582eace7051f71"}, - {file = "hiredis-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61995eb826009d99ed8590747bc0da683a5f4fbb4faa8788166bf3810845cd5c"}, - {file = "hiredis-2.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f969edc851efe23010e0f53a64269f2629a9364135e9ec81c842e8b2277d0c1"}, - {file = "hiredis-2.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27e560eefb57914d742a837f1da98d3b29cb22eff013c8023b7cf52ae6e051d"}, - {file = "hiredis-2.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3759f4789ae1913b7df278dfc9e8749205b7a106f888cd2903d19461e24a7697"}, - {file = "hiredis-2.2.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c6cb613148422c523945cdb8b6bed617856f2602fd8750e33773ede2616e55d5"}, - {file = "hiredis-2.2.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:1d274d5c511dfc03f83f997d3238eaa9b6ee3f982640979f509373cced891e98"}, - {file = "hiredis-2.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b7fe075e91b9d9cff40eba4fb6a8eff74964d3979a39be9a9ef58b1b4cb3604"}, - {file = "hiredis-2.2.3-cp310-cp310-win32.whl", hash = "sha256:77924b0d32fd1f493d3df15d9609ddf9d94c31a364022a6bf6b525ce9da75bea"}, - {file = "hiredis-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:dcb0569dd5bfe6004658cd0f229efa699a3169dcb4f77bd72e188adda302063d"}, - {file = "hiredis-2.2.3-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:d115790f18daa99b5c11a506e48923b630ef712e9e4b40482af942c3d40638b8"}, - {file = "hiredis-2.2.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c3b8be557e08b234774925622e196f0ee36fe4eab66cd19df934d3efd8f3743"}, - {file = "hiredis-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f5446068197b35a11ccc697720c41879c8657e2e761aaa8311783aac84cef20"}, - {file = "hiredis-2.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa17a3b22b3726d54d7af20394f65d4a1735a842a4e0f557dc67a90f6965c4bc"}, - {file = "hiredis-2.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7df645b6b7800e8b748c217fbd6a4ca8361bcb9a1ae6206cc02377833ec8a1aa"}, - {file = "hiredis-2.2.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fb9300959a0048138791f3d68359d61a788574ec9556bddf1fec07f2dbc5320"}, - {file = "hiredis-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d7e459fe7313925f395148d36d9b7f4f8dac65be06e45d7af356b187cef65fc"}, - {file = "hiredis-2.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8eceffca3941775b646cd585cd19b275d382de43cc3327d22f7c75d7b003d481"}, - {file = "hiredis-2.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b17baf702c6e5b4bb66e1281a3efbb1d749c9d06cdb92b665ad81e03118f78fc"}, - {file = "hiredis-2.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e43e2b5acaad09cf48c032f7e4926392bb3a3f01854416cf6d82ebff94d5467"}, - {file = "hiredis-2.2.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a7205497d7276a81fe92951a29616ef96562ed2f91a02066f72b6f93cb34b40e"}, - {file = "hiredis-2.2.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:126623b03c31cb6ac3e0d138feb6fcc36dd43dd34fc7da7b7a0c38b5d75bc896"}, - {file = "hiredis-2.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:071c5814b850574036506a8118034f97c3cbf2fe9947ff45a27b07a48da56240"}, - {file = "hiredis-2.2.3-cp311-cp311-win32.whl", hash = "sha256:d1be9e30e675f5bc1cb534633324578f6f0944a1bcffe53242cf632f554f83b6"}, - {file = "hiredis-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9a7c987e161e3c58f992c63b7e26fea7fe0777f3b975799d23d65bbb8cb5899"}, - {file = "hiredis-2.2.3-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:f2dcb8389fa3d453927b1299f46bdb38473c293c8269d5c777d33ea0e526b610"}, - {file = "hiredis-2.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2df98f5e071320c7d84e8bd07c0542acdd0a7519307fc31774d60e4b842ec4f"}, - {file = "hiredis-2.2.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a72e4a523cdfc521762137559c08dfa360a3caef63620be58c699d1717dac1"}, - {file = "hiredis-2.2.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9b9e5bde7030cae83aa900b5bd660decc65afd2db8c400f3c568c815a47ca2a"}, - {file = "hiredis-2.2.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd2614f17e261f72efc2f19f5e5ff2ee19e2296570c0dcf33409e22be30710de"}, - {file = "hiredis-2.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46525fbd84523cac75af5bf524bc74aaac848beaf31b142d2df8a787d9b4bbc4"}, - {file = "hiredis-2.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1a4ce40ba11da9382c14da31f4f9e88c18f7d294f523decd0fadfb81f51ad18"}, - {file = "hiredis-2.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:5cda592405bbd29d53942e0389dc3fa77b49c362640210d7e94a10c14a677d4d"}, - {file = "hiredis-2.2.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5e6674a017629284ef373b50496d9fb1a89b85a20a7fa100ecd109484ec748e5"}, - {file = "hiredis-2.2.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:e62ec131816c6120eff40dffe43424e140264a15fa4ab88c301bd6a595913af3"}, - {file = "hiredis-2.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17e938d9d3ee92e1adbff361706f1c36cc60eeb3e3eeca7a3a353eae344f4c91"}, - {file = "hiredis-2.2.3-cp37-cp37m-win32.whl", hash = "sha256:95d2305fd2a7b179cacb48b10f618872fc565c175f9f62b854e8d1acac3e8a9e"}, - {file = "hiredis-2.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8f9dbe12f011a9b784f58faecc171d22465bb532c310bd588d769ba79a59ef5a"}, - {file = "hiredis-2.2.3-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:5a4bcef114fc071d5f52c386c47f35aae0a5b43673197b9288a15b584da8fa3a"}, - {file = "hiredis-2.2.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:232d0a70519865741ba56e1dfefd160a580ae78c30a1517bad47b3cf95a3bc7d"}, - {file = "hiredis-2.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9076ce8429785c85f824650735791738de7143f61f43ae9ed83e163c0ca0fa44"}, - {file = "hiredis-2.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec58fb7c2062f835595c12f0f02dcda76d0eb0831423cc191d1e18c9276648de"}, - {file = "hiredis-2.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f2b34a6444b8f9c1e9f84bd2c639388e5d14f128afd14a869dfb3d9af893aa2"}, - {file = "hiredis-2.2.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:818dfd310aa1020a13cd08ee48e116dd8c3bb2e23b8161f8ac4df587dd5093d7"}, - {file = "hiredis-2.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96d9ea6c8d4cbdeee2e0d43379ce2881e4af0454b00570677c59f33f2531cd38"}, - {file = "hiredis-2.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1eadbcd3de55ac42310ff82550d3302cb4efcd4e17d76646a17b6e7004bb42b"}, - {file = "hiredis-2.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:477c34c4489666dc73cb5e89dafe2617c3e13da1298917f73d55aac4696bd793"}, - {file = "hiredis-2.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:14824e457e4f5cda685c3345d125da13949bcf3bb1c88eb5d248c8d2c3dee08f"}, - {file = "hiredis-2.2.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9cd32326dfa6ce87edf754153b0105aca64486bebe93b9600ccff74fa0b224df"}, - {file = "hiredis-2.2.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:51341e70b467004dcbec3a6ce8c478d2d6241e0f6b01e4c56764afd5022e1e9d"}, - {file = "hiredis-2.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2443659c76b226267e2a04dbbb21bc2a3f91aa53bdc0c22964632753ae43a247"}, - {file = "hiredis-2.2.3-cp38-cp38-win32.whl", hash = "sha256:4e3e3e31423f888d396b1fc1f936936e52af868ac1ec17dd15e3eeba9dd4de24"}, - {file = "hiredis-2.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:20f509e3a1a20d6e5f5794fc37ceb21f70f409101fcfe7a8bde783894d51b369"}, - {file = "hiredis-2.2.3-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:d20891e3f33803b26d54c77fd5745878497091e33f4bbbdd454cf6e71aee8890"}, - {file = "hiredis-2.2.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:50171f985e17970f87d5a29e16603d1e5b03bdbf5c2691a37e6c912942a6b657"}, - {file = "hiredis-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9944a2cac25ffe049a7e89f306e11b900640837d1ef38d9be0eaa4a4e2b73a52"}, - {file = "hiredis-2.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a5c8019ff94988d56eb49b15de76fe83f6b42536d76edeb6565dbf7fe14b973"}, - {file = "hiredis-2.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a286ded34eb16501002e3713b3130c987366eee2ba0d58c33c72f27778e31676"}, - {file = "hiredis-2.2.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b3e974ad15eb32b1f537730dea70b93a4c3db7b026de3ad2b59da49c6f7454d"}, - {file = "hiredis-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08415ea74c1c29b9d6a4ca3dd0e810dc1af343c1d1d442e15ba133b11ab5be6a"}, - {file = "hiredis-2.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e17d04ea58ab8cf3f2dc52e875db16077c6357846006780086fff3189fb199d"}, - {file = "hiredis-2.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6ccdcb635dae85b006592f78e32d97f4bc7541cb27829d505f9c7fefcef48298"}, - {file = "hiredis-2.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69536b821dd1bc78058a6e7541743f8d82bf2d981b91280b14c4daa6cdc7faba"}, - {file = "hiredis-2.2.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:3753df5f873d473f055e1f8837bfad0bd3b277c86f3c9bf058c58f14204cd901"}, - {file = "hiredis-2.2.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6f88cafe46612b6fa68e6dea49e25bebf160598bba00101caa51cc8c1f18d597"}, - {file = "hiredis-2.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33ee3ea5cad3a8cb339352cd230b411eb437a2e75d7736c4899acab32056ccdb"}, - {file = "hiredis-2.2.3-cp39-cp39-win32.whl", hash = "sha256:b4f3d06dc16671b88a13ae85d8ca92534c0b637d59e49f0558d040a691246422"}, - {file = "hiredis-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4f674e309cd055ee7a48304ceb8cf43265d859faf4d7d01d270ce45e976ae9d3"}, - {file = "hiredis-2.2.3-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8f280ab4e043b089777b43b4227bdc2035f88da5072ab36588e0ccf77d45d058"}, - {file = "hiredis-2.2.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15c2a551f3b8a26f7940d6ee10b837810201754b8d7e6f6b1391655370882c5a"}, - {file = "hiredis-2.2.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60c4e3c258eafaab21b174b17270a0cc093718d61cdbde8c03f85ec4bf835343"}, - {file = "hiredis-2.2.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc36a9dded458d4e37492fe3e619c6c83caae794d26ad925adbce61d592f8428"}, - {file = "hiredis-2.2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:4ed68a3b1ccb4313d2a42546fd7e7439ad4745918a48b6c9bcaa61e1e3e42634"}, - {file = "hiredis-2.2.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3bf4b5bae472630c229518e4a814b1b68f10a3d9b00aeaec45f1a330f03a0251"}, - {file = "hiredis-2.2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33a94d264e6e12a79d9bb8af333b01dc286b9f39c99072ab5fef94ce1f018e17"}, - {file = "hiredis-2.2.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fa6811a618653164f918b891a0fa07052bd71a799defa5c44d167cac5557b26"}, - {file = "hiredis-2.2.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af33f370be90b48bbaf0dab32decbdcc522b1fa95d109020a963282086518a8e"}, - {file = "hiredis-2.2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b9953d87418ac228f508d93898ab572775e4d3b0eeb886a1a7734553bcdaf291"}, - {file = "hiredis-2.2.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e7bb4dd524f50b71c20ef5a12bd61da9b463f8894b18a06130942fe31509881"}, - {file = "hiredis-2.2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89a258424158eb8b3ed9f65548d68998da334ef155d09488c5637723eb1cd697"}, - {file = "hiredis-2.2.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f4a65276f6ecdebe75f2a53f578fbc40e8d2860658420d5e0611c56bbf5054c"}, - {file = "hiredis-2.2.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:334f2738700b20faa04a0d813366fb16ed17287430a6b50584161d5ad31ca6d7"}, - {file = "hiredis-2.2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d194decd9608f11c777946f596f31d5aacad13972a0a87829ae1e6f2d26c1885"}, - {file = "hiredis-2.2.3.tar.gz", hash = "sha256:e75163773a309e56a9b58165cf5a50e0f84b755f6ff863b2c01a38918fe92daa"}, + {file = "hiredis-3.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:4b182791c41c5eb1d9ed736f0ff81694b06937ca14b0d4dadde5dadba7ff6dae"}, + {file = "hiredis-3.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:13c275b483a052dd645eb2cb60d6380f1f5215e4c22d6207e17b86be6dd87ffa"}, + {file = "hiredis-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1018cc7f12824506f165027eabb302735b49e63af73eb4d5450c66c88f47026"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83a29cc7b21b746cb6a480189e49f49b2072812c445e66a9e38d2004d496b81c"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e241fab6332e8fb5f14af00a4a9c6aefa22f19a336c069b7ddbf28ef8341e8d6"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1fb8de899f0145d6c4d5d4bd0ee88a78eb980a7ffabd51e9889251b8f58f1785"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b23291951959141173eec10f8573538e9349fa27f47a0c34323d1970bf891ee5"}, + {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e421ac9e4b5efc11705a0d5149e641d4defdc07077f748667f359e60dc904420"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:77c8006c12154c37691b24ff293c077300c22944018c3ff70094a33e10c1d795"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:41afc0d3c18b59eb50970479a9c0e5544fb4b95e3a79cf2fbaece6ddefb926fe"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:04ccae6dcd9647eae6025425ab64edb4d79fde8b9e6e115ebfabc6830170e3b2"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fe91d62b0594db5ea7d23fc2192182b1a7b6973f628a9b8b2e0a42a2be721ac6"}, + {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99516d99316062824a24d145d694f5b0d030c80da693ea6f8c4ecf71a251d8bb"}, + {file = "hiredis-3.0.0-cp310-cp310-win32.whl", hash = "sha256:562eaf820de045eb487afaa37e6293fe7eceb5b25e158b5a1974b7e40bf04543"}, + {file = "hiredis-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1c81c89ed765198da27412aa21478f30d54ef69bf5e4480089d9c3f77b8f882"}, + {file = "hiredis-3.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:4664dedcd5933364756d7251a7ea86d60246ccf73a2e00912872dacbfcef8978"}, + {file = "hiredis-3.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:47de0bbccf4c8a9f99d82d225f7672b9dd690d8fd872007b933ef51a302c9fa6"}, + {file = "hiredis-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e43679eca508ba8240d016d8cca9d27342d70184773c15bea78a23c87a1922f1"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13c345e7278c210317e77e1934b27b61394fee0dec2e8bd47e71570900f75823"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00018f22f38530768b73ea86c11f47e8d4df65facd4e562bd78773bd1baef35e"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ea3a86405baa8eb0d3639ced6926ad03e07113de54cb00fd7510cb0db76a89d"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c073848d2b1d5561f3903879ccf4e1a70c9b1e7566c7bdcc98d082fa3e7f0a1d"}, + {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a8dffb5f5b3415a4669d25de48b617fd9d44b0bccfc4c2ab24b06406ecc9ecb"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:22c17c96143c2a62dfd61b13803bc5de2ac526b8768d2141c018b965d0333b66"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c3ece960008dab66c6b8bb3a1350764677ee7c74ccd6270aaf1b1caf9ccebb46"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f75999ae00a920f7dce6ecae76fa5e8674a3110e5a75f12c7a2c75ae1af53396"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e069967cbd5e1900aafc4b5943888f6d34937fc59bf8918a1a546cb729b4b1e4"}, + {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0aacc0a78e1d94d843a6d191f224a35893e6bdfeb77a4a89264155015c65f126"}, + {file = "hiredis-3.0.0-cp311-cp311-win32.whl", hash = "sha256:719c32147ba29528cb451f037bf837dcdda4ff3ddb6cdb12c4216b0973174718"}, + {file = "hiredis-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:bdc144d56333c52c853c31b4e2e52cfbdb22d3da4374c00f5f3d67c42158970f"}, + {file = "hiredis-3.0.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:484025d2eb8f6348f7876fc5a2ee742f568915039fcb31b478fd5c242bb0fe3a"}, + {file = "hiredis-3.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:fcdb552ffd97151dab8e7bc3ab556dfa1512556b48a367db94b5c20253a35ee1"}, + {file = "hiredis-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bb6f9fd92f147ba11d338ef5c68af4fd2908739c09e51f186e1d90958c68cc1"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa86bf9a0ed339ec9e8a9a9d0ae4dccd8671625c83f9f9f2640729b15e07fbfd"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e194a0d5df9456995d8f510eab9f529213e7326af6b94770abf8f8b7952ddcaa"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a1df39d74ec507d79c7a82c8063eee60bf80537cdeee652f576059b9cdd15c"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f91456507427ba36fd81b2ca11053a8e112c775325acc74e993201ea912d63e9"}, + {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9862db92ef67a8a02e0d5370f07d380e14577ecb281b79720e0d7a89aedb9ee5"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d10fcd9e0eeab835f492832b2a6edb5940e2f1230155f33006a8dfd3bd2c94e4"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:48727d7d405d03977d01885f317328dc21d639096308de126c2c4e9950cbd3c9"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e0bb6102ebe2efecf8a3292c6660a0e6fac98176af6de67f020bea1c2343717"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:df274e3abb4df40f4c7274dd3e587dfbb25691826c948bc98d5fead019dfb001"}, + {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:034925b5fb514f7b11aac38cd55b3fd7e9d3af23bd6497f3f20aa5b8ba58e232"}, + {file = "hiredis-3.0.0-cp312-cp312-win32.whl", hash = "sha256:120f2dda469b28d12ccff7c2230225162e174657b49cf4cd119db525414ae281"}, + {file = "hiredis-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:e584fe5f4e6681d8762982be055f1534e0170f6308a7a90f58d737bab12ff6a8"}, + {file = "hiredis-3.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:122171ff47d96ed8dd4bba6c0e41d8afaba3e8194949f7720431a62aa29d8895"}, + {file = "hiredis-3.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ba9fc605ac558f0de67463fb588722878641e6fa1dabcda979e8e69ff581d0bd"}, + {file = "hiredis-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a631e2990b8be23178f655cae8ac6c7422af478c420dd54e25f2e26c29e766f1"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63482db3fadebadc1d01ad33afa6045ebe2ea528eb77ccaabd33ee7d9c2bad48"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f669212c390eebfbe03c4e20181f5970b82c5d0a0ad1df1785f7ffbe7d61150"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a49ef161739f8018c69b371528bdb47d7342edfdee9ddc75a4d8caddf45a6e"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98a152052b8878e5e43a2e3a14075218adafc759547c98668a21e9485882696c"}, + {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50a196af0ce657fcde9bf8a0bbe1032e22c64d8fcec2bc926a35e7ff68b3a166"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f2f312eef8aafc2255e3585dcf94d5da116c43ef837db91db9ecdc1bc930072d"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6ca41fa40fa019cde42c21add74aadd775e71458051a15a352eabeb12eb4d084"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:6eecb343c70629f5af55a8b3e53264e44fa04e155ef7989de13668a0cb102a90"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:c3fdad75e7837a475900a1d3a5cc09aa024293c3b0605155da2d42f41bc0e482"}, + {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8854969e7480e8d61ed7549eb232d95082a743e94138d98d7222ba4e9f7ecacd"}, + {file = "hiredis-3.0.0-cp38-cp38-win32.whl", hash = "sha256:f114a6c86edbf17554672b050cce72abf489fe58d583c7921904d5f1c9691605"}, + {file = "hiredis-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:7d99b91e42217d7b4b63354b15b41ce960e27d216783e04c4a350224d55842a4"}, + {file = "hiredis-3.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:4c6efcbb5687cf8d2aedcc2c3ed4ac6feae90b8547427d417111194873b66b06"}, + {file = "hiredis-3.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5b5cff42a522a0d81c2ae7eae5e56d0ee7365e0c4ad50c4de467d8957aff4414"}, + {file = "hiredis-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:82f794d564f4bc76b80c50b03267fe5d6589e93f08e66b7a2f674faa2fa76ebc"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a4c1791d7aa7e192f60fe028ae409f18ccdd540f8b1e6aeb0df7816c77e4a4"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2537b2cd98192323fce4244c8edbf11f3cac548a9d633dbbb12b48702f379f4"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fed69bbaa307040c62195a269f82fc3edf46b510a17abb6b30a15d7dab548df"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:869f6d5537d243080f44253491bb30aa1ec3c21754003b3bddeadedeb65842b0"}, + {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d435ae89073d7cd51e6b6bf78369c412216261c9c01662e7008ff00978153729"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:204b79b30a0e6be0dc2301a4d385bb61472809f09c49f400497f1cdd5a165c66"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ea635101b739c12effd189cc19b2671c268abb03013fd1f6321ca29df3ca625"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f359175197fd833c8dd7a8c288f1516be45415bb5c939862ab60c2918e1e1943"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ac6d929cb33dd12ad3424b75725975f0a54b5b12dbff95f2a2d660c510aa106d"}, + {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:100431e04d25a522ef2c3b94f294c4219c4de3bfc7d557b6253296145a144c11"}, + {file = "hiredis-3.0.0-cp39-cp39-win32.whl", hash = "sha256:e1a9c14ae9573d172dc050a6f63a644457df5d01ec4d35a6a0f097f812930f83"}, + {file = "hiredis-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:54a6dd7b478e6eb01ce15b3bb5bf771e108c6c148315bf194eb2ab776a3cac4d"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:50da7a9edf371441dfcc56288d790985ee9840d982750580710a9789b8f4a290"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b285ef6bf1581310b0d5e8f6ce64f790a1c40e89c660e1320b35f7515433672"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dcfa684966f25b335072115de2f920228a3c2caf79d4bfa2b30f6e4f674a948"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a41be8af1fd78ca97bc948d789a09b730d1e7587d07ca53af05758f31f4b985d"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:038756db735e417ab36ee6fd7725ce412385ed2bd0767e8179a4755ea11b804f"}, + {file = "hiredis-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fcecbd39bd42cef905c0b51c9689c39d0cc8b88b1671e7f40d4fb213423aef3a"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a131377493a59fb0f5eaeb2afd49c6540cafcfba5b0b3752bed707be9e7c4eaf"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d22c53f0ec5c18ecb3d92aa9420563b1c5d657d53f01356114978107b00b860"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a91e9520fbc65a799943e5c970ffbcd67905744d8becf2e75f9f0a5e8414f0"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dc8043959b50141df58ab4f398e8ae84c6f9e673a2c9407be65fc789138f4a6"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51b99cfac514173d7b8abdfe10338193e8a0eccdfe1870b646009d2fb7cbe4b5"}, + {file = "hiredis-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:fa1fcad89d8a41d8dc10b1e54951ec1e161deabd84ed5a2c95c3c7213bdb3514"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:898636a06d9bf575d2c594129085ad6b713414038276a4bfc5db7646b8a5be78"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:466f836dbcf86de3f9692097a7a01533dc9926986022c6617dc364a402b265c5"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23142a8af92a13fc1e3f2ca1d940df3dcf2af1d176be41fe8d89e30a837a0b60"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:793c80a3d6b0b0e8196a2d5de37a08330125668c8012922685e17aa9108c33ac"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:467d28112c7faa29b7db743f40803d927c8591e9da02b6ce3d5fadc170a542a2"}, + {file = "hiredis-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:dc384874a719c767b50a30750f937af18842ee5e288afba95a5a3ed703b1515a"}, + {file = "hiredis-3.0.0.tar.gz", hash = "sha256:fed8581ae26345dea1f1e0d1a96e05041a727a45e7d8d459164583e23c6ac441"}, ] [[package]] @@ -1152,13 +1431,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.33" +version = "2.6.2" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, - {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, + {file = "identify-2.6.2-py2.py3-none-any.whl", hash = "sha256:c097384259f49e372f4ea00a19719d95ae27dd5ff0fd77ad630aa891306b82f3"}, + {file = "identify-2.6.2.tar.gz", hash = "sha256:fab5c716c24d7a789775228823797296a2994b075fb6080ac83a102772a98cbd"}, ] [package.extras] @@ -1166,15 +1445,18 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.6" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -1188,30 +1470,206 @@ files = [ [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] +[package.dependencies] +MarkupSafe = ">=2.0" + [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "libcst" +version = "1.5.1" +description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.13 programs." +optional = false +python-versions = ">=3.9" +files = [ + {file = "libcst-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab83633e61ee91df575a3838b1e73c371f19d4916bf1816554933235553d41ea"}, + {file = "libcst-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b58a49895d95ec1fd34fad041a142d98edf9b51fcaf632337c13befeb4d51c7c"}, + {file = "libcst-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9ec764aa781ef35ab96b693569ac3dced16df9feb40ee6c274d13e86a1472e"}, + {file = "libcst-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99bbffd8596d192bc0e844a4cf3c4fc696979d4e20ab1c0774a01768a59b47ed"}, + {file = "libcst-1.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec6ee607cfe4cc4cc93e56e0188fdb9e50399d61a1262d58229752946f288f5e"}, + {file = "libcst-1.5.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:72132756f985a19ef64d702a821099d4afc3544974662772b44cbc55b7279727"}, + {file = "libcst-1.5.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:40b75bf2d70fc0bc26b1fa73e61bdc46fef59f5c71aedf16128e7c33db8d5e40"}, + {file = "libcst-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:56c944acaa781b8e586df3019374f5cf117054d7fc98f85be1ba84fe810005dc"}, + {file = "libcst-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db7711a762b0327b581be5a963908fecd74412bdda34db34553faa521563c22d"}, + {file = "libcst-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa524bd012aaae1f485fd44490ef5abf708b14d2addc0f06b28de3e4585c4b9e"}, + {file = "libcst-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffb8135c09e41e8cf710b152c33e9b7f1d0d0b9f242bae0c502eb082fdb1fb"}, + {file = "libcst-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76a8ac7a84f9b6f678a668bff85b360e0a93fa8d7f25a74a206a28110734bb2a"}, + {file = "libcst-1.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89c808bdb5fa9ca02df41dd234cbb0e9de0d2e0c029c7063d5435a9f6781cc10"}, + {file = "libcst-1.5.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40fbbaa8b839bfbfa5b300623ca2b6b0768b58bbc31b341afbc99110c9bee232"}, + {file = "libcst-1.5.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c7021e3904d8d088c369afc3fe17c279883e583415ef07edacadba76cfbecd27"}, + {file = "libcst-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:f053a5deb6a214972dbe9fa26ecd8255edb903de084a3d7715bf9e9da8821c50"}, + {file = "libcst-1.5.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:666813950b8637af0c0e96b1ca46f5d5f183d2fe50bbac2186f5b283a99f3529"}, + {file = "libcst-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b58b36022ae77a5a00002854043ae95c03e92f6062ad08473eff326f32efa0"}, + {file = "libcst-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb13d7c598fe9a798a1d22eae56ab3d3d599b38b83436039bd6ae229fc854d7"}, + {file = "libcst-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5987daff8389b0df60b5c20499ff4fb73fc03cb3ae1f6a746eefd204ed08df85"}, + {file = "libcst-1.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f3d2f32ee081bad3394546b0b9ac5e31686d3b5cfe4892d716d2ba65f9ec08"}, + {file = "libcst-1.5.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ff21005c33b634957a98db438e882522febf1cacc62fa716f29e163a3f5871a"}, + {file = "libcst-1.5.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:15697ea9f1edbb9a263364d966c72abda07195d1c1a6838eb79af057f1040770"}, + {file = "libcst-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:cedd4c8336e01c51913113fbf5566b8f61a86d90f3d5cc5b1cb5049575622c5f"}, + {file = "libcst-1.5.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:06a9b4c9b76da4a7399e6f1f3a325196fb5febd3ea59fac1f68e2116f3517cd8"}, + {file = "libcst-1.5.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:940ec4c8db4c2d620a7268d6c83e64ff646e4afd74ae5183d0f0ef3b80e05be0"}, + {file = "libcst-1.5.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fbccb016b1ac6d892344300dcccc8a16887b71bb7f875ba56c0ed6c1a7ade8be"}, + {file = "libcst-1.5.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c615af2117320e9a218083c83ec61227d3547e38a0de80329376971765f27a9e"}, + {file = "libcst-1.5.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02b38fa4d9f13e79fe69e9b5407b9e173557bcfb5960f7866cf4145af9c7ae09"}, + {file = "libcst-1.5.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3334afe9e7270e175de01198f816b0dc78dda94d9d72152b61851c323e4e741e"}, + {file = "libcst-1.5.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:26c804fa8091747128579013df0b5f8e6b0c7904d9c4ee83841f136f53e18684"}, + {file = "libcst-1.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:b5a0d3c632aa2b21c5fa145e4e8dbf86f45c9b37a64c0b7221a5a45caf58915a"}, + {file = "libcst-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1cc7393aaac733e963f0ee00466d059db74a38e15fc7e6a46dddd128c5be8d08"}, + {file = "libcst-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bbaf5755be50fa9b35a3d553d1e62293fbb2ee5ce2c16c7e7ffeb2746af1ab88"}, + {file = "libcst-1.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e397f5b6c0fc271acea44579f154b0f3ab36011050f6db75ab00cef47441946"}, + {file = "libcst-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1947790a4fd7d96bcc200a6ecaa528045fcb26a34a24030d5859c7983662289e"}, + {file = "libcst-1.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:697eabe9f5ffc40f76d6d02e693274e0a382826d0cf8183bd44e7407dfb0ab90"}, + {file = "libcst-1.5.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dc06b7c60d086ef1832aebfd31b64c3c8a645adf0c5638d6243e5838f6a9356e"}, + {file = "libcst-1.5.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:19e39cfef4316599ca20d1c821490aeb783b52e8a8543a824972a525322a85d0"}, + {file = "libcst-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:01e01c04f0641188160d3b99c6526436e93a3fbf9783dba970f9885a77ec9b38"}, + {file = "libcst-1.5.1.tar.gz", hash = "sha256:71cb294db84df9e410208009c732628e920111683c2f2b2e0c5b71b98464f365"}, +] + +[package.dependencies] +pyyaml = ">=5.2" + +[package.extras] +dev = ["Sphinx (>=5.1.1)", "black (==24.8.0)", "build (>=0.10.0)", "coverage[toml] (>=4.5.4)", "fixit (==2.1.0)", "flake8 (==7.1.1)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.4)", "jupyter (>=1.0.0)", "maturin (>=1.7.0,<1.8)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.7.3)", "usort (==1.0.8.post1)"] + +[[package]] +name = "lupa" +version = "2.2" +description = "Python wrapper around Lua and LuaJIT" +optional = false +python-versions = "*" +files = [ + {file = "lupa-2.2-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:4bb05e3fc8f794b4a1b8a38229c3b4ae47f83cfbe7f6b172032f66d3308a0934"}, + {file = "lupa-2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:13062395e716cebe25dfc6dc3738a9eb514bb052b52af25cf502c1fd74affd21"}, + {file = "lupa-2.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:e673443dd7f7f0510bb9f4b0dc6bad6932d271b0afdbdc492fa71e9b9eab638d"}, + {file = "lupa-2.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3b47702b94e9e391052118cbde253f69a0af96ec776f48af74e72f30d740ccc9"}, + {file = "lupa-2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2242884a5078cd2507f15a162b5faf6f39a1f27654a1cc7db09cdb65b0b599b3"}, + {file = "lupa-2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8555526f03bb41d5aef16d105e8f51da1000d833e90d846448cf745ca6cd72e8"}, + {file = "lupa-2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50807c6cc11d3ecf568d964be6708e26d4669d435c76fcb568a98d1dd6e8ae9"}, + {file = "lupa-2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c140dd19614e43b76b84295945878cea3cdf7ed34e133b1a8c0e3fa7efc9c6ac"}, + {file = "lupa-2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c725c1832b0c6095583a6a57273e6f33a6b55230f90bcacdf06934ce21ef04e9"}, + {file = "lupa-2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:18a302810735da688d21e8397c696e68b89dbe3c45a3fdc3406f5c0e55887467"}, + {file = "lupa-2.2-cp310-cp310-win32.whl", hash = "sha256:a4f03aa308d949a3f2e4e755ffc6a698d3ea02fccd34014fab496efb99b3d4f4"}, + {file = "lupa-2.2-cp310-cp310-win_amd64.whl", hash = "sha256:8494802f789174cd26176e6b408e60e468cda348d4f767562d06991604813f61"}, + {file = "lupa-2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:95ee903ab71c3e6498bcd3bca60938a961c84fae47cdf23389a48c73e15dbad2"}, + {file = "lupa-2.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:011dbc81a790693b5457a0d761b032a8acdcc2945e32ca6ef34a7698bda0b09a"}, + {file = "lupa-2.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:8c89d8e99f684dfedccbf2f0dbdcc28deb73c4ff0545452f43ec02330dacfe0c"}, + {file = "lupa-2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26c3edea3ce6465364af6cc1c134b7f23a3ff919e5e499720acbff01b14b9931"}, + {file = "lupa-2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd6afa3f6c998ac55f90b0665266c19100387de55d25af25ef4a35197d29d52"}, + {file = "lupa-2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b79bef7f48696bf70eff165afa49778470607dce6420b497eb82cfae1af6947"}, + {file = "lupa-2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:08e2bfa98725f7495cef30d42d87fff82795b9b9e76b740521828784b778ade7"}, + {file = "lupa-2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0318ceb4d1782776bae7495a3bd3d50e57f80115ecbeff1e95d87a4e9411acf2"}, + {file = "lupa-2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9180dc7ee5c580cee41d9afac0b7c738cf7f6badf4a1398a6e1921dff155619c"}, + {file = "lupa-2.2-cp311-cp311-win32.whl", hash = "sha256:82077fe962c6e9ae1652e826f58e6250d1daa13c446ba1f4d6b68f16df65db0b"}, + {file = "lupa-2.2-cp311-cp311-win_amd64.whl", hash = "sha256:e2d2b9a6a4ef109b75668e26204f122196f33907ce3ccc80322ca70f84f81598"}, + {file = "lupa-2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8cd872e16e736a3ecb800e70b4f36a66c794b7d339247712244a515561da4ff5"}, + {file = "lupa-2.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:6e8027ad53daa511e4a049eb0eb9f71b46fd2c5be6897fc68d75288b04086d4d"}, + {file = "lupa-2.2-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:0a7bd2841fd41b718d415162ec53b7d00079c27b1c5c1a2f2d0fb8080dd64d73"}, + {file = "lupa-2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:63eff3aa68791b5c9a400f89f18018f4f63b8619adaa603fcd09392b87ca6b9b"}, + {file = "lupa-2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ab43356bb269ca4f03d25200b7559581cd791fbc631104c3e7d186d3c37221f"}, + {file = "lupa-2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556779c0c28a2948749817ffd62dec882c834a6445aeff5d31ae862e14eebb21"}, + {file = "lupa-2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:42fd611a099ab1804a8d23154d4c7b2221557c94d34f8964da0dc03760f15d3d"}, + {file = "lupa-2.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:63d5ae8ccbafe0aa0034da32f18fc692963df1b5e1ebf91e76f504de1d5aecff"}, + {file = "lupa-2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3d3d9e5991861d8ee28709d94e673b89bdea10188b34a155835ba2dbbc7d26a7"}, + {file = "lupa-2.2-cp312-cp312-win32.whl", hash = "sha256:58a3621579b26ad5a524c1c41623ec551160653e915cf4aa41453f4339821b89"}, + {file = "lupa-2.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e8ff117eca26f5cedcd2b2467cf56d0c64cfcb804b5083a36d818b57edc4036"}, + {file = "lupa-2.2-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:afe2b90c65f61f7d5ad55cdbfbb89cb50e5ab4d6184ea975befc51ffdc20dc8f"}, + {file = "lupa-2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c597ea2dc203767dcb5a853cf885a7238b0639f5b7cb5c6ad5dbe5d2b39e25c6"}, + {file = "lupa-2.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8149dcbe9953e8cad991949dec41bf6dbaa8a2d613e4b024f98e510b0aab4fa4"}, + {file = "lupa-2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92e1c6a1f380bc829618d0e95c15612b6e2604baa8ffd42547451e9d842837ae"}, + {file = "lupa-2.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:56be246cf7126f980c13b79a03ad43361dee5a65f8be8c4e2feb58a2bdcc5a2a"}, + {file = "lupa-2.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:da3460b920d4520ae8a3927b92c22402592fe2e31f08492c3c0ba9b8eadee302"}, + {file = "lupa-2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:211d3371d9836d87b2097f520492241cd5e06b29ca8777739c4fe30a1df4c76c"}, + {file = "lupa-2.2-cp36-cp36m-win32.whl", hash = "sha256:617fc3532f224619e15d45adb9c9af8f4690e36cad332d68d49e78463e51d528"}, + {file = "lupa-2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:50b2f0f8bfcacd68c9ae0a2872ff4b90c2df0490f193253c922283a295f23b6a"}, + {file = "lupa-2.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f3de07b7f19296a702c8710f44b221aefe6563461e209198862cd1f06401b13d"}, + {file = "lupa-2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eed6529c89ea475cbc403ed6e8670f1adf9eb2eb34b7610690d9827d35759a3c"}, + {file = "lupa-2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cc171b352c187a012bbc5c20692236843e8c123c60569be872cb72bb7edcbd4"}, + {file = "lupa-2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:704ed8f5a91133a8d62cba2d6fe4f2e43c7ee6f3998484d31abcfc4a57bedd1e"}, + {file = "lupa-2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d2aa0fba09a045f5bcc638ede0f614fcd36339da58b7415a1e66e3590781a4a5"}, + {file = "lupa-2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2b911d3890fa93ae3f83c5d806008c3b551941813b39e7605def137a9b9b064"}, + {file = "lupa-2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00bcae88a2123f0cfd34f7206cc2d88008d905ebc065d41797827d046404b09e"}, + {file = "lupa-2.2-cp37-cp37m-win32.whl", hash = "sha256:225bbe9e58881bb92f96c6b43587168ed329b2b37c3236a9883efa681aec9f5a"}, + {file = "lupa-2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:57662d9653e157872caeaa622d966aa1da7bb8fe8646b63fb1194a3cdb98c417"}, + {file = "lupa-2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc728fbe6d4e668ad8bec979ef86675387ca640e319ec029e0fc8f2bc9c3d224"}, + {file = "lupa-2.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:33a2beebe078e13770eff5d12a22d98a425fff89f87af2155c32769adc0114f1"}, + {file = "lupa-2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd1e95d8a399ff379d09358490171965aaa25007ed06488b972df08f1b3df509"}, + {file = "lupa-2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63d1bc6a473813c707cf5badbfba081bf7cfbd761d58e1812c9a65a477146f9"}, + {file = "lupa-2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6e1bdd13f6fbdab2212bf08c24c232653832673c21c10ba576f89770e58686"}, + {file = "lupa-2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:26f2617544e4b8cf2a4c1873e6f4feb7e547f4c06bfd088a24547d37f68a3945"}, + {file = "lupa-2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:189856225402eab6dc467b77190c5beddc5c004a9cdc5855e7517206f3b380ca"}, + {file = "lupa-2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2563d55538ebecab1d8768c77e1972f7768440b8e41aff4466352b942aa50dd1"}, + {file = "lupa-2.2-cp38-cp38-win32.whl", hash = "sha256:6c7e418bd39b9e2717654ed52ea55b681247d95139da958603e0766ed138b190"}, + {file = "lupa-2.2-cp38-cp38-win_amd64.whl", hash = "sha256:3facbd310fc73d3bcdb8cb363df80524ee52ac25b7566d0f0fb8b300b04c3bdb"}, + {file = "lupa-2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cda04e655af89824a92b4ca168524e0f526b78da5f39f66103cc3b6a924ef60c"}, + {file = "lupa-2.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:c49d1962478fa6a94b468e0dd6f725034ee690f41ae03217ff4672f370a7a099"}, + {file = "lupa-2.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:6bddf06f4f4b2257701e12690c5e951eb6a02b88633b7a43cc160172ff3a88b5"}, + {file = "lupa-2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c3bb414fc3a4ba9ac3e57a17ffd4c3d0db6da78c53b6792de5a964b5539e42"}, + {file = "lupa-2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10c2c81bc96f2091210aaf046ef22f920581a3e161b3961121171e02595ca6fb"}, + {file = "lupa-2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11193c9e7fe1b82d921991c68a33f5b08c8e0c16d67d173768fc80f8c75d9d52"}, + {file = "lupa-2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9e149fafd20e748818a0b718abc42f099a3cc6debc7c6932564d7e475291f0e2"}, + {file = "lupa-2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2518128f38a4608bbc5375404082a3c22c86037639842fb7b1fc2b4f5d2a41e3"}, + {file = "lupa-2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:756fc6aa5ca3a6b7764c474ef061760c5d38e2dd96c21567ab3c7d4f5ed2c3a7"}, + {file = "lupa-2.2-cp39-cp39-win32.whl", hash = "sha256:9b2b7148a77f60b7b193aec2bd820e89c1ecaab9838ca81c8212e2f972df1a1d"}, + {file = "lupa-2.2-cp39-cp39-win_amd64.whl", hash = "sha256:93216d7ae8bb373a8a388b058960a00eaaa6a01e5e2306a13e65db1024181a62"}, + {file = "lupa-2.2-pp310-pypy310_pp73-macosx_11_0_x86_64.whl", hash = "sha256:e4cd8c6f725a5629551ac08979d0631af6bed2564cf87dcae489bcb53bdab808"}, + {file = "lupa-2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95d712728d36262e0bcffea2ad4b1c3ee6122e4eb16f5a70c2f4750f34580148"}, + {file = "lupa-2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:47eb46153810e868c543ffc53a3369700998a3e617cfcebf49133a79e6f56432"}, + {file = "lupa-2.2-pp37-pypy37_pp73-macosx_11_0_x86_64.whl", hash = "sha256:283066c6ef9141a66924854a78619ff16bc2efd324484807be58ca9a8e9b617a"}, + {file = "lupa-2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7141e395325f150321c3caa69178dc70224512e0483e2165d3d1ca375608abb7"}, + {file = "lupa-2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:502248085d3d2dc74e642f97773367a1929daa24fcf039dd5048acdd5b49a8f9"}, + {file = "lupa-2.2-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:4cdeb4a942068882c9e3751520b6de1b6c21d7c2526a2040755b62c7cb46308f"}, + {file = "lupa-2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfd7e62f3149d10fa3485f4d5143f74b295787708b1974f7fad74b65fb911fa1"}, + {file = "lupa-2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4c78b3b7137212a9ef881adca3168a376445da3a7dc322b2416c90a73c81db2c"}, + {file = "lupa-2.2-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ecd1b3a4d8db553c4eaed742843f4b7d77bca795ec9f4292385709bcf691e8a3"}, + {file = "lupa-2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36db930207c15656b9989721ea41ba8c039abd088cc7242bb690aa72a4978e68"}, + {file = "lupa-2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8ccba6f5cd8bdecf4000531298e6edd803547340752b80fe5b74911fa6119cc8"}, + {file = "lupa-2.2.tar.gz", hash = "sha256:665a006bcf8d9aacdfdb953824b929d06a0c55910a662b59be2f157ab4c8924d"}, +] [[package]] name = "mako" -version = "1.3.0" +version = "1.3.6" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, - {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, + {file = "Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a"}, + {file = "mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d"}, ] [package.dependencies] @@ -1248,61 +1706,72 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +python-versions = ">=3.9" +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -1329,130 +1798,157 @@ files = [ [[package]] name = "multidict" -version = "6.0.4" +version = "6.1.0" description = "multidict implementation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "mypy" -version = "1.7.1" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, - {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, - {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, - {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, - {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, - {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, - {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, - {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, - {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, - {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, - {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, - {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, - {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, - {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, - {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, - {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, - {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, - {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, - {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -1470,51 +1966,307 @@ files = [ [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "openpyxl" +version = "3.1.5" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, + {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, ] [package.dependencies] -setuptools = "*" +et-xmlfile = "*" [[package]] name = "packaging" -version = "23.2" +version = "24.2" description = "Core utilities for Python packages" optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pamqp" +version = "3.3.0" +description = "RabbitMQ Focused AMQP low-level library" +optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "pamqp-3.3.0-py2.py3-none-any.whl", hash = "sha256:c901a684794157ae39b52cbf700db8c9aae7a470f13528b9d7b4e5f7202f8eb0"}, + {file = "pamqp-3.3.0.tar.gz", hash = "sha256:40b8795bd4efcf2b0f8821c1de83d12ca16d5760f4507836267fd7a02b06763b"}, +] + +[package.extras] +codegen = ["lxml", "requests", "yapf"] +testing = ["coverage", "flake8", "flake8-comprehensions", "flake8-deprecated", "flake8-import-order", "flake8-print", "flake8-quotes", "flake8-rst-docstrings", "flake8-tuple", "yapf"] + +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, ] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pbr" -version = "6.0.0" +version = "6.1.0" description = "Python Build Reasonableness" optional = false python-versions = ">=2.6" files = [ - {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, - {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, + {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, + {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, +] + +[[package]] +name = "pendulum" +version = "3.0.0" +description = "Python datetimes made easy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, + {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, + {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, + {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, + {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, + {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, + {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, + {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, + {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, + {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, + {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, + {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, + {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, + {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, + {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, + {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, + {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, + {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, + {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, + {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, + {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, + {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, + {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, + {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, + {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, + {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, + {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, + {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, + {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, + {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, + {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, + {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, ] +[package.dependencies] +python-dateutil = ">=2.6" +tzdata = ">=2020.1" + +[package.extras] +test = ["time-machine (>=2.6.0)"] + [[package]] name = "pep8-naming" version = "0.13.2" @@ -1531,28 +2283,29 @@ flake8 = ">=3.9.1" [[package]] name = "platformdirs" -version = "4.1.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1561,13 +2314,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.5.0" +version = "3.8.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, ] [package.dependencies] @@ -1579,13 +2332,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prometheus-client" -version = "0.19.0" +version = "0.21.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, - {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, + {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, + {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, ] [package.extras] @@ -1606,6 +2359,113 @@ files = [ fastapi = ">=0.38.1,<1.0.0" prometheus-client = ">=0.8.0,<1.0.0" +[[package]] +name = "propcache" +version = "0.2.0" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.8" +files = [ + {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58"}, + {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b"}, + {file = "propcache-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336"}, + {file = "propcache-0.2.0-cp310-cp310-win32.whl", hash = "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad"}, + {file = "propcache-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b"}, + {file = "propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1"}, + {file = "propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348"}, + {file = "propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5"}, + {file = "propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544"}, + {file = "propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032"}, + {file = "propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed"}, + {file = "propcache-0.2.0-cp38-cp38-win32.whl", hash = "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d"}, + {file = "propcache-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798"}, + {file = "propcache-0.2.0-cp39-cp39-win32.whl", hash = "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9"}, + {file = "propcache-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df"}, + {file = "propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036"}, + {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"}, +] + [[package]] name = "pycodestyle" version = "2.8.0" @@ -1619,147 +2479,135 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pydantic" -version = "2.5.2" +version = "2.9.2" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, - {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" +annotated-types = ">=0.6.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.14.5" -typing-extensions = ">=4.6.1" +pydantic-core = "2.23.4" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.14.5" -description = "" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, - {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, - {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, - {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, - {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, - {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, - {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, - {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, - {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, - {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, - {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, - {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, - {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, - {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, - {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, - {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -1767,19 +2615,24 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.1.0" +version = "2.6.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.1.0-py3-none-any.whl", hash = "sha256:7621c0cb5d90d1140d2f0ef557bdf03573aac7035948109adf2574770b77605a"}, - {file = "pydantic_settings-2.1.0.tar.gz", hash = "sha256:26b1492e0a24755626ac5e6d715e9077ab7ad4fb5f19a8b7ed7011d52f36141c"}, + {file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"}, + {file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"}, ] [package.dependencies] -pydantic = ">=2.3.0" +pydantic = ">=2.7.0" python-dotenv = ">=0.21.0" +[package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + [[package]] name = "pydocstyle" version = "6.3.0" @@ -1810,45 +2663,44 @@ files = [ [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pytest" -version = "7.4.3" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -1856,11 +2708,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -1897,77 +2749,116 @@ pytest = ">=7.3.1" [package.extras] test = ["coverage (>=7.2.7)", "pytest-mock (>=3.10)"] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "python-dotenv" -version = "1.0.0" +version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, ] [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-multipart" +version = "0.0.18" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_multipart-0.0.18-py3-none-any.whl", hash = "sha256:efe91480f485f6a361427a541db4796f9e1591afc0fb8e7a4ba06bfbc6708996"}, + {file = "python_multipart-0.0.18.tar.gz", hash = "sha256:7a68db60c8bfb82e460637fa4750727b45af1d5e2ed215593f917f64694d34fe"}, +] + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -1989,6 +2880,27 @@ hiredis = {version = ">=1.0.0", optional = true, markers = "extra == \"hiredis\" hiredis = ["hiredis (>=1.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "restructuredtext-lint" version = "1.4.0" @@ -2021,58 +2933,91 @@ idna2008 = ["idna"] [[package]] name = "rich" -version = "13.7.0" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "s3transfer" +version = "0.10.3" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.8" +files = [ + {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"}, + {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + [[package]] name = "setuptools" -version = "69.0.2" +version = "75.5.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, - {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, + {file = "setuptools-75.5.0-py3-none-any.whl", hash = "sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829"}, + {file = "setuptools-75.5.0.tar.gz", hash = "sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib-metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -2099,80 +3044,88 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.23" +version = "2.0.36" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, - {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, - {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\""} -typing-extensions = ">=4.2.0" + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +typing-extensions = ">=4.6.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=8)"] +oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -2182,17 +3135,17 @@ postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.27.0" +version = "0.41.3" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, - {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, + {file = "starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7"}, + {file = "starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835"}, ] [package.dependencies] @@ -2200,114 +3153,209 @@ anyio = ">=3.4.0,<5" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] [[package]] name = "stevedore" -version = "5.1.0" +version = "5.3.0" description = "Manage dynamic plugins for Python applications" optional = false python-versions = ">=3.8" files = [ - {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, - {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, + {file = "stevedore-5.3.0-py3-none-any.whl", hash = "sha256:1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78"}, + {file = "stevedore-5.3.0.tar.gz", hash = "sha256:9a64265f4060312828151c204efbe9b7a9852a0d9228756344dbc7e4023e375a"}, ] [package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" +pbr = ">=2.0.0" + +[[package]] +name = "structlog" +version = "24.4.0" +description = "Structured Logging for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "structlog-24.4.0-py3-none-any.whl", hash = "sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610"}, + {file = "structlog-24.4.0.tar.gz", hash = "sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4"}, +] + +[package.extras] +dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"] +tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy (>=1.4)", "rich", "twisted"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.1.0" description = "A lil' TOML parser" optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, + {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, +] + +[[package]] +name = "typer" +version = "0.13.1" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false python-versions = ">=3.7" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "typer-0.13.1-py3-none-any.whl", hash = "sha256:5b59580fd925e89463a29d363e0a43245ec02765bde9fb77d39e5d0f29dd7157"}, + {file = "typer-0.13.1.tar.gz", hash = "sha256:9d444cb96cc268ce6f8b94e13b4335084cef4c079998a9f4851a90229a3bd25c"}, ] +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] name = "ujson" -version = "5.8.0" +version = "5.10.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" files = [ - {file = "ujson-5.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1"}, - {file = "ujson-5.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75"}, - {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76"}, - {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a"}, - {file = "ujson-5.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7"}, - {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2"}, - {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba"}, - {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf"}, - {file = "ujson-5.8.0-cp310-cp310-win32.whl", hash = "sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a"}, - {file = "ujson-5.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0"}, - {file = "ujson-5.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f"}, - {file = "ujson-5.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8"}, - {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405"}, - {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4"}, - {file = "ujson-5.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e"}, - {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3"}, - {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5"}, - {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa"}, - {file = "ujson-5.8.0-cp311-cp311-win32.whl", hash = "sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879"}, - {file = "ujson-5.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721"}, - {file = "ujson-5.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c"}, - {file = "ujson-5.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6"}, - {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae"}, - {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b"}, - {file = "ujson-5.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296"}, - {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7"}, - {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08"}, - {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30"}, - {file = "ujson-5.8.0-cp312-cp312-win32.whl", hash = "sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916"}, - {file = "ujson-5.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6"}, - {file = "ujson-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb"}, - {file = "ujson-5.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b"}, - {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582"}, - {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c"}, - {file = "ujson-5.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7"}, - {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67"}, - {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9"}, - {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07"}, - {file = "ujson-5.8.0-cp38-cp38-win32.whl", hash = "sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564"}, - {file = "ujson-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5"}, - {file = "ujson-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c"}, - {file = "ujson-5.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a"}, - {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207"}, - {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042"}, - {file = "ujson-5.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3"}, - {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95"}, - {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c"}, - {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc"}, - {file = "ujson-5.8.0-cp39-cp39-win32.whl", hash = "sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903"}, - {file = "ujson-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94"}, - {file = "ujson-5.8.0.tar.gz", hash = "sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425"}, + {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, + {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, + {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, + {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, + {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, + {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, + {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, + {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, + {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, + {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, + {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, + {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, + {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, + {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, + {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, +] + +[[package]] +name = "urllib3" +version = "1.26.20" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "uvicorn" version = "0.24.0.post1" @@ -2336,57 +3384,64 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "uvloop" -version = "0.19.0" +version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" files = [ - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, ] [package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] [[package]] name = "virtualenv" -version = "20.25.0" +version = "20.27.1" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, - {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, + {file = "virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4"}, + {file = "virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba"}, ] [package.dependencies] @@ -2395,91 +3450,99 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "watchfiles" -version = "0.21.0" +version = "0.24.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.8" files = [ - {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"}, - {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"}, - {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"}, - {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"}, - {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"}, - {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"}, - {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"}, - {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"}, - {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"}, - {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"}, - {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"}, - {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"}, - {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"}, - {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"}, - {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, + {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, + {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, ] [package.dependencies] @@ -2487,83 +3550,80 @@ anyio = ">=3.0.0" [[package]] name = "websockets" -version = "12.0" +version = "14.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +python-versions = ">=3.9" +files = [ + {file = "websockets-14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a0adf84bc2e7c86e8a202537b4fd50e6f7f0e4a6b6bf64d7ccb96c4cd3330b29"}, + {file = "websockets-14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90b5d9dfbb6d07a84ed3e696012610b6da074d97453bd01e0e30744b472c8179"}, + {file = "websockets-14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2177ee3901075167f01c5e335a6685e71b162a54a89a56001f1c3e9e3d2ad250"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f14a96a0034a27f9d47fd9788913924c89612225878f8078bb9d55f859272b0"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f874ba705deea77bcf64a9da42c1f5fc2466d8f14daf410bc7d4ceae0a9fcb0"}, + {file = "websockets-14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9607b9a442392e690a57909c362811184ea429585a71061cd5d3c2b98065c199"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bea45f19b7ca000380fbd4e02552be86343080120d074b87f25593ce1700ad58"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:219c8187b3ceeadbf2afcf0f25a4918d02da7b944d703b97d12fb01510869078"}, + {file = "websockets-14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad2ab2547761d79926effe63de21479dfaf29834c50f98c4bf5b5480b5838434"}, + {file = "websockets-14.1-cp310-cp310-win32.whl", hash = "sha256:1288369a6a84e81b90da5dbed48610cd7e5d60af62df9851ed1d1d23a9069f10"}, + {file = "websockets-14.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0744623852f1497d825a49a99bfbec9bea4f3f946df6eb9d8a2f0c37a2fec2e"}, + {file = "websockets-14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:449d77d636f8d9c17952628cc7e3b8faf6e92a17ec581ec0c0256300717e1512"}, + {file = "websockets-14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a35f704be14768cea9790d921c2c1cc4fc52700410b1c10948511039be824aac"}, + {file = "websockets-14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1f3628a0510bd58968c0f60447e7a692933589b791a6b572fcef374053ca280"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3deac3748ec73ef24fc7be0b68220d14d47d6647d2f85b2771cb35ea847aa1"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7048eb4415d46368ef29d32133134c513f507fff7d953c18c91104738a68c3b3"}, + {file = "websockets-14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf0ad281c979306a6a34242b371e90e891bce504509fb6bb5246bbbf31e7b6"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc1fc87428c1d18b643479caa7b15db7d544652e5bf610513d4a3478dbe823d0"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f95ba34d71e2fa0c5d225bde3b3bdb152e957150100e75c86bc7f3964c450d89"}, + {file = "websockets-14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9481a6de29105d73cf4515f2bef8eb71e17ac184c19d0b9918a3701c6c9c4f23"}, + {file = "websockets-14.1-cp311-cp311-win32.whl", hash = "sha256:368a05465f49c5949e27afd6fbe0a77ce53082185bbb2ac096a3a8afaf4de52e"}, + {file = "websockets-14.1-cp311-cp311-win_amd64.whl", hash = "sha256:6d24fc337fc055c9e83414c94e1ee0dee902a486d19d2a7f0929e49d7d604b09"}, + {file = "websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed"}, + {file = "websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d"}, + {file = "websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45"}, + {file = "websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4"}, + {file = "websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05"}, + {file = "websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0"}, + {file = "websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f"}, + {file = "websockets-14.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3630b670d5057cd9e08b9c4dab6493670e8e762a24c2c94ef312783870736ab9"}, + {file = "websockets-14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36ebd71db3b89e1f7b1a5deaa341a654852c3518ea7a8ddfdf69cc66acc2db1b"}, + {file = "websockets-14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5b918d288958dc3fa1c5a0b9aa3256cb2b2b84c54407f4813c45d52267600cd3"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00fe5da3f037041da1ee0cf8e308374e236883f9842c7c465aa65098b1c9af59"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8149a0f5a72ca36720981418eeffeb5c2729ea55fa179091c81a0910a114a5d2"}, + {file = "websockets-14.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77569d19a13015e840b81550922056acabc25e3f52782625bc6843cfa034e1da"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cf5201a04550136ef870aa60ad3d29d2a59e452a7f96b94193bee6d73b8ad9a9"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:88cf9163ef674b5be5736a584c999e98daf3aabac6e536e43286eb74c126b9c7"}, + {file = "websockets-14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:836bef7ae338a072e9d1863502026f01b14027250a4545672673057997d5c05a"}, + {file = "websockets-14.1-cp313-cp313-win32.whl", hash = "sha256:0d4290d559d68288da9f444089fd82490c8d2744309113fc26e2da6e48b65da6"}, + {file = "websockets-14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8621a07991add373c3c5c2cf89e1d277e49dc82ed72c75e3afc74bd0acc446f0"}, + {file = "websockets-14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01bb2d4f0a6d04538d3c5dfd27c0643269656c28045a53439cbf1c004f90897a"}, + {file = "websockets-14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:414ffe86f4d6f434a8c3b7913655a1a5383b617f9bf38720e7c0799fac3ab1c6"}, + {file = "websockets-14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fda642151d5affdee8a430bd85496f2e2517be3a2b9d2484d633d5712b15c56"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd7c11968bc3860d5c78577f0dbc535257ccec41750675d58d8dc66aa47fe52c"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a032855dc7db987dff813583d04f4950d14326665d7e714d584560b140ae6b8b"}, + {file = "websockets-14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7e7ea2f782408c32d86b87a0d2c1fd8871b0399dd762364c731d86c86069a78"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:39450e6215f7d9f6f7bc2a6da21d79374729f5d052333da4d5825af8a97e6735"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ceada5be22fa5a5a4cdeec74e761c2ee7db287208f54c718f2df4b7e200b8d4a"}, + {file = "websockets-14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3fc753451d471cff90b8f467a1fc0ae64031cf2d81b7b34e1811b7e2691bc4bc"}, + {file = "websockets-14.1-cp39-cp39-win32.whl", hash = "sha256:14839f54786987ccd9d03ed7f334baec0f02272e7ec4f6e9d427ff584aeea8b4"}, + {file = "websockets-14.1-cp39-cp39-win_amd64.whl", hash = "sha256:d9fd19ecc3a4d5ae82ddbfb30962cf6d874ff943e56e0c81f5169be2fda62979"}, + {file = "websockets-14.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5dc25a9dbd1a7f61eca4b7cb04e74ae4b963d658f9e4f9aad9cd00b688692c8"}, + {file = "websockets-14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:04a97aca96ca2acedf0d1f332c861c5a4486fdcba7bcef35873820f940c4231e"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df174ece723b228d3e8734a6f2a6febbd413ddec39b3dc592f5a4aa0aff28098"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:034feb9f4286476f273b9a245fb15f02c34d9586a5bc936aff108c3ba1b21beb"}, + {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c308dabd2b380807ab64b62985eaccf923a78ebc572bd485375b9ca2b7dc7"}, + {file = "websockets-14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a42d3ecbb2db5080fc578314439b1d79eef71d323dc661aa616fb492436af5d"}, + {file = "websockets-14.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddaa4a390af911da6f680be8be4ff5aaf31c4c834c1a9147bc21cbcbca2d4370"}, + {file = "websockets-14.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4c805c6034206143fbabd2d259ec5e757f8b29d0a2f0bf3d2fe5d1f60147a4a"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:205f672a6c2c671a86d33f6d47c9b35781a998728d2c7c2a3e1cf3333fcb62b7"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef440054124728cc49b01c33469de06755e5a7a4e83ef61934ad95fc327fbb0"}, + {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7591d6f440af7f73c4bd9404f3772bfee064e639d2b6cc8c94076e71b2471c1"}, + {file = "websockets-14.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:25225cc79cfebc95ba1d24cd3ab86aaa35bcd315d12fa4358939bd55e9bd74a5"}, + {file = "websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e"}, + {file = "websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8"}, ] [[package]] @@ -2598,110 +3658,130 @@ pep8-naming = ">=0.13,<0.14" pygments = ">=2.4,<3.0" typing_extensions = ">=4.0,<5.0" +[[package]] +name = "xlrd" +version = "2.0.1" +description = "Library for developers to extract data from Microsoft Excel (tm) .xls spreadsheet files" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd"}, + {file = "xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88"}, +] + +[package.extras] +build = ["twine", "wheel"] +docs = ["sphinx"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "xlwt" +version = "1.3.0" +description = "Library to create spreadsheet files compatible with MS Excel 97/2000/XP/2003 XLS files, on any platform, with Python 2.6, 2.7, 3.3+" +optional = false +python-versions = "*" +files = [ + {file = "xlwt-1.3.0-py2.py3-none-any.whl", hash = "sha256:a082260524678ba48a297d922cc385f58278b8aa68741596a87de01a9c628b2e"}, + {file = "xlwt-1.3.0.tar.gz", hash = "sha256:c59912717a9b28f1a3c2a98fd60741014b06b043936dcecbc113eaaada156c88"}, +] + [[package]] name = "yarl" -version = "1.9.4" +version = "1.17.2" description = "Yet another URL library" optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +python-versions = ">=3.9" +files = [ + {file = "yarl-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:93771146ef048b34201bfa382c2bf74c524980870bb278e6df515efaf93699ff"}, + {file = "yarl-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8281db240a1616af2f9c5f71d355057e73a1409c4648c8949901396dc0a3c151"}, + {file = "yarl-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:170ed4971bf9058582b01a8338605f4d8c849bd88834061e60e83b52d0c76870"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc61b005f6521fcc00ca0d1243559a5850b9dd1e1fe07b891410ee8fe192d0c0"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871e1b47eec7b6df76b23c642a81db5dd6536cbef26b7e80e7c56c2fd371382e"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a58a2f2ca7aaf22b265388d40232f453f67a6def7355a840b98c2d547bd037f"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:736bb076f7299c5c55dfef3eb9e96071a795cb08052822c2bb349b06f4cb2e0a"}, + {file = "yarl-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fd51299e21da709eabcd5b2dd60e39090804431292daacbee8d3dabe39a6bc0"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:358dc7ddf25e79e1cc8ee16d970c23faee84d532b873519c5036dbb858965795"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:50d866f7b1a3f16f98603e095f24c0eeba25eb508c85a2c5939c8b3870ba2df8"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8b9c4643e7d843a0dca9cd9d610a0876e90a1b2cbc4c5ba7930a0d90baf6903f"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d63123bfd0dce5f91101e77c8a5427c3872501acece8c90df457b486bc1acd47"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4e76381be3d8ff96a4e6c77815653063e87555981329cf8f85e5be5abf449021"}, + {file = "yarl-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:734144cd2bd633a1516948e477ff6c835041c0536cef1d5b9a823ae29899665b"}, + {file = "yarl-1.17.2-cp310-cp310-win32.whl", hash = "sha256:26bfb6226e0c157af5da16d2d62258f1ac578d2899130a50433ffee4a5dfa673"}, + {file = "yarl-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:76499469dcc24759399accd85ec27f237d52dec300daaca46a5352fcbebb1071"}, + {file = "yarl-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:792155279dc093839e43f85ff7b9b6493a8eaa0af1f94f1f9c6e8f4de8c63500"}, + {file = "yarl-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:38bc4ed5cae853409cb193c87c86cd0bc8d3a70fd2268a9807217b9176093ac6"}, + {file = "yarl-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4a8c83f6fcdc327783bdc737e8e45b2e909b7bd108c4da1892d3bc59c04a6d84"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6d5fed96f0646bfdf698b0a1cebf32b8aae6892d1bec0c5d2d6e2df44e1e2d"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:782ca9c58f5c491c7afa55518542b2b005caedaf4685ec814fadfcee51f02493"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff6af03cac0d1a4c3c19e5dcc4c05252411bf44ccaa2485e20d0a7c77892ab6e"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a3f47930fbbed0f6377639503848134c4aa25426b08778d641491131351c2c8"}, + {file = "yarl-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1fa68a3c921365c5745b4bd3af6221ae1f0ea1bf04b69e94eda60e57958907f"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:187df91395c11e9f9dc69b38d12406df85aa5865f1766a47907b1cc9855b6303"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:93d1c8cc5bf5df401015c5e2a3ce75a5254a9839e5039c881365d2a9dcfc6dc2"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:11d86c6145ac5c706c53d484784cf504d7d10fa407cb73b9d20f09ff986059ef"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c42774d1d1508ec48c3ed29e7b110e33f5e74a20957ea16197dbcce8be6b52ba"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8e589379ef0407b10bed16cc26e7392ef8f86961a706ade0a22309a45414d7"}, + {file = "yarl-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1056cadd5e850a1c026f28e0704ab0a94daaa8f887ece8dfed30f88befb87bb0"}, + {file = "yarl-1.17.2-cp311-cp311-win32.whl", hash = "sha256:be4c7b1c49d9917c6e95258d3d07f43cfba2c69a6929816e77daf322aaba6628"}, + {file = "yarl-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:ac8eda86cc75859093e9ce390d423aba968f50cf0e481e6c7d7d63f90bae5c9c"}, + {file = "yarl-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:dd90238d3a77a0e07d4d6ffdebc0c21a9787c5953a508a2231b5f191455f31e9"}, + {file = "yarl-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c74f0b0472ac40b04e6d28532f55cac8090e34c3e81f118d12843e6df14d0909"}, + {file = "yarl-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d486ddcaca8c68455aa01cf53d28d413fb41a35afc9f6594a730c9779545876"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25b7e93f5414b9a983e1a6c1820142c13e1782cc9ed354c25e933aebe97fcf2"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a0baff7827a632204060f48dca9e63fbd6a5a0b8790c1a2adfb25dc2c9c0d50"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:460024cacfc3246cc4d9f47a7fc860e4fcea7d1dc651e1256510d8c3c9c7cde0"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5870d620b23b956f72bafed6a0ba9a62edb5f2ef78a8849b7615bd9433384171"}, + {file = "yarl-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2941756754a10e799e5b87e2319bbec481ed0957421fba0e7b9fb1c11e40509f"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9611b83810a74a46be88847e0ea616794c406dbcb4e25405e52bff8f4bee2d0a"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:cd7e35818d2328b679a13268d9ea505c85cd773572ebb7a0da7ccbca77b6a52e"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6b981316fcd940f085f646b822c2ff2b8b813cbd61281acad229ea3cbaabeb6b"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:688058e89f512fb7541cb85c2f149c292d3fa22f981d5a5453b40c5da49eb9e8"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56afb44a12b0864d17b597210d63a5b88915d680f6484d8d202ed68ade38673d"}, + {file = "yarl-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:17931dfbb84ae18b287279c1f92b76a3abcd9a49cd69b92e946035cff06bcd20"}, + {file = "yarl-1.17.2-cp312-cp312-win32.whl", hash = "sha256:ff8d95e06546c3a8c188f68040e9d0360feb67ba8498baf018918f669f7bc39b"}, + {file = "yarl-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:4c840cc11163d3c01a9d8aad227683c48cd3e5be5a785921bcc2a8b4b758c4f3"}, + {file = "yarl-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3294f787a437cb5d81846de3a6697f0c35ecff37a932d73b1fe62490bef69211"}, + {file = "yarl-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f1e7fedb09c059efee2533119666ca7e1a2610072076926fa028c2ba5dfeb78c"}, + {file = "yarl-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:da9d3061e61e5ae3f753654813bc1cd1c70e02fb72cf871bd6daf78443e9e2b1"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91c012dceadc695ccf69301bfdccd1fc4472ad714fe2dd3c5ab4d2046afddf29"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f11fd61d72d93ac23718d393d2a64469af40be2116b24da0a4ca6922df26807e"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46c465ad06971abcf46dd532f77560181387b4eea59084434bdff97524444032"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef6eee1a61638d29cd7c85f7fd3ac7b22b4c0fabc8fd00a712b727a3e73b0685"}, + {file = "yarl-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4434b739a8a101a837caeaa0137e0e38cb4ea561f39cb8960f3b1e7f4967a3fc"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:752485cbbb50c1e20908450ff4f94217acba9358ebdce0d8106510859d6eb19a"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:17791acaa0c0f89323c57da7b9a79f2174e26d5debbc8c02d84ebd80c2b7bff8"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5c6ea72fe619fee5e6b5d4040a451d45d8175f560b11b3d3e044cd24b2720526"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db5ac3871ed76340210fe028f535392f097fb31b875354bcb69162bba2632ef4"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7a1606ba68e311576bcb1672b2a1543417e7e0aa4c85e9e718ba6466952476c0"}, + {file = "yarl-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9bc27dd5cfdbe3dc7f381b05e6260ca6da41931a6e582267d5ca540270afeeb2"}, + {file = "yarl-1.17.2-cp313-cp313-win32.whl", hash = "sha256:52492b87d5877ec405542f43cd3da80bdcb2d0c2fbc73236526e5f2c28e6db28"}, + {file = "yarl-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:8e1bf59e035534ba4077f5361d8d5d9194149f9ed4f823d1ee29ef3e8964ace3"}, + {file = "yarl-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c556fbc6820b6e2cda1ca675c5fa5589cf188f8da6b33e9fc05b002e603e44fa"}, + {file = "yarl-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f2f44a4247461965fed18b2573f3a9eb5e2c3cad225201ee858726cde610daca"}, + {file = "yarl-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a3ede8c248f36b60227eb777eac1dbc2f1022dc4d741b177c4379ca8e75571a"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2654caaf5584449d49c94a6b382b3cb4a246c090e72453493ea168b931206a4d"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d41c684f286ce41fa05ab6af70f32d6da1b6f0457459a56cf9e393c1c0b2217"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2270d590997445a0dc29afa92e5534bfea76ba3aea026289e811bf9ed4b65a7f"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18662443c6c3707e2fc7fad184b4dc32dd428710bbe72e1bce7fe1988d4aa654"}, + {file = "yarl-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75ac158560dec3ed72f6d604c81090ec44529cfb8169b05ae6fcb3e986b325d9"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1fee66b32e79264f428dc8da18396ad59cc48eef3c9c13844adec890cd339db5"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:585ce7cd97be8f538345de47b279b879e091c8b86d9dbc6d98a96a7ad78876a3"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c019abc2eca67dfa4d8fb72ba924871d764ec3c92b86d5b53b405ad3d6aa56b0"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c6e659b9a24d145e271c2faf3fa6dd1fcb3e5d3f4e17273d9e0350b6ab0fe6e2"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:d17832ba39374134c10e82d137e372b5f7478c4cceeb19d02ae3e3d1daed8721"}, + {file = "yarl-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bc3003710e335e3f842ae3fd78efa55f11a863a89a72e9a07da214db3bf7e1f8"}, + {file = "yarl-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f5ffc6b7ace5b22d9e73b2a4c7305740a339fbd55301d52735f73e21d9eb3130"}, + {file = "yarl-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:48e424347a45568413deec6f6ee2d720de2cc0385019bedf44cd93e8638aa0ed"}, + {file = "yarl-1.17.2-py3-none-any.whl", hash = "sha256:dd7abf4f717e33b7487121faf23560b3a50924f80e4bef62b22dab441ded8f3b"}, + {file = "yarl-1.17.2.tar.gz", hash = "sha256:753eaaa0c7195244c84b5cc159dc8204b7fd99f716f11198f999f2332a86b178"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" +propcache = ">=0.2.0" [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "827da20134c0682277699fcd1f155a8c6ca567c463fe90a121116d7b6002ba34" +content-hash = "1898dc5b68facf49cb53d1fe98daecb39c1dc39b184d17c31d8d23a48cfce2c0" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 37304fd1a..e40217ccb 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -12,44 +12,60 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.9" -fastapi = "^0.104.1" +fastapi = "^0.115.4" uvicorn = { version = "^0.24.0", extras = ["standard"] } -pydantic = {extras = ["email"], version = "^2.4.2"} +pydantic = { extras = ["email"], version = "^2.4.2" } yarl = "^1.8.2" ujson = "^5.7.0" -SQLAlchemy = {version = "^2.0.0", extras = ["asyncio"]} +SQLAlchemy = { version = "^2.0.0", extras = ["asyncio"] } alembic = "^1.12.1" -asyncpg = {version = "^0.27.0", extras = ["sa"]} -redis = {version = "^4.4.2", extras = ["hiredis"]} +asyncpg = { version = "^0.27.0", extras = ["sa"] } +redis = { version = "^4.4.2", extras = ["hiredis"] } httptools = "^0.5.0" prometheus-fastapi-instrumentator = "6.0.0" -colorlog = "6.7.0" -distlib = "^0.3.3" +distlib = "^0.3.8" pydantic-settings = "^2.0.3" pyjwt = "^2.8.0" debugpy = "^1.8.0" -cryptography = "^41.0.7" - +cryptography = "^43.0.1" +fastapi-cache2 = "^0.2.1" +httpx = "^0.23.3" +pandas = "^2.1.4" +openpyxl = "^3.1.2" +xlwt = "^1.3.0" +xlrd = "^2.0.1" +numpy = "^1.26.4" +lupa = "^2.0" +alembic-postgresql-enum = "^1.1.2" +bump-pydantic = "^0.8.0" +black = "^24.4.2" +boto3 = "^1.35.26" +typing-extensions = "^4.12.2" +structlog = "^24.4.0" +python-multipart = "^0.0.18" +aio-pika = "^9.4.3" +jinja2 = "^3.1.4" +requests = "^2.32.3" [tool.poetry.dev-dependencies] -pytest = "^7.2.1" +pytest = "^8.3.3" flake8 = "~4.0.1" mypy = "^1.1.1" isort = "^5.11.4" pre-commit = "^3.0.1" wemake-python-styleguide = "^0.17.0" -black = "^22.12.0" +black = "^24.4.2" autoflake = "^1.6.1" pytest-cov = "^4.0.0" anyio = "^3.6.2" pytest-env = "^0.8.1" fakeredis = "^2.5.0" -httpx = "^0.23.3" +rich = "^13.9.4" [tool.isort] profile = "black" multi_line_output = 3 -src_paths = ["lcfs",] +src_paths = ["lcfs", ] [tool.mypy] strict = true diff --git a/docker-compose-db-only.yml b/docker-compose-db-only.yml new file mode 100644 index 000000000..c6ba567f5 --- /dev/null +++ b/docker-compose-db-only.yml @@ -0,0 +1,17 @@ +version: '3.2' + +services: + db: + restart: 'no' + image: postgres:14.2 + environment: + POSTGRES_DB: lcfs + POSTGRES_USER: lcfs + POSTGRES_PASSWORD: development_only + ports: + - 5432:5432 + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + postgres_data: diff --git a/docker-compose.yml b/docker-compose.yml index 0f5cc7c43..d8170fa6b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,8 +1,7 @@ -version: '3.2' - services: db: - restart: 'no' + container_name: db + restart: "no" image: postgres:14.2 environment: POSTGRES_DB: lcfs @@ -12,19 +11,83 @@ services: - 5432:5432 volumes: - postgres_data:/var/lib/postgresql/data + networks: + - shared_network redis: - restart: 'no' + container_name: redis + restart: "no" image: bitnami/redis:6.2.5 environment: - ALLOW_EMPTY_PASSWORD: 'yes' + REDIS_PASSWORD: development_only ports: - '6379:6379' volumes: - redis_data:/bitnami/redis/data + networks: + - shared_network + + rabbitmq: + image: rabbitmq:3-management + container_name: rabbitmq + environment: + RABBITMQ_DEFAULT_USER: lcfs + RABBITMQ_DEFAULT_PASS: development_only + RABBITMQ_DEFAULT_VHOST: lcfs + volumes: + - ./docker/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf + ports: + - "15672:15672" # Management UI + - "5672:5672" # RabbitMQ Port + networks: + - shared_network + + # clamav: + # image: clamav/clamav:stable_base + # volumes: + # - clamsocket:/var/run/clamav/ + # - clamav:/var/lib/clamav + # ports: + # - "3310:3310" + # networks: + # - shared_network + + minio: + image: minio/minio:latest + restart: "no" + command: "server --console-address :9001 --address :9000 /data" + environment: + - MINIO_ROOT_USER=s3_access_key + - MINIO_ROOT_PASSWORD=development_only + - MINIO_SERVER_URL=http://localhost:9000 + volumes: + - s3:/data + ports: + - "9000:9000" + - "9001:9001" + networks: + - shared_network + + create_bucket: + image: minio/mc + restart: "no" + container_name: minio_init + entrypoint: > + /bin/sh -c " + until mc alias set myminio http://minio:9000 s3_access_key development_only; do + echo 'Waiting for MinIO...'; + sleep 5; + done; + mc mb myminio/lcfs + " + networks: + - shared_network + depends_on: + - minio backend: - restart: 'no' + container_name: backend + restart: "no" build: context: ./backend dockerfile: 'Dockerfile' @@ -42,7 +105,14 @@ services: LCFS_DB_BASE: lcfs LCFS_REDIS_HOST: redis LCFS_REDIS_PORT: 6379 + LCFS_REDIS_PASSWORD: development_only LCFS_RELOAD: true + LCFS_CHES_CLIENT_ID: ${LCFS_CHES_CLIENT_ID} + LCFS_CHES_CLIENT_SECRET: ${LCFS_CHES_CLIENT_SECRET} + LCFS_CHES_AUTH_URL: ${LCFS_CHES_AUTH_URL} + LCFS_CHES_SENDER_EMAIL: ${LCFS_CHES_SENDER_EMAIL} + LCFS_CHES_SENDER_NAME: ${LCFS_CHES_SENDER_NAME} + LCFS_CHES_EMAIL_URL: ${LCFS_CHES_EMAIL_URL} APP_ENVIRONMENT: dev ports: - '8000:8000' # Application port @@ -50,20 +120,45 @@ services: depends_on: - db - redis + - create_bucket + - rabbitmq + # - clamav + networks: + - shared_network frontend: - restart: 'no' + container_name: frontend + restart: "no" build: dockerfile: Dockerfile.dev context: ./frontend command: npm run dev --force ports: - - 3000:3000 + - "3000:3000" volumes: - - ./frontend:/app - - /app/node_modules + - type: bind + source: ./frontend + target: /app + - type: volume + source: node_modules + target: /app/node_modules + networks: + - shared_network volumes: - node_modules: postgres_data: + name: lcfs_postgres_data redis_data: + name: lcfs_redis_data + node_modules: + name: lcfs_node_data + s3: + name: lfcs_s3_data + clamav: + name: clamav + clamsocket: + name: clamsocket + +networks: + shared_network: + name: shared_network diff --git a/docker/rabbitmq.conf b/docker/rabbitmq.conf new file mode 100644 index 000000000..851ea79bb --- /dev/null +++ b/docker/rabbitmq.conf @@ -0,0 +1 @@ +log.console.level = warning \ No newline at end of file diff --git a/etl/.gitignore b/etl/.gitignore new file mode 100644 index 000000000..4b1a5471d --- /dev/null +++ b/etl/.gitignore @@ -0,0 +1,8 @@ +nifi_output/ +nifi/content_repository/ +nifi/database_repository/ +nifi/flowfile_repository/ +nifi/logs/ +nifi/provenance_repository/ +nifi/state/ +nifi/conf/archive \ No newline at end of file diff --git a/etl/data-migration.sh b/etl/data-migration.sh new file mode 100755 index 000000000..b3326dd85 --- /dev/null +++ b/etl/data-migration.sh @@ -0,0 +1,449 @@ +#!/usr/local/bin/bash +# Update bash dir to your local version +# Optimized NiFi Processor Management Script with Advanced Logging +# This script manages NiFi processors, updates database connections, and establishes port-forwarding. +# Usage: +# ./nifi_processor_manager.sh [dev|test|prod] [--debug|--verbose] +# ./data-migration.sh test --debug +# +# Arguments: +# [dev|test|prod] - The environment for which the script will run. +# --debug - Enables debug logging (most verbose). +# --verbose - Enables verbose logging. +# +# Features: +# - Logs with multiple debug levels for clear tracing. +# - Automatically manages port-forwarding. +# - Handles processor execution and tracks its state. +# - Updates NiFi database controller connections. +# +# Requirements: +# - `jq` for JSON parsing. +# - `lsof` for port check. +# - OpenShift CLI (`oc`). + +# Exit immediately if a command exits with a non-zero status and treat unset variables as errors +set -euo pipefail + +# Debug Levels +readonly DEBUG_NONE=0 +readonly DEBUG_ERROR=1 +readonly DEBUG_WARN=2 +readonly DEBUG_INFO=3 +readonly DEBUG_VERBOSE=4 +readonly DEBUG_DEBUG=5 + +# Default Debug Level +DEBUG_LEVEL=${DEBUG_LEVEL:-$DEBUG_INFO} + +# NiFi Processor IDs +# Update these IDs as needed +readonly ORGANIZATION_PROCESSOR="328e2539-0192-1000-0000-00007a4304c1" +readonly USER_PROCESSOR="e6c63130-3eac-1b13-a947-ee0103275138" +readonly TRANSFER_PROCESSOR="b9d73248-1438-1418-a736-cc94c8c21e70" +readonly TRANSACTIONS_PROCESSOR="7a010ef5-0193-1000-ffff-ffff8c22e67e" + +# Global Port Configuration (Update these based on your setup) +declare -A LOCAL_PORTS=( + [tfrs]=5435 + [lcfs]=5432 +) +readonly TARGET_PORT=5432 + +# Centralized configuration for the NiFi API +readonly NIFI_API_URL="http://localhost:8091/nifi-api" +readonly MAX_RETRIES=5 + +# Advanced logging function +_log() { + local level=$1 + local message=$2 + local caller=${FUNCNAME[1]} + local timestamp=$(date '+%Y-%m-%d %H:%M:%S') + + # Check if the message's log level is less than or equal to current debug level + if [ "$level" -le "$DEBUG_LEVEL" ]; then + case $level in + $DEBUG_ERROR) + printf "\e[31m[ERROR][%s] %s\e[0m\n" "$timestamp" "$message" >&2 + ;; + $DEBUG_WARN) + printf "\e[33m[WARN][%s] %s\e[0m\n" "$timestamp" "$message" >&2 + ;; + $DEBUG_INFO) + printf "\e[32m[INFO][%s] %s\e[0m\n" "$timestamp" "$message" >&2 + ;; + $DEBUG_VERBOSE) + printf "\e[34m[VERBOSE][%s] %s\e[0m\n" "$timestamp" "$message" >&2 + ;; + $DEBUG_DEBUG) + printf "\e[36m[DEBUG][%s] %s\e[0m\n" "$timestamp" "$message" >&2 + ;; + esac + fi +} + +# Shortcut logging functions +error() { _log $DEBUG_ERROR "$1"; } +warn() { _log $DEBUG_WARN "$1"; } +info() { _log $DEBUG_INFO "$1"; } +verbose() { _log $DEBUG_VERBOSE "$1"; } +debug() { _log $DEBUG_DEBUG "$1"; } + +# Error handling function +error_exit() { + error "$1" + exit 1 +} + +# Robust curl wrapper with retry mechanism +curl_with_retry() { + local max_attempts=$1 + shift + local url=$1 + local method=${2:-GET} + local data=${3:-} + local attempt=1 + + info "Curl Request: $method $url" + verbose "Max Attempts: $max_attempts" + + while [ $attempt -lt "$max_attempts" ]; do + debug "Attempt $attempt for $url" + local response + local http_code + + if [ -z "$data" ]; then + response=$(curl -sS -w "%{http_code}" -X "$method" "$url") + else + response=$(curl -sS -w "%{http_code}" -X "$method" -H "Content-Type: application/json" -d "$data" "$url") + fi + + # Extract the HTTP status code (last 3 characters) + http_code="${response: -3}" + # Extract the response body (everything except the last 3 characters) + response_body="${response:0:${#response}-3}" + + if [ "$http_code" -ge 200 ] && [ "$http_code" -lt 300 ]; then + verbose "Curl successful. HTTP Code: $http_code" + echo "$response_body" + return 0 + fi + + warn "Curl attempt $attempt failed. HTTP Code: $http_code" + ((attempt++)) + sleep $((attempt * 2)) + done + + error_exit "Curl failed after $max_attempts attempts: $url" +} + +# Enhanced controller service enable function +enable_controller_service() { + local service_id=$1 + + info "Enabling controller service $service_id" + local current_config revision_version response + + current_config=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$service_id") + revision_version=$(echo "$current_config" | jq -r '.revision.version') + + verbose "Current Revision: $revision_version" + + response=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$service_id/run-status" PUT "{\"state\": \"ENABLED\", \"revision\": { \"version\": $revision_version }}") + + info "Controller service $service_id enabled successfully." +} + +# Improved processor execution tracking for single run +execute_processor() { + local processor_id=$1 + local env=$2 + + info "Triggering single execution for processor $processor_id" + local current_config revision_version response + + # Fetch the current processor configuration + current_config=$(curl_with_retry 3 "$NIFI_API_URL/processors/$processor_id") + revision_version=$(echo "$current_config" | jq -r '.revision.version') + + verbose "Processor current revision: $revision_version" + + # Prepare payload for single run + local run_once_payload=$(jq -n \ + --argjson revision_version "$revision_version" \ + '{ + "revision": { "version": $revision_version }, + "state": "RUN_ONCE", + "disconnectedNodeAcknowledged": false, + }') + + # Send run once request + response=$(curl -sS -w "%{http_code}" -X PUT \ + -H "Content-Type: application/json" \ + -d "$run_once_payload" \ + "$NIFI_API_URL/processors/$processor_id/run-status") + + # Extract HTTP code and response body using parameter expansion + local http_code="${response: -3}" + local response_body="${response:0:${#response}-3}" + + if [ "$http_code" -ge 200 ] && [ "$http_code" -lt 300 ]; then + info "Processor $processor_id triggered for single run successfully" + sleep 60 + else + error "Failed to trigger single run for processor $processor_id. HTTP Code: $http_code" + error "Response: $response_body" + return 1 + fi + + # Monitor processor execution with timeout + local max_wait_time=300 # 5 minutes max wait + local start_time=$(date +%s) + local timeout=$((start_time + max_wait_time)) + + while true; do + local current_time=$(date +%s) + + # Check if we've exceeded timeout + if [ "$current_time" -gt "$timeout" ]; then + error "Processor $processor_id execution timed out after $max_wait_time seconds" + return 1 + fi + + # Get processor status + local processor_status + processor_status=$(curl_with_retry 3 "$NIFI_API_URL/processors/$processor_id" | jq '.status.aggregateSnapshot') + + local active_threads=$(echo "$processor_status" | jq '.activeThreadCount') + local processed_count=$(echo "$processor_status" | jq '.flowFilesProcessed') + + verbose "Processor $processor_id - Active Threads: $active_threads, Processed: $processed_count" + + # Check if processing is complete + if [ "$active_threads" -eq 0 ]; then + info "Processor $processor_id completed single run execution" + break + fi + + sleep 2 + done +} + +# Robust leader pod detection +get_leader_pod() { + local namespace=$1 + local app_label=$2 + + info "Detecting leader pod for $app_label in $namespace" + + local leader_pod + leader_pod=$(oc get pods -n "$namespace" -o name | grep "$app_label" | while read -r pod; do + debug "Checking pod: $pod" + if oc exec -n "$namespace" "$pod" -- bash -c "psql -U postgres -tAc \"SELECT pg_is_in_recovery()\"" | grep -q 'f'; then + echo "$pod" + break + fi + done) + + if [ -z "$leader_pod" ]; then + error_exit "No leader pod found for $app_label in $namespace" + fi + + verbose "Leader pod detected: $leader_pod" + echo "$leader_pod" +} + +# Consolidated port forwarding +forward_database_ports() { + local env=$1 + local pid_file=$(mktemp) + + info "Initiating port forwarding for environment: $env" + + local port_mappings=( + "tfrs:0ab226-$env:tfrs-spilo:tfrs" + "lcfs:d2bd59-$env:lcfs-crunchy-$env-lcfs:lcfs" # Uncomment if you want to load directly to LCFS openshift environment + ) + + for mapping in "${port_mappings[@]}"; do + IFS=':' read -r app namespace app_label app_key <<<"$mapping" + local_port=${LOCAL_PORTS[$app_key]} + + verbose "Port forwarding for $app: $local_port -> $TARGET_PORT" + + service_name=$(get_leader_pod "$namespace" "$app_label") + oc -n "$namespace" port-forward "$service_name" "$local_port:$TARGET_PORT" & + echo $! >>"$pid_file" + done + + trap " + debug 'Cleaning up port-forward processes'; + kill \$(tr '\n' ' ' < '$pid_file') 2>/dev/null; + rm -f '$pid_file' + " EXIT +} + +# Update NiFi database connection +update_nifi_connection() { + local controller_service_id=$1 + local namespace=$2 + local pod_pattern=$3 + local app=$4 + + info "Updating NiFi connection for $app in $namespace" + debug "Controller Service ID: $controller_service_id" + debug "Pod Pattern: $pod_pattern" + debug "App: $app" + debug "Namespace: $namespace" + + local pod_name db_env_vars database_user database_pass database_name database_url + local local_port=${LOCAL_PORTS[$app]} + + verbose "Using local port $local_port for $app" + + pod_name=$(oc get pods -n "$namespace" -o name | grep "$pod_pattern" | head -n 1) + + if [ -z "$pod_name" ]; then + error_exit "No pod found matching $pod_pattern" + fi + + debug "Selected pod: $pod_name" + + case "$app" in + tfrs) + db_env_vars=$(oc exec -n "$namespace" "$pod_name" -- env | grep 'DATABASE_') + database_user=$(echo "$db_env_vars" | grep 'DATABASE_USER' | cut -d'=' -f2) + database_pass=$(echo "$db_env_vars" | grep 'DATABASE_PASSWORD' | cut -d'=' -f2) + database_name=$(echo "$db_env_vars" | grep 'DATABASE_NAME' | cut -d'=' -f2) + ;; + lcfs) + db_env_vars=$(oc exec -n "$namespace" "$pod_name" -- env | grep 'LCFS_DB_') + database_user=$(echo "$db_env_vars" | grep 'LCFS_DB_USER' | cut -d'=' -f2) + database_pass=$(echo "$db_env_vars" | grep 'LCFS_DB_PASS' | cut -d'=' -f2) + database_name=$(echo "$db_env_vars" | grep 'LCFS_DB_BASE' | cut -d'=' -f2) + ;; + *) + error_exit "Invalid application: $app" + ;; + esac + + database_url="jdbc:postgresql://host.docker.internal:$local_port/$database_name" + + verbose "Database Connection Details:" + verbose " URL: $database_url" + verbose " User: $database_user" + + local controller_config current_name revision_version updated_config response + + # Get and update controller service configuration + controller_config=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$controller_service_id") + current_name=$(echo "$controller_config" | jq -r '.component.name') + revision_version=$(echo "$controller_config" | jq '.revision.version') + + debug "Current Service Name: $current_name" + debug "Current Revision: $revision_version" + debug "Disabling the controller service." + + controller_config=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$controller_service_id/run-status" PUT "{\"state\": \"DISABLED\", \"revision\": { \"version\": $revision_version }}") + current_state=$(echo "$controller_config" | jq -r '.component.state') + current_name=$(echo "$controller_config" | jq -r '.component.name') + revision_version=$(echo "$controller_config" | jq '.revision.version') + + # Wait for the service to be disabled + debug "Waiting for the controller service to be disabled..." + while true; do + controller_config=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$controller_service_id") + current_state=$(echo "$controller_config" | jq -r '.component.state') + if [ "$current_state" == "DISABLED" ]; then + break + fi + warn "Controller service is not yet disabled. Retrying in 2 seconds..." + sleep 2 + done + # Update the controller service with new credentials and keep the name unchanged + debug "Updating NiFi controller service with new database credentials..." + updated_config=$(jq -n \ + --arg name "$current_name" \ + --arg db_url "$database_url" \ + --arg db_user "$database_user" \ + --arg db_pass "$database_pass" \ + --argjson version "$revision_version" \ + '{ + "revision": { "version": $version }, + "component": { + "id": "'$controller_service_id'", + "name": "Database Connection Pool", + "properties": { + "Database Connection URL": $db_url, + "Database User": $db_user, + "Password": $db_pass + } + } + }') + response=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$controller_service_id" PUT "$updated_config") + + info "Controller service updated successfully." + enable_controller_service "$controller_service_id" +} + +# Main execution +main() { + # Parse command-line arguments + while [[ $# -gt 0 ]]; do + case "$1" in + --debug) + DEBUG_LEVEL=$DEBUG_DEBUG + shift + ;; + --verbose) + DEBUG_LEVEL=$DEBUG_VERBOSE + shift + ;; + *) + break + ;; + esac + done + + local env=${1:?Usage: $0 [dev|test|prod] [--debug|--verbose]} + + [[ $env =~ ^(dev|test|prod)$ ]] || error_exit "Invalid environment. Use 'dev', 'test', or 'prod'." + + info "Starting NiFi Processor Management Script" + info "Environment: $env" + info "Debug Level: $DEBUG_LEVEL" + + info "Checking OpenShift credentials" + oc whoami + + # Controller service configuration + local LCFS_CONTROLLER_SERVICE_ID="32417e8c-0192-1000-ffff-ffff8ccb5dfa" + local TFRS_CONTROLLER_SERVICE_ID="3245b078-0192-1000-ffff-ffffba20c1eb" + + # Update NiFi connections + update_nifi_connection "$LCFS_CONTROLLER_SERVICE_ID" "d2bd59-$env" "lcfs-backend-$env" "lcfs" # Uncomment if we're loading the data to openshift database. + update_nifi_connection "$TFRS_CONTROLLER_SERVICE_ID" "0ab226-$env" "tfrs-backend-$env" "tfrs" + + # Uncomment if you want to use port forwarding from the script + ## Duplicating as the connections gets disconnected when the port forwarding is enabled for first time. + # forward_database_ports "$env" + # sleep 5 # Allow time for port forwarding + # info "Executing processors in sequence..." + # execute_processor "$ORGANIZATION_PROCESSOR" "$env" + # ## + # # Actual processing starts here + # forward_database_ports "$env" + # sleep 5 # Allow time for port forwarding + + # Expand these processors as needed + execute_processor "$ORGANIZATION_PROCESSOR" "$env" + execute_processor "$USER_PROCESSOR" "$env" + execute_processor "$TRANSFER_PROCESSOR" "$env" + execute_processor "$TRANSACTIONS_PROCESSOR" "$env" + + info "All processors executed successfully." + info "Validate all the loaded data manually in the database." +} + +main "$@" diff --git a/etl/data-transfer.sh b/etl/data-transfer.sh new file mode 100755 index 000000000..9d3e97357 --- /dev/null +++ b/etl/data-transfer.sh @@ -0,0 +1,163 @@ +#!/bin/bash +set -e + +# This script handles PostgreSQL data transfer between OpenShift and local containers + +# 4 Arguments +# $1 = 'tfrs' or 'lcfs' (application) +# $2 = 'test', 'prod', or 'dev' (environment) +# $3 = 'import' or 'export' (direction of data transfer) +# $4 = 'local container name or id' +# example commands: +# . data-transfer.sh lcfs dev export 398cd4661173 +# . data-transfer.sh tfrs prod import 398cd4661173 + +if [ "$#" -lt 4 ] || [ "$#" -gt 5 ]; then + echo "Passed $# parameters. Expected 4 or 5." + echo "Usage: $0 []" + echo "Where:" + echo " is 'tfrs' or 'lcfs'" + echo " is 'test', 'prod', or 'dev'" + echo " is 'import' or 'export'" + echo " is the name or id of your local Docker container" + exit 1 +fi + +application=$1 +env=$2 +direction=$3 +local_container=$4 + +# Validate direction +if [ "$direction" != "import" ] && [ "$direction" != "export" ]; then + echo "Invalid direction. Use 'import' or 'export'." + exit 1 +fi + +# Check if the operation is supported +if [ "$application" = "tfrs" ] && [ "$direction" = "export" ]; then + echo "Export operation is not supported for the TFRS application." + exit 1 +fi + +# checks if you are logged in to openshift +echo "** Checking Openshift creds" +oc whoami +echo "logged in" +echo + +# Function to get the leader pod for Crunchy Data PostgreSQL clusters +get_leader_pod() { + local project=$1 + local app_label=$2 + + # Get all pods with the given app label + pods=$(oc get pods -n $project_name -o name | grep "$app_label") + + # Loop through pods to find the leader + for pod in $pods; do + is_leader=$(oc exec -n $project $pod -- bash -c "psql -U postgres -tAc \"SELECT pg_is_in_recovery()\"") + if [ "$is_leader" = "f" ]; then + echo $pod + return + fi + done + + echo "No leader pod found" + exit 1 +} + +# Set project name and pod name based on application and environment +case $application in + "tfrs") + project_name="0ab226-$env" + if [ "$env" = "prod" ]; then + app_label="tfrs-crunchy-prod-tfrs" + else + app_label="tfrs-spilo" + fi + db_name="tfrs" + ;; + "lcfs") + project_name="d2bd59-$env" + app_label="lcfs-crunchy-$env-lcfs" + db_name="lcfs" + ;; + *) + echo "Invalid application. Use 'tfrs' or 'lcfs'." + exit 1 + ;; +esac + +echo "** Setting project $project_name" +oc project $project_name +echo + +# Get the appropriate pod +pod_name=$(get_leader_pod $project_name $app_label) +if [ -z "$pod_name" ]; then + echo "Error: No leader pod identified." + exit 1 +fi +echo "** Leader pod identified: $pod_name" + +if [ "$direction" = "import" ]; then + echo "** Starting pg_dump on OpenShift pod" + oc exec $pod_name -- bash -c "pg_dump -U postgres -F t --no-privileges --no-owner -c -d $db_name > /tmp/$db_name.tar" + echo + + echo "** Downloading .tar file from OpenShift pod" + oc rsync $pod_name:/tmp/$db_name.tar ./ + echo + + echo "** Copying .tar to local database container $local_container" + docker cp $db_name.tar $local_container:/tmp/$db_name.tar + echo + + echo "** Restoring local database" + docker exec $local_container bash -c "pg_restore -U $db_name --dbname=$db_name --no-owner --clean --if-exists --verbose /tmp/$db_name.tar" || true + echo + + echo "** Cleaning up dump file from OpenShift pod" + oc exec $pod_name -- bash -c "rm /tmp/$db_name.tar" + echo + + echo "** Cleaning up local dump file" + rm $db_name.tar + +elif [ "$direction" = "export" ]; then + echo "** Starting pg_dump on local container" + docker exec $local_container bash -c "pg_dump -U $db_name -F t --no-privileges --no-owner -c -d $db_name > /tmp/$db_name.tar" + echo + + echo "** Copying .tar file from local container" + docker cp $local_container:/tmp/$db_name.tar ./ + echo + + echo "** Preparing .tar file for OpenShift pod" + mkdir -p tmp_transfer + mv $db_name.tar tmp_transfer/ + echo + + echo "** Uploading .tar file to OpenShift pod" + oc rsync ./tmp_transfer $pod_name:/tmp/ + echo + + echo "** Restoring database on OpenShift pod" + oc exec $pod_name -- bash -c "pg_restore -U postgres --dbname=$db_name --no-owner --clean --if-exists --verbose /tmp/tmp_transfer/$db_name.tar" || true + echo + + echo "** Cleaning up temporary files on OpenShift pod" + oc exec $pod_name -- bash -c "rm -rf /tmp/tmp_transfer" + echo + + echo "** Cleaning up dump file from local container" + docker exec $local_container bash -c "rm /tmp/tmp_transfer" || true + echo + + echo "** Cleaning up local temporary directory" + rm -rf tmp_transfer +fi + + +echo "** Finished data transfer and cleanup" diff --git a/etl/database/nifi-registry-primary.mv.db b/etl/database/nifi-registry-primary.mv.db new file mode 100644 index 000000000..f5b1bf947 Binary files /dev/null and b/etl/database/nifi-registry-primary.mv.db differ diff --git a/etl/database/nifi-registry-primary.mv.db.backup b/etl/database/nifi-registry-primary.mv.db.backup new file mode 100644 index 000000000..f7aeb5da2 Binary files /dev/null and b/etl/database/nifi-registry-primary.mv.db.backup differ diff --git a/etl/docker-compose.yml b/etl/docker-compose.yml new file mode 100755 index 000000000..8f0a3eec6 --- /dev/null +++ b/etl/docker-compose.yml @@ -0,0 +1,88 @@ +services: +# configuration manager for NiFi + zookeeper: + hostname: myzookeeper + container_name: zookeeper_container_persistent + image: 'bitnami/zookeeper:3.9.2-debian-12-r14' + restart: on-failure + environment: + - ALLOW_ANONYMOUS_LOGIN=yes + networks: + - shared_network +# version control for nifi flows + registry: + hostname: lcfs-registry + container_name: nifi_registry + image: 'apache/nifi-registry:1.27.0' + restart: on-failure + ports: + - "18080:18080" + environment: + - LOG_LEVEL=INFO + - NIFI_REGISTRY_DB_DIR=/opt/nifi-registry/nifi-registry-current/database + - NIFI_REGISTRY_FLOW_PROVIDER=file + - NIFI_REGISTRY_FLOW_STORAGE_DIR=/opt/nifi-registry/nifi-registry-current/flow_storage + volumes: + - ./database:/opt/nifi-registry/nifi-registry-current/database + - ./flow_storage:/opt/nifi-registry/nifi-registry-current/flow_storage + networks: + - shared_network +# data extraction, transformation and load service + nifi: + hostname: nifi + container_name: nifi + image: 'apache/nifi:1.27.0' + restart: on-failure + ports: + - '8091:8080' + environment: + - NIFI_WEB_HTTP_PORT=8080 + - NIFI_CLUSTER_IS_NODE=true + - NIFI_CLUSTER_NODE_PROTOCOL_PORT=8082 + - NIFI_ZK_CONNECT_STRING=myzookeeper:2181 + - NIFI_ELECTION_MAX_WAIT=30 sec + - NIFI_SENSITIVE_PROPS_KEY='12345678901234567890A' + healthcheck: + test: "${DOCKER_HEALTHCHECK_TEST:-curl localhost:8091/nifi/}" + interval: "60s" + timeout: "3s" + start_period: "5s" + retries: 5 + volumes: + - ./nifi/database_repository:/opt/nifi/nifi-current/database_repository + - ./nifi/flowfile_repository:/opt/nifi/nifi-current/flowfile_repository + - ./nifi/content_repository:/opt/nifi/nifi-current/content_repository + - ./nifi/provenance_repository:/opt/nifi/nifi-current/provenance_repository + - ./nifi/state:/opt/nifi/nifi-current/state + - ./nifi/logs:/opt/nifi/nifi-current/logs + - ./jdbc_drivers/postgresql-42.7.3.jar:/opt/nifi/nifi-current/lib/postgresql-42.7.3.jar + - ./nifi/conf:/opt/nifi/nifi-current/conf + - ./nifi_scripts:/opt/nifi/nifi-current/nifi_scripts + - ./nifi_output:/opt/nifi/nifi-current/nifi_output + networks: + - shared_network +# TFRS database loaded with TFRS data + tfrs: + image: postgres:14.2 + container_name: tfrs + environment: + POSTGRES_USER: tfrs + POSTGRES_PASSWORD: development_only + POSTGRES_DB: tfrs + ports: + - "5435:5432" + volumes: + - tfrs_data:/var/lib/postgresql/data + restart: unless-stopped + networks: + - shared_network + +volumes: + tfrs_data: + nifi_output: + nifi_scripts: + +networks: + shared_network: + external: true + driver: bridge \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/30d1071b-382a-435b-bfb7-680d4d958f75/1/1.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/30d1071b-382a-435b-bfb7-680d4d958f75/1/1.snapshot new file mode 100644 index 000000000..81dafeaae --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/30d1071b-382a-435b-bfb7-680d4d958f75/1/1.snapshot @@ -0,0 +1,144 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { + "c3d0e746-e921-3321-ae2d-584895e9dc32" : { + "identifier" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "name" : "TFRS" + }, + "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c" : { + "identifier" : "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c", + "name" : "LCFS" + } + }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "bb9b9cd3-0464-3f72-b036-ac335d4210e2", + "inputPorts" : [ ], + "instanceIdentifier" : "4f0ec623-0193-1000-0000-0000246af8c6", + "labels" : [ { + "componentType" : "LABEL", + "groupIdentifier" : "bb9b9cd3-0464-3f72-b036-ac335d4210e2", + "height" : 40.0, + "identifier" : "f2b53acb-0172-3f57-9817-e861bfc5c700", + "instanceIdentifier" : "3ca638d8-c01c-11a8-bf7a-dfc50000dda3", + "label" : "Download credit_trade and credit_trade_history information from \nTFRS database to load onto LCFS database (Transaction, Transfer and transfer_history tables)", + "position" : { + "x" : 1160.0, + "y" : 568.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 600.0, + "zIndex" : 0 + } ], + "name" : "Transfers transactions", + "outputPorts" : [ ], + "position" : { + "x" : 1160.0, + "y" : 560.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "bb9b9cd3-0464-3f72-b036-ac335d4210e2", + "identifier" : "7ed356d0-2363-3992-8e60-eb5b5ce86f80", + "instanceIdentifier" : "b9d73248-1438-1418-a736-cc94c8c21e70", + "maxBackoffPeriod" : "10 mins", + "name" : "Transfer data ETL", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1168.0, + "y" : 624.0 + }, + "properties" : { + "SQL.lcfs" : "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c", + "groovyx-failure-strategy" : "rollback", + "SQL.tfrs" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/transfer.groovy" + }, + "propertyDescriptors" : { + "SQL.lcfs" : { + "displayName" : "SQL.lcfs", + "identifiesControllerService" : true, + "name" : "SQL.lcfs", + "sensitive" : false + }, + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "SQL.tfrs" : { + "displayName" : "SQL.tfrs", + "identifiesControllerService" : true, + "name" : "SQL.tfrs", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/30d1071b-382a-435b-bfb7-680d4d958f75/2/2.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/30d1071b-382a-435b-bfb7-680d4d958f75/2/2.snapshot new file mode 100644 index 000000000..d6d20a1b9 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/30d1071b-382a-435b-bfb7-680d4d958f75/2/2.snapshot @@ -0,0 +1,144 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { + "c3d0e746-e921-3321-ae2d-584895e9dc32" : { + "identifier" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "name" : "TFRS" + }, + "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c" : { + "identifier" : "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c", + "name" : "LCFS" + } + }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "bb9b9cd3-0464-3f72-b036-ac335d4210e2", + "inputPorts" : [ ], + "instanceIdentifier" : "4f0ec623-0193-1000-0000-0000246af8c6", + "labels" : [ { + "componentType" : "LABEL", + "groupIdentifier" : "bb9b9cd3-0464-3f72-b036-ac335d4210e2", + "height" : 96.0, + "identifier" : "f2b53acb-0172-3f57-9817-e861bfc5c700", + "instanceIdentifier" : "3ca638d8-c01c-11a8-bf7a-dfc50000dda3", + "label" : "Download credit_trade and credit_trade_history information from \nTFRS database to load onto LCFS database (Transaction, Transfer and transfer_history tables)\n\nNote: Ensure identity data is loaded prior to running this script.", + "position" : { + "x" : 1080.0, + "y" : 504.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 592.0, + "zIndex" : 0 + } ], + "name" : "Transfers transactions", + "outputPorts" : [ ], + "position" : { + "x" : 1160.0, + "y" : 560.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "bb9b9cd3-0464-3f72-b036-ac335d4210e2", + "identifier" : "7ed356d0-2363-3992-8e60-eb5b5ce86f80", + "instanceIdentifier" : "b9d73248-1438-1418-a736-cc94c8c21e70", + "maxBackoffPeriod" : "10 mins", + "name" : "Transfer data ETL", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1168.0, + "y" : 624.0 + }, + "properties" : { + "SQL.lcfs" : "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c", + "groovyx-failure-strategy" : "rollback", + "SQL.tfrs" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/transfer.groovy" + }, + "propertyDescriptors" : { + "SQL.lcfs" : { + "displayName" : "SQL.lcfs", + "identifiesControllerService" : true, + "name" : "SQL.lcfs", + "sensitive" : false + }, + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "SQL.tfrs" : { + "displayName" : "SQL.tfrs", + "identifiesControllerService" : true, + "name" : "SQL.tfrs", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/1/1.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/1/1.snapshot new file mode 100644 index 000000000..afc477425 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/1/1.snapshot @@ -0,0 +1,814 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-dbcp-service-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.dbcp.DBCPService" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "d5d7894c-6550-3397-bf65-018d28caf185", + "instanceIdentifier" : "321cfb6e-0192-1000-ffff-ffffb87bc99d", + "name" : "LCFS", + "properties" : { + "dbcp-min-idle-conns" : "0", + "Max Wait Time" : "500 millis", + "Database Driver Class Name" : "org.postgresql.Driver", + "dbcp-min-evictable-idle-time" : "30 mins", + "Max Total Connections" : "8", + "dbcp-max-conn-lifetime" : "-1", + "Database Connection URL" : "jdbc:postgresql://db:5432/lcfs", + "dbcp-time-between-eviction-runs" : "-1", + "Database User" : "lcfs", + "dbcp-soft-min-evictable-idle-time" : "-1", + "dbcp-max-idle-conns" : "8" + }, + "propertyDescriptors" : { + "kerberos-password" : { + "displayName" : "Kerberos Password", + "identifiesControllerService" : false, + "name" : "kerberos-password", + "sensitive" : true + }, + "dbcp-min-idle-conns" : { + "displayName" : "Minimum Idle Connections", + "identifiesControllerService" : false, + "name" : "dbcp-min-idle-conns", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Driver Class Name" : { + "displayName" : "Database Driver Class Name", + "identifiesControllerService" : false, + "name" : "Database Driver Class Name", + "sensitive" : false + }, + "dbcp-min-evictable-idle-time" : { + "displayName" : "Minimum Evictable Idle Time", + "identifiesControllerService" : false, + "name" : "dbcp-min-evictable-idle-time", + "sensitive" : false + }, + "kerberos-principal" : { + "displayName" : "Kerberos Principal", + "identifiesControllerService" : false, + "name" : "kerberos-principal", + "sensitive" : false + }, + "Max Total Connections" : { + "displayName" : "Max Total Connections", + "identifiesControllerService" : false, + "name" : "Max Total Connections", + "sensitive" : false + }, + "kerberos-credentials-service" : { + "displayName" : "Kerberos Credentials Service", + "identifiesControllerService" : true, + "name" : "kerberos-credentials-service", + "sensitive" : false + }, + "dbcp-max-conn-lifetime" : { + "displayName" : "Max Connection Lifetime", + "identifiesControllerService" : false, + "name" : "dbcp-max-conn-lifetime", + "sensitive" : false + }, + "Validation-query" : { + "displayName" : "Validation query", + "identifiesControllerService" : false, + "name" : "Validation-query", + "sensitive" : false + }, + "Database Connection URL" : { + "displayName" : "Database Connection URL", + "identifiesControllerService" : false, + "name" : "Database Connection URL", + "sensitive" : false + }, + "dbcp-time-between-eviction-runs" : { + "displayName" : "Time Between Eviction Runs", + "identifiesControllerService" : false, + "name" : "dbcp-time-between-eviction-runs", + "sensitive" : false + }, + "Database User" : { + "displayName" : "Database User", + "identifiesControllerService" : false, + "name" : "Database User", + "sensitive" : false + }, + "kerberos-user-service" : { + "displayName" : "Kerberos User Service", + "identifiesControllerService" : true, + "name" : "kerberos-user-service", + "sensitive" : false + }, + "dbcp-soft-min-evictable-idle-time" : { + "displayName" : "Soft Minimum Evictable Idle Time", + "identifiesControllerService" : false, + "name" : "dbcp-soft-min-evictable-idle-time", + "sensitive" : false + }, + "database-driver-locations" : { + "displayName" : "Database Driver Location(s)", + "identifiesControllerService" : false, + "name" : "database-driver-locations", + "resourceDefinition" : { + "cardinality" : "MULTIPLE", + "resourceTypes" : [ "URL", "FILE", "DIRECTORY" ] + }, + "sensitive" : false + }, + "dbcp-max-idle-conns" : { + "displayName" : "Max Idle Connections", + "identifiesControllerService" : false, + "name" : "dbcp-max-idle-conns", + "sensitive" : false + }, + "Password" : { + "displayName" : "Password", + "identifiesControllerService" : false, + "name" : "Password", + "sensitive" : true + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.dbcp.DBCPConnectionPool" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-dbcp-service-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.dbcp.DBCPService" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "50abc554-57ce-314d-87f9-270eaef9ce85", + "instanceIdentifier" : "1cc00211-a467-3758-9a65-c05a679c4ce2", + "name" : "TFRS", + "properties" : { + "dbcp-min-idle-conns" : "0", + "Max Wait Time" : "500 millis", + "Database Driver Class Name" : "org.postgresql.Driver", + "dbcp-min-evictable-idle-time" : "30 mins", + "Max Total Connections" : "8", + "dbcp-max-conn-lifetime" : "-1", + "Database Connection URL" : "jdbc:postgresql://tfrs:5435/tfrs", + "dbcp-time-between-eviction-runs" : "-1", + "Database User" : "tfrs", + "dbcp-soft-min-evictable-idle-time" : "-1", + "dbcp-max-idle-conns" : "8" + }, + "propertyDescriptors" : { + "kerberos-password" : { + "displayName" : "Kerberos Password", + "identifiesControllerService" : false, + "name" : "kerberos-password", + "sensitive" : true + }, + "dbcp-min-idle-conns" : { + "displayName" : "Minimum Idle Connections", + "identifiesControllerService" : false, + "name" : "dbcp-min-idle-conns", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Driver Class Name" : { + "displayName" : "Database Driver Class Name", + "identifiesControllerService" : false, + "name" : "Database Driver Class Name", + "sensitive" : false + }, + "dbcp-min-evictable-idle-time" : { + "displayName" : "Minimum Evictable Idle Time", + "identifiesControllerService" : false, + "name" : "dbcp-min-evictable-idle-time", + "sensitive" : false + }, + "kerberos-principal" : { + "displayName" : "Kerberos Principal", + "identifiesControllerService" : false, + "name" : "kerberos-principal", + "sensitive" : false + }, + "Max Total Connections" : { + "displayName" : "Max Total Connections", + "identifiesControllerService" : false, + "name" : "Max Total Connections", + "sensitive" : false + }, + "kerberos-credentials-service" : { + "displayName" : "Kerberos Credentials Service", + "identifiesControllerService" : true, + "name" : "kerberos-credentials-service", + "sensitive" : false + }, + "dbcp-max-conn-lifetime" : { + "displayName" : "Max Connection Lifetime", + "identifiesControllerService" : false, + "name" : "dbcp-max-conn-lifetime", + "sensitive" : false + }, + "Validation-query" : { + "displayName" : "Validation query", + "identifiesControllerService" : false, + "name" : "Validation-query", + "sensitive" : false + }, + "Database Connection URL" : { + "displayName" : "Database Connection URL", + "identifiesControllerService" : false, + "name" : "Database Connection URL", + "sensitive" : false + }, + "dbcp-time-between-eviction-runs" : { + "displayName" : "Time Between Eviction Runs", + "identifiesControllerService" : false, + "name" : "dbcp-time-between-eviction-runs", + "sensitive" : false + }, + "Database User" : { + "displayName" : "Database User", + "identifiesControllerService" : false, + "name" : "Database User", + "sensitive" : false + }, + "kerberos-user-service" : { + "displayName" : "Kerberos User Service", + "identifiesControllerService" : true, + "name" : "kerberos-user-service", + "sensitive" : false + }, + "dbcp-soft-min-evictable-idle-time" : { + "displayName" : "Soft Minimum Evictable Idle Time", + "identifiesControllerService" : false, + "name" : "dbcp-soft-min-evictable-idle-time", + "sensitive" : false + }, + "database-driver-locations" : { + "displayName" : "Database Driver Location(s)", + "identifiesControllerService" : false, + "name" : "database-driver-locations", + "resourceDefinition" : { + "cardinality" : "MULTIPLE", + "resourceTypes" : [ "URL", "FILE", "DIRECTORY" ] + }, + "sensitive" : false + }, + "dbcp-max-idle-conns" : { + "displayName" : "Max Idle Connections", + "identifiesControllerService" : false, + "name" : "dbcp-max-idle-conns", + "sensitive" : false + }, + "Password" : { + "displayName" : "Password", + "identifiesControllerService" : false, + "name" : "Password", + "sensitive" : true + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.dbcp.DBCPConnectionPool" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 614.0, + "y" : 160.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "a2f0e8b2-25aa-3468-b8b6-3f9c3cc32adb", + "instanceIdentifier" : "322bd9a8-0192-1000-ffff-ffffa9963f69", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteSQL", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1510.3360373456674, + "y" : 852.1890375717256 + }, + "properties" : { + "esql-max-rows" : "0", + "dbf-default-precision" : "10", + "Max Wait Time" : "0 seconds", + "Database Connection Pooling Service" : "d5d7894c-6550-3397-bf65-018d28caf185", + "esql-auto-commit" : "true", + "dbf-user-logical-types" : "false", + "dbf-default-scale" : "0", + "compression-format" : "NONE", + "esql-output-batch-size" : "0", + "esql-fetch-size" : "0", + "SQL select query" : "select * from user_profile", + "dbf-normalize" : "false" + }, + "propertyDescriptors" : { + "esql-max-rows" : { + "displayName" : "Max Rows Per Flow File", + "identifiesControllerService" : false, + "name" : "esql-max-rows", + "sensitive" : false + }, + "dbf-default-precision" : { + "displayName" : "Default Decimal Precision", + "identifiesControllerService" : false, + "name" : "dbf-default-precision", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Connection Pooling Service" : { + "displayName" : "Database Connection Pooling Service", + "identifiesControllerService" : true, + "name" : "Database Connection Pooling Service", + "sensitive" : false + }, + "sql-post-query" : { + "displayName" : "SQL Post-Query", + "identifiesControllerService" : false, + "name" : "sql-post-query", + "sensitive" : false + }, + "esql-auto-commit" : { + "displayName" : "Set Auto Commit", + "identifiesControllerService" : false, + "name" : "esql-auto-commit", + "sensitive" : false + }, + "dbf-user-logical-types" : { + "displayName" : "Use Avro Logical Types", + "identifiesControllerService" : false, + "name" : "dbf-user-logical-types", + "sensitive" : false + }, + "dbf-default-scale" : { + "displayName" : "Default Decimal Scale", + "identifiesControllerService" : false, + "name" : "dbf-default-scale", + "sensitive" : false + }, + "sql-pre-query" : { + "displayName" : "SQL Pre-Query", + "identifiesControllerService" : false, + "name" : "sql-pre-query", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "esql-output-batch-size" : { + "displayName" : "Output Batch Size", + "identifiesControllerService" : false, + "name" : "esql-output-batch-size", + "sensitive" : false + }, + "esql-fetch-size" : { + "displayName" : "Fetch Size", + "identifiesControllerService" : false, + "name" : "esql-fetch-size", + "sensitive" : false + }, + "SQL select query" : { + "displayName" : "SQL select query", + "identifiesControllerService" : false, + "name" : "SQL select query", + "sensitive" : false + }, + "dbf-normalize" : { + "displayName" : "Normalize Table/Column Names", + "identifiesControllerService" : false, + "name" : "dbf-normalize", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.ExecuteSQL", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/2/2.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/2/2.snapshot new file mode 100644 index 000000000..95f4d8470 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/2/2.snapshot @@ -0,0 +1,748 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-dbcp-service-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.dbcp.DBCPService" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "d5d7894c-6550-3397-bf65-018d28caf185", + "instanceIdentifier" : "321cfb6e-0192-1000-ffff-ffffb87bc99d", + "name" : "LCFS", + "properties" : { + "dbcp-min-idle-conns" : "0", + "Max Wait Time" : "500 millis", + "Database Driver Class Name" : "org.postgresql.Driver", + "dbcp-min-evictable-idle-time" : "30 mins", + "Max Total Connections" : "8", + "dbcp-max-conn-lifetime" : "-1", + "Database Connection URL" : "jdbc:postgresql://db:5432/lcfs", + "dbcp-time-between-eviction-runs" : "-1", + "Database User" : "lcfs", + "dbcp-soft-min-evictable-idle-time" : "-1", + "dbcp-max-idle-conns" : "8" + }, + "propertyDescriptors" : { + "kerberos-password" : { + "displayName" : "Kerberos Password", + "identifiesControllerService" : false, + "name" : "kerberos-password", + "sensitive" : true + }, + "dbcp-min-idle-conns" : { + "displayName" : "Minimum Idle Connections", + "identifiesControllerService" : false, + "name" : "dbcp-min-idle-conns", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Driver Class Name" : { + "displayName" : "Database Driver Class Name", + "identifiesControllerService" : false, + "name" : "Database Driver Class Name", + "sensitive" : false + }, + "dbcp-min-evictable-idle-time" : { + "displayName" : "Minimum Evictable Idle Time", + "identifiesControllerService" : false, + "name" : "dbcp-min-evictable-idle-time", + "sensitive" : false + }, + "kerberos-principal" : { + "displayName" : "Kerberos Principal", + "identifiesControllerService" : false, + "name" : "kerberos-principal", + "sensitive" : false + }, + "Max Total Connections" : { + "displayName" : "Max Total Connections", + "identifiesControllerService" : false, + "name" : "Max Total Connections", + "sensitive" : false + }, + "kerberos-credentials-service" : { + "displayName" : "Kerberos Credentials Service", + "identifiesControllerService" : true, + "name" : "kerberos-credentials-service", + "sensitive" : false + }, + "dbcp-max-conn-lifetime" : { + "displayName" : "Max Connection Lifetime", + "identifiesControllerService" : false, + "name" : "dbcp-max-conn-lifetime", + "sensitive" : false + }, + "Validation-query" : { + "displayName" : "Validation query", + "identifiesControllerService" : false, + "name" : "Validation-query", + "sensitive" : false + }, + "Database Connection URL" : { + "displayName" : "Database Connection URL", + "identifiesControllerService" : false, + "name" : "Database Connection URL", + "sensitive" : false + }, + "dbcp-time-between-eviction-runs" : { + "displayName" : "Time Between Eviction Runs", + "identifiesControllerService" : false, + "name" : "dbcp-time-between-eviction-runs", + "sensitive" : false + }, + "Database User" : { + "displayName" : "Database User", + "identifiesControllerService" : false, + "name" : "Database User", + "sensitive" : false + }, + "kerberos-user-service" : { + "displayName" : "Kerberos User Service", + "identifiesControllerService" : true, + "name" : "kerberos-user-service", + "sensitive" : false + }, + "dbcp-soft-min-evictable-idle-time" : { + "displayName" : "Soft Minimum Evictable Idle Time", + "identifiesControllerService" : false, + "name" : "dbcp-soft-min-evictable-idle-time", + "sensitive" : false + }, + "database-driver-locations" : { + "displayName" : "Database Driver Location(s)", + "identifiesControllerService" : false, + "name" : "database-driver-locations", + "resourceDefinition" : { + "cardinality" : "MULTIPLE", + "resourceTypes" : [ "FILE", "DIRECTORY", "URL" ] + }, + "sensitive" : false + }, + "dbcp-max-idle-conns" : { + "displayName" : "Max Idle Connections", + "identifiesControllerService" : false, + "name" : "dbcp-max-idle-conns", + "sensitive" : false + }, + "Password" : { + "displayName" : "Password", + "identifiesControllerService" : false, + "name" : "Password", + "sensitive" : true + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.dbcp.DBCPConnectionPool" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-dbcp-service-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.dbcp.DBCPService" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "50abc554-57ce-314d-87f9-270eaef9ce85", + "instanceIdentifier" : "1cc00211-a467-3758-9a65-c05a679c4ce2", + "name" : "TFRS", + "properties" : { + "dbcp-min-idle-conns" : "0", + "Max Wait Time" : "500 millis", + "Database Driver Class Name" : "org.postgresql.Driver", + "dbcp-min-evictable-idle-time" : "30 mins", + "Max Total Connections" : "8", + "dbcp-max-conn-lifetime" : "-1", + "Database Connection URL" : "jdbc:postgresql://tfrs:5435/tfrs", + "dbcp-time-between-eviction-runs" : "-1", + "Database User" : "tfrs", + "dbcp-soft-min-evictable-idle-time" : "-1", + "dbcp-max-idle-conns" : "8" + }, + "propertyDescriptors" : { + "kerberos-password" : { + "displayName" : "Kerberos Password", + "identifiesControllerService" : false, + "name" : "kerberos-password", + "sensitive" : true + }, + "dbcp-min-idle-conns" : { + "displayName" : "Minimum Idle Connections", + "identifiesControllerService" : false, + "name" : "dbcp-min-idle-conns", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Driver Class Name" : { + "displayName" : "Database Driver Class Name", + "identifiesControllerService" : false, + "name" : "Database Driver Class Name", + "sensitive" : false + }, + "dbcp-min-evictable-idle-time" : { + "displayName" : "Minimum Evictable Idle Time", + "identifiesControllerService" : false, + "name" : "dbcp-min-evictable-idle-time", + "sensitive" : false + }, + "kerberos-principal" : { + "displayName" : "Kerberos Principal", + "identifiesControllerService" : false, + "name" : "kerberos-principal", + "sensitive" : false + }, + "Max Total Connections" : { + "displayName" : "Max Total Connections", + "identifiesControllerService" : false, + "name" : "Max Total Connections", + "sensitive" : false + }, + "kerberos-credentials-service" : { + "displayName" : "Kerberos Credentials Service", + "identifiesControllerService" : true, + "name" : "kerberos-credentials-service", + "sensitive" : false + }, + "dbcp-max-conn-lifetime" : { + "displayName" : "Max Connection Lifetime", + "identifiesControllerService" : false, + "name" : "dbcp-max-conn-lifetime", + "sensitive" : false + }, + "Validation-query" : { + "displayName" : "Validation query", + "identifiesControllerService" : false, + "name" : "Validation-query", + "sensitive" : false + }, + "Database Connection URL" : { + "displayName" : "Database Connection URL", + "identifiesControllerService" : false, + "name" : "Database Connection URL", + "sensitive" : false + }, + "dbcp-time-between-eviction-runs" : { + "displayName" : "Time Between Eviction Runs", + "identifiesControllerService" : false, + "name" : "dbcp-time-between-eviction-runs", + "sensitive" : false + }, + "Database User" : { + "displayName" : "Database User", + "identifiesControllerService" : false, + "name" : "Database User", + "sensitive" : false + }, + "kerberos-user-service" : { + "displayName" : "Kerberos User Service", + "identifiesControllerService" : true, + "name" : "kerberos-user-service", + "sensitive" : false + }, + "dbcp-soft-min-evictable-idle-time" : { + "displayName" : "Soft Minimum Evictable Idle Time", + "identifiesControllerService" : false, + "name" : "dbcp-soft-min-evictable-idle-time", + "sensitive" : false + }, + "database-driver-locations" : { + "displayName" : "Database Driver Location(s)", + "identifiesControllerService" : false, + "name" : "database-driver-locations", + "resourceDefinition" : { + "cardinality" : "MULTIPLE", + "resourceTypes" : [ "FILE", "DIRECTORY", "URL" ] + }, + "sensitive" : false + }, + "dbcp-max-idle-conns" : { + "displayName" : "Max Idle Connections", + "identifiesControllerService" : false, + "name" : "dbcp-max-idle-conns", + "sensitive" : false + }, + "Password" : { + "displayName" : "Password", + "identifiesControllerService" : false, + "name" : "Password", + "sensitive" : true + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.dbcp.DBCPConnectionPool" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 614.0, + "y" : 160.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4b9b5ad1-842a-32a0-b04a-2e6e204b49f9", + "instanceIdentifier" : "323b9d4d-0192-1000-0000-00002603bb8b", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteGroovyScript", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1657.3068633086637, + "y" : 792.1552939590737 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/3/3.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/3/3.snapshot new file mode 100644 index 000000000..6e89670d7 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/3/3.snapshot @@ -0,0 +1,448 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 614.0, + "y" : 160.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4b9b5ad1-842a-32a0-b04a-2e6e204b49f9", + "instanceIdentifier" : "323b9d4d-0192-1000-0000-00002603bb8b", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteGroovyScript", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1657.3068633086637, + "y" : 792.1552939590737 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/4/4.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/4/4.snapshot new file mode 100644 index 000000000..55ae1b565 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/4/4.snapshot @@ -0,0 +1,519 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { + "c3d0e746-e921-3321-ae2d-584895e9dc32" : { + "identifier" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "name" : "TFRS" + } + }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 614.0, + "y" : 160.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "ad271e02-2f09-341a-bdb3-f9126dc0cb53", + "instanceIdentifier" : "324cc3c1-0192-1000-0000-000011804257", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteSQL", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1674.4604661487112, + "y" : 868.6867527839015 + }, + "properties" : { + "esql-max-rows" : "0", + "dbf-default-precision" : "10", + "Max Wait Time" : "0 seconds", + "Database Connection Pooling Service" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "esql-auto-commit" : "true", + "dbf-user-logical-types" : "false", + "dbf-default-scale" : "0", + "compression-format" : "NONE", + "esql-output-batch-size" : "0", + "esql-fetch-size" : "0", + "SQL select query" : "SELECT\n o.id as organization_id,\n o.name,\n cast(null as varchar) as operating_name,\n cast(null as varchar) as email,\n cast(null as varchar) as phone,\n o.edrms_record,\n (\n case\n when os.status = 'Archived' then 'Suspended'\n when oat.the_type = 'Buy And Sell' or oat.the_type = 'Sell Only' then 'Registered'\n else 'Unregistered'\n end\n ) as org_status,\n 'fuel_supplier' as organization_type,\n oa.address_line_1 as service_street_address,\n oa.address_line_2 as service_address_other,\n oa.city as service_city,\n oa.state as service_province_state,\n oa.postal_code as \"service_postalCode_zipCode\",\n oa.country as service_country,\n oa.attorney_street_address,\n oa.attorney_address_other,\n oa.attorney_city as attorney_city,\n oa.attorney_province as attorney_province_state,\n oa.attorney_postal_code as \"attorney_postalCode_zipCode\",\n oa.attorney_country as attorney_country\n FROM\n organization o\n INNER JOIN organization_status os ON os.id = o.status_id\n INNER JOIN organization_actions_type oat ON oat.id = o.actions_type_id\n INNER JOIN organization_address oa ON oa.organization_id = o.id", + "dbf-normalize" : "false" + }, + "propertyDescriptors" : { + "esql-max-rows" : { + "displayName" : "Max Rows Per Flow File", + "identifiesControllerService" : false, + "name" : "esql-max-rows", + "sensitive" : false + }, + "dbf-default-precision" : { + "displayName" : "Default Decimal Precision", + "identifiesControllerService" : false, + "name" : "dbf-default-precision", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Connection Pooling Service" : { + "displayName" : "Database Connection Pooling Service", + "identifiesControllerService" : true, + "name" : "Database Connection Pooling Service", + "sensitive" : false + }, + "sql-post-query" : { + "displayName" : "SQL Post-Query", + "identifiesControllerService" : false, + "name" : "sql-post-query", + "sensitive" : false + }, + "esql-auto-commit" : { + "displayName" : "Set Auto Commit", + "identifiesControllerService" : false, + "name" : "esql-auto-commit", + "sensitive" : false + }, + "dbf-user-logical-types" : { + "displayName" : "Use Avro Logical Types", + "identifiesControllerService" : false, + "name" : "dbf-user-logical-types", + "sensitive" : false + }, + "dbf-default-scale" : { + "displayName" : "Default Decimal Scale", + "identifiesControllerService" : false, + "name" : "dbf-default-scale", + "sensitive" : false + }, + "sql-pre-query" : { + "displayName" : "SQL Pre-Query", + "identifiesControllerService" : false, + "name" : "sql-pre-query", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "esql-output-batch-size" : { + "displayName" : "Output Batch Size", + "identifiesControllerService" : false, + "name" : "esql-output-batch-size", + "sensitive" : false + }, + "esql-fetch-size" : { + "displayName" : "Fetch Size", + "identifiesControllerService" : false, + "name" : "esql-fetch-size", + "sensitive" : false + }, + "SQL select query" : { + "displayName" : "SQL select query", + "identifiesControllerService" : false, + "name" : "SQL select query", + "sensitive" : false + }, + "dbf-normalize" : { + "displayName" : "Normalize Table/Column Names", + "identifiesControllerService" : false, + "name" : "dbf-normalize", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.ExecuteSQL", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/5/5.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/5/5.snapshot new file mode 100644 index 000000000..054491164 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/5/5.snapshot @@ -0,0 +1,585 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { + "c3d0e746-e921-3321-ae2d-584895e9dc32" : { + "identifier" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "name" : "TFRS" + } + }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 664.0, + "y" : 256.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "81624320-5fcd-3c78-ada6-d0c85c3018d1", + "instanceIdentifier" : "3257275c-0192-1000-ffff-ffffab8e1b6c", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteGroovyScript", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2008.0, + "y" : 1000.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "ad271e02-2f09-341a-bdb3-f9126dc0cb53", + "instanceIdentifier" : "324cc3c1-0192-1000-0000-000011804257", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteSQL", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1608.0, + "y" : 968.0 + }, + "properties" : { + "esql-max-rows" : "0", + "dbf-default-precision" : "10", + "Max Wait Time" : "0 seconds", + "Database Connection Pooling Service" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "esql-auto-commit" : "true", + "dbf-user-logical-types" : "false", + "dbf-default-scale" : "0", + "compression-format" : "NONE", + "esql-output-batch-size" : "0", + "esql-fetch-size" : "0", + "SQL select query" : "SELECT\n o.id as organization_id,\n o.name,\n cast(null as varchar) as operating_name,\n cast(null as varchar) as email,\n cast(null as varchar) as phone,\n o.edrms_record,\n (\n case\n when os.status = 'Archived' then 'Suspended'\n when oat.the_type = 'Buy And Sell' or oat.the_type = 'Sell Only' then 'Registered'\n else 'Unregistered'\n end\n ) as org_status,\n 'fuel_supplier' as organization_type,\n oa.address_line_1 as service_street_address,\n oa.address_line_2 as service_address_other,\n oa.city as service_city,\n oa.state as service_province_state,\n oa.postal_code as \"service_postalCode_zipCode\",\n oa.country as service_country,\n oa.attorney_street_address,\n oa.attorney_address_other,\n oa.attorney_city as attorney_city,\n oa.attorney_province as attorney_province_state,\n oa.attorney_postal_code as \"attorney_postalCode_zipCode\",\n oa.attorney_country as attorney_country\n FROM\n organization o\n INNER JOIN organization_status os ON os.id = o.status_id\n INNER JOIN organization_actions_type oat ON oat.id = o.actions_type_id\n INNER JOIN organization_address oa ON oa.organization_id = o.id", + "dbf-normalize" : "false" + }, + "propertyDescriptors" : { + "esql-max-rows" : { + "displayName" : "Max Rows Per Flow File", + "identifiesControllerService" : false, + "name" : "esql-max-rows", + "sensitive" : false + }, + "dbf-default-precision" : { + "displayName" : "Default Decimal Precision", + "identifiesControllerService" : false, + "name" : "dbf-default-precision", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Connection Pooling Service" : { + "displayName" : "Database Connection Pooling Service", + "identifiesControllerService" : true, + "name" : "Database Connection Pooling Service", + "sensitive" : false + }, + "sql-post-query" : { + "displayName" : "SQL Post-Query", + "identifiesControllerService" : false, + "name" : "sql-post-query", + "sensitive" : false + }, + "esql-auto-commit" : { + "displayName" : "Set Auto Commit", + "identifiesControllerService" : false, + "name" : "esql-auto-commit", + "sensitive" : false + }, + "dbf-user-logical-types" : { + "displayName" : "Use Avro Logical Types", + "identifiesControllerService" : false, + "name" : "dbf-user-logical-types", + "sensitive" : false + }, + "dbf-default-scale" : { + "displayName" : "Default Decimal Scale", + "identifiesControllerService" : false, + "name" : "dbf-default-scale", + "sensitive" : false + }, + "sql-pre-query" : { + "displayName" : "SQL Pre-Query", + "identifiesControllerService" : false, + "name" : "sql-pre-query", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "esql-output-batch-size" : { + "displayName" : "Output Batch Size", + "identifiesControllerService" : false, + "name" : "esql-output-batch-size", + "sensitive" : false + }, + "esql-fetch-size" : { + "displayName" : "Fetch Size", + "identifiesControllerService" : false, + "name" : "esql-fetch-size", + "sensitive" : false + }, + "SQL select query" : { + "displayName" : "SQL select query", + "identifiesControllerService" : false, + "name" : "SQL select query", + "sensitive" : false + }, + "dbf-normalize" : { + "displayName" : "Normalize Table/Column Names", + "identifiesControllerService" : false, + "name" : "dbf-normalize", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.ExecuteSQL", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/6/6.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/6/6.snapshot new file mode 100644 index 000000000..3cd3ce1cb --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/6/6.snapshot @@ -0,0 +1,585 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { + "c3d0e746-e921-3321-ae2d-584895e9dc32" : { + "identifier" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "name" : "TFRS" + } + }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 664.0, + "y" : 256.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "ad271e02-2f09-341a-bdb3-f9126dc0cb53", + "instanceIdentifier" : "324cc3c1-0192-1000-0000-000011804257", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteSQL", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1608.0, + "y" : 968.0 + }, + "properties" : { + "esql-max-rows" : "0", + "dbf-default-precision" : "10", + "Max Wait Time" : "0 seconds", + "Database Connection Pooling Service" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "esql-auto-commit" : "true", + "dbf-user-logical-types" : "false", + "dbf-default-scale" : "0", + "compression-format" : "NONE", + "esql-output-batch-size" : "0", + "esql-fetch-size" : "0", + "SQL select query" : "SELECT\n o.id as organization_id,\n o.name,\n cast(null as varchar) as operating_name,\n cast(null as varchar) as email,\n cast(null as varchar) as phone,\n o.edrms_record,\n (\n case\n when os.status = 'Archived' then 'Suspended'\n when oat.the_type = 'Buy And Sell' or oat.the_type = 'Sell Only' then 'Registered'\n else 'Unregistered'\n end\n ) as org_status,\n 'fuel_supplier' as organization_type,\n oa.address_line_1 as service_street_address,\n oa.address_line_2 as service_address_other,\n oa.city as service_city,\n oa.state as service_province_state,\n oa.postal_code as \"service_postalCode_zipCode\",\n oa.country as service_country,\n oa.attorney_street_address,\n oa.attorney_address_other,\n oa.attorney_city as attorney_city,\n oa.attorney_province as attorney_province_state,\n oa.attorney_postal_code as \"attorney_postalCode_zipCode\",\n oa.attorney_country as attorney_country\n FROM\n organization o\n INNER JOIN organization_status os ON os.id = o.status_id\n INNER JOIN organization_actions_type oat ON oat.id = o.actions_type_id\n INNER JOIN organization_address oa ON oa.organization_id = o.id", + "dbf-normalize" : "false" + }, + "propertyDescriptors" : { + "esql-max-rows" : { + "displayName" : "Max Rows Per Flow File", + "identifiesControllerService" : false, + "name" : "esql-max-rows", + "sensitive" : false + }, + "dbf-default-precision" : { + "displayName" : "Default Decimal Precision", + "identifiesControllerService" : false, + "name" : "dbf-default-precision", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Connection Pooling Service" : { + "displayName" : "Database Connection Pooling Service", + "identifiesControllerService" : true, + "name" : "Database Connection Pooling Service", + "sensitive" : false + }, + "sql-post-query" : { + "displayName" : "SQL Post-Query", + "identifiesControllerService" : false, + "name" : "sql-post-query", + "sensitive" : false + }, + "esql-auto-commit" : { + "displayName" : "Set Auto Commit", + "identifiesControllerService" : false, + "name" : "esql-auto-commit", + "sensitive" : false + }, + "dbf-user-logical-types" : { + "displayName" : "Use Avro Logical Types", + "identifiesControllerService" : false, + "name" : "dbf-user-logical-types", + "sensitive" : false + }, + "dbf-default-scale" : { + "displayName" : "Default Decimal Scale", + "identifiesControllerService" : false, + "name" : "dbf-default-scale", + "sensitive" : false + }, + "sql-pre-query" : { + "displayName" : "SQL Pre-Query", + "identifiesControllerService" : false, + "name" : "sql-pre-query", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "esql-output-batch-size" : { + "displayName" : "Output Batch Size", + "identifiesControllerService" : false, + "name" : "esql-output-batch-size", + "sensitive" : false + }, + "esql-fetch-size" : { + "displayName" : "Fetch Size", + "identifiesControllerService" : false, + "name" : "esql-fetch-size", + "sensitive" : false + }, + "SQL select query" : { + "displayName" : "SQL select query", + "identifiesControllerService" : false, + "name" : "SQL select query", + "sensitive" : false + }, + "dbf-normalize" : { + "displayName" : "Normalize Table/Column Names", + "identifiesControllerService" : false, + "name" : "dbf-normalize", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.ExecuteSQL", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteGroovyScript", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2107.918814837607, + "y" : 1030.9862258089674 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/7/7.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/7/7.snapshot new file mode 100644 index 000000000..365b528c8 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/7/7.snapshot @@ -0,0 +1,448 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 664.0, + "y" : 256.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "maxBackoffPeriod" : "10 mins", + "name" : "Organization data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1824.0, + "y" : 968.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/8/8.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/8/8.snapshot new file mode 100644 index 000000000..743312b3d --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/8/8.snapshot @@ -0,0 +1,448 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 664.0, + "y" : 256.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "maxBackoffPeriod" : "10 mins", + "name" : "Organization data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1816.0, + "y" : 1256.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/9/9.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/9/9.snapshot new file mode 100644 index 000000000..178633edf --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/3f1fbc95-d88e-4efe-a816-e63cff4fd5ee/9/9.snapshot @@ -0,0 +1,546 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 48.0, + "identifier" : "def7b942-6ce5-3285-b541-ce3e5d4033a7", + "instanceIdentifier" : "3582fbe8-0192-1000-0000-000055e080a0", + "label" : "Organization and it's address details are download from TFRS \nand loaded to LCFS database", + "position" : { + "x" : 1816.0, + "y" : 1192.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 400.0, + "zIndex" : 0 + }, { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 40.0, + "identifier" : "e841f383-10dd-33db-b88a-392608e63a3d", + "instanceIdentifier" : "3581baa2-0192-1000-0000-00003f514967", + "label" : "Download user and role information from \nTFRS database to load onto LCFS database", + "position" : { + "x" : 1824.0, + "y" : 1440.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 288.0, + "zIndex" : 0 + } ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 664.0, + "y" : 256.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "1abb6d9e-4bb0-31d6-b42d-48772028c14c", + "instanceIdentifier" : "e6c63130-3eac-1b13-a947-ee0103275138", + "maxBackoffPeriod" : "10 mins", + "name" : "Users data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1824.0, + "y" : 1488.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/user.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "maxBackoffPeriod" : "10 mins", + "name" : "Organization data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 1816.0, + "y" : 1256.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/407353f5-7799-429a-96f3-daed547475b8/1/1.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/407353f5-7799-429a-96f3-daed547475b8/1/1.snapshot new file mode 100644 index 000000000..20fc6e10f --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/407353f5-7799-429a-96f3-daed547475b8/1/1.snapshot @@ -0,0 +1,1056 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { + "fac92561-7fce-3f0d-8ad2-9915754100b0" : { + "identifier" : "fac92561-7fce-3f0d-8ad2-9915754100b0", + "name" : "AvroReader" + }, + "c3d0e746-e921-3321-ae2d-584895e9dc32" : { + "identifier" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "name" : "TFRS" + }, + "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c" : { + "identifier" : "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c", + "name" : "LCFS" + }, + "d8284a15-fc31-3005-847f-3aa231b2ec18" : { + "identifier" : "d8284a15-fc31-3005-847f-3aa231b2ec18", + "name" : "JsonRecordSetWriter" + }, + "9a65a6ae-dc93-3490-ae85-d9deec6cabb2" : { + "identifier" : "9a65a6ae-dc93-3490-ae85-d9deec6cabb2", + "name" : "JsonTreeReader" + } + }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "0b29bf21-e1ed-3569-8730-e7e91180081b", + "instanceIdentifier" : "08723512-4fe5-3f1b-aa72-ff19d5f5d8c4", + "name" : "PutFile", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "e56ee4fc-b88f-3b08-993a-6623857af5de", + "instanceIdentifier" : "0f21456b-7f10-3abd-a351-d26e43c33f37", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "success" ], + "source" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "fc33d742-071f-3f7f-aa46-412c50527ff4", + "instanceIdentifier" : "a51ea9e5-d33b-383c-9e97-4edffa95444b", + "name" : "UpdateAttribute", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + }, { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "d9acf96c-02a9-355d-b6b3-094196f81d8a", + "instanceIdentifier" : "bca411e3-12f7-3711-938d-32f89e3d4509", + "name" : "ConvertRecord", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "82ddf0ce-1d29-3f2c-849c-cfd1e63e1f88", + "instanceIdentifier" : "707beec5-5c1b-330c-b506-f234f9226a34", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "success" ], + "source" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "9563da14-f663-35ed-b559-d733350517c9", + "instanceIdentifier" : "80f4908e-36af-346c-865b-820cf03768f6", + "name" : "ExecuteSQL", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + }, { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "87359bdf-4312-39b2-a275-ef0293e12715", + "instanceIdentifier" : "b8a1a9cd-1b39-395b-a9b1-a414b5f971dc", + "name" : "PutDatabaseRecord", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "fa8cdc48-556a-3a37-b8a4-9f7073f60e47", + "instanceIdentifier" : "30300a77-09b8-3598-9e30-bdd73aa8971c", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "success" ], + "source" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "64ce57de-1c9b-3951-aaa6-cd2bfda3329c", + "instanceIdentifier" : "6b36bbac-bb38-36d3-b63a-df6e23483e7c", + "name" : "ExecuteScript", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + }, { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "fc33d742-071f-3f7f-aa46-412c50527ff4", + "instanceIdentifier" : "a51ea9e5-d33b-383c-9e97-4edffa95444b", + "name" : "UpdateAttribute", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "3df016fd-6ef5-336b-a704-86f168f57fb9", + "instanceIdentifier" : "2e42cfaa-f178-3761-b7f4-db41c9ffe0cc", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "failure" ], + "source" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "64ce57de-1c9b-3951-aaa6-cd2bfda3329c", + "instanceIdentifier" : "6b36bbac-bb38-36d3-b63a-df6e23483e7c", + "name" : "ExecuteScript", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + }, { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "0b29bf21-e1ed-3569-8730-e7e91180081b", + "instanceIdentifier" : "08723512-4fe5-3f1b-aa72-ff19d5f5d8c4", + "name" : "PutFile", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "562e717d-8ea0-3207-89c3-f64a7b189172", + "instanceIdentifier" : "7740445d-4289-3057-8109-e849425aec2f", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "success" ], + "source" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "85d8f91c-b898-3053-8ac7-08ab9765d990", + "instanceIdentifier" : "820434ca-3017-3280-b212-308bcb097ed6", + "name" : "UpdateAttribute", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + }, { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "64ce57de-1c9b-3951-aaa6-cd2bfda3329c", + "instanceIdentifier" : "6b36bbac-bb38-36d3-b63a-df6e23483e7c", + "name" : "ExecuteScript", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "505398ac-079c-3588-9cac-661492ca6d4c", + "instanceIdentifier" : "425a79e2-9aae-3358-a467-e080897d85fe", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "split" ], + "source" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "4623015d-35a6-36e6-90bc-bca8ae5ded23", + "instanceIdentifier" : "ffdfcdb5-7a07-347f-80bc-b776c9030399", + "name" : "SplitJson", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + }, { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "85d8f91c-b898-3053-8ac7-08ab9765d990", + "instanceIdentifier" : "820434ca-3017-3280-b212-308bcb097ed6", + "name" : "UpdateAttribute", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "7fdb70bd-d010-3763-89d9-fac177ec42f6", + "instanceIdentifier" : "bf7b40e3-e3f2-315c-a278-52e2e580deb6", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "failure" ], + "source" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "87359bdf-4312-39b2-a275-ef0293e12715", + "instanceIdentifier" : "b8a1a9cd-1b39-395b-a9b1-a414b5f971dc", + "name" : "PutDatabaseRecord", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + }, { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "4623015d-35a6-36e6-90bc-bca8ae5ded23", + "instanceIdentifier" : "ffdfcdb5-7a07-347f-80bc-b776c9030399", + "name" : "SplitJson", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "fa14a635-937c-3f22-a820-aac51786c40b", + "instanceIdentifier" : "e704b8f4-50f0-38b2-85b3-2b29e71ad95c", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "success" ], + "source" : { + "comments" : "", + "groupId" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "id" : "d9acf96c-02a9-355d-b6b3-094196f81d8a", + "instanceIdentifier" : "bca411e3-12f7-3711-938d-32f89e3d4509", + "name" : "ConvertRecord", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + } ], + "controllerServices" : [ ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "inputPorts" : [ ], + "instanceIdentifier" : "ec253c89-8105-127b-ffff-fffffccd56b0", + "labels" : [ ], + "name" : "Fuel code", + "outputPorts" : [ ], + "position" : { + "x" : 736.0, + "y" : 304.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "0b29bf21-e1ed-3569-8730-e7e91180081b", + "instanceIdentifier" : "08723512-4fe5-3f1b-aa72-ff19d5f5d8c4", + "maxBackoffPeriod" : "10 mins", + "name" : "PutFile", + "penaltyDuration" : "5 min", + "position" : { + "x" : 1804.000003307729, + "y" : 905.0000055923916 + }, + "properties" : { + "Create Missing Directories" : "true", + "Directory" : "/opt/nifi/nifi-current/nifi_output", + "Conflict Resolution Strategy" : "replace" + }, + "propertyDescriptors" : { + "Group" : { + "displayName" : "Group", + "identifiesControllerService" : false, + "name" : "Group", + "sensitive" : false + }, + "Owner" : { + "displayName" : "Owner", + "identifiesControllerService" : false, + "name" : "Owner", + "sensitive" : false + }, + "Create Missing Directories" : { + "displayName" : "Create Missing Directories", + "identifiesControllerService" : false, + "name" : "Create Missing Directories", + "sensitive" : false + }, + "Permissions" : { + "displayName" : "Permissions", + "identifiesControllerService" : false, + "name" : "Permissions", + "sensitive" : false + }, + "Maximum File Count" : { + "displayName" : "Maximum File Count", + "identifiesControllerService" : false, + "name" : "Maximum File Count", + "sensitive" : false + }, + "Last Modified Time" : { + "displayName" : "Last Modified Time", + "identifiesControllerService" : false, + "name" : "Last Modified Time", + "sensitive" : false + }, + "Directory" : { + "displayName" : "Directory", + "identifiesControllerService" : false, + "name" : "Directory", + "sensitive" : false + }, + "Conflict Resolution Strategy" : { + "displayName" : "Conflict Resolution Strategy", + "identifiesControllerService" : false, + "name" : "Conflict Resolution Strategy", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "1 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.PutFile", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-scripting-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "64ce57de-1c9b-3951-aaa6-cd2bfda3329c", + "instanceIdentifier" : "6b36bbac-bb38-36d3-b63a-df6e23483e7c", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteScript", + "penaltyDuration" : "5 min", + "position" : { + "x" : 533.000003307729, + "y" : 782.0000055923916 + }, + "properties" : { + "Script File" : "/opt/nifi/nifi-current/nifi_scripts/fuel_code.groovy", + "Script Engine" : "Groovy" + }, + "propertyDescriptors" : { + "Script File" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "Script File", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + }, + "Script Engine" : { + "displayName" : "Script Engine", + "identifiesControllerService" : false, + "name" : "Script Engine", + "sensitive" : false + }, + "Script Body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "Script Body", + "sensitive" : false + }, + "Module Directory" : { + "displayName" : "Module Directory", + "identifiesControllerService" : false, + "name" : "Module Directory", + "resourceDefinition" : { + "cardinality" : "MULTIPLE", + "resourceTypes" : [ "FILE", "DIRECTORY" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.script.ExecuteScript", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-update-attribute-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "85d8f91c-b898-3053-8ac7-08ab9765d990", + "instanceIdentifier" : "820434ca-3017-3280-b212-308bcb097ed6", + "maxBackoffPeriod" : "10 mins", + "name" : "UpdateAttribute", + "penaltyDuration" : "10 min", + "position" : { + "x" : 1172.000003307729, + "y" : 1001.0000055923916 + }, + "properties" : { + "filename" : "create_record_error_${now():format('yyyy-MM-dd_HH-mm-ss')}_${uuid}.json", + "Store State" : "Do not store state", + "canonical-value-lookup-cache-size" : "100" + }, + "propertyDescriptors" : { + "Delete Attributes Expression" : { + "displayName" : "Delete Attributes Expression", + "identifiesControllerService" : false, + "name" : "Delete Attributes Expression", + "sensitive" : false + }, + "filename" : { + "displayName" : "filename", + "identifiesControllerService" : false, + "name" : "filename", + "sensitive" : false + }, + "Store State" : { + "displayName" : "Store State", + "identifiesControllerService" : false, + "name" : "Store State", + "sensitive" : false + }, + "canonical-value-lookup-cache-size" : { + "displayName" : "Cache Value Lookup Cache Size", + "identifiesControllerService" : false, + "name" : "canonical-value-lookup-cache-size", + "sensitive" : false + }, + "Stateful Variables Initial Value" : { + "displayName" : "Stateful Variables Initial Value", + "identifiesControllerService" : false, + "name" : "Stateful Variables Initial Value", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 25, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.attributes.UpdateAttribute", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "d9acf96c-02a9-355d-b6b3-094196f81d8a", + "instanceIdentifier" : "bca411e3-12f7-3711-938d-32f89e3d4509", + "maxBackoffPeriod" : "10 mins", + "name" : "ConvertRecord", + "penaltyDuration" : "5 min", + "position" : { + "x" : 533.000003307729, + "y" : 334.00000559239163 + }, + "properties" : { + "record-writer" : "d8284a15-fc31-3005-847f-3aa231b2ec18", + "record-reader" : "fac92561-7fce-3f0d-8ad2-9915754100b0", + "include-zero-record-flowfiles" : "true" + }, + "propertyDescriptors" : { + "record-writer" : { + "displayName" : "Record Writer", + "identifiesControllerService" : true, + "name" : "record-writer", + "sensitive" : false + }, + "record-reader" : { + "displayName" : "Record Reader", + "identifiesControllerService" : true, + "name" : "record-reader", + "sensitive" : false + }, + "include-zero-record-flowfiles" : { + "displayName" : "Include Zero Record FlowFiles", + "identifiesControllerService" : false, + "name" : "include-zero-record-flowfiles", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.ConvertRecord", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "success", "retry" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "87359bdf-4312-39b2-a275-ef0293e12715", + "instanceIdentifier" : "b8a1a9cd-1b39-395b-a9b1-a414b5f971dc", + "maxBackoffPeriod" : "10 mins", + "name" : "PutDatabaseRecord", + "penaltyDuration" : "5 min", + "position" : { + "x" : 533.000003307729, + "y" : 1006.0000055923916 + }, + "properties" : { + "put-db-record-allow-multiple-statements" : "false", + "table-schema-cache-size" : "100", + "put-db-record-quoted-table-identifiers" : "false", + "put-db-record-unmatched-column-behavior" : "Fail on Unmatched Columns", + "put-db-record-translate-field-names" : "true", + "put-db-record-dcbp-service" : "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c", + "put-db-record-query-timeout" : "0 seconds", + "rollback-on-failure" : "false", + "put-db-record-statement-type" : "INSERT", + "put-db-record-binary-format" : "UTF-8", + "db-type" : "PostgreSQL", + "put-db-record-quoted-identifiers" : "false", + "put-db-record-table-name" : "fuel_code", + "put-db-record-unmatched-field-behavior" : "Ignore Unmatched Fields", + "put-db-record-max-batch-size" : "1000", + "put-db-record-record-reader" : "9a65a6ae-dc93-3490-ae85-d9deec6cabb2", + "database-session-autocommit" : "false" + }, + "propertyDescriptors" : { + "put-db-record-allow-multiple-statements" : { + "displayName" : "Allow Multiple SQL Statements", + "identifiesControllerService" : false, + "name" : "put-db-record-allow-multiple-statements", + "sensitive" : false + }, + "table-schema-cache-size" : { + "displayName" : "Table Schema Cache Size", + "identifiesControllerService" : false, + "name" : "table-schema-cache-size", + "sensitive" : false + }, + "put-db-record-schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "put-db-record-schema-name", + "sensitive" : false + }, + "put-db-record-field-containing-sql" : { + "displayName" : "Field Containing SQL", + "identifiesControllerService" : false, + "name" : "put-db-record-field-containing-sql", + "sensitive" : false + }, + "put-db-record-quoted-table-identifiers" : { + "displayName" : "Quote Table Identifiers", + "identifiesControllerService" : false, + "name" : "put-db-record-quoted-table-identifiers", + "sensitive" : false + }, + "Statement Type Record Path" : { + "displayName" : "Statement Type Record Path", + "identifiesControllerService" : false, + "name" : "Statement Type Record Path", + "sensitive" : false + }, + "put-db-record-unmatched-column-behavior" : { + "displayName" : "Unmatched Column Behavior", + "identifiesControllerService" : false, + "name" : "put-db-record-unmatched-column-behavior", + "sensitive" : false + }, + "put-db-record-catalog-name" : { + "displayName" : "Catalog Name", + "identifiesControllerService" : false, + "name" : "put-db-record-catalog-name", + "sensitive" : false + }, + "put-db-record-translate-field-names" : { + "displayName" : "Translate Field Names", + "identifiesControllerService" : false, + "name" : "put-db-record-translate-field-names", + "sensitive" : false + }, + "put-db-record-dcbp-service" : { + "displayName" : "Database Connection Pooling Service", + "identifiesControllerService" : true, + "name" : "put-db-record-dcbp-service", + "sensitive" : false + }, + "put-db-record-query-timeout" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "put-db-record-query-timeout", + "sensitive" : false + }, + "rollback-on-failure" : { + "displayName" : "Rollback On Failure", + "identifiesControllerService" : false, + "name" : "rollback-on-failure", + "sensitive" : false + }, + "put-db-record-statement-type" : { + "displayName" : "Statement Type", + "identifiesControllerService" : false, + "name" : "put-db-record-statement-type", + "sensitive" : false + }, + "put-db-record-binary-format" : { + "displayName" : "Binary String Format", + "identifiesControllerService" : false, + "name" : "put-db-record-binary-format", + "sensitive" : false + }, + "db-type" : { + "displayName" : "Database Type", + "identifiesControllerService" : false, + "name" : "db-type", + "sensitive" : false + }, + "put-db-record-update-keys" : { + "displayName" : "Update Keys", + "identifiesControllerService" : false, + "name" : "put-db-record-update-keys", + "sensitive" : false + }, + "put-db-record-quoted-identifiers" : { + "displayName" : "Quote Column Identifiers", + "identifiesControllerService" : false, + "name" : "put-db-record-quoted-identifiers", + "sensitive" : false + }, + "put-db-record-table-name" : { + "displayName" : "Table Name", + "identifiesControllerService" : false, + "name" : "put-db-record-table-name", + "sensitive" : false + }, + "put-db-record-unmatched-field-behavior" : { + "displayName" : "Unmatched Field Behavior", + "identifiesControllerService" : false, + "name" : "put-db-record-unmatched-field-behavior", + "sensitive" : false + }, + "put-db-record-max-batch-size" : { + "displayName" : "Maximum Batch Size", + "identifiesControllerService" : false, + "name" : "put-db-record-max-batch-size", + "sensitive" : false + }, + "put-db-record-record-reader" : { + "displayName" : "Record Reader", + "identifiesControllerService" : true, + "name" : "put-db-record-record-reader", + "sensitive" : false + }, + "Data Record Path" : { + "displayName" : "Data Record Path", + "identifiesControllerService" : false, + "name" : "Data Record Path", + "sensitive" : false + }, + "database-session-autocommit" : { + "displayName" : "Database Session AutoCommit", + "identifiesControllerService" : false, + "name" : "database-session-autocommit", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.PutDatabaseRecord", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "9563da14-f663-35ed-b559-d733350517c9", + "instanceIdentifier" : "80f4908e-36af-346c-865b-820cf03768f6", + "maxBackoffPeriod" : "10 mins", + "name" : "ExecuteSQL", + "penaltyDuration" : "5 min", + "position" : { + "x" : 533.000003307729, + "y" : 118.00000559239163 + }, + "properties" : { + "esql-max-rows" : "10000", + "dbf-default-precision" : "10", + "Max Wait Time" : "0 seconds", + "Database Connection Pooling Service" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "esql-auto-commit" : "true", + "dbf-user-logical-types" : "false", + "dbf-default-scale" : "0", + "compression-format" : "NONE", + "esql-output-batch-size" : "0", + "esql-fetch-size" : "0", + "SQL select query" : "SELECT * FROM fuel_code", + "dbf-normalize" : "false" + }, + "propertyDescriptors" : { + "esql-max-rows" : { + "displayName" : "Max Rows Per Flow File", + "identifiesControllerService" : false, + "name" : "esql-max-rows", + "sensitive" : false + }, + "dbf-default-precision" : { + "displayName" : "Default Decimal Precision", + "identifiesControllerService" : false, + "name" : "dbf-default-precision", + "sensitive" : false + }, + "Max Wait Time" : { + "displayName" : "Max Wait Time", + "identifiesControllerService" : false, + "name" : "Max Wait Time", + "sensitive" : false + }, + "Database Connection Pooling Service" : { + "displayName" : "Database Connection Pooling Service", + "identifiesControllerService" : true, + "name" : "Database Connection Pooling Service", + "sensitive" : false + }, + "sql-post-query" : { + "displayName" : "SQL Post-Query", + "identifiesControllerService" : false, + "name" : "sql-post-query", + "sensitive" : false + }, + "esql-auto-commit" : { + "displayName" : "Set Auto Commit", + "identifiesControllerService" : false, + "name" : "esql-auto-commit", + "sensitive" : false + }, + "dbf-user-logical-types" : { + "displayName" : "Use Avro Logical Types", + "identifiesControllerService" : false, + "name" : "dbf-user-logical-types", + "sensitive" : false + }, + "dbf-default-scale" : { + "displayName" : "Default Decimal Scale", + "identifiesControllerService" : false, + "name" : "dbf-default-scale", + "sensitive" : false + }, + "sql-pre-query" : { + "displayName" : "SQL Pre-Query", + "identifiesControllerService" : false, + "name" : "sql-pre-query", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "esql-output-batch-size" : { + "displayName" : "Output Batch Size", + "identifiesControllerService" : false, + "name" : "esql-output-batch-size", + "sensitive" : false + }, + "esql-fetch-size" : { + "displayName" : "Fetch Size", + "identifiesControllerService" : false, + "name" : "esql-fetch-size", + "sensitive" : false + }, + "SQL select query" : { + "displayName" : "SQL select query", + "identifiesControllerService" : false, + "name" : "SQL select query", + "sensitive" : false + }, + "dbf-normalize" : { + "displayName" : "Normalize Table/Column Names", + "identifiesControllerService" : false, + "name" : "dbf-normalize", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 days", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.ExecuteSQL", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-update-attribute-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "fc33d742-071f-3f7f-aa46-412c50527ff4", + "instanceIdentifier" : "a51ea9e5-d33b-383c-9e97-4edffa95444b", + "maxBackoffPeriod" : "10 mins", + "name" : "UpdateAttribute", + "penaltyDuration" : "10 min", + "position" : { + "x" : 1173.000003307729, + "y" : 784.0000055923916 + }, + "properties" : { + "filename" : "script_error_${now():format('yyyy-MM-dd_HH-mm-ss')}_${uuid}.json", + "Store State" : "Do not store state", + "canonical-value-lookup-cache-size" : "100" + }, + "propertyDescriptors" : { + "Delete Attributes Expression" : { + "displayName" : "Delete Attributes Expression", + "identifiesControllerService" : false, + "name" : "Delete Attributes Expression", + "sensitive" : false + }, + "filename" : { + "displayName" : "filename", + "identifiesControllerService" : false, + "name" : "filename", + "sensitive" : false + }, + "Store State" : { + "displayName" : "Store State", + "identifiesControllerService" : false, + "name" : "Store State", + "sensitive" : false + }, + "canonical-value-lookup-cache-size" : { + "displayName" : "Cache Value Lookup Cache Size", + "identifiesControllerService" : false, + "name" : "canonical-value-lookup-cache-size", + "sensitive" : false + }, + "Stateful Variables Initial Value" : { + "displayName" : "Stateful Variables Initial Value", + "identifiesControllerService" : false, + "name" : "Stateful Variables Initial Value", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 25, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.attributes.UpdateAttribute", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "original", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-standard-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "6f2abcd5-ee63-3462-beab-a4a52804b4ad", + "identifier" : "4623015d-35a6-36e6-90bc-bca8ae5ded23", + "instanceIdentifier" : "ffdfcdb5-7a07-347f-80bc-b776c9030399", + "maxBackoffPeriod" : "10 mins", + "name" : "SplitJson", + "penaltyDuration" : "5 min", + "position" : { + "x" : 533.000003307729, + "y" : 558.0000055923916 + }, + "properties" : { + "Max String Length" : "20 MB", + "Null Value Representation" : "empty string", + "JsonPath Expression" : "$[*]" + }, + "propertyDescriptors" : { + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "Null Value Representation" : { + "displayName" : "Null Value Representation", + "identifiesControllerService" : false, + "name" : "Null Value Representation", + "sensitive" : false + }, + "JsonPath Expression" : { + "displayName" : "JsonPath Expression", + "identifiesControllerService" : false, + "name" : "JsonPath Expression", + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.standard.SplitJson", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/1/1.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/1/1.snapshot new file mode 100644 index 000000000..3e1cbcd52 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/1/1.snapshot @@ -0,0 +1,668 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { + "c3d0e746-e921-3321-ae2d-584895e9dc32" : { + "identifier" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "name" : "TFRS" + }, + "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c" : { + "identifier" : "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c", + "name" : "LCFS" + } + }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 48.0, + "identifier" : "def7b942-6ce5-3285-b541-ce3e5d4033a7", + "instanceIdentifier" : "3582fbe8-0192-1000-0000-000055e080a0", + "label" : "Organization and it's address details are download from TFRS \nand loaded to LCFS database", + "position" : { + "x" : 2224.0, + "y" : 1192.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 400.0, + "zIndex" : 0 + }, { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 40.0, + "identifier" : "e841f383-10dd-33db-b88a-392608e63a3d", + "instanceIdentifier" : "3581baa2-0192-1000-0000-00003f514967", + "label" : "Download user and role information from \nTFRS database to load onto LCFS database", + "position" : { + "x" : 2232.0, + "y" : 1456.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 288.0, + "zIndex" : 0 + } ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 1168.0, + "y" : 304.0 + }, + "processGroups" : [ { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "22d0d04d-64ee-3880-95dc-2ade2a466ade", + "inputPorts" : [ ], + "instanceIdentifier" : "d00a1477-0192-1000-ffff-fffffa1972b8", + "labels" : [ ], + "name" : "Transfer data", + "outputPorts" : [ ], + "position" : { + "x" : 2848.0, + "y" : 1688.0 + }, + "processGroups" : [ ], + "processors" : [ ], + "remoteProcessGroups" : [ ], + "variables" : { }, + "versionedFlowCoordinates" : { + "bucketId" : "f1e99fb2-a652-408a-a1ff-0979dc03d93f", + "flowId" : "b634e610-960c-4772-b487-7c1b8c67b886", + "registryUrl" : "http://lcfs-registry:18080", + "storageLocation" : "http://lcfs-registry:18080/nifi-registry-api/buckets/f1e99fb2-a652-408a-a1ff-0979dc03d93f/flows/b634e610-960c-4772-b487-7c1b8c67b886/versions/1", + "version" : 1 + } + } ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "7ed356d0-2363-3992-8e60-eb5b5ce86f80", + "instanceIdentifier" : "b9d73248-1438-1418-a736-cc94c8c21e70", + "maxBackoffPeriod" : "10 mins", + "name" : "Transfer data ETL", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2232.0, + "y" : 1704.0 + }, + "properties" : { + "SQL.lcfs" : "1c6bdea5-84e0-3517-82d2-ad5fc8e6432c", + "groovyx-failure-strategy" : "rollback", + "SQL.tfrs" : "c3d0e746-e921-3321-ae2d-584895e9dc32", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/transfer_new.groovy" + }, + "propertyDescriptors" : { + "SQL.lcfs" : { + "displayName" : "SQL.lcfs", + "identifiesControllerService" : true, + "name" : "SQL.lcfs", + "sensitive" : false + }, + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "SQL.tfrs" : { + "displayName" : "SQL.tfrs", + "identifiesControllerService" : true, + "name" : "SQL.tfrs", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "0 sec", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "1abb6d9e-4bb0-31d6-b42d-48772028c14c", + "instanceIdentifier" : "e6c63130-3eac-1b13-a947-ee0103275138", + "maxBackoffPeriod" : "10 mins", + "name" : "Users data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2232.0, + "y" : 1496.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/user.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "maxBackoffPeriod" : "10 mins", + "name" : "Organization data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2224.0, + "y" : 1256.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/2/2.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/2/2.snapshot new file mode 100644 index 000000000..9b5d51f21 --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/2/2.snapshot @@ -0,0 +1,579 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ { + "backPressureDataSizeThreshold" : "1 GB", + "backPressureObjectThreshold" : 10000, + "bends" : [ ], + "componentType" : "CONNECTION", + "destination" : { + "comments" : "", + "groupId" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "id" : "1abb6d9e-4bb0-31d6-b42d-48772028c14c", + "instanceIdentifier" : "e6c63130-3eac-1b13-a947-ee0103275138", + "name" : "Users data transfer", + "type" : "PROCESSOR" + }, + "flowFileExpiration" : "0 sec", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "f272e89e-7c54-3518-9fe2-af8c800c51d3", + "instanceIdentifier" : "4f0d0f9c-0193-1000-0000-00004c769461", + "labelIndex" : 1, + "loadBalanceCompression" : "DO_NOT_COMPRESS", + "loadBalanceStrategy" : "DO_NOT_LOAD_BALANCE", + "name" : "", + "partitioningAttribute" : "", + "prioritizers" : [ ], + "selectedRelationships" : [ "success" ], + "source" : { + "comments" : "", + "groupId" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "id" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "name" : "Organization data transfer", + "type" : "PROCESSOR" + }, + "zIndex" : 0 + } ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 48.0, + "identifier" : "def7b942-6ce5-3285-b541-ce3e5d4033a7", + "instanceIdentifier" : "3582fbe8-0192-1000-0000-000055e080a0", + "label" : "Organization and it's address details are download from TFRS \nand loaded to LCFS database", + "position" : { + "x" : 2584.0, + "y" : 1256.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 400.0, + "zIndex" : 0 + }, { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 40.0, + "identifier" : "e841f383-10dd-33db-b88a-392608e63a3d", + "instanceIdentifier" : "3581baa2-0192-1000-0000-00003f514967", + "label" : "Download user and role information from \nTFRS database to load onto LCFS database", + "position" : { + "x" : 2584.0, + "y" : 1488.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 288.0, + "zIndex" : 0 + } ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 1168.0, + "y" : 304.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "1abb6d9e-4bb0-31d6-b42d-48772028c14c", + "instanceIdentifier" : "e6c63130-3eac-1b13-a947-ee0103275138", + "maxBackoffPeriod" : "10 mins", + "name" : "Users data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2224.0, + "y" : 1496.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/user.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "maxBackoffPeriod" : "10 mins", + "name" : "Organization data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2224.0, + "y" : 1256.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/3/3.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/3/3.snapshot new file mode 100644 index 000000000..056b6d48c --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/3/3.snapshot @@ -0,0 +1,546 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 48.0, + "identifier" : "def7b942-6ce5-3285-b541-ce3e5d4033a7", + "instanceIdentifier" : "3582fbe8-0192-1000-0000-000055e080a0", + "label" : "Step1: Organization and it's address details are download from TFRS \nand loaded to LCFS database", + "position" : { + "x" : 2584.0, + "y" : 1256.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 400.0, + "zIndex" : 0 + }, { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 40.0, + "identifier" : "e841f383-10dd-33db-b88a-392608e63a3d", + "instanceIdentifier" : "3581baa2-0192-1000-0000-00003f514967", + "label" : "Step2: Download user and role information from \nTFRS database to load onto LCFS database", + "position" : { + "x" : 2584.0, + "y" : 1488.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 288.0, + "zIndex" : 0 + } ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 1168.0, + "y" : 304.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "1abb6d9e-4bb0-31d6-b42d-48772028c14c", + "instanceIdentifier" : "e6c63130-3eac-1b13-a947-ee0103275138", + "maxBackoffPeriod" : "10 mins", + "name" : "Users data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2224.0, + "y" : 1496.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/user.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "maxBackoffPeriod" : "10 mins", + "name" : "Organization data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2224.0, + "y" : 1256.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/4/4.snapshot b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/4/4.snapshot new file mode 100644 index 000000000..3199c5b5f --- /dev/null +++ b/etl/flow_storage/f1e99fb2-a652-408a-a1ff-0979dc03d93f/852e916a-6f9d-4001-90cc-cbe8ec7248cc/4/4.snapshot @@ -0,0 +1,546 @@ +{ + "header" : { + "dataModelVersion" : "3" + }, + "content" : { + "flowSnapshot" : { + "externalControllerServices" : { }, + "flowContents" : { + "comments" : "", + "componentType" : "PROCESS_GROUP", + "connections" : [ ], + "controllerServices" : [ { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "eb16f9e4-c948-3849-8efe-c3079a6ba952", + "instanceIdentifier" : "9cceebb4-8e43-3ac7-8b6d-cc0dce4c9697", + "name" : "AvroReader", + "properties" : { + "schema-name" : "${schema.name}", + "cache-size" : "1000", + "schema-access-strategy" : "embedded-avro-schema", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "cache-size" : { + "displayName" : "Cache Size", + "identifiesControllerService" : false, + "name" : "cache-size", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.avro.AvroReader" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordSetWriterFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "e3d3c133-8004-3880-8150-388dfd06818c", + "instanceIdentifier" : "e73bd6b0-8a3e-395c-bfb8-bc1f4e72e1f7", + "name" : "JsonRecordSetWriter", + "properties" : { + "Allow Scientific Notation" : "false", + "compression-level" : "1", + "Pretty Print JSON" : "false", + "compression-format" : "none", + "Schema Write Strategy" : "no-schema", + "suppress-nulls" : "never-suppress", + "output-grouping" : "output-array", + "schema-name" : "${schema.name}", + "schema-access-strategy" : "inherit-record-schema", + "schema-protocol-version" : "1", + "schema-text" : "${avro.schema}" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Allow Scientific Notation" : { + "displayName" : "Allow Scientific Notation", + "identifiesControllerService" : false, + "name" : "Allow Scientific Notation", + "sensitive" : false + }, + "compression-level" : { + "displayName" : "Compression Level", + "identifiesControllerService" : false, + "name" : "compression-level", + "sensitive" : false + }, + "schema-cache" : { + "displayName" : "Schema Cache", + "identifiesControllerService" : true, + "name" : "schema-cache", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "Pretty Print JSON" : { + "displayName" : "Pretty Print JSON", + "identifiesControllerService" : false, + "name" : "Pretty Print JSON", + "sensitive" : false + }, + "compression-format" : { + "displayName" : "Compression Format", + "identifiesControllerService" : false, + "name" : "compression-format", + "sensitive" : false + }, + "Schema Write Strategy" : { + "displayName" : "Schema Write Strategy", + "identifiesControllerService" : false, + "name" : "Schema Write Strategy", + "sensitive" : false + }, + "suppress-nulls" : { + "displayName" : "Suppress Null Values", + "identifiesControllerService" : false, + "name" : "suppress-nulls", + "sensitive" : false + }, + "output-grouping" : { + "displayName" : "Output Grouping", + "identifiesControllerService" : false, + "name" : "output-grouping", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-protocol-version" : { + "displayName" : "Schema Protocol Version", + "identifiesControllerService" : false, + "name" : "schema-protocol-version", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonRecordSetWriter" + }, { + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-record-serialization-services-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "CONTROLLER_SERVICE", + "controllerServiceApis" : [ { + "bundle" : { + "artifact" : "nifi-standard-services-api-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "type" : "org.apache.nifi.serialization.RecordReaderFactory" + } ], + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "4850ced9-1430-3cec-9720-4fab95e71836", + "instanceIdentifier" : "722cb190-da9b-3cf5-98a0-e572dd9d412e", + "name" : "JsonTreeReader", + "properties" : { + "Max String Length" : "20 MB", + "schema-application-strategy" : "SELECTED_PART", + "schema-name" : "${schema.name}", + "starting-field-strategy" : "ROOT_NODE", + "schema-access-strategy" : "infer-schema", + "schema-text" : "${avro.schema}", + "Allow Comments" : "false" + }, + "propertyDescriptors" : { + "schema-branch" : { + "displayName" : "Schema Branch", + "identifiesControllerService" : false, + "name" : "schema-branch", + "sensitive" : false + }, + "Max String Length" : { + "displayName" : "Max String Length", + "identifiesControllerService" : false, + "name" : "Max String Length", + "sensitive" : false + }, + "schema-application-strategy" : { + "displayName" : "Schema Application Strategy", + "identifiesControllerService" : false, + "name" : "schema-application-strategy", + "sensitive" : false + }, + "Timestamp Format" : { + "displayName" : "Timestamp Format", + "identifiesControllerService" : false, + "name" : "Timestamp Format", + "sensitive" : false + }, + "schema-inference-cache" : { + "displayName" : "Schema Inference Cache", + "identifiesControllerService" : true, + "name" : "schema-inference-cache", + "sensitive" : false + }, + "Date Format" : { + "displayName" : "Date Format", + "identifiesControllerService" : false, + "name" : "Date Format", + "sensitive" : false + }, + "schema-name" : { + "displayName" : "Schema Name", + "identifiesControllerService" : false, + "name" : "schema-name", + "sensitive" : false + }, + "starting-field-strategy" : { + "displayName" : "Starting Field Strategy", + "identifiesControllerService" : false, + "name" : "starting-field-strategy", + "sensitive" : false + }, + "schema-registry" : { + "displayName" : "Schema Registry", + "identifiesControllerService" : true, + "name" : "schema-registry", + "sensitive" : false + }, + "starting-field-name" : { + "displayName" : "Starting Field Name", + "identifiesControllerService" : false, + "name" : "starting-field-name", + "sensitive" : false + }, + "Time Format" : { + "displayName" : "Time Format", + "identifiesControllerService" : false, + "name" : "Time Format", + "sensitive" : false + }, + "schema-access-strategy" : { + "displayName" : "Schema Access Strategy", + "identifiesControllerService" : false, + "name" : "schema-access-strategy", + "sensitive" : false + }, + "schema-version" : { + "displayName" : "Schema Version", + "identifiesControllerService" : false, + "name" : "schema-version", + "sensitive" : false + }, + "schema-text" : { + "displayName" : "Schema Text", + "identifiesControllerService" : false, + "name" : "schema-text", + "sensitive" : false + }, + "Allow Comments" : { + "displayName" : "Allow Comments", + "identifiesControllerService" : false, + "name" : "Allow Comments", + "sensitive" : false + } + }, + "scheduledState" : "DISABLED", + "type" : "org.apache.nifi.json.JsonTreeReader" + } ], + "defaultBackPressureDataSizeThreshold" : "1 GB", + "defaultBackPressureObjectThreshold" : 10000, + "defaultFlowFileExpiration" : "0 sec", + "flowFileConcurrency" : "UNBOUNDED", + "flowFileOutboundPolicy" : "STREAM_WHEN_AVAILABLE", + "funnels" : [ ], + "identifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "inputPorts" : [ ], + "instanceIdentifier" : "321c623c-0192-1000-ffff-ffffa5255e12", + "labels" : [ { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 56.0, + "identifier" : "def7b942-6ce5-3285-b541-ce3e5d4033a7", + "instanceIdentifier" : "3582fbe8-0192-1000-0000-000055e080a0", + "label" : "Step1: Organization and it's address details are download from TFRS \nand loaded to LCFS database", + "position" : { + "x" : 2584.0, + "y" : 1256.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 440.0, + "zIndex" : 0 + }, { + "componentType" : "LABEL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "height" : 40.0, + "identifier" : "e841f383-10dd-33db-b88a-392608e63a3d", + "instanceIdentifier" : "3581baa2-0192-1000-0000-00003f514967", + "label" : "Step2: Download user and role information from \nTFRS database to load onto LCFS database", + "position" : { + "x" : 2584.0, + "y" : 1488.0 + }, + "style" : { + "font-size" : "14px" + }, + "width" : 312.0, + "zIndex" : 0 + } ], + "name" : "Identity data transfer", + "outputPorts" : [ ], + "position" : { + "x" : 1168.0, + "y" : 304.0 + }, + "processGroups" : [ ], + "processors" : [ { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "1abb6d9e-4bb0-31d6-b42d-48772028c14c", + "instanceIdentifier" : "e6c63130-3eac-1b13-a947-ee0103275138", + "maxBackoffPeriod" : "10 mins", + "name" : "Users data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2224.0, + "y" : 1496.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/user.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + }, { + "autoTerminatedRelationships" : [ "success", "failure" ], + "backoffMechanism" : "PENALIZE_FLOWFILE", + "bulletinLevel" : "WARN", + "bundle" : { + "artifact" : "nifi-groovyx-nar", + "group" : "org.apache.nifi", + "version" : "1.27.0" + }, + "comments" : "", + "componentType" : "PROCESSOR", + "concurrentlySchedulableTaskCount" : 1, + "executionNode" : "ALL", + "groupIdentifier" : "303615fb-749c-3917-aabb-ed4ccfe9dec1", + "identifier" : "70a978dd-82ec-3c3c-ae4e-a81168947fae", + "instanceIdentifier" : "328e2539-0192-1000-0000-00007a4304c1", + "maxBackoffPeriod" : "10 mins", + "name" : "Organization data transfer", + "penaltyDuration" : "30 sec", + "position" : { + "x" : 2224.0, + "y" : 1256.0 + }, + "properties" : { + "groovyx-failure-strategy" : "rollback", + "groovyx-script-file" : "/opt/nifi/nifi-current/nifi_scripts/organization.groovy" + }, + "propertyDescriptors" : { + "groovyx-script-body" : { + "displayName" : "Script Body", + "identifiesControllerService" : false, + "name" : "groovyx-script-body", + "sensitive" : false + }, + "groovyx-failure-strategy" : { + "displayName" : "Failure strategy", + "identifiesControllerService" : false, + "name" : "groovyx-failure-strategy", + "sensitive" : false + }, + "groovyx-additional-classpath" : { + "displayName" : "Additional classpath", + "identifiesControllerService" : false, + "name" : "groovyx-additional-classpath", + "sensitive" : false + }, + "groovyx-script-file" : { + "displayName" : "Script File", + "identifiesControllerService" : false, + "name" : "groovyx-script-file", + "resourceDefinition" : { + "cardinality" : "SINGLE", + "resourceTypes" : [ "FILE" ] + }, + "sensitive" : false + } + }, + "retriedRelationships" : [ ], + "retryCount" : 10, + "runDurationMillis" : 0, + "scheduledState" : "ENABLED", + "schedulingPeriod" : "365 day", + "schedulingStrategy" : "TIMER_DRIVEN", + "style" : { }, + "type" : "org.apache.nifi.processors.groovyx.ExecuteGroovyScript", + "yieldDuration" : "1 sec" + } ], + "remoteProcessGroups" : [ ], + "variables" : { } + }, + "flowEncodingVersion" : "1.0", + "parameterContexts" : { }, + "parameterProviders" : { } + } + } +} \ No newline at end of file diff --git a/etl/jdbc_drivers/postgresql-42.7.3.jar b/etl/jdbc_drivers/postgresql-42.7.3.jar new file mode 100644 index 000000000..fa42b1d6d Binary files /dev/null and b/etl/jdbc_drivers/postgresql-42.7.3.jar differ diff --git a/etl/local_nifi_manager.sh b/etl/local_nifi_manager.sh new file mode 100644 index 000000000..ab873acc7 --- /dev/null +++ b/etl/local_nifi_manager.sh @@ -0,0 +1,300 @@ +#!/usr/bin/env bash +# A Simplified NiFi Processor Management Script for Local Databases +# +# This script manages NiFi processors and updates database connection configurations +# for a local setup, without OpenShift or port-forwarding. It assumes: +# - NiFi is running locally and accessible at http://localhost:8091 +# - TFRS and LCFS databases are running locally on specified ports. +# +# Usage: +# ./local_nifi_manager.sh [--debug|--verbose] +# +# Features: +# - No OpenShift or port-forwarding; everything runs locally. +# - Updates NiFi database controller connections to point to local DBs. +# - Triggers NiFi processors to run once and waits for them to finish. +# +# Requirements: +# - `jq` for JSON parsing. +# - NiFi running locally and accessible via the NiFi API. +# +# Notes: +# Update the processor and controller service IDs, as well as the database credentials, +# according to your local environment. + +set -euo pipefail + +# Debug Levels +readonly DEBUG_NONE=0 +readonly DEBUG_ERROR=1 +readonly DEBUG_WARN=2 +readonly DEBUG_INFO=3 +readonly DEBUG_VERBOSE=4 +readonly DEBUG_DEBUG=5 + +# Default Debug Level +DEBUG_LEVEL=${DEBUG_LEVEL:-$DEBUG_INFO} + +# Processor and Controller Service IDs (Update these as needed) +ORGANIZATION_PROCESSOR="328e2539-0192-1000-0000-00007a4304c1" +USER_PROCESSOR="e6c63130-3eac-1b13-a947-ee0103275138" +TRANSFER_PROCESSOR="b9d73248-1438-1418-a736-cc94c8c21e70" +TRANSACTIONS_PROCESSOR="7a010ef5-0193-1000-ffff-ffff8c22e67e" + +# Controller Service IDs for local DB connections (Update these) +LCFS_CONTROLLER_SERVICE_ID="32417e8c-0192-1000-ffff-ffff8ccb5dfa" +TFRS_CONTROLLER_SERVICE_ID="3245b078-0192-1000-ffff-ffffba20c1eb" + +# Local Database Configuration (Update as needed) +# Example: TFRS DB at localhost:5435, LCFS DB at localhost:5432 +TFRS_DB_PORT=5435 +TFRS_DB_USER="tfrs" +TFRS_DB_PASS="development_only" +TFRS_DB_NAME="tfrs" + +LCFS_DB_PORT=5432 +LCFS_DB_USER="lcfs" +LCFS_DB_PASS="development_only" +LCFS_DB_NAME="lcfs" + +# NiFi API URL +NIFI_API_URL="http://localhost:8091/nifi-api" +MAX_RETRIES=5 + +# Logging function +_log() { + local level=$1 + local message=$2 + local timestamp=$(date '+%Y-%m-%d %H:%M:%S') + + if [ "$level" -le "$DEBUG_LEVEL" ]; then + case $level in + $DEBUG_ERROR) printf "\e[31m[ERROR][%s] %s\e[0m\n" "$timestamp" "$message" >&2 ;; + $DEBUG_WARN) printf "\e[33m[WARN][%s] %s\e[0m\n" "$timestamp" "$message" >&2 ;; + $DEBUG_INFO) printf "\e[32m[INFO][%s] %s\e[0m\n" "$timestamp" "$message" >&2 ;; + $DEBUG_VERBOSE)printf "\e[34m[VERBOSE][%s] %s\e[0m\n" "$timestamp" "$message" >&2 ;; + $DEBUG_DEBUG) printf "\e[36m[DEBUG][%s] %s\e[0m\n" "$timestamp" "$message" >&2 ;; + esac + fi +} + +error() { _log $DEBUG_ERROR "$1"; } +warn() { _log $DEBUG_WARN "$1"; } +info() { _log $DEBUG_INFO "$1"; } +verbose() { _log $DEBUG_VERBOSE "$1"; } +debug() { _log $DEBUG_DEBUG "$1"; } + +error_exit() { + error "$1" + exit 1 +} + +curl_with_retry() { + local max_attempts=$1 + shift + local url=$1 + local method=${2:-GET} + local data=${3:-} + local attempt=1 + + info "Curl Request: $method $url" + verbose "Max Attempts: $max_attempts" + + while [ $attempt -le "$max_attempts" ]; do + debug "Attempt $attempt for $url" + local response + local http_code + + if [ -z "$data" ]; then + response=$(curl -sS -w "%{http_code}" -X "$method" "$url") + else + response=$(curl -sS -w "%{http_code}" -X "$method" -H "Content-Type: application/json" -d "$data" "$url") + fi + + http_code="${response: -3}" + response_body="${response:0:${#response}-3}" + + if [ "$http_code" -ge 200 ] && [ "$http_code" -lt 300 ]; then + verbose "Curl successful. HTTP Code: $http_code" + echo "$response_body" + return 0 + fi + + warn "Curl attempt $attempt failed (HTTP $http_code). Retrying..." + ((attempt++)) + sleep $((attempt * 2)) + done + + error_exit "Curl failed after $max_attempts attempts: $url" +} + +enable_controller_service() { + local service_id=$1 + info "Enabling controller service $service_id" + local current_config revision_version + + current_config=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$service_id") + revision_version=$(echo "$current_config" | jq -r '.revision.version') + + curl_with_retry 3 "$NIFI_API_URL/controller-services/$service_id/run-status" PUT "{\"state\": \"ENABLED\", \"revision\": { \"version\": $revision_version }}" + info "Controller service $service_id enabled." +} + +execute_processor() { + local processor_id=$1 + + info "Triggering single execution for processor $processor_id" + local current_config revision_version + + current_config=$(curl_with_retry 3 "$NIFI_API_URL/processors/$processor_id") + revision_version=$(echo "$current_config" | jq -r '.revision.version') + + local run_once_payload=$(jq -n \ + --argjson revision_version "$revision_version" \ + '{ + "revision": { "version": $revision_version }, + "state": "RUN_ONCE", + "disconnectedNodeAcknowledged": false + }') + + local response + response=$(curl -sS -w "%{http_code}" -X PUT \ + -H "Content-Type: application/json" \ + -d "$run_once_payload" \ + "$NIFI_API_URL/processors/$processor_id/run-status") + + local http_code="${response: -3}" + local response_body="${response:0:${#response}-3}" + + if [ "$http_code" -ge 200 ] && [ "$http_code" -lt 300 ]; then + info "Processor $processor_id triggered for single run successfully" + sleep 10 + else + error "Failed to trigger single run for processor $processor_id (HTTP $http_code)" + error "Response: $response_body" + return 1 + fi + + # Wait for completion + local max_wait_time=300 + local start_time=$(date +%s) + local timeout=$((start_time + max_wait_time)) + + while true; do + local current_time=$(date +%s) + if [ "$current_time" -gt "$timeout" ]; then + error "Processor $processor_id execution timed out after $max_wait_time seconds" + return 1 + fi + + local processor_status + processor_status=$(curl_with_retry 3 "$NIFI_API_URL/processors/$processor_id" | jq '.status.aggregateSnapshot') + + local active_threads=$(echo "$processor_status" | jq '.activeThreadCount') + local processed_count=$(echo "$processor_status" | jq '.flowFilesProcessed') + + verbose "Processor $processor_id - Active Threads: $active_threads, Processed: $processed_count" + + if [ "$active_threads" -eq 0 ]; then + info "Processor $processor_id completed execution" + break + fi + sleep 2 + done +} + +update_nifi_connection() { + local controller_service_id=$1 + local db_url=$2 + local db_user=$3 + local db_pass=$4 + + info "Updating NiFi controller service $controller_service_id" + + local controller_config + controller_config=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$controller_service_id") + local revision_version=$(echo "$controller_config" | jq '.revision.version') + local current_name=$(echo "$controller_config" | jq -r '.component.name') + + # Disable the controller service first + info "Disabling controller service $controller_service_id" + curl_with_retry 3 "$NIFI_API_URL/controller-services/$controller_service_id/run-status" PUT "{\"state\": \"DISABLED\", \"revision\": { \"version\": $revision_version }}" + + # Wait until disabled + while true; do + controller_config=$(curl_with_retry 3 "$NIFI_API_URL/controller-services/$controller_service_id") + local current_state=$(echo "$controller_config" | jq -r '.component.state') + if [ "$current_state" == "DISABLED" ]; then + break + fi + warn "Controller service not yet disabled. Retrying..." + sleep 2 + done + + local updated_config + updated_config=$(jq -n \ + --arg name "$current_name" \ + --arg db_url "$db_url" \ + --arg db_user "$db_user" \ + --arg db_pass "$db_pass" \ + --argjson version "$revision_version" \ + '{ + "revision": { "version": $version }, + "component": { + "id": "'$controller_service_id'", + "name": $name, + "properties": { + "Database Connection URL": $db_url, + "Database User": $db_user, + "Password": $db_pass + } + } + }') + + curl_with_retry 3 "$NIFI_API_URL/controller-services/$controller_service_id" PUT "$updated_config" + info "Controller service updated." + + enable_controller_service "$controller_service_id" +} + +main() { + while [[ $# -gt 0 ]]; do + case "$1" in + --debug) + DEBUG_LEVEL=$DEBUG_DEBUG + shift + ;; + --verbose) + DEBUG_LEVEL=$DEBUG_VERBOSE + shift + ;; + *) + break + ;; + esac + done + + info "Starting Local NiFi Manager" + info "Debug Level: $DEBUG_LEVEL" + + # Prepare DB URLs for local connections + local TFRS_DB_URL="jdbc:postgresql://localhost:$TFRS_DB_PORT/$TFRS_DB_NAME" + local LCFS_DB_URL="jdbc:postgresql://localhost:$LCFS_DB_PORT/$LCFS_DB_NAME" + + info "Updating TFRS DB connection..." + update_nifi_connection "$TFRS_CONTROLLER_SERVICE_ID" "$TFRS_DB_URL" "$TFRS_DB_USER" "$TFRS_DB_PASS" + + info "Updating LCFS DB connection..." + update_nifi_connection "$LCFS_CONTROLLER_SERVICE_ID" "$LCFS_DB_URL" "$LCFS_DB_USER" "$LCFS_DB_PASS" + + info "Executing processors..." + execute_processor "$ORGANIZATION_PROCESSOR" + execute_processor "$USER_PROCESSOR" + execute_processor "$TRANSFER_PROCESSOR" + execute_processor "$TRANSACTIONS_PROCESSOR" + + info "All processors executed successfully." + info "Check your local databases for the expected data." +} + +main "$@" diff --git a/etl/nifi/conf/authorizers.xml b/etl/nifi/conf/authorizers.xml new file mode 100755 index 000000000..3b59cb35f --- /dev/null +++ b/etl/nifi/conf/authorizers.xml @@ -0,0 +1,399 @@ + + + + + + + + file-user-group-provider + org.apache.nifi.authorization.FileUserGroupProvider + ./conf/users.xml + + + + + + + + + + + + + + + + + + + + + + + + file-access-policy-provider + org.apache.nifi.authorization.FileAccessPolicyProvider + file-user-group-provider + ./conf/authorizations.xml + + + + + + + + + managed-authorizer + org.apache.nifi.authorization.StandardManagedAuthorizer + file-access-policy-provider + + + + + + + + single-user-authorizer + org.apache.nifi.authorization.single.user.SingleUserAuthorizer + + diff --git a/etl/nifi/conf/bootstrap-aws.conf b/etl/nifi/conf/bootstrap-aws.conf new file mode 100755 index 000000000..f624dec9b --- /dev/null +++ b/etl/nifi/conf/bootstrap-aws.conf @@ -0,0 +1,27 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# AWS KMS Key ID is required to be configured for AWS KMS Sensitive Property Provider +aws.kms.key.id= + +# NiFi uses the following properties when authentication to AWS when all values are provided. +# NiFi uses the default AWS credentials provider chain when one or more or the following properties are blank +# AWS SDK documentation describes the default credential retrieval order: +# https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/credentials.html#credentials-chain +aws.access.key.id= +aws.secret.access.key= +aws.region= diff --git a/etl/nifi/conf/bootstrap-azure.conf b/etl/nifi/conf/bootstrap-azure.conf new file mode 100755 index 000000000..49e318eaa --- /dev/null +++ b/etl/nifi/conf/bootstrap-azure.conf @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Key Identifier for Azure Key Vault Key Sensitive Property Provider +azure.keyvault.key.id= +# Encryption Algorithm for Azure Key Vault Key Sensitive Property Provider +azure.keyvault.encryption.algorithm= + +# Vault URI for Azure Key Vault Secret Sensitive Property Provider +azure.keyvault.uri= diff --git a/etl/nifi/conf/bootstrap-gcp.conf b/etl/nifi/conf/bootstrap-gcp.conf new file mode 100755 index 000000000..440dad236 --- /dev/null +++ b/etl/nifi/conf/bootstrap-gcp.conf @@ -0,0 +1,22 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# These GCP KMS settings must all be configured in order to use the GCP KMS Sensitive Property Provider +gcp.kms.project= +gcp.kms.location= +gcp.kms.keyring= +gcp.kms.key= \ No newline at end of file diff --git a/etl/nifi/conf/bootstrap-hashicorp-vault.conf b/etl/nifi/conf/bootstrap-hashicorp-vault.conf new file mode 100755 index 000000000..bf6975b96 --- /dev/null +++ b/etl/nifi/conf/bootstrap-hashicorp-vault.conf @@ -0,0 +1,53 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# HTTP or HTTPS URI for HashiCorp Vault is required to enable the Sensitive Properties Provider +vault.uri= + +# Transit Path is required to enable the Sensitive Properties Provider Protection Scheme 'hashicorp/vault/transit/{path}' +vault.transit.path= + +# Key/Value Path is required to enable the Sensitive Properties Provider Protection Scheme 'hashicorp/vault/kv/{path}' +vault.kv.path= +# Key/Value Secrets Engine version may be 1 or 2, and defaults to 1 +# vault.kv.version=1 + +# Token Authentication example properties +# vault.authentication=TOKEN +# vault.token= + +# Optional file supports authentication properties described in the Spring Vault Environment Configuration +# https://docs.spring.io/spring-vault/docs/2.3.x/reference/html/#vault.core.environment-vault-configuration +# +# All authentication properties must be included in bootstrap-hashicorp-vault.conf when this property is not specified. +# Properties in bootstrap-hashicorp-vault.conf take precedence when the same values are defined in both files. +# Token Authentication is the default when the 'vault.authentication' property is not specified. +vault.authentication.properties.file= + +# Optional Timeout properties +vault.connection.timeout=5 secs +vault.read.timeout=15 secs + +# Optional TLS properties +vault.ssl.enabledCipherSuites= +vault.ssl.enabledProtocols= +vault.ssl.key-store= +vault.ssl.key-store-type= +vault.ssl.key-store-password= +vault.ssl.trust-store= +vault.ssl.trust-store-type= +vault.ssl.trust-store-password= diff --git a/etl/nifi/conf/bootstrap-notification-services.xml b/etl/nifi/conf/bootstrap-notification-services.xml new file mode 100755 index 000000000..bbfc65a3b --- /dev/null +++ b/etl/nifi/conf/bootstrap-notification-services.xml @@ -0,0 +1,53 @@ + + + + + + + + + \ No newline at end of file diff --git a/etl/nifi/conf/bootstrap.conf b/etl/nifi/conf/bootstrap.conf new file mode 100755 index 000000000..7f8fb855f --- /dev/null +++ b/etl/nifi/conf/bootstrap.conf @@ -0,0 +1,119 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Java command to use when running NiFi +java=java + +# Username to use when running NiFi. This value will be ignored on Windows. +run.as= + +# Preserve shell environment while runnning as "run.as" user +preserve.environment=false + +# Configure where NiFi's lib and conf directories live +lib.dir=./lib +conf.dir=./conf + +# How long to wait after telling NiFi to shutdown before explicitly killing the Process +graceful.shutdown.seconds=20 + +# Disable JSR 199 so that we can use JSP's without running a JDK +java.arg.1=-Dorg.apache.jasper.compiler.disablejsr199=true + +# JVM memory settings +java.arg.2=-Xms1g +java.arg.3=-Xmx1g + +# Enable Remote Debugging +#java.arg.debug=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=8000 + +java.arg.4=-Djava.net.preferIPv4Stack=true + +# allowRestrictedHeaders is required for Cluster/Node communications to work properly +java.arg.5=-Dsun.net.http.allowRestrictedHeaders=true +java.arg.6=-Djava.protocol.handler.pkgs=sun.net.www.protocol + +# The G1GC is known to cause some problems in Java 8 and earlier, but the issues were addressed in Java 9. If using Java 8 or earlier, +# it is recommended that G1GC not be used, especially in conjunction with the Write Ahead Provenance Repository. However, if using a newer +# version of Java, it can result in better performance without significant "stop-the-world" delays. +#java.arg.13=-XX:+UseG1GC + +#Set headless mode by default +java.arg.14=-Djava.awt.headless=true + +# Root key in hexadecimal format for encrypted sensitive configuration values +nifi.bootstrap.sensitive.key= + +# Sensitive Property Provider configuration + +# HashiCorp Vault Sensitive Property Providers +#nifi.bootstrap.protection.hashicorp.vault.conf=./conf/bootstrap-hashicorp-vault.conf + +# AWS Sensitive Property Providers +#nifi.bootstrap.protection.aws.conf=./conf/bootstrap-aws.conf + +# Azure Key Vault Sensitive Property Providers +#nifi.bootstrap.protection.azure.keyvault.conf=./conf/bootstrap-azure.conf + +# GCP KMS Sensitive Property Providers +#nifi.bootstrap.protection.gcp.kms.conf=./conf/bootstrap-gcp.conf + +# Sets the provider of SecureRandom to /dev/urandom to prevent blocking on VMs +java.arg.15=-Djava.security.egd=file:/dev/urandom + +# Requires JAAS to use only the provided JAAS configuration to authenticate a Subject, without using any "fallback" methods (such as prompting for username/password) +# Please see https://docs.oracle.com/javase/8/docs/technotes/guides/security/jgss/single-signon.html, section "EXCEPTIONS TO THE MODEL" +java.arg.16=-Djavax.security.auth.useSubjectCredsOnly=true + +# Zookeeper 3.5 now includes an Admin Server that starts on port 8080, since NiFi is already using that port disable by default. +# Please see https://zookeeper.apache.org/doc/current/zookeeperAdmin.html#sc_adminserver_config for configuration options. +java.arg.17=-Dzookeeper.admin.enableServer=false + +# The following options configure a Java Agent to handle native library loading. +# It is needed when a custom jar (eg. JDBC driver) has been configured on a component in the flow and this custom jar depends on a native library +# and tries to load it by its absolute path (java.lang.System.load(String filename) method call). +# Use this Java Agent only if you get "Native Library ... already loaded in another classloader" errors otherwise! +#java.arg.18=-javaagent:./lib/aspectj/aspectjweaver-1.9.6.jar +#java.arg.19=-Daj.weaving.loadersToSkip=sun.misc.Launcher$AppClassLoader,jdk.internal.loader.ClassLoaders$AppClassLoader,org.eclipse.jetty.webapp.WebAppClassLoader,\ +# org.apache.jasper.servlet.JasperLoader,org.jvnet.hk2.internal.DelegatingClassLoader,org.apache.nifi.nar.NarClassLoader +# End of Java Agent config for native library loading. + +### +# Notification Services for notifying interested parties when NiFi is stopped, started, dies +### + +# XML File that contains the definitions of the notification services +notification.services.file=./conf/bootstrap-notification-services.xml + +# In the case that we are unable to send a notification for an event, how many times should we retry? +notification.max.attempts=5 + +# Comma-separated list of identifiers that are present in the notification.services.file; which services should be used to notify when NiFi is started? +#nifi.start.notification.services=email-notification + +# Comma-separated list of identifiers that are present in the notification.services.file; which services should be used to notify when NiFi is stopped? +#nifi.stop.notification.services=email-notification + +# Comma-separated list of identifiers that are present in the notification.services.file; which services should be used to notify when NiFi dies? +#nifi.dead.notification.services=email-notification + +# The first curator connection issue is logged as ERROR, for example when NiFi cannot connect to one of the Zookeeper nodes. +# Additional connection issues are logged as DEBUG until the connection is restored. +java.arg.curator.supress.excessive.logs=-Dcurator-log-only-first-connection-issue-as-error-level=true + +# Port used to listen for communications from NiFi. If this property is missing, empty, or 0, a random ephemeral port is used. +nifi.bootstrap.listen.port=0 diff --git a/etl/nifi/conf/flow.json.gz b/etl/nifi/conf/flow.json.gz new file mode 100644 index 000000000..e2e4eaaef Binary files /dev/null and b/etl/nifi/conf/flow.json.gz differ diff --git a/etl/nifi/conf/flow.xml.gz b/etl/nifi/conf/flow.xml.gz new file mode 100644 index 000000000..203856fcb Binary files /dev/null and b/etl/nifi/conf/flow.xml.gz differ diff --git a/etl/nifi/conf/logback.xml b/etl/nifi/conf/logback.xml new file mode 100755 index 000000000..8924411d2 --- /dev/null +++ b/etl/nifi/conf/logback.xml @@ -0,0 +1,261 @@ + + + + + + + + true + + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-app.log + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-app_%d{yyyy-MM-dd_HH}.%i.log + 100MB + + 30 + + true + + %date %level [%thread] %logger{40} %msg%n + + + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-user.log + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-user_%d.log + + 30 + + + %date %level [%thread] %logger{40} %msg%n + + + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-request.log + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-request_%d.log + 30 + + + %msg%n + + + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-bootstrap.log + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-bootstrap_%d.log + + 5 + + + %date %level [%thread] %logger{40} %msg%n + + + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-deprecation.log + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-deprecation_%d.%i.log + 10MB + 10 + 100MB + + + %date %level [%thread] %logger %msg%n + + + + + + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-app-${logFileSuffix}.log + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-app-${logFileSuffix}_%d{yyyy-MM-dd_HH}.%i.log + 100MB + + 30 + + true + + %date %level [%thread] %logger{40} %msg%n + + + + + + + + %date %level [%thread] %logger{40} %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etl/nifi/conf/login-identity-providers.xml b/etl/nifi/conf/login-identity-providers.xml new file mode 100755 index 000000000..f7eac8685 --- /dev/null +++ b/etl/nifi/conf/login-identity-providers.xml @@ -0,0 +1,128 @@ + + + + + + + single-user-provider + org.apache.nifi.authentication.single.user.SingleUserLoginIdentityProvider + + + + + + + + + \ No newline at end of file diff --git a/etl/nifi/conf/nifi.properties b/etl/nifi/conf/nifi.properties new file mode 100644 index 000000000..2fdf0aa83 --- /dev/null +++ b/etl/nifi/conf/nifi.properties @@ -0,0 +1,376 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Core Properties # +nifi.flow.configuration.file=./conf/flow.xml.gz +nifi.flow.configuration.json.file=./conf/flow.json.gz +nifi.flow.configuration.archive.enabled=true +nifi.flow.configuration.archive.dir=./conf/archive/ +nifi.flow.configuration.archive.max.time=30 days +nifi.flow.configuration.archive.max.storage=500 MB +nifi.flow.configuration.archive.max.count= +nifi.flowcontroller.autoResumeState=true +nifi.flowcontroller.graceful.shutdown.period=10 sec +nifi.flowservice.writedelay.interval=500 ms +nifi.administrative.yield.duration=30 sec +# If a component has no work to do (is "bored"), how long should we wait before checking again for work? +nifi.bored.yield.duration=10 millis +nifi.queue.backpressure.count=10000 +nifi.queue.backpressure.size=1 GB + +nifi.authorizer.configuration.file=./conf/authorizers.xml +nifi.login.identity.provider.configuration.file=./conf/login-identity-providers.xml +nifi.templates.directory=./conf/templates +nifi.ui.banner.text= +nifi.ui.autorefresh.interval=30 sec +nifi.nar.library.directory=./lib +nifi.nar.library.autoload.directory=./extensions +nifi.nar.working.directory=./work/nar/ +nifi.documentation.working.directory=./work/docs/components +nifi.nar.unpack.uber.jar=false + +#################### +# State Management # +#################### +nifi.state.management.configuration.file=./conf/state-management.xml +# The ID of the local state provider +nifi.state.management.provider.local=local-provider +# The ID of the cluster-wide state provider. This will be ignored if NiFi is not clustered but must be populated if running in a cluster. +nifi.state.management.provider.cluster=zk-provider +# Specifies whether or not this instance of NiFi should run an embedded ZooKeeper server +nifi.state.management.embedded.zookeeper.start=false +# Properties file that provides the ZooKeeper properties to use if is set to true +nifi.state.management.embedded.zookeeper.properties=./conf/zookeeper.properties + +# Database Settings +nifi.database.directory=./database_repository + +# Repository Encryption properties override individual repository implementation properties +nifi.repository.encryption.protocol.version= +nifi.repository.encryption.key.id= +nifi.repository.encryption.key.provider= +nifi.repository.encryption.key.provider.keystore.location= +nifi.repository.encryption.key.provider.keystore.password= + +# FlowFile Repository +nifi.flowfile.repository.implementation=org.apache.nifi.controller.repository.WriteAheadFlowFileRepository +nifi.flowfile.repository.wal.implementation=org.apache.nifi.wali.SequentialAccessWriteAheadLog +nifi.flowfile.repository.directory=./flowfile_repository +nifi.flowfile.repository.checkpoint.interval=20 secs +nifi.flowfile.repository.always.sync=false +nifi.flowfile.repository.retain.orphaned.flowfiles=true + +nifi.swap.manager.implementation=org.apache.nifi.controller.FileSystemSwapManager +nifi.queue.swap.threshold=20000 + +# Content Repository +nifi.content.repository.implementation=org.apache.nifi.controller.repository.FileSystemRepository +nifi.content.claim.max.appendable.size=50 KB +nifi.content.repository.directory.default=./content_repository +nifi.content.repository.archive.max.retention.period=7 days +nifi.content.repository.archive.max.usage.percentage=50% +nifi.content.repository.archive.enabled=true +nifi.content.repository.always.sync=false +nifi.content.viewer.url=../nifi-content-viewer/ + +# Provenance Repository Properties +nifi.provenance.repository.implementation=org.apache.nifi.provenance.WriteAheadProvenanceRepository + +# Persistent Provenance Repository Properties +nifi.provenance.repository.directory.default=./provenance_repository +nifi.provenance.repository.max.storage.time=30 days +nifi.provenance.repository.max.storage.size=10 GB +nifi.provenance.repository.rollover.time=10 mins +nifi.provenance.repository.rollover.size=100 MB +nifi.provenance.repository.query.threads=2 +nifi.provenance.repository.index.threads=2 +nifi.provenance.repository.compress.on.rollover=true +nifi.provenance.repository.always.sync=false +# Comma-separated list of fields. Fields that are not indexed will not be searchable. Valid fields are: +# EventType, FlowFileUUID, Filename, TransitURI, ProcessorID, AlternateIdentifierURI, Relationship, Details +nifi.provenance.repository.indexed.fields=EventType, FlowFileUUID, Filename, ProcessorID, Relationship +# FlowFile Attributes that should be indexed and made searchable. Some examples to consider are filename, uuid, mime.type +nifi.provenance.repository.indexed.attributes= +# Large values for the shard size will result in more Java heap usage when searching the Provenance Repository +# but should provide better performance +nifi.provenance.repository.index.shard.size=500 MB +# Indicates the maximum length that a FlowFile attribute can be when retrieving a Provenance Event from +# the repository. If the length of any attribute exceeds this value, it will be truncated when the event is retrieved. +nifi.provenance.repository.max.attribute.length=65536 +nifi.provenance.repository.concurrent.merge.threads=2 + + +# Volatile Provenance Respository Properties +nifi.provenance.repository.buffer.size=100000 + +# Component and Node Status History Repository +nifi.components.status.repository.implementation=org.apache.nifi.controller.status.history.VolatileComponentStatusRepository + +# Volatile Status History Repository Properties +nifi.components.status.repository.buffer.size=1440 +nifi.components.status.snapshot.frequency=1 min + +# QuestDB Status History Repository Properties +nifi.status.repository.questdb.persist.node.days=14 +nifi.status.repository.questdb.persist.component.days=3 +nifi.status.repository.questdb.persist.location=./status_repository + +# Site to Site properties +nifi.remote.input.host=nifi +nifi.remote.input.secure=false +nifi.remote.input.socket.port=10000 +nifi.remote.input.http.enabled=true +nifi.remote.input.http.transaction.ttl=30 sec +nifi.remote.contents.cache.expiration=30 secs + +# web properties # +############################################# + +# For security, NiFi will present the UI on 127.0.0.1 and only be accessible through this loopback interface. +# Be aware that changing these properties may affect how your instance can be accessed without any restriction. +# We recommend configuring HTTPS instead. The administrators guide provides instructions on how to do this. + +nifi.web.http.host=nifi +nifi.web.http.port=8080 +nifi.web.http.network.interface.default= + +############################################# + +nifi.web.https.host= +nifi.web.https.port= +nifi.web.https.network.interface.default= +nifi.web.https.application.protocols=http/1.1 +nifi.web.jetty.working.directory=./work/jetty +nifi.web.jetty.threads=200 +nifi.web.max.header.size=16 KB +nifi.web.proxy.context.path= +nifi.web.proxy.host= +nifi.web.max.content.size= +nifi.web.max.requests.per.second=30000 +nifi.web.max.access.token.requests.per.second=25 +nifi.web.request.timeout=60 secs +nifi.web.request.ip.whitelist= +nifi.web.should.send.server.version=true +nifi.web.request.log.format=%{client}a - %u %t "%r" %s %O "%{Referer}i" "%{User-Agent}i" + +# Filter JMX MBeans available through the System Diagnostics REST API +nifi.web.jmx.metrics.allowed.filter.pattern= + +# Include or Exclude TLS Cipher Suites for HTTPS +nifi.web.https.ciphersuites.include= +nifi.web.https.ciphersuites.exclude= + +# security properties # +nifi.sensitive.props.key='12345678901234567890A' +nifi.sensitive.props.key.protected= +nifi.sensitive.props.algorithm=NIFI_PBKDF2_AES_GCM_256 +nifi.sensitive.props.additional.keys= + +nifi.security.autoreload.enabled=false +nifi.security.autoreload.interval=10 secs +nifi.security.keystore= +nifi.security.keystoreType= +nifi.security.keystorePasswd= +nifi.security.keyPasswd= +nifi.security.truststore= +nifi.security.truststoreType= +nifi.security.truststorePasswd= +nifi.security.user.authorizer=single-user-authorizer +nifi.security.allow.anonymous.authentication=false +nifi.security.user.login.identity.provider= +nifi.security.user.jws.key.rotation.period=PT1H +nifi.security.ocsp.responder.url= +nifi.security.ocsp.responder.certificate= + +# OpenId Connect SSO Properties # +nifi.security.user.oidc.discovery.url= +nifi.security.user.oidc.connect.timeout=5 secs +nifi.security.user.oidc.read.timeout=5 secs +nifi.security.user.oidc.client.id= +nifi.security.user.oidc.client.secret= +nifi.security.user.oidc.preferred.jwsalgorithm= +nifi.security.user.oidc.additional.scopes=offline_access +nifi.security.user.oidc.claim.identifying.user= +nifi.security.user.oidc.fallback.claims.identifying.user= +nifi.security.user.oidc.claim.groups=groups +nifi.security.user.oidc.truststore.strategy=JDK +nifi.security.user.oidc.token.refresh.window=60 secs + +# Apache Knox SSO Properties # +nifi.security.user.knox.url= +nifi.security.user.knox.publicKey= +nifi.security.user.knox.cookieName=hadoop-jwt +nifi.security.user.knox.audiences= + +# SAML Properties # +nifi.security.user.saml.idp.metadata.url= +nifi.security.user.saml.sp.entity.id= +nifi.security.user.saml.identity.attribute.name= +nifi.security.user.saml.group.attribute.name= +nifi.security.user.saml.request.signing.enabled=false +nifi.security.user.saml.want.assertions.signed=true +nifi.security.user.saml.signature.algorithm=http://www.w3.org/2001/04/xmldsig-more#rsa-sha256 +nifi.security.user.saml.authentication.expiration=12 hours +nifi.security.user.saml.single.logout.enabled=false +nifi.security.user.saml.http.client.truststore.strategy=JDK +nifi.security.user.saml.http.client.connect.timeout=30 secs +nifi.security.user.saml.http.client.read.timeout=30 secs + +# Identity Mapping Properties # +# These properties allow normalizing user identities such that identities coming from different identity providers +# (certificates, LDAP, Kerberos) can be treated the same internally in NiFi. The following example demonstrates normalizing +# DNs from certificates and principals from Kerberos into a common identity string: +# +# nifi.security.identity.mapping.pattern.dn=^CN=(.*?), OU=(.*?), O=(.*?), L=(.*?), ST=(.*?), C=(.*?)$ +# nifi.security.identity.mapping.value.dn=$1@$2 +# nifi.security.identity.mapping.transform.dn=NONE +# nifi.security.identity.mapping.pattern.kerb=^(.*?)/instance@(.*?)$ +# nifi.security.identity.mapping.value.kerb=$1@$2 +# nifi.security.identity.mapping.transform.kerb=UPPER + +# Group Mapping Properties # +# These properties allow normalizing group names coming from external sources like LDAP. The following example +# lowercases any group name. +# +# nifi.security.group.mapping.pattern.anygroup=^(.*)$ +# nifi.security.group.mapping.value.anygroup=$1 +# nifi.security.group.mapping.transform.anygroup=LOWER + +# Listener Bootstrap properties # +# This property defines the port used to listen for communications from NiFi Bootstrap. If this property +# is missing, empty, or 0, a random ephemeral port is used. +nifi.listener.bootstrap.port=0 + +# cluster common properties (all nodes must have same values) # +nifi.cluster.protocol.heartbeat.interval=5 sec +nifi.cluster.protocol.heartbeat.missable.max=8 +nifi.cluster.protocol.is.secure=false + +# cluster node properties (only configure for cluster nodes) # +nifi.cluster.is.node=true +nifi.cluster.node.address=nifi +nifi.cluster.node.protocol.port=8082 +nifi.cluster.node.protocol.max.threads=50 +nifi.cluster.node.event.history.size=25 +nifi.cluster.node.connection.timeout=5 sec +nifi.cluster.node.read.timeout=5 sec +nifi.cluster.node.max.concurrent.requests=100 +nifi.cluster.firewall.file= +nifi.cluster.flow.election.max.wait.time=30 sec +nifi.cluster.flow.election.max.candidates= + +# cluster load balancing properties # +nifi.cluster.load.balance.host= +nifi.cluster.load.balance.port=6342 +nifi.cluster.load.balance.connections.per.node=1 +nifi.cluster.load.balance.max.thread.count=8 +nifi.cluster.load.balance.comms.timeout=30 sec + +# zookeeper properties, used for cluster management # +nifi.zookeeper.connect.string=myzookeeper:2181 +nifi.zookeeper.connect.timeout=10 secs +nifi.zookeeper.session.timeout=10 secs +nifi.zookeeper.root.node=/nifi +nifi.zookeeper.client.secure=false +nifi.zookeeper.security.keystore= +nifi.zookeeper.security.keystoreType= +nifi.zookeeper.security.keystorePasswd= +nifi.zookeeper.security.truststore= +nifi.zookeeper.security.truststoreType= +nifi.zookeeper.security.truststorePasswd= +nifi.zookeeper.jute.maxbuffer= + +# Zookeeper properties for the authentication scheme used when creating acls on znodes used for cluster management +# Values supported for nifi.zookeeper.auth.type are "default", which will apply world/anyone rights on znodes +# and "sasl" which will give rights to the sasl/kerberos identity used to authenticate the nifi node +# The identity is determined using the value in nifi.kerberos.service.principal and the removeHostFromPrincipal +# and removeRealmFromPrincipal values (which should align with the kerberos.removeHostFromPrincipal and kerberos.removeRealmFromPrincipal +# values configured on the zookeeper server). +nifi.zookeeper.auth.type= +nifi.zookeeper.kerberos.removeHostFromPrincipal= +nifi.zookeeper.kerberos.removeRealmFromPrincipal= + +# kerberos # +nifi.kerberos.krb5.file= + +# kerberos service principal # +nifi.kerberos.service.principal= +nifi.kerberos.service.keytab.location= + +# kerberos spnego principal # +nifi.kerberos.spnego.principal= +nifi.kerberos.spnego.keytab.location= +nifi.kerberos.spnego.authentication.expiration=12 hours + +# external properties files for variable registry +# supports a comma delimited list of file locations +nifi.variable.registry.properties= + +# analytics properties # +nifi.analytics.predict.enabled=false +nifi.analytics.predict.interval=3 mins +nifi.analytics.query.interval=5 mins +nifi.analytics.connection.model.implementation=org.apache.nifi.controller.status.analytics.models.OrdinaryLeastSquares +nifi.analytics.connection.model.score.name=rSquared +nifi.analytics.connection.model.score.threshold=.90 + +# runtime monitoring properties +nifi.monitor.long.running.task.schedule= +nifi.monitor.long.running.task.threshold= + +# Enable automatic diagnostic at shutdown. +nifi.diagnostics.on.shutdown.enabled=false + +# Include verbose diagnostic information. +nifi.diagnostics.on.shutdown.verbose=false + +# The location of the diagnostics folder. +nifi.diagnostics.on.shutdown.directory=./diagnostics + +# The maximum number of files permitted in the directory. If the limit is exceeded, the oldest files are deleted. +nifi.diagnostics.on.shutdown.max.filecount=10 + +# The diagnostics folder's maximum permitted size in bytes. If the limit is exceeded, the oldest files are deleted. +nifi.diagnostics.on.shutdown.max.directory.size=10 MB + +# Performance tracking properties +## Specifies what percentage of the time we should track the amount of time processors are using CPU, reading from/writing to content repo, etc. +## This can be useful to understand which components are the most expensive and to understand where system bottlenecks may be occurring. +## The value must be in the range of 0 (inclusive) to 100 (inclusive). A larger value will produce more accurate results, while a smaller value may be +## less expensive to compute. +## Results can be obtained by running "nifi.sh diagnostics " and then inspecting the produced file. +nifi.performance.tracking.percentage=0 + +# NAR Provider Properties # +# These properties allow configuring one or more NAR providers. A NAR provider retrieves NARs from an external source +# and copies them to the directory specified by nifi.nar.library.autoload.directory. +# +# Each NAR provider property follows the format: +# nifi.nar.library.provider.. +# +# Each NAR provider must have at least one property named "implementation". +# +# Example HDFS NAR Provider: +# nifi.nar.library.provider.hdfs.implementation=org.apache.nifi.flow.resource.hadoop.HDFSExternalResourceProvider +# nifi.nar.library.provider.hdfs.resources=/path/to/core-site.xml,/path/to/hdfs-site.xml +# nifi.nar.library.provider.hdfs.storage.location=hdfs://hdfs-location +# nifi.nar.library.provider.hdfs.source.directory=/nars +# nifi.nar.library.provider.hdfs.kerberos.principal=nifi@NIFI.COM +# nifi.nar.library.provider.hdfs.kerberos.keytab=/path/to/nifi.keytab +# nifi.nar.library.provider.hdfs.kerberos.password= +# +# Example NiFi Registry NAR Provider: +# nifi.nar.library.provider.nifi-registry.implementation=org.apache.nifi.registry.extension.NiFiRegistryNarProvider +# nifi.nar.library.provider.nifi-registry.url=http://localhost:18080 diff --git a/etl/nifi/conf/state-management.xml b/etl/nifi/conf/state-management.xml new file mode 100755 index 000000000..a57cb5e91 --- /dev/null +++ b/etl/nifi/conf/state-management.xml @@ -0,0 +1,138 @@ + + + + + + + local-provider + org.apache.nifi.controller.state.providers.local.WriteAheadLocalStateProvider + ./state/local + false + 16 + 2 mins + + + + zk-provider + org.apache.nifi.controller.state.providers.zookeeper.ZooKeeperStateProvider + myzookeeper:2181 + /nifi + 10 seconds + Open + + + diff --git a/etl/nifi/conf/stateless-logback.xml b/etl/nifi/conf/stateless-logback.xml new file mode 100755 index 000000000..0b8602f9a --- /dev/null +++ b/etl/nifi/conf/stateless-logback.xml @@ -0,0 +1,77 @@ + + + + + + true + + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-stateless.log + + + ${org.apache.nifi.bootstrap.config.log.dir}/nifi-stateless_%d{yyyy-MM-dd_HH}.%i.log + 100MB + + 30 + + true + + %date %level [%thread] %logger{40} %msg%n + + + + + + %date %level [%thread] %logger{40} %msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/etl/nifi/conf/stateless.properties b/etl/nifi/conf/stateless.properties new file mode 100755 index 000000000..a3ec79cca --- /dev/null +++ b/etl/nifi/conf/stateless.properties @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# NAR and Working Directories # +nifi.stateless.nar.directory=./lib +nifi.stateless.working.directory=./work/stateless + +# Security Properties # +nifi.stateless.security.keystore= +nifi.stateless.security.keystoreType= +nifi.stateless.security.keystorePasswd= +nifi.stateless.security.keyPasswd= +nifi.stateless.security.truststore= +nifi.stateless.security.truststoreType= +nifi.stateless.security.truststorePasswd= +nifi.stateless.sensitive.props.key= + +# Extension Client Configuration # +# Zero or more Nexus repositories may be specified here. +# The following client communicates with Maven Central. To use it, uncomment the following 4 lines. +# Additional clients can be added by adding duplicate properties but changing mvn-central to a different key. +#nifi.stateless.extension.client.mvn-central.type=nexus +#nifi.stateless.extension.client.mvn-central.timeout=30 sec +#nifi.stateless.extension.client.mvn-central.baseUrl=https://repo1.maven.org/maven2/ +#nifi.stateless.extension.client.mvn-central.useSslContext=false + +# Schedule for status logging task +nifi.stateless.status.task.interval=1 min + +# Kerberos Properties # +nifi.stateless.kerberos.krb5.file=/etc/krb5.conf diff --git a/etl/nifi/conf/zookeeper.properties b/etl/nifi/conf/zookeeper.properties new file mode 100755 index 000000000..a86818ce9 --- /dev/null +++ b/etl/nifi/conf/zookeeper.properties @@ -0,0 +1,71 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# + +initLimit=10 +autopurge.purgeInterval=24 +syncLimit=5 +tickTime=2000 +dataDir=./state/zookeeper +autopurge.snapRetainCount=30 + +# Embedded/distributed ZK TLS connection support can be activated by setting these properties at minimum: +# +# secureClientPort=2281 +# serverCnxnFactory=org.apache.zookeeper.server.NettyServerCnxnFactory + +# Most TLS configurations will set these values as well: +# +# ssl.keyStore.location=/example/path/to/key-store.jks +# ssl.keyStore.password=change this value to the actual value in your installation +# ssl.trustStore.location=/example/path/to/trust-store.jks +# ssl.trustStore.password=change this value to the actual value in your installation +# ssl.hostnameVerification=false +# +# Note that many ZK parameters can set as Java system properties, refer to the ZK admin guide for details: +# +# https://zookeeper.apache.org/doc/current/zookeeperAdmin.html#sc_configuration + +# Other common settings: +# +# client.portUnification=true +# admin.enableServer=false + +# The server string has changed as of 3.5.5 and the client port is now specified at the end of the server string: +# https://zookeeper.apache.org/doc/r3.5.5/zookeeperReconfig.html#sc_reconfig_clientport +# +# Specifies the servers that are part of this zookeeper ensemble. For +# every NiFi instance running an embedded zookeeper, there needs to be +# a server entry below. Client port is now specified at the end of the string +# after a semi-colon. +# +# For instance: +# +# server.1=nifi-node1-hostname:2888:3888;2181 +# server.2=nifi-node2-hostname:2888:3888;2181 +# server.3=nifi-node3-hostname:2888:3888;2181 +# +# The index of the server corresponds to the myid file that gets created +# in the dataDir of each node running an embedded zookeeper. See the +# administration guide for more details. +# + +server.1= \ No newline at end of file diff --git a/etl/nifi_output/.gitkeep b/etl/nifi_output/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/etl/nifi_scripts/adminAdjTrxn.groovy b/etl/nifi_scripts/adminAdjTrxn.groovy new file mode 100644 index 000000000..27806df95 --- /dev/null +++ b/etl/nifi_scripts/adminAdjTrxn.groovy @@ -0,0 +1,440 @@ +import groovy.json.JsonSlurper +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet +import java.time.OffsetDateTime +import java.sql.Timestamp + +log.warn("**** STARTED ADMIN ADJUSTMENT ETL ****") + +def SOURCE_QUERY = """ +WITH + internal_comment AS ( + SELECT + ctc.id, + ctc.credit_trade_id, + ctc.credit_trade_comment, + ctc.create_user_id, + ctc.create_timestamp, + STRING_AGG (r."name", '; ') AS role_names + FROM + credit_trade_comment ctc + JOIN "user" u ON u.id = ctc.create_user_id + AND u.organization_id = 1 + AND ctc.is_privileged_access = TRUE + JOIN user_role ur ON ur.user_id = u.id + JOIN "role" r ON ur.role_id = r.id + GROUP BY + ctc.id, + ctc.credit_trade_id, + ctc.credit_trade_comment, + ctc.create_user_id, + ctc.create_timestamp + ORDER BY + ctc.credit_trade_id, + ctc.create_timestamp + ) + SELECT + ct.id AS admin_adjustment_id, + ct.respondent_id AS to_organization_id, + ct.date_of_written_agreement AS agreement_date, + ct.trade_effective_date AS transaction_effective_date, + ct.number_of_credits AS compliance_units, + ct.create_user_id as create_user, + ct.update_user_id as update_user, + ct.update_timestamp as update_date, + ct.create_timestamp as create_date, + -- Aggregate comments from government with internal comment handling + STRING_AGG (DISTINCT gov_ctc.credit_trade_comment, '; ') AS gov_comment, + -- JSON aggregation for internal comments + json_agg (row_to_json (internal_comment)) AS internal_comments, + -- JSON aggregation for credit trade history + json_agg ( + json_build_object ( + 'admin_adjustment_id', + cth.credit_trade_id, + 'admin_adjustment_status', + case + WHEN cts_history.status IN ('Cancelled', 'Not Recommended', 'Declined', 'Refused') or ct.is_rescinded = true THEN 'Deleted' + WHEN cts_history.status IN ('Accepted', 'Submitted', 'Recommended') THEN 'Recommended' + WHEN cts_history.status IN ('Approved', 'Recorded') THEN 'Approved' + ELSE 'Draft' + END, + 'user_profile_id', + cth.create_user_id, + 'create_timestamp', + cth.create_timestamp + ) + ) AS credit_trade_history, + case + WHEN cts.status IN ('Cancelled', 'Not Recommended', 'Declined', 'Refused') or ct.is_rescinded = true THEN 'Deleted' + WHEN cts.status IN ('Accepted', 'Submitted', 'Recommended') THEN 'Recommended' + WHEN cts.status IN ('Approved', 'Recorded') THEN 'Approved' + ELSE 'Draft' + END AS current_status, cts.status + FROM + credit_trade ct + JOIN credit_trade_type ctt ON ct.type_id = ctt.id + LEFT OUTER JOIN credit_trade_category ctc ON ct.trade_category_id = ctc.id + JOIN credit_trade_status cts ON ct.status_id = cts.id + LEFT JOIN credit_trade_zero_reason ctzr ON ctzr.id = ct.zero_reason_id + AND ctzr.reason = 'Internal' + -- Join for Initiator Comments + LEFT JOIN credit_trade_comment from_ctc ON from_ctc.credit_trade_id = ct.id + AND from_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = ct.initiator_id + ) + -- Join for Respondent Comments + LEFT JOIN credit_trade_comment to_ctc ON to_ctc.credit_trade_id = ct.id + AND to_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = ct.respondent_id + ) + -- Join for Government Comments + LEFT JOIN credit_trade_comment gov_ctc ON gov_ctc.credit_trade_id = ct.id + AND gov_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = 1 + AND gov_ctc.is_privileged_access = FALSE + ) + -- Join the internal comment logic for role-based filtering and audience_scope + LEFT JOIN internal_comment ON internal_comment.credit_trade_id = ct.id + -- Join for credit trade history + LEFT JOIN credit_trade_history cth ON cth.credit_trade_id = ct.id + JOIN credit_trade_status cts_history ON cth.status_id = cts_history.id + WHERE + ctt.the_type IN ('Administrative Adjustment') + GROUP BY + ct.id, + ct.respondent_id, + ct.date_of_written_agreement, + ct.trade_effective_date, + ct.number_of_credits, + cts.status, + ctzr.description, + internal_comment.role_names; + """ + +// Fetch connections to both the source and destination databases +def sourceDbcpService = context.controllerServiceLookup.getControllerService('3245b078-0192-1000-ffff-ffffba20c1eb') +def destinationDbcpService = context.controllerServiceLookup.getControllerService('3244bf63-0192-1000-ffff-ffffc8ec6d93') + +Connection sourceConn = null +Connection destinationConn = null + +try { + sourceConn = sourceDbcpService.getConnection() + destinationConn = destinationDbcpService.getConnection() + destinationConn.setAutoCommit(false) + + // Fetch status and category data once and cache it + def preparedData = prepareData(destinationConn) + + def statements = prepareStatements(destinationConn) + + destinationConn.createStatement().execute('DROP FUNCTION IF EXISTS refresh_transaction_aggregate() CASCADE;') + destinationConn.createStatement().execute(""" + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS void AS \$\$ + BEGIN + -- Temporarily disable the materialized view refresh + END; + \$\$ LANGUAGE plpgsql; + """) + + PreparedStatement sourceStmt = sourceConn.prepareStatement(SOURCE_QUERY) + ResultSet resultSet = sourceStmt.executeQuery() + + int recordCount = 0 + + while (resultSet.next()) { + recordCount++ + def jsonSlurper = new JsonSlurper() + def internalComments = resultSet.getString('internal_comments') + def creditTradeHistory = resultSet.getString('credit_trade_history') + + def internalCommentsJson = internalComments ? jsonSlurper.parseText(internalComments) : [] + def creditTradeHistoryJson = creditTradeHistory ? jsonSlurper.parseText(creditTradeHistory) : [] + + def toTransactionId = processTransactions(resultSet.getString('current_status'), + resultSet, statements.transactionStmt) + + def adminAdjustmentId = insertadminAdjustment(resultSet, statements.adminAdjustmentStmt, + toTransactionId, preparedData, destinationConn) + + if (adminAdjustmentId) { + processHistory(adminAdjustmentId, creditTradeHistoryJson, statements.historyStmt, preparedData) + processInternalComments(adminAdjustmentId, internalCommentsJson, statements.internalCommentStmt, + statements.adminAdjustmentInternalCommentStmt) + } else { + log.warn("admin-adjustment not inserted for record: ${resultSet.getInt('admin_adjustment_id')}") + } + } + resultSet.close() + destinationConn.createStatement().execute(""" + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS void AS \$\$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate; + END; + \$\$ LANGUAGE plpgsql; + """) + destinationConn.createStatement().execute('REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate') + + destinationConn.commit() + log.debug("Processed ${recordCount} records successfully.") +} catch (Exception e) { + log.error('Error occurred while processing data', e) + destinationConn?.rollback() +} finally { + if (sourceConn != null) sourceConn.close() + if (destinationConn != null) destinationConn.close() +} + +def logResultSetRow(ResultSet rs) { + def metaData = rs.getMetaData() + def columnCount = metaData.getColumnCount() + def rowData = [:] + + for (int i = 1; i <= columnCount; i++) { + def columnName = metaData.getColumnName(i) + def columnValue = rs.getObject(i) + rowData[columnName] = columnValue + } + + log.debug("Row data: ${rowData}") +} + +def loadTableData(Connection conn, String query, String keyColumn, String valueColumn) { + def dataMap = [:] + def stmt = conn.createStatement() + def rs = stmt.executeQuery(query) + + while (rs.next()) { + def key = rs.getString(keyColumn) + def value = rs.getInt(valueColumn) + if (dataMap.containsKey(key)) { + log.warn("Duplicate key found for ${key}. Existing value: ${dataMap[key]}, New value: ${value}.") + } + dataMap[key] = value + } + + rs.close() + stmt.close() + return dataMap +} + +def prepareData(Connection conn) { + def statusMap = loadTableData(conn, + 'SELECT DISTINCT status, MIN(admin_adjustment_status_id) AS admin_adjustment_status_id FROM admin_adjustment_status GROUP BY status', + 'status', + 'admin_adjustment_status_id' + ) + return [ + statusMap : statusMap + ] +} + +def getStatusId(String status, Map preparedData) { + return preparedData.statusMap[status] +} + +def prepareStatements(Connection conn) { + def INSERT_admin_adjustment_SQL = ''' + INSERT INTO admin_adjustment ( + to_organization_id, transaction_id, transaction_effective_date, compliance_units, gov_comment, + current_status_id, create_date, update_date, create_user, update_user, effective_status, + admin_adjustment_id + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, true, ?) + RETURNING admin_adjustment_id + ''' + def INSERT_admin_adjustment_HISTORY_SQL = ''' + INSERT INTO admin_adjustment_history ( + admin_adjustment_history_id, admin_adjustment_id, admin_adjustment_status_id, user_profile_id, create_date, effective_status + ) VALUES (DEFAULT, ?, ?, ?, ?, true) + ''' + def INSERT_INTERNAL_COMMENT_SQL = ''' + INSERT INTO internal_comment ( + internal_comment_id, comment, audience_scope, create_user, create_date + ) VALUES (DEFAULT, ?, ?::audience_scope, ?, ?) + RETURNING internal_comment_id + ''' + def INSERT_admin_adjustment_INTERNAL_COMMENT_SQL = ''' + INSERT INTO admin_adjustment_internal_comment ( + admin_adjustment_id, internal_comment_id + ) VALUES (?, ?) + ''' + def INSERT_TRANSACTION_SQL = ''' + INSERT INTO transaction ( + transaction_id, compliance_units, organization_id, transaction_action, effective_date, create_user, create_date, effective_status + ) VALUES (DEFAULT, ?, ?, ?::transaction_action_enum, ?, ?, ?, true) + RETURNING transaction_id + ''' + return [adminAdjustmentStmt : conn.prepareStatement(INSERT_admin_adjustment_SQL), + historyStmt : conn.prepareStatement(INSERT_admin_adjustment_HISTORY_SQL), + internalCommentStmt : conn.prepareStatement(INSERT_INTERNAL_COMMENT_SQL), + adminAdjustmentInternalCommentStmt + : conn.prepareStatement(INSERT_admin_adjustment_INTERNAL_COMMENT_SQL), + transactionStmt : conn.prepareStatement(INSERT_TRANSACTION_SQL)] +} + +def toSqlTimestamp(String timestampString) { + try { + // Parse the ISO 8601 timestamp and convert to java.sql.Timestamp + def offsetDateTime = OffsetDateTime.parse(timestampString) + return Timestamp.from(offsetDateTime.toInstant()) + } catch (Exception e) { + log.error("Invalid timestamp format: ${timestampString}, defaulting to '1970-01-01T00:00:00Z'") + return Timestamp.valueOf('1970-01-01 00:00:00') + } +} + +def processTransactions(String currentStatus, ResultSet rs, PreparedStatement stmt) { + def toTransactionId = null + + if (currentStatus == 'Approved') { + toTransactionId = insertTransaction(stmt, rs, 'Adjustment', rs.getInt('to_organization_id')) + } + + return toTransactionId +} + +def insertTransaction(PreparedStatement stmt, ResultSet rs, String action, int orgId) { + stmt.setInt(1, rs.getInt('compliance_units')) + stmt.setInt(2, orgId) + stmt.setString(3, action) + stmt.setDate(4, rs.getDate('transaction_effective_date') ?: rs.getDate('agreement_date')) + stmt.setInt(5, rs.getInt('create_user')) + stmt.setTimestamp(6, rs.getTimestamp('create_date')) + + def result = stmt.executeQuery() + return result.next() ? result.getInt('transaction_id') : null +} + +def processHistory(Integer adminAdjustmentId, List creditTradeHistory, PreparedStatement historyStmt, Map preparedData) { + if (!creditTradeHistory) return + + // Use a Set to track unique combinations of admin_adjustment_id and admin_adjustment_status + def processedEntries = new HashSet() + + creditTradeHistory.each { historyItem -> + try { + def statusId = getStatusId(historyItem.admin_adjustment_status, preparedData) + def uniqueKey = "${adminAdjustmentId}_${statusId}" + + // Check if this combination has already been processed + if (!processedEntries.contains(uniqueKey)) { + // If not processed, add to batch and mark as processed + historyStmt.setInt(1, adminAdjustmentId) + historyStmt.setInt(2, statusId) + historyStmt.setInt(3, historyItem.user_profile_id) + historyStmt.setTimestamp(4, toSqlTimestamp(historyItem.create_timestamp ?: '2013-01-01T00:00:00Z')) + historyStmt.addBatch() + + processedEntries.add(uniqueKey) + } + } catch (Exception e) { + log.error("Error processing history record for admin_adjustment_id: ${adminAdjustmentId}", e) + } + } + + // Execute batch + historyStmt.executeBatch() +} + + +def processInternalComments(Integer adminAdjustmentId, List internalComments, + PreparedStatement internalCommentStmt, + PreparedStatement adminAdjustmentInternalCommentStmt) { + if (!internalComments) return + + internalComments.each { comment -> + if (!comment) return // Skip null comments + + try { + // Insert the internal comment + internalCommentStmt.setString(1, comment.credit_trade_comment ?: '') + internalCommentStmt.setString(2, getAudienceScope(comment.role_names ?: '')) + internalCommentStmt.setInt(3, comment.create_user_id ?: null) + internalCommentStmt.setTimestamp(4, toSqlTimestamp(comment.create_timestamp ?: '2013-01-01T00:00:00Z')) + + def internalCommentId = null + def commentResult = internalCommentStmt.executeQuery() + if (commentResult.next()) { + internalCommentId = commentResult.getInt('internal_comment_id') + + // Insert the admin-adjustment-comment relationship + adminAdjustmentInternalCommentStmt.setInt(1, adminAdjustmentId) + adminAdjustmentInternalCommentStmt.setInt(2, internalCommentId) + adminAdjustmentInternalCommentStmt.executeUpdate() + } + + commentResult.close() + } catch (Exception e) { + log.error("Error processing internal comment for admin-adjustment ${adminAdjustmentId}: ${e.getMessage()}", e) + } + } + } + +// Helper function to determine audience scope based on role names +def getAudienceScope(String roleNames) { + if (!roleNames) return 'Analyst' + + switch (true) { + case roleNames.contains('GovDirector'): + return 'Director' + case roleNames.contains('GovComplianceManager'): + return 'Compliance Manager' + default: + return 'Analyst' + } +} + +def insertadminAdjustment(ResultSet rs, PreparedStatement adminAdjustmentStmt, + Long toTransactionId, Map preparedData, Connection conn) { + // Check for duplicates in the `admin_adjustment` table + def adminAdjustmentId = rs.getInt('admin_adjustment_id') + def duplicateCheckStmt = conn.prepareStatement('SELECT COUNT(*) FROM admin_adjustment WHERE admin_adjustment_id = ?') + duplicateCheckStmt.setInt(1, adminAdjustmentId) + def duplicateResult = duplicateCheckStmt.executeQuery() + duplicateResult.next() + def count = duplicateResult.getInt(1) + duplicateResult.close() + duplicateCheckStmt.close() + + if (count > 0) { + log.warn("Duplicate admin_adjustment detected with admin_adjustment_id: ${adminAdjustmentId}, skipping insertion.") + return null + } + + // Proceed with insertion if no duplicate exists + def statusId = getStatusId(rs.getString('current_status'), preparedData) + adminAdjustmentStmt.setInt(1, rs.getInt('to_organization_id')) + adminAdjustmentStmt.setObject(2, toTransactionId) + adminAdjustmentStmt.setTimestamp(3, rs.getTimestamp('transaction_effective_date')) + adminAdjustmentStmt.setInt(4, rs.getInt('compliance_units')) + adminAdjustmentStmt.setString(5, rs.getString('gov_comment')) + adminAdjustmentStmt.setObject(6, statusId) + adminAdjustmentStmt.setTimestamp(7, rs.getTimestamp('create_date')) + adminAdjustmentStmt.setTimestamp(8, rs.getTimestamp('update_date')) + adminAdjustmentStmt.setInt(9, rs.getInt('create_user')) + adminAdjustmentStmt.setInt(10, rs.getInt('update_user')) + adminAdjustmentStmt.setInt(11, rs.getInt('admin_adjustment_id')) + def result = adminAdjustmentStmt.executeQuery() + return result.next() ? result.getInt('admin_adjustment_id') : null +} + +log.warn("**** COMPLETED ADMIN ADJUSTMENT ETL ****") diff --git a/etl/nifi_scripts/compliance_report.groovy b/etl/nifi_scripts/compliance_report.groovy new file mode 100644 index 000000000..18ea947a5 --- /dev/null +++ b/etl/nifi_scripts/compliance_report.groovy @@ -0,0 +1,1018 @@ +import groovy.json.JsonSlurper +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet +import java.sql.Timestamp +import java.util.UUID +import java.nio.charset.StandardCharsets +import java.time.OffsetDateTime +import java.time.Instant + +/* +Simplified and revised script for migrating compliance reports from Source DB to Destination DB. + +Key Features: +1. Processes each compliance report individually using a single comprehensive query. +2. Assigns a deterministic group_uuid based on root_report_id. +3. Excludes "Deleted" reports and includes "Rejected" reports. +4. Maps create_user and update_user to user.display_name. +5. Inserts compliance reports, their histories, and associated transactions. +6. Ensures sequence integrity for compliance_report_id. +7. Facilitates efficient testing by allowing deletion of inserted records based on display_name. +8. Adds handling for 'in reserve' transactions based on snapshot data. +9. After all existing compliance reports are processed, queries for unassociated credit trades of type 3 or 4 + and creates new transactions for them. +*/ + +// ========================================= +// Queries +// ========================================= + +// Consolidated source query that gathers all relevant information for each compliance report +def SOURCE_CONSOLIDATED_QUERY = """ +WITH + compliance_history AS ( + SELECT + crh.compliance_report_id, + json_agg( + json_build_object( + 'history_id', crh.id, + 'create_user_id', crh.create_user_id, + 'create_timestamp', crh.create_timestamp, + 'fuel_supplier_status_id', cws.fuel_supplier_status_id, + 'analyst_status_id', cws.analyst_status_id, + 'manager_status_id', cws.manager_status_id, + 'director_status_id', cws.director_status_id + ) + ) AS history_json + FROM compliance_report_history crh + JOIN compliance_report_workflow_state cws ON crh.status_id = cws.id + GROUP BY crh.compliance_report_id + ) +SELECT + cr.id AS compliance_report_id, + cr.type_id, + cr.organization_id, + cr.compliance_period_id, + cr.supplements_id, + cr.root_report_id, + cr.latest_report_id, + cr.traversal, + cr.nickname, + cr.supplemental_note, + cws.fuel_supplier_status_id, + cws.analyst_status_id, + cws.manager_status_id, + cws.director_status_id, + cr.create_user_id, + cr.create_timestamp, + cr.update_user_id, + cr.update_timestamp, + crt.the_type AS report_type, + cp.description AS compliance_period_desc, + CASE WHEN cr.supplements_id IS NOT NULL THEN TRUE ELSE FALSE END AS is_supplemental, + cr.credit_transaction_id, + ch.history_json, + crs.snapshot AS compliance_report_snapshot, -- Added snapshot + -- CreditTrade Fields (One-to-One Relationship) + ct.id AS credit_trade_id, + ct.initiator_id, + ct.respondent_id, + ct.date_of_written_agreement, + ct.trade_effective_date, + ct.number_of_credits, + ct.create_user_id AS ct_create_user_id, + ct.create_timestamp AS ct_create_timestamp, + ct.update_user_id AS ct_update_user_id, + ct.update_timestamp AS ct_update_timestamp, + ctt.the_type AS credit_trade_type, + ct.fair_market_value_per_credit +FROM compliance_report cr +JOIN compliance_report_type crt ON cr.type_id = crt.id +JOIN compliance_period cp ON cp.id = cr.compliance_period_id +JOIN compliance_report_workflow_state cws ON cr.status_id = cws.id +LEFT JOIN compliance_history ch ON cr.id = ch.compliance_report_id +LEFT JOIN compliance_report_snapshot crs ON cr.id = crs.compliance_report_id -- Added join +LEFT JOIN credit_trade ct ON cr.credit_transaction_id = ct.id +LEFT JOIN credit_trade_type ctt ON ct.type_id = ctt.id +LEFT JOIN credit_trade_status cts ON ct.status_id = cts.id +WHERE cr.type_id = 1 +ORDER BY cr.root_report_id NULLS FIRST, cr.traversal, cr.id; +""" + +// ========================================= +// NiFi Controller Services +// ========================================= +def sourceDbcpService = context.controllerServiceLookup.getControllerService('3245b078-0192-1000-ffff-ffffba20c1eb') +def destinationDbcpService = context.controllerServiceLookup.getControllerService('3244bf63-0192-1000-ffff-ffffc8ec6d93') + +// ========================================= +// Main Execution +// ========================================= + +Connection sourceConn = null +Connection destinationConn = null + +try { + sourceConn = sourceDbcpService.getConnection() + destinationConn = destinationDbcpService.getConnection() + destinationConn.setAutoCommit(false) + + // Disable refresh of the materialized views + def stmt = destinationConn.createStatement() + stmt.execute('DROP FUNCTION IF EXISTS refresh_transaction_aggregate() CASCADE;') + stmt.execute(""" + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS void AS \$\$ + BEGIN + -- Temporarily disable the materialized view refresh + END; + \$\$ LANGUAGE plpgsql; + """) + + stmt.execute('DROP FUNCTION IF EXISTS refresh_mv_director_review_transaction_count() CASCADE;') + stmt.execute(""" + CREATE OR REPLACE FUNCTION refresh_mv_director_review_transaction_count() + RETURNS void AS \$\$ + BEGIN + -- Temporarily disable the materialized view refresh + END; + \$\$ LANGUAGE plpgsql; + """) + stmt.close() + + // Load reference data for status mapping + def referenceData = loadReferenceData(destinationConn) + + // Fetch user profiles to map user IDs to display names + def userProfiles = fetchUserProfiles(sourceConn) + + // Prepare database statements + def statements = prepareStatements(destinationConn) + + // Initialize counters + int totalInserted = 0 + int totalHistoryInserted = 0 + int totalTransactionsInserted = 0 + + // Execute the consolidated compliance reports query + PreparedStatement consolidatedStmt = sourceConn.prepareStatement(SOURCE_CONSOLIDATED_QUERY) + ResultSet rs = consolidatedStmt.executeQuery() + + log.warn("Creating Compliance Report Objects") + + while (rs.next()) { + def record = [ + compliance_report_id : rs.getInt('compliance_report_id'), + type_id : rs.getInt('type_id'), + organization_id : rs.getInt('organization_id'), + compliance_period_id : rs.getInt('compliance_period_id'), + supplements_id : rs.getObject('supplements_id'), + root_report_id : rs.getObject('root_report_id'), + latest_report_id : rs.getObject('latest_report_id'), + traversal : rs.getInt('traversal'), + nickname : rs.getString('nickname'), + supplemental_note : rs.getString('supplemental_note'), + fuel_supplier_status_id : rs.getString('fuel_supplier_status_id'), // Now String + analyst_status_id : rs.getString('analyst_status_id'), + manager_status_id : rs.getString('manager_status_id'), + director_status_id : rs.getString('director_status_id'), + create_user_id : rs.getInt('create_user_id'), + create_timestamp : rs.getTimestamp('create_timestamp'), + update_user_id : rs.getInt('update_user_id'), + update_timestamp : rs.getTimestamp('update_timestamp'), + report_type : rs.getString('report_type'), + compliance_period_desc : rs.getString('compliance_period_desc'), + is_supplemental : rs.getBoolean('is_supplemental'), + credit_transaction_id : rs.getObject('credit_transaction_id'), + history_json : rs.getString('history_json'), + compliance_report_snapshot : rs.getString('compliance_report_snapshot'), // Added snapshot + // CreditTrade Fields + credit_trade_id : rs.getObject('credit_trade_id'), + initiator_id : rs.getObject('initiator_id'), + respondent_id : rs.getObject('respondent_id'), + date_of_written_agreement : rs.getDate('date_of_written_agreement'), + trade_effective_date : rs.getDate('trade_effective_date'), + number_of_credits : rs.getInt('number_of_credits'), + ct_create_user_id : rs.getObject('ct_create_user_id'), + ct_create_timestamp : rs.getTimestamp('ct_create_timestamp'), + ct_update_user_id : rs.getObject('ct_update_user_id'), + ct_update_timestamp : rs.getTimestamp('ct_update_timestamp'), + credit_trade_type : rs.getString('credit_trade_type'), + fair_market_value_per_credit : rs.getBigDecimal('fair_market_value_per_credit') + ] + + // Map TFRS compliance period ID to LCFS compliance period ID + record.compliance_period_id = mapCompliancePeriodId(record.compliance_period_id) + + // Insert Compliance Report History from JSON + def historyJson = record.history_json + def historyRecords = historyJson ? new JsonSlurper().parseText(historyJson).sort { a, b -> + def aTime = a.create_timestamp ? OffsetDateTime.parse(a.create_timestamp).toInstant() : Instant.EPOCH + def bTime = b.create_timestamp ? OffsetDateTime.parse(b.create_timestamp).toInstant() : Instant.EPOCH + aTime <=> bTime + } : [] + + // Parse the snapshot JSON to extract in reserve quantity and convert it to an Integer + def snapshotJson = record.compliance_report_snapshot + def inReserveQuantity = null + if (snapshotJson) { + try { + def snapshot = new JsonSlurper().parseText(snapshotJson) + def quantityStr = snapshot?.summary?.lines?.get('25') + if (quantityStr != null) { + inReserveQuantity = safeConvertToInt(quantityStr) + if (inReserveQuantity == null) { + log.error("Invalid in reserve quantity format for compliance_report_id: ${record.compliance_report_id}") + } + } else { + log.warn("In reserve quantity not found in snapshot for compliance_report_id: ${record.compliance_report_id}") + } + } catch (Exception e) { + log.error("Error parsing snapshot JSON for compliance_report_id: ${record.compliance_report_id}", e) + } + } else { + log.warn("No snapshot found for compliance_report_id: ${record.compliance_report_id}") + } + + // Determine the root ID for UUID generation + def rootId = record.root_report_id ?: record.compliance_report_id + + // Generate a deterministic UUID based on rootId + def groupUuid = UUID.nameUUIDFromBytes(rootId.toString().getBytes(StandardCharsets.UTF_8)) + + def version = (record.compliance_report_id == record.root_report_id) ? 0 : (record.traversal ?: 0) + + // Determine the current status ID based on workflow states and history + def currentStatusId = mapCurrentStatus( + record.fuel_supplier_status_id, + record.analyst_status_id, + record.manager_status_id, + record.director_status_id, + referenceData, + historyRecords + ) + + // Exclude reports with non-matching statuses like deleted + if (currentStatusId == null) { + continue + } + + // Map create_user and update_user to display_name + def reportCreateUser = userProfiles[record.create_user_id] ?: "imported_user" + def reportUpdateUser = userProfiles[record.update_user_id] ?: reportCreateUser + + def supplementalInitiator = record.is_supplemental ? "SUPPLIER_SUPPLEMENTAL" : null + def reportingFrequency = "ANNUAL" + + // Insert Compliance Report into LCFS + def lcfsReportId = insertComplianceReport( + statements.insertComplianceReportStmt, + record, + groupUuid.toString(), + version, + supplementalInitiator, + reportingFrequency, + currentStatusId, + reportCreateUser, + reportUpdateUser + ) + totalInserted++ + + def insertedHistoryCount = insertComplianceReportHistory( + lcfsReportId, + historyRecords, + statements.insertComplianceReportHistoryStmt, + referenceData, + userProfiles + ) + totalHistoryInserted += insertedHistoryCount + + // Determine organization ID + def orgId = record.organization_id + + if (orgId == null) { + log.error("No organization_id found for compliance_report_id: ${record.compliance_report_id}. Skipping transaction creation.") + } else { + if (record.credit_trade_id) { + // **Accepted Report:** Create only an Adjustment transaction + def ctCreateUser = userProfiles[record.ct_create_user_id] ?: "imported_user" + def quantity = record.number_of_credits + if (record.credit_trade_type == "Credit Reduction") { + quantity = -quantity + } + + def associatedTransactionId = insertTransactionForReport( + statements.insertTransactionStmt, + record.organization_id, + quantity, + "Adjustment", + ctCreateUser, + record.ct_create_timestamp, + record.trade_effective_date ? Timestamp.valueOf(record.trade_effective_date.toLocalDate().atStartOfDay()) : null + ) + totalTransactionsInserted++ + + if (associatedTransactionId != null) { + updateComplianceReportWithTransaction(destinationConn, lcfsReportId, associatedTransactionId) + } + } else { + // After determining currentStatusName and having processed "In Reserve" transactions: + def currentStatusName = referenceData.statusIdToName[currentStatusId]?.toLowerCase() + + // Check if not the latest report in the chain or status is rejected + boolean isLatestReport = (record.compliance_report_id == record.latest_report_id) + + // Determine if we should consider a "Reserved" transaction + def shouldReserve = ["submitted", "recommended by analyst", "recommended by manager", "not recommended"].contains(currentStatusName) + + if (shouldReserve && inReserveQuantity != null && inReserveQuantity < 0) { + boolean mustRelease = false + + // If current report is rejected, release immediately + if (currentStatusName == "rejected") { + mustRelease = true + } else { + // If not the latest report, check the source DB for a future report + if (!isLatestReport) { + def futureQuery = """ + SELECT c.id, + s.fuel_supplier_status_id, + s.analyst_status_id, + s.manager_status_id, + s.director_status_id + FROM compliance_report c + JOIN compliance_report_workflow_state s ON c.status_id = s.id + WHERE c.root_report_id = ? + AND c.id > ? + ORDER BY c.id ASC LIMIT 1 + """ + + PreparedStatement futureStmt = sourceConn.prepareStatement(futureQuery) + futureStmt.setObject(1, record.root_report_id) + futureStmt.setInt(2, record.compliance_report_id) + ResultSet futureRs = futureStmt.executeQuery() + + if (futureRs.next()) { + def futureFuelStatus = futureRs.getString("fuel_supplier_status_id") + def futureAnalystStatus = futureRs.getString("analyst_status_id") + def futureManagerStatus = futureRs.getString("manager_status_id") + def futureDirectorStatus = futureRs.getString("director_status_id") + + // Map these future statuses to a final mapped status ID + def futureStatusId = mapCurrentStatus( + futureFuelStatus, + futureAnalystStatus, + futureManagerStatus, + futureDirectorStatus, + referenceData, + [] + ) + + if (futureStatusId != null) { + def futureStatusName = referenceData.statusIdToName[futureStatusId]?.toLowerCase() + log.warn("Future report found with mapped status: ${futureStatusName}") + + // If the future report progresses beyond current (e.g., submitted or recommended), we must release immediately + if (["submitted", "recommended by analyst", "recommended by manager", "assessed"].contains(futureStatusName)) { + mustRelease = true + } + } else { + log.warn("Future report found but no valid mapped status. Not releasing at this time.") + } + } + + futureRs.close() + futureStmt.close() + } + } + + // Determine final action: if mustRelease is true, directly release; otherwise reserve + def finalAction = mustRelease ? "Released" : "Reserved" + + log.warn("Inserting a single ${finalAction} transaction for compliance_report_id: ${record.compliance_report_id} with quantity: ${inReserveQuantity}") + + def transactionId = insertTransactionForReport( + statements.insertTransactionStmt, + orgId, + inReserveQuantity, + finalAction, + reportCreateUser, + record.create_timestamp, + null + ) + totalTransactionsInserted++ + if (transactionId != null) { + updateComplianceReportWithTransaction(destinationConn, lcfsReportId, transactionId) + } + + } else if (currentStatusName == "rejected") { + // Report is rejected but no inReserveQuantity or not in the reservable statuses, no transaction + log.debug("Rejected report_id: ${record.compliance_report_id}. No transaction created.") + } else { + // Unhandled status or inReserveQuantity is null + log.warn("No action for compliance_report_id: ${record.compliance_report_id}, status: ${currentStatusName}, inReserveQuantity: ${inReserveQuantity}.") + } + } + } + } + + rs.close() + consolidatedStmt.close() + // Commit all changes from the original compliance report processing + destinationConn.commit() + + // ========================================= + // Process Unassociated Credit Trades + // ========================================= + // This section identifies credit trades of type 3 or 4 (Credit Validation or Credit Reduction) + // that are not currently associated with any compliance_report. Instead of creating new + // compliance reports, it only creates corresponding transactions. (to be reviewed at a later date) + // + // For each unassociated credit trade: + // 1. Determines if it's a credit validation or a credit reduction. + // 2. Creates a transaction (positive units for validations, negative units for reductions). + // 3. Does not associate the transaction with any compliance report, leaving them as standalone transactions. + // + // This ensures that all known credit trades have corresponding transactions, + // even if they do not currently relate to a compliance report. + + log.warn("Processing Unassociated Credit Trades of Type 3 or 4 to Create Transactions Only") + + // Query for unassociated credit trades of type 3 or 4 and approved status + // These trades exist in the credit_trade table but are not linked to a compliance_report. + def UNASSOCIATED_CREDIT_TRADES_QUERY = """ + SELECT ct.id AS credit_trade_id, + ct.respondent_id AS organization_id, + ct.compliance_period_id AS period_id, + ct.type_id AS credit_trade_type_id, + ct.trade_effective_date, + ct.number_of_credits, + ct.status_id, + ct.create_user_id AS ct_create_user_id, + ct.create_timestamp AS ct_create_timestamp + FROM credit_trade ct + LEFT JOIN compliance_report cr ON cr.credit_transaction_id = ct.id + WHERE ct.type_id IN (3,4) + AND ct.status_id = 7 + AND cr.id IS NULL; + """ + + PreparedStatement unassociatedTradesStmt = sourceConn.prepareStatement(UNASSOCIATED_CREDIT_TRADES_QUERY) + ResultSet unassocRs = unassociatedTradesStmt.executeQuery() + + while (unassocRs.next()) { + def credit_trade_id = unassocRs.getInt("credit_trade_id") + def organization_id = unassocRs.getInt("organization_id") + def credit_trade_type_id = unassocRs.getInt("credit_trade_type_id") + def trade_effective_date = unassocRs.getTimestamp("trade_effective_date") + def number_of_credits = unassocRs.getInt("number_of_credits") + def ct_create_user_id = unassocRs.getInt("ct_create_user_id") + def ct_create_timestamp = unassocRs.getTimestamp("ct_create_timestamp") + + def createUser = userProfiles[ct_create_user_id] ?: "imported_user" + + // Determine transaction direction based on trade type + // type_id = 3 (Credit Validation) => positive credits + // type_id = 4 (Credit Reduction) => negative credits + def adjustedCredits = (credit_trade_type_id == 4) ? -number_of_credits : number_of_credits + + // Create a standalone transaction for this credit trade + def transactionId = insertTransactionForReport( + statements.insertTransactionStmt, + organization_id, + adjustedCredits, + "Adjustment", + createUser, + ct_create_timestamp, + trade_effective_date + ) + totalTransactionsInserted++ + + log.warn("Created standalone transaction ${transactionId} for credit_trade_id ${credit_trade_id} (no associated compliance report)") + } + + unassocRs.close() + unassociatedTradesStmt.close() + + // Commit changes for the newly created transactions + destinationConn.commit() + + log.warn("Inserted ${totalInserted} compliance reports (from earlier processing), ${totalHistoryInserted} history records, and ${totalTransactionsInserted} transactions into LCFS, including standalone transactions for unassociated credit trades.") + + // Re-enable and refresh the materialized views + stmt = destinationConn.createStatement() + stmt.execute(""" + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS void AS \$\$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate; + END; + \$\$ LANGUAGE plpgsql; + """) + stmt.execute('REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate') + + stmt.execute(""" + CREATE OR REPLACE FUNCTION refresh_mv_director_review_transaction_count() + RETURNS void AS \$\$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_director_review_transaction_count; + END; + \$\$ LANGUAGE plpgsql; + """) + stmt.execute('REFRESH MATERIALIZED VIEW CONCURRENTLY mv_director_review_transaction_count') + stmt.close() + + log.warn("Inserted ${totalInserted} compliance reports, ${totalHistoryInserted} history records, and ${totalTransactionsInserted} transactions into LCFS.") +} catch (Exception e) { + log.error('Error occurred while processing compliance reports', e) + destinationConn?.rollback() + throw e +} finally { + if (sourceConn != null) sourceConn.close() + if (destinationConn != null) destinationConn.close() +} + +// ========================================= +// Helper Functions +// ========================================= + +/** + * Safely converts a value to Integer. + * @param value The value to convert. + * @return The Integer value or null if conversion fails. + */ +def safeConvertToInt(def value) { + if (value instanceof Number) { + return value.intValue() + } else if (value instanceof String) { + try { + return Integer.parseInt(value) + } catch (NumberFormatException e) { + log.error("Failed to convert value to Integer: ${value}", e) + return null + } + } + return null +} + +/** + * Fetches user profiles and maps id to display_name. + * @param conn Source database connection. + * @return A map of id to display_name. + */ +def fetchUserProfiles(Connection conn) { + def stmt = conn.createStatement() + def rs = stmt.executeQuery("SELECT id, display_name FROM public.user") + def userMap = [:] + while (rs.next()) { + userMap[rs.getInt("id")] = rs.getString("display_name") + } + rs.close() + stmt.close() + return userMap +} + +/** + * Loads reference data for status mapping. + * @param conn Destination database connection. + * @return A map containing status mappings. + */ +def loadReferenceData(Connection conn) { + def statusMap = [:] + def idToName = [:] + def stmt = conn.createStatement() + def rs = stmt.executeQuery("SELECT compliance_report_status_id, status FROM compliance_report_status") + while (rs.next()) { + statusMap[rs.getString('status').toLowerCase()] = rs.getInt('compliance_report_status_id') + idToName[rs.getInt('compliance_report_status_id')] = rs.getString('status').toLowerCase() + } + rs.close() + stmt.close() + + return [ + statusMap : statusMap, + statusIdToName : idToName + ] +} + +/** + * Maps workflow statuses to LCFS compliance_report_status_id. + * Handles 'requested supplemental' and 'not recommended' by referencing the history. + * @param fuelStatusName Fuel supplier status name. + * @param analystStatusName Analyst status name. + * @param managerStatusName Manager status name. + * @param directorStatusName Director status name. + * @param referenceData Reference data for status mapping. + * @param historyRecords List of history records for the compliance report. + * @return Mapped compliance_report_status_id or null if excluded. + */ +def mapCurrentStatus(fuelStatusName, analystStatusName, managerStatusName, directorStatusName, referenceData, List historyRecords) { + // Normalize to lowercase for consistent comparison + fuelStatusName = fuelStatusName?.toLowerCase() + analystStatusName = analystStatusName?.toLowerCase() + managerStatusName = managerStatusName?.toLowerCase() + directorStatusName = directorStatusName?.toLowerCase() + + // Flags to determine special conditions + def isRequestedSupplemental = [ + fuelStatusName, + analystStatusName, + managerStatusName, + directorStatusName + ].any { it?.contains("requested supplemental") } + + def isNotRecommended = [ + fuelStatusName, + analystStatusName, + managerStatusName, + directorStatusName + ].any { it == "not recommended" } + + // Handle 'requested supplemental' + if (isRequestedSupplemental) { + log.debug("Status contains 'requested supplemental'. Processing history to determine previous status.") + if (historyRecords) { + // Iterate through history records in reverse (most recent first) + for (int i = historyRecords.size() - 1; i >= 0; i--) { + def previousRecord = historyRecords[i] + def prevFuelStatus = previousRecord.fuel_supplier_status_id ? previousRecord.fuel_supplier_status_id.toLowerCase() : null + def prevAnalystStatus = previousRecord.analyst_status_id ? previousRecord.analyst_status_id.toLowerCase() : null + def prevManagerStatus = previousRecord.manager_status_id ? previousRecord.manager_status_id.toLowerCase() : null + def prevDirectorStatus = previousRecord.director_status_id ? previousRecord.director_status_id.toLowerCase() : null + + // Check if the previous record does not have 'requested supplemental' + def wasRequestedSupplemental = [ + prevFuelStatus, + prevAnalystStatus, + prevManagerStatus, + prevDirectorStatus + ].any { it?.contains("requested supplemental") } + + if (!wasRequestedSupplemental) { + // Determine the previous status based on priority: Director > Manager > Analyst > Supplier + def previousStatusName = null + if (prevDirectorStatus) { + previousStatusName = prevDirectorStatus + } else if (prevManagerStatus) { + previousStatusName = prevManagerStatus + } else if (prevAnalystStatus) { + previousStatusName = prevAnalystStatus + } else if (prevFuelStatus) { + previousStatusName = prevFuelStatus + } + + if (previousStatusName) { + if (previousStatusName == "accepted") { + log.debug("Previous status was 'accepted'. Mapping to 'assessed'.") + return referenceData.statusMap["assessed"] + } else if (previousStatusName == "recommended" || previousStatusName == "submitted") { + log.debug("Previous status was '${previousStatusName}'. Mapping back to 'submitted'.") + return referenceData.statusMap["submitted"] + } + // Add additional mappings if necessary + } + } + } + } + // If no valid previous status is found, exclude the report + log.debug("No valid previous status found for 'requested supplemental'. Excluding report.") + return null + } + + // Handle 'not recommended' + if (isNotRecommended) { + log.debug("Status contains 'not recommended'. Mapping back to 'submitted'.") + return referenceData.statusMap["submitted"] + } + + // Determine the current status based on all status fields + // Priority: Director > Manager > Analyst > Supplier + // Adjust mappings based on your specific business rules + + // Check Director Status + if (directorStatusName) { + if (directorStatusName == "accepted") { + return referenceData.statusMap["assessed"] + } else if (directorStatusName == "rejected") { + return referenceData.statusMap["rejected"] + } + } + + // Check Manager Status + if (managerStatusName) { + if (managerStatusName == "recommended") { + return referenceData.statusMap["recommended by manager"] + } + } + + // Check Analyst Status + if (analystStatusName) { + if (analystStatusName == "recommended") { + return referenceData.statusMap["recommended by analyst"] + } + } + + // Check Fuel Supplier Status + if (fuelStatusName) { + if (fuelStatusName == "submitted") { + return referenceData.statusMap["submitted"] + } else if (fuelStatusName == "draft") { + return referenceData.statusMap["draft"] + } else if (fuelStatusName == "deleted") { + // Exclude deleted reports + return null + } + } + + // Default case + return null +} + +// ========================================= +// Add a mapping function for compliance_period_id +// ========================================= +def mapCompliancePeriodId(Integer tfrsId) { + // This mapping adjusts for the doubled up years in TFRS (2012-13, 2013-14) + // and ensures correct alignment with LCFS single-year periods. + // + // TFRS IDs vs. Descriptions: + // 3 -> "2012-13" + // 4 -> "2013-14" + // LCFS has each year individually: + // 3 -> "2012" + // 4 -> "2013" + // 5 -> "2014" + // + // Because TFRS combines two-year periods, align them as follows: + // TFRS:3 (2012-13) -> LCFS:3 (2012) + // TFRS:4 (2013-14) -> LCFS:5 (2014), skipping LCFS:4 (2013) + // After handling these two combined periods, subsequent years shift by 1. + + def compliancePeriodMapping = [ + 1: 1, + 2: 2, + 3: 3, // 2012-13 mapped to 2012 + 4: 5, // 2013-14 mapped to 2014, skipping 2013 + 5: 6, + 6: 7, + 7: 8, + 8: 9, + 9: 10, + 10: 11, + 11: 12, + 12: 13, + 13: 14, + 14: 15, + 15: 16, + 16: 17, + 17: 18, + 18: 19, + 19: 20, + 20: 21 + ] + + return compliancePeriodMapping[tfrsId] ?: tfrsId +} + +/** + * Prepares SQL statements for insertion and updates. + * @param conn Destination database connection. + * @return A map of prepared statements. + */ +def prepareStatements(Connection conn) { + def INSERT_COMPLIANCE_REPORT_SQL = """ + INSERT INTO compliance_report ( + compliance_period_id, + organization_id, + current_status_id, + transaction_id, + compliance_report_group_uuid, + version, + supplemental_initiator, + reporting_frequency, + nickname, + supplemental_note, + create_user, + create_date, + update_user, + update_date, + legacy_id + ) VALUES (?, ?, ?, NULL, ?, ?, ?::SupplementalInitiatorType, ?::ReportingFrequency, ?, ?, ?, ?, ?, ?, ?) + RETURNING compliance_report_id + """ + + def INSERT_COMPLIANCE_REPORT_HISTORY_SQL = """ + INSERT INTO compliance_report_history ( + compliance_report_id, + status_id, + user_profile_id, + create_user, + create_date, + update_user, + update_date + ) VALUES (?, ?, ?, ?, ?, ?, ?) + """ + + def INSERT_TRANSACTION_SQL = ''' + INSERT INTO transaction ( + compliance_units, organization_id, transaction_action, effective_date, create_user, create_date, effective_status + ) VALUES (?, ?, ?::transaction_action_enum, ?, ?, ?, TRUE) + RETURNING transaction_id + ''' + + def UPDATE_COMPLIANCE_REPORT_TRANSACTION_SQL = ''' + UPDATE compliance_report + SET transaction_id = ? + WHERE compliance_report_id = ? + ''' + + return [ + insertComplianceReportStmt : conn.prepareStatement(INSERT_COMPLIANCE_REPORT_SQL), + insertComplianceReportHistoryStmt : conn.prepareStatement(INSERT_COMPLIANCE_REPORT_HISTORY_SQL), + insertTransactionStmt : conn.prepareStatement(INSERT_TRANSACTION_SQL), + updateComplianceReportTransactionStmt: conn.prepareStatement(UPDATE_COMPLIANCE_REPORT_TRANSACTION_SQL) + ] +} + +/** + * Inserts a compliance report into LCFS. + * @param stmt PreparedStatement for inserting compliance_report. + * @param report Compliance report data. + * @param groupUuid Group UUID as string. + * @param version Version number. + * @param supplementalInitiator Supplemental initiator type. + * @param reportingFrequency Reporting frequency. + * @param currentStatusId Mapped status ID. + * @param createUser Display name of the creator. + * @param updateUser Display name of the updater. + * @return Inserted compliance_report_id. + */ +def insertComplianceReport(PreparedStatement stmt, Map report, String groupUuid, int version, String supplementalInitiator, String reportingFrequency, Integer currentStatusId, String createUser, String updateUser) { + stmt.setInt(1, report.compliance_period_id) + stmt.setInt(2, report.organization_id) + + if (currentStatusId == null) { + stmt.setNull(3, java.sql.Types.INTEGER) + } else { + stmt.setInt(3, currentStatusId) + } + + stmt.setString(4, groupUuid) + stmt.setInt(5, version) + stmt.setObject(6, supplementalInitiator) + stmt.setString(7, reportingFrequency) + stmt.setString(8, report.nickname) + stmt.setString(9, report.supplemental_note) + stmt.setString(10, createUser) + stmt.setTimestamp(11, report.create_timestamp ?: Timestamp.valueOf("1970-01-01 00:00:00")) + stmt.setString(12, updateUser) + stmt.setTimestamp(13, report.update_timestamp ?: report.create_timestamp ?: Timestamp.valueOf("1970-01-01 00:00:00")) + stmt.setInt(14, report.compliance_report_id) + + def rs = null + try { + rs = stmt.executeQuery() + if (rs.next()) { + def insertedId = rs.getInt('compliance_report_id') + return insertedId + } + } catch (Exception e) { + log.error("Failed to insert compliance report for compliance_report_id: ${report.compliance_report_id}", e) + } finally { + if (rs != null) rs.close() + } + return null +} + +/** + * Inserts compliance report history records into LCFS. + * @param lcfsReportId Inserted compliance_report_id in LCFS. + * @param historyRecords List of history records. + * @param historyStmt PreparedStatement for inserting compliance_report_history. + * @param referenceData Reference data for status mapping. + * @param userProfiles Map of user_id to display_name. + * @return Number of history records inserted. + */ +def insertComplianceReportHistory(Integer lcfsReportId, List historyRecords, + PreparedStatement historyStmt, + Map referenceData, + Map userProfiles) { + int count = 0 + if (!historyRecords) return count + + historyRecords.each { h -> + def statusId = mapCurrentStatus( + h.fuel_supplier_status_id, + h.analyst_status_id, + h.manager_status_id, + h.director_status_id, + referenceData, + [] // Pass an empty list to prevent recursive history processing + ) + + if (statusId == null) { + // Skip history with no mapped status + return + } + + // Convert create_timestamp from String to Timestamp + def timestamp = h.create_timestamp ? Timestamp.from(OffsetDateTime.parse(h.create_timestamp).toInstant()) : Timestamp.valueOf("1970-01-01 00:00:00") + + def userProfileId = h.create_user_id + + def reportCreateUser = userProfiles[h.create_user_id] ?: "imported_user" + def reportUpdateUser = userProfiles[h.update_user_id] ?: reportCreateUser + + try { + historyStmt.setInt(1, lcfsReportId) + historyStmt.setInt(2, statusId) + + historyStmt.setInt(3, userProfileId) + + historyStmt.setString(4, reportCreateUser) + historyStmt.setTimestamp(5, timestamp) + historyStmt.setString(6, reportUpdateUser) + historyStmt.setTimestamp(7, timestamp) + + historyStmt.addBatch() + count++ + } catch (Exception e) { + log.error("Error inserting compliance report history for lcfsReportId: ${lcfsReportId}", e) + } + } + + try { + historyStmt.executeBatch() + } catch (Exception e) { + log.error("Error executing batch insert for compliance report history", e) + throw e + } + + return count +} + +/** + * Inserts a transaction into LCFS. + * @param stmt PreparedStatement for inserting transaction. + * @param orgId Organization ID. + * @param quantity Number of compliance units. + * @param action Transaction action type. + * @param createUser Display name of the creator. + * @param createDate Creation timestamp. + * @param effectiveDate Effective date of the transaction. + * @return Inserted transaction_id or null. + */ +def insertTransactionForReport(PreparedStatement stmt, Integer orgId, int quantity, String action, String createUser, Timestamp createDate, Timestamp effectiveDate = null) { + if (orgId == null) { + log.error("Organization ID is null. Cannot insert transaction.") + return null + } + + def effDate = effectiveDate ?: createDate ?: Timestamp.valueOf("1970-01-01 00:00:00") + + stmt.setInt(1, quantity) + stmt.setInt(2, orgId) + stmt.setString(3, action) + stmt.setTimestamp(4, effDate) + stmt.setString(5, createUser) + stmt.setTimestamp(6, createDate ?: Timestamp.valueOf("1970-01-01 00:00:00")) + + def rs = null + try { + rs = stmt.executeQuery() + if (rs.next()) { + return rs.getInt('transaction_id') + } + } catch (Exception e) { + log.error("Failed to insert transaction for compliance_report_id", e) + } finally { + if (rs != null) rs.close() + } + return null +} + +/** + * Updates a compliance report with the associated transaction ID. + * @param conn Destination database connection. + * @param lcfsReportId Compliance report ID in LCFS. + * @param transactionId Transaction ID to associate. + */ +def updateComplianceReportWithTransaction(Connection conn, int lcfsReportId, int transactionId) { + def updateStmt = conn.prepareStatement("UPDATE compliance_report SET transaction_id = ? WHERE compliance_report_id = ?") + try { + updateStmt.setInt(1, transactionId) + updateStmt.setInt(2, lcfsReportId) + updateStmt.executeUpdate() + } catch (Exception e) { + log.error("Failed to update compliance_report with transaction_id: ${transactionId} for lcfsReportId: ${lcfsReportId}", e) + } finally { + updateStmt.close() + } +} + +// ========================================= +// Script Termination +// ========================================= + +log.warn("**** COMPLETED COMPLIANCE REPORT ETL ****") diff --git a/etl/nifi_scripts/credit_validation.groovy b/etl/nifi_scripts/credit_validation.groovy new file mode 100644 index 000000000..503f83978 --- /dev/null +++ b/etl/nifi_scripts/credit_validation.groovy @@ -0,0 +1,143 @@ +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet + +/* +This NiFi Groovy script: +1. Fetches all credit trades from the TFRS database with status_id = 7. +2. Sums the credit balances for each organization based on credit_trade_type logic: + - Sell (type_id = 1): initiator loses credits, respondent gains credits + - Buy (type_id = 2): initiator gains credits, respondent loses credits + - Credit Validation (type_id = 3): respondent_id gains credits + - Credit Reduction (type_id = 4): respondent_id loses credits + - Part 3 Award (type_id = 5): respondent_id gains credits + (Ignore type_id = 6 for now as instructed) + +3. Fetches LCFS transactions and sums them per organization. +4. Prints out organizations where TFRS balance differs from LCFS balance, along with the discrepancy. +5. Logs how many organizations have discrepancies and how many have matching balances. +*/ + +log.warn("******* STARTING CREDIT VALIDATION *******") + +// NiFi DBCP Controller Services +def sourceDbcpService = context.controllerServiceLookup.getControllerService('3245b078-0192-1000-ffff-ffffba20c1eb') +def destinationDbcpService = context.controllerServiceLookup.getControllerService('3244bf63-0192-1000-ffff-ffffc8ec6d93') + +Connection sourceConn = null +Connection destinationConn = null + +try { + + sourceConn = sourceDbcpService.getConnection() + destinationConn = destinationDbcpService.getConnection() + + // Query TFRS for all credit trades with status_id = 7 + def TFRS_QUERY = """ + SELECT ct.id AS credit_trade_id, + ct.type_id, + ct.initiator_id, + ct.respondent_id, + ct.number_of_credits + FROM credit_trade ct + WHERE ct.status_id = 7 + """ + + PreparedStatement tfrsStmt = sourceConn.prepareStatement(TFRS_QUERY) + ResultSet tfrsRs = tfrsStmt.executeQuery() + + // Map for TFRS balances: org_id -> credits + def tfrsBalances = [:].withDefault {0} + + while (tfrsRs.next()) { + def typeId = tfrsRs.getInt("type_id") + def initiator = tfrsRs.getInt("initiator_id") + def respondent = tfrsRs.getInt("respondent_id") + def credits = tfrsRs.getInt("number_of_credits") + + // Apply logic based on type_id + switch(typeId) { + case 1: // Sell + tfrsBalances[initiator] = tfrsBalances[initiator] - credits + tfrsBalances[respondent] = tfrsBalances[respondent] + credits + break + case 2: // Buy + tfrsBalances[initiator] = tfrsBalances[initiator] - credits + tfrsBalances[respondent] = tfrsBalances[respondent] + credits + break + case 3: // Credit Validation + tfrsBalances[respondent] = tfrsBalances[respondent] + credits + break + case 4: // Credit Reduction + tfrsBalances[respondent] = tfrsBalances[respondent] - credits + break + case 5: // Part 3 Award + tfrsBalances[respondent] = tfrsBalances[respondent] + credits + break + // type_id = 6 (Admin Adjustment) not considered per requirement + } + } + + tfrsRs.close() + tfrsStmt.close() + + // Query LCFS for all transactions and sum by org + def LCFS_QUERY = """ + SELECT organization_id, SUM(compliance_units) AS total_units + FROM transaction + WHERE transaction_action = 'Adjustment' + GROUP BY organization_id + """ + + PreparedStatement lcfsStmt = destinationConn.prepareStatement(LCFS_QUERY) + ResultSet lcfsRs = lcfsStmt.executeQuery() + + def lcfsBalances = [:].withDefault {0} + + while (lcfsRs.next()) { + def orgId = lcfsRs.getInt("organization_id") + def totalUnits = lcfsRs.getInt("total_units") + lcfsBalances[orgId] = totalUnits + } + + lcfsRs.close() + lcfsStmt.close() + + // Compare balances + def allOrgs = (tfrsBalances.keySet() + lcfsBalances.keySet()).unique() + def discrepancies = [] + allOrgs.each { orgId -> + def tfrsVal = tfrsBalances[orgId] + def lcfsVal = lcfsBalances[orgId] + if (tfrsVal != lcfsVal) { + def diff = tfrsVal - lcfsVal + discrepancies << [orgId: orgId, tfrs: tfrsVal, lcfs: lcfsVal, difference: diff] + } + } + + // Print out discrepancies + if (discrepancies) { + log.warn("******** Organizations with balance discrepancies between TFRS and LCFS: ********") + discrepancies.each { d -> + log.warn("OrgID: ${d.orgId}, TFRS: ${d.tfrs}, LCFS: ${d.lcfs}, Difference (TFRS-LCFS): ${d.difference}") + } + } else { + log.warn("No discrepancies found. Balances match for all organizations.") + } + + // Log counts + def discrepancyCount = discrepancies.size() + def totalOrgs = allOrgs.size() + def matchingCount = totalOrgs - discrepancyCount + + log.warn("Number of organizations with discrepancies: ${discrepancyCount}") + log.warn("Number of organizations with matching balances: ${matchingCount}") + log.warn("Total organizations considered: ${totalOrgs}") + +} catch (Exception e) { + log.error("Error while validating organization balances", e) +} finally { + log.warn("**** FINISHED CREDIT VALIDATION ****") + if (sourceConn != null) sourceConn.close() + if (destinationConn != null) destinationConn.close() +} diff --git a/etl/nifi_scripts/fuel_code.groovy b/etl/nifi_scripts/fuel_code.groovy new file mode 100644 index 000000000..57de4f16f --- /dev/null +++ b/etl/nifi_scripts/fuel_code.groovy @@ -0,0 +1,80 @@ +import org.apache.nifi.processor.io.StreamCallback +import groovy.json.JsonSlurper +import groovy.json.JsonOutput + +def transformCallback = { inputStream, outputStream -> + try { + // Parse JSON input + def record = new JsonSlurper().parseText(inputStream.text) + + // Map the fuel_id to the corresponding new value + def fuelIdMapping = [ + 8 : 13, // Propane + 21 : 15, // Renewable naphtha + 10 : 14, // Renewable gasoline + 19 : 16, // Fossil-derived diesel + 11 : 17, // Fossil-derived gasoline + 20 : 17 // Fossil-derived gasoline + ] + + // Replace fuel_id if it matches one of the keys in the map + if (fuelIdMapping.containsKey(record.fuel_id)) { + record.fuel_id = fuelIdMapping[record.fuel_id] + } + + // Map the fields from the source to the target schema + // The following fields are not used in the migration: fuel_code_id, facility_location, renewable_percentage, facility_nameplate_capacity_unit + def transformedRecord = [ + fuel_status_id : record.status_id, + prefix_id : 1, // BCLCF + fuel_suffix : "${record.fuel_code_version}.${record.fuel_code_version_minor}", + company : record.company, + carbon_intensity : record.carbon_intensity, + edrms : "", + last_updated : record.update_timestamp, + application_date : record.application_date, + approval_date : record.approval_date, + fuel_type_id : record.fuel_id, + feedstock : record.feedstock, + feedstock_location : record.feedstock_location, + feedstock_misc : record.feedstock_misc, + facility_nameplate_capacity : record.facility_nameplate, + former_company : record.former_company, + notes : null, + create_date : record.create_timestamp, + update_date : record.update_timestamp, + create_user : null, + update_user : null, + effective_date : record.effective_date, + expiration_date : record.expiry_date, + effective_status : true, + fuel_production_facility_city : null, + fuel_production_facility_province_state: null, + fuel_production_facility_country : null, + contact_name : null, + contact_email : null + ] + + // Write the transformed data back to the output + outputStream.write(JsonOutput.toJson(transformedRecord).getBytes("UTF-8")) + + } catch (Exception e) { + def recordId = record?.id + if (recordId) { + flowFile = session.putAttribute(flowFile, "failed_record_id", recordId.toString()) + } + throw e + } +} + +// Obtain the flowFile from the session +flowFile = session.get() +if (flowFile != null) { + try { + // Write the transformed data using the transformCallback + flowFile = session.write(flowFile, transformCallback as StreamCallback) + session.transfer(flowFile, REL_SUCCESS) + } catch (Exception e) { + session.transfer(flowFile, REL_FAILURE) + } +} diff --git a/etl/nifi_scripts/initiativeAgrmtTrxn.groovy b/etl/nifi_scripts/initiativeAgrmtTrxn.groovy new file mode 100644 index 000000000..cb615d037 --- /dev/null +++ b/etl/nifi_scripts/initiativeAgrmtTrxn.groovy @@ -0,0 +1,440 @@ +import groovy.json.JsonSlurper +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet +import java.time.OffsetDateTime +import java.sql.Timestamp + +log.warn("**** STARTING INITIATIVE AGREEMENT ETL ****") + +def SOURCE_QUERY = """ +WITH + internal_comment AS ( + SELECT + ctc.id, + ctc.credit_trade_id, + ctc.credit_trade_comment, + ctc.create_user_id, + ctc.create_timestamp, + STRING_AGG (r."name", '; ') AS role_names + FROM + credit_trade_comment ctc + JOIN "user" u ON u.id = ctc.create_user_id + AND u.organization_id = 1 + AND ctc.is_privileged_access = TRUE + JOIN user_role ur ON ur.user_id = u.id + JOIN "role" r ON ur.role_id = r.id + GROUP BY + ctc.id, + ctc.credit_trade_id, + ctc.credit_trade_comment, + ctc.create_user_id, + ctc.create_timestamp + ORDER BY + ctc.credit_trade_id, + ctc.create_timestamp + ) + SELECT + ct.id AS initiative_agreement_id, + ct.respondent_id AS to_organization_id, + ct.date_of_written_agreement AS agreement_date, + ct.trade_effective_date AS transaction_effective_date, + ct.number_of_credits AS compliance_units, + ct.create_user_id as create_user, + ct.update_user_id as update_user, + ct.update_timestamp as update_date, + ct.create_timestamp as create_date, + -- Aggregate comments from government with internal comment handling + STRING_AGG (DISTINCT gov_ctc.credit_trade_comment, '; ') AS gov_comment, + -- JSON aggregation for internal comments + json_agg (row_to_json (internal_comment)) AS internal_comments, + -- JSON aggregation for credit trade history + json_agg ( + json_build_object ( + 'initiative_agreement_id', + cth.credit_trade_id, + 'initiative_agreement_status', + case + WHEN cts_history.status IN ('Cancelled', 'Not Recommended', 'Declined', 'Refused') or ct.is_rescinded = true THEN 'Deleted' + WHEN cts_history.status IN ('Accepted', 'Submitted', 'Recommended') THEN 'Recommended' + WHEN cts_history.status IN ('Approved', 'Recorded') THEN 'Approved' + ELSE 'Draft' + END, + 'user_profile_id', + cth.create_user_id, + 'create_timestamp', + cth.create_timestamp + ) + ) AS credit_trade_history, + case + WHEN cts.status IN ('Cancelled', 'Not Recommended', 'Declined', 'Refused') or ct.is_rescinded = true THEN 'Deleted' + WHEN cts.status IN ('Accepted', 'Submitted', 'Recommended') THEN 'Recommended' + WHEN cts.status IN ('Approved', 'Recorded') THEN 'Approved' + ELSE 'Draft' + END AS current_status, cts.status + FROM + credit_trade ct + JOIN credit_trade_type ctt ON ct.type_id = ctt.id + LEFT OUTER JOIN credit_trade_category ctc ON ct.trade_category_id = ctc.id + JOIN credit_trade_status cts ON ct.status_id = cts.id + LEFT JOIN credit_trade_zero_reason ctzr ON ctzr.id = ct.zero_reason_id + AND ctzr.reason = 'Internal' + -- Join for Initiator Comments + LEFT JOIN credit_trade_comment from_ctc ON from_ctc.credit_trade_id = ct.id + AND from_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = ct.initiator_id + ) + -- Join for Respondent Comments + LEFT JOIN credit_trade_comment to_ctc ON to_ctc.credit_trade_id = ct.id + AND to_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = ct.respondent_id + ) + -- Join for Government Comments + LEFT JOIN credit_trade_comment gov_ctc ON gov_ctc.credit_trade_id = ct.id + AND gov_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = 1 + AND gov_ctc.is_privileged_access = FALSE + ) + -- Join the internal comment logic for role-based filtering and audience_scope + LEFT JOIN internal_comment ON internal_comment.credit_trade_id = ct.id + -- Join for credit trade history + LEFT JOIN credit_trade_history cth ON cth.credit_trade_id = ct.id + JOIN credit_trade_status cts_history ON cth.status_id = cts_history.id + WHERE + ctt.the_type IN ('Part 3 Award') + GROUP BY + ct.id, + ct.respondent_id, + ct.date_of_written_agreement, + ct.trade_effective_date, + ct.number_of_credits, + cts.status, + ctzr.description, + internal_comment.role_names; + """ + +// Fetch connections to both the source and destination databases +def sourceDbcpService = context.controllerServiceLookup.getControllerService('3245b078-0192-1000-ffff-ffffba20c1eb') +def destinationDbcpService = context.controllerServiceLookup.getControllerService('3244bf63-0192-1000-ffff-ffffc8ec6d93') + +Connection sourceConn = null +Connection destinationConn = null + +try { + sourceConn = sourceDbcpService.getConnection() + destinationConn = destinationDbcpService.getConnection() + destinationConn.setAutoCommit(false) + + // Fetch status and category data once and cache it + def preparedData = prepareData(destinationConn) + + def statements = prepareStatements(destinationConn) + + destinationConn.createStatement().execute('DROP FUNCTION IF EXISTS refresh_transaction_aggregate() CASCADE;') + destinationConn.createStatement().execute(""" + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS void AS \$\$ + BEGIN + -- Temporarily disable the materialized view refresh + END; + \$\$ LANGUAGE plpgsql; + """) + + PreparedStatement sourceStmt = sourceConn.prepareStatement(SOURCE_QUERY) + ResultSet resultSet = sourceStmt.executeQuery() + + int recordCount = 0 + + while (resultSet.next()) { + recordCount++ + def jsonSlurper = new JsonSlurper() + def internalComments = resultSet.getString('internal_comments') + def creditTradeHistory = resultSet.getString('credit_trade_history') + + def internalCommentsJson = internalComments ? jsonSlurper.parseText(internalComments) : [] + def creditTradeHistoryJson = creditTradeHistory ? jsonSlurper.parseText(creditTradeHistory) : [] + + def toTransactionId = processTransactions(resultSet.getString('current_status'), + resultSet, statements.transactionStmt) + + def initiativeAgreementId = insertInitiativeAgreement(resultSet, statements.initiativeAgreementStmt, + toTransactionId, preparedData, destinationConn) + + if (initiativeAgreementId) { + processHistory(initiativeAgreementId, creditTradeHistoryJson, statements.historyStmt, preparedData) + processInternalComments(initiativeAgreementId, internalCommentsJson, statements.internalCommentStmt, + statements.initiativeAgreementInternalCommentStmt) + } else { + log.warn("initiative-agreement not inserted for record: ${resultSet.getInt('initiative_agreement_id')}") + } + } + resultSet.close() + destinationConn.createStatement().execute(""" + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS void AS \$\$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate; + END; + \$\$ LANGUAGE plpgsql; + """) + destinationConn.createStatement().execute('REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate') + + destinationConn.commit() + log.debug("Processed ${recordCount} records successfully.") +} catch (Exception e) { + log.error('Error occurred while processing data', e) + destinationConn?.rollback() +} finally { + if (sourceConn != null) sourceConn.close() + if (destinationConn != null) destinationConn.close() +} + +def logResultSetRow(ResultSet rs) { + def metaData = rs.getMetaData() + def columnCount = metaData.getColumnCount() + def rowData = [:] + + for (int i = 1; i <= columnCount; i++) { + def columnName = metaData.getColumnName(i) + def columnValue = rs.getObject(i) + rowData[columnName] = columnValue + } + + log.debug("Row data: ${rowData}") +} + +def loadTableData(Connection conn, String query, String keyColumn, String valueColumn) { + def dataMap = [:] + def stmt = conn.createStatement() + def rs = stmt.executeQuery(query) + + while (rs.next()) { + def key = rs.getString(keyColumn) + def value = rs.getInt(valueColumn) + if (dataMap.containsKey(key)) { + log.warn("Duplicate key found for ${key}. Existing value: ${dataMap[key]}, New value: ${value}.") + } + dataMap[key] = value + } + + rs.close() + stmt.close() + return dataMap +} + +def prepareData(Connection conn) { + def statusMap = loadTableData(conn, + 'SELECT DISTINCT status, MIN(initiative_agreement_status_id) AS initiative_agreement_status_id FROM initiative_agreement_status GROUP BY status', + 'status', + 'initiative_agreement_status_id' + ) + return [ + statusMap : statusMap + ] +} + +def getStatusId(String status, Map preparedData) { + return preparedData.statusMap[status] +} + +def prepareStatements(Connection conn) { + def INSERT_INITIATIVE_AGREEMENT_SQL = ''' + INSERT INTO initiative_agreement ( + to_organization_id, transaction_id, transaction_effective_date, compliance_units, gov_comment, + current_status_id, create_date, update_date, create_user, update_user, effective_status, + initiative_agreement_id + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, true, ?) + RETURNING initiative_agreement_id + ''' + def INSERT_INITIATIVE_AGREEMENT_HISTORY_SQL = ''' + INSERT INTO initiative_agreement_history ( + initiative_agreement_history_id, initiative_agreement_id, initiative_agreement_status_id, user_profile_id, create_date, effective_status + ) VALUES (DEFAULT, ?, ?, ?, ?, true) + ''' + def INSERT_INTERNAL_COMMENT_SQL = ''' + INSERT INTO internal_comment ( + internal_comment_id, comment, audience_scope, create_user, create_date + ) VALUES (DEFAULT, ?, ?::audience_scope, ?, ?) + RETURNING internal_comment_id + ''' + def INSERT_INITIATIVE_AGREEMENT_INTERNAL_COMMENT_SQL = ''' + INSERT INTO initiative_agreement_internal_comment ( + initiative_agreement_id, internal_comment_id + ) VALUES (?, ?) + ''' + def INSERT_TRANSACTION_SQL = ''' + INSERT INTO transaction ( + transaction_id, compliance_units, organization_id, transaction_action, effective_date, create_user, create_date, effective_status + ) VALUES (DEFAULT, ?, ?, ?::transaction_action_enum, ?, ?, ?, true) + RETURNING transaction_id + ''' + return [initiativeAgreementStmt : conn.prepareStatement(INSERT_INITIATIVE_AGREEMENT_SQL), + historyStmt : conn.prepareStatement(INSERT_INITIATIVE_AGREEMENT_HISTORY_SQL), + internalCommentStmt : conn.prepareStatement(INSERT_INTERNAL_COMMENT_SQL), + initiativeAgreementInternalCommentStmt + : conn.prepareStatement(INSERT_INITIATIVE_AGREEMENT_INTERNAL_COMMENT_SQL), + transactionStmt : conn.prepareStatement(INSERT_TRANSACTION_SQL)] +} + +def toSqlTimestamp(String timestampString) { + try { + // Parse the ISO 8601 timestamp and convert to java.sql.Timestamp + def offsetDateTime = OffsetDateTime.parse(timestampString) + return Timestamp.from(offsetDateTime.toInstant()) + } catch (Exception e) { + log.error("Invalid timestamp format: ${timestampString}, defaulting to '1970-01-01T00:00:00Z'") + return Timestamp.valueOf('1970-01-01 00:00:00') + } +} + +def processTransactions(String currentStatus, ResultSet rs, PreparedStatement stmt) { + def toTransactionId = null + + if (currentStatus == 'Approved') { + toTransactionId = insertTransaction(stmt, rs, 'Adjustment', rs.getInt('to_organization_id')) + } + + return toTransactionId +} + +def insertTransaction(PreparedStatement stmt, ResultSet rs, String action, int orgId) { + stmt.setInt(1, rs.getInt('compliance_units')) + stmt.setInt(2, orgId) + stmt.setString(3, action) + stmt.setDate(4, rs.getDate('transaction_effective_date') ?: rs.getDate('agreement_date')) + stmt.setInt(5, rs.getInt('create_user')) + stmt.setTimestamp(6, rs.getTimestamp('create_date')) + + def result = stmt.executeQuery() + return result.next() ? result.getInt('transaction_id') : null +} + +def processHistory(Integer initiativeAgreementId, List creditTradeHistory, PreparedStatement historyStmt, Map preparedData) { + if (!creditTradeHistory) return + + // Use a Set to track unique combinations of initiative_agreement_id and initiative_agreement_status + def processedEntries = new HashSet() + + creditTradeHistory.each { historyItem -> + try { + def statusId = getStatusId(historyItem.initiative_agreement_status, preparedData) + def uniqueKey = "${initiativeAgreementId}_${statusId}" + + // Check if this combination has already been processed + if (!processedEntries.contains(uniqueKey)) { + // If not processed, add to batch and mark as processed + historyStmt.setInt(1, initiativeAgreementId) + historyStmt.setInt(2, statusId) + historyStmt.setInt(3, historyItem.user_profile_id) + historyStmt.setTimestamp(4, toSqlTimestamp(historyItem.create_timestamp ?: '2013-01-01T00:00:00Z')) + historyStmt.addBatch() + + processedEntries.add(uniqueKey) + } + } catch (Exception e) { + log.error("Error processing history record for initiative_agreement_id: ${initiativeAgreementId}", e) + } + } + + // Execute batch + historyStmt.executeBatch() +} + + +def processInternalComments(Integer initiativeAgreementId, List internalComments, + PreparedStatement internalCommentStmt, + PreparedStatement initiativeAgreementInternalCommentStmt) { + if (!internalComments) return + + internalComments.each { comment -> + if (!comment) return // Skip null comments + + try { + // Insert the internal comment + internalCommentStmt.setString(1, comment.credit_trade_comment ?: '') + internalCommentStmt.setString(2, getAudienceScope(comment.role_names ?: '')) + internalCommentStmt.setInt(3, comment.create_user_id ?: null) + internalCommentStmt.setTimestamp(4, toSqlTimestamp(comment.create_timestamp ?: '2013-01-01T00:00:00Z')) + + def internalCommentId = null + def commentResult = internalCommentStmt.executeQuery() + if (commentResult.next()) { + internalCommentId = commentResult.getInt('internal_comment_id') + + // Insert the initiative-agreement-comment relationship + initiativeAgreementInternalCommentStmt.setInt(1, initiativeAgreementId) + initiativeAgreementInternalCommentStmt.setInt(2, internalCommentId) + initiativeAgreementInternalCommentStmt.executeUpdate() + } + + commentResult.close() + } catch (Exception e) { + log.error("Error processing internal comment for initiative-agreement ${initiativeAgreementId}: ${e.getMessage()}", e) + } + } + } + +// Helper function to determine audience scope based on role names +def getAudienceScope(String roleNames) { + if (!roleNames) return 'Analyst' + + switch (true) { + case roleNames.contains('GovDirector'): + return 'Director' + case roleNames.contains('GovComplianceManager'): + return 'Compliance Manager' + default: + return 'Analyst' + } +} + +def insertInitiativeAgreement(ResultSet rs, PreparedStatement initiativeAgreementStmt, + Long toTransactionId, Map preparedData, Connection conn) { + // Check for duplicates in the `initiative_agreement` table + def initiativeAgreementId = rs.getInt('initiative_agreement_id') + def duplicateCheckStmt = conn.prepareStatement('SELECT COUNT(*) FROM initiative_agreement WHERE initiative_agreement_id = ?') + duplicateCheckStmt.setInt(1, initiativeAgreementId) + def duplicateResult = duplicateCheckStmt.executeQuery() + duplicateResult.next() + def count = duplicateResult.getInt(1) + duplicateResult.close() + duplicateCheckStmt.close() + + if (count > 0) { + log.warn("Duplicate initiative_agreement detected with initiative_agreement_id: ${initiativeAgreementId}, skipping insertion.") + return null + } + + // Proceed with insertion if no duplicate exists + def statusId = getStatusId(rs.getString('current_status'), preparedData) + initiativeAgreementStmt.setInt(1, rs.getInt('to_organization_id')) + initiativeAgreementStmt.setObject(2, toTransactionId) + initiativeAgreementStmt.setTimestamp(3, rs.getTimestamp('transaction_effective_date')) + initiativeAgreementStmt.setInt(4, rs.getInt('compliance_units')) + initiativeAgreementStmt.setString(5, rs.getString('gov_comment')) + initiativeAgreementStmt.setObject(6, statusId) + initiativeAgreementStmt.setTimestamp(7, rs.getTimestamp('create_date')) + initiativeAgreementStmt.setTimestamp(8, rs.getTimestamp('update_date')) + initiativeAgreementStmt.setInt(9, rs.getInt('create_user')) + initiativeAgreementStmt.setInt(10, rs.getInt('update_user')) + initiativeAgreementStmt.setInt(11, rs.getInt('initiative_agreement_id')) + def result = initiativeAgreementStmt.executeQuery() + return result.next() ? result.getInt('initiative_agreement_id') : null +} + +log.warn("**** COMPLETED INITIATIVE AGREEMENT ETL ****") diff --git a/etl/nifi_scripts/organization.groovy b/etl/nifi_scripts/organization.groovy new file mode 100644 index 000000000..d9b826e14 --- /dev/null +++ b/etl/nifi_scripts/organization.groovy @@ -0,0 +1,233 @@ +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet +import java.util.Random + +log.warn("**** STARTING ORGANIZATION ETL ****") + +// SQL query to fetch data from the source +def sourceQuery = """ + SELECT + oa.organization_id, + o.name, + cast(null as varchar) as operating_name, + cast(null as varchar) as email, + cast(null as varchar) as phone, + coalesce(o.edrms_record, '') as edrms_record, + ( + case + when os.status = 'Archived' then 'Suspended' + when oat.the_type = 'Buy And Sell' or oat.the_type = 'Sell Only' then 'Registered' + else 'Unregistered' + end + ) as org_status, + 'fuel_supplier' as organization_type, + oa.address_line_1 as service_street_address, + oa.address_line_2 as service_address_other, + oa.city as service_city, + oa.state as service_province_state, + oa.postal_code as "service_postalCode_zipCode", + oa.country as service_country, + oa.attorney_street_address, + oa.attorney_address_other, + oa.attorney_city as attorney_city, + oa.attorney_province as attorney_province_state, + oa.attorney_postal_code as "attorney_postalCode_zipCode", + oa.attorney_country as attorney_country + FROM + organization o + INNER JOIN organization_status os ON os.id = o.status_id + INNER JOIN organization_actions_type oat ON oat.id = o.actions_type_id + INNER JOIN organization_address oa ON oa.organization_id = o.id and oa.expiration_date is null; +""" + +// SQL query to check if the record with the same organization_id already exists +def checkDuplicateQuery = "SELECT COUNT(*) FROM organization WHERE organization_id = ?" + +// SQL queries to fetch the status and type IDs from the destination database +def getStatusIdQuery = "SELECT organization_status_id FROM organization_status WHERE status = ?::org_status_enum" +def getTypeIdQuery = "SELECT organization_type_id FROM organization_type WHERE org_type = ?::org_type_enum" + +// SQL query to check if an organization code already exists +def checkOrganizationCodeQuery = "SELECT COUNT(*) FROM organization WHERE organization_code = ?" + +// SQL query to insert new organizations with the generated code +def insertOrganizationSQL = """ + INSERT INTO organization ( + effective_status, organization_id, organization_code, name, operating_name, email, phone, edrms_record, organization_status_id, organization_type_id, organization_address_id, organization_attorney_address_id + ) VALUES (true, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT (organization_id) DO NOTHING +""" + +// Fetch connections to both the source and destination databases +def sourceDbcpService = context.controllerServiceLookup.getControllerService("3245b078-0192-1000-ffff-ffffba20c1eb") +def destinationDbcpService = context.controllerServiceLookup.getControllerService("3244bf63-0192-1000-ffff-ffffc8ec6d93") + +Connection sourceConn = null +Connection destinationConn = null + +// Function to generate a random alphanumeric code +def generateRandomCode() { + def chars = ('A'..'Z') + ('0'..'9') + def random = new Random() + return (1..4).collect { chars[random.nextInt(chars.size())] }.join('') +} + +// Function to check if the generated organization code is unique +// Passing the SQL query (checkOrganizationCodeQuery) as a parameter +def isOrganizationCodeUnique(Connection conn, String code, String checkQuery) { + PreparedStatement checkCodeStmt = conn.prepareStatement(checkQuery) + checkCodeStmt.setString(1, code) + ResultSet rs = checkCodeStmt.executeQuery() + rs.next() + return rs.getInt(1) == 0 // Return true if the count is 0, meaning the code is unique +} + +try { + // Get connections + sourceConn = sourceDbcpService.getConnection() + destinationConn = destinationDbcpService.getConnection() + + // Step 1: Insert new records from the source database + // Prepare statements for fetching org_status_id and organization_type_id + PreparedStatement statusStmt = destinationConn.prepareStatement(getStatusIdQuery) + PreparedStatement typeStmt = destinationConn.prepareStatement(getTypeIdQuery) + PreparedStatement checkDuplicateStmt = destinationConn.prepareStatement(checkDuplicateQuery) + + // Prepare the SQL insert statements for organization_address and organization_attorney_address + def insertAddressSQL = """ + INSERT INTO organization_address ( + organization_address_id, name, street_address, address_other, city, province_state, "postalCode_zipCode", country, effective_status + ) VALUES (DEFAULT, ?, ?, ?, ?, ?, ?, ?, true) + RETURNING organization_address_id + """ + + def insertAttorneyAddressSQL = """ + INSERT INTO organization_attorney_address ( + organization_attorney_address_id, name, street_address, address_other, city, province_state, "postalCode_zipCode", country, effective_status + ) VALUES (DEFAULT, ?, ?, ?, ?, ?, ?, ?, true) + RETURNING organization_attorney_address_id + """ + + // Execute the query on the source database + PreparedStatement sourceStmt = sourceConn.prepareStatement(sourceQuery) + ResultSet resultSet = sourceStmt.executeQuery() + + while (resultSet.next()) { + def organizationId = resultSet.getInt("organization_id") + + // Check if the organization already exists in the destination + checkDuplicateStmt.setInt(1, organizationId) + ResultSet duplicateResult = checkDuplicateStmt.executeQuery() + if (duplicateResult.next() && duplicateResult.getInt(1) > 0) { + // If a duplicate exists, skip this record + log.info("Skipping duplicate organization with organization_id: " + organizationId) + continue + } + + // If no duplicate exists, proceed with the insert logic + def name = resultSet.getString("name") ?: "" + def operatingName = resultSet.getString("operating_name") ?: "" // not nullable string field + def email = resultSet.getString("email") ?: "" + def phone = resultSet.getString("phone") ?: "" + def edrmsRecord = resultSet.getString("edrms_record") ?: "" + def orgStatusChar = resultSet.getString("org_status") ?: "" + def orgTypeChar = resultSet.getString("organization_type") ?: "" + def serviceStreetAddress = resultSet.getString("service_street_address") ?: "" + def serviceAddressOther = resultSet.getString("service_address_other") ?: "" + def serviceCity = resultSet.getString("service_city") ?: "" + def serviceProvinceState = resultSet.getString("service_province_state") ?: "" + def servicePostalCodeZipCode = resultSet.getString("service_postalCode_zipCode") ?: "" + def serviceCountry = resultSet.getString("service_country") ?: "" + def attorneyStreetAddress = resultSet.getString("attorney_street_address") ?: "" + def attorneyAddressOther = resultSet.getString("attorney_address_other") ?: "" + def attorneyCity = resultSet.getString("attorney_city") ?: "" + def attorneyProvinceState = resultSet.getString("attorney_province_state") ?: "" + def attorneyPostalCodeZipCode = resultSet.getString("attorney_postalCode_zipCode") ?: "" + def attorneyCountry = resultSet.getString("attorney_country") ?: "" + + // Fetch organization_status_id and organization_type_id + statusStmt.setString(1, orgStatusChar) + ResultSet statusResult = statusStmt.executeQuery() + def orgStatusId = statusResult.next() ? statusResult.getInt("organization_status_id") : null + + typeStmt.setString(1, orgTypeChar) + ResultSet typeResult = typeStmt.executeQuery() + def orgTypeId = typeResult.next() ? typeResult.getInt("organization_type_id") : null + + // If either orgStatusId or orgTypeId is null, log and skip this record + if (orgStatusId == null || orgTypeId == null) { + log.error("Failed to map organization status or type for record: " + organizationId) + continue + } + + // Step 2: Insert service address into organization_address table and get the generated organization_address_id + PreparedStatement insertAddressStmt = destinationConn.prepareStatement(insertAddressSQL) + def organizationAddressId = null + insertAddressStmt.setString(1, name) + insertAddressStmt.setString(2, serviceStreetAddress) + insertAddressStmt.setString(3, serviceAddressOther) + insertAddressStmt.setString(4, serviceCity) + insertAddressStmt.setString(5, serviceProvinceState) + insertAddressStmt.setString(6, servicePostalCodeZipCode) + insertAddressStmt.setString(7, serviceCountry) + ResultSet addressResultSet = insertAddressStmt.executeQuery() + if (addressResultSet.next()) { + organizationAddressId = addressResultSet.getInt("organization_address_id") + } + + // Step 3: Insert attorney address into organization_attorney_address table and get the generated organization_attorney_address_id + PreparedStatement insertAttorneyAddressStmt = destinationConn.prepareStatement(insertAttorneyAddressSQL) + def organizationAttorneyAddressId = null + insertAttorneyAddressStmt.setString(1, name) + insertAttorneyAddressStmt.setString(2, attorneyStreetAddress) + insertAttorneyAddressStmt.setString(3, attorneyAddressOther) + insertAttorneyAddressStmt.setString(4, attorneyCity) + insertAttorneyAddressStmt.setString(5, attorneyProvinceState) + insertAttorneyAddressStmt.setString(6, attorneyPostalCodeZipCode) + insertAttorneyAddressStmt.setString(7, attorneyCountry) + ResultSet attorneyAddressResultSet = insertAttorneyAddressStmt.executeQuery() + if (attorneyAddressResultSet.next()) { + organizationAttorneyAddressId = attorneyAddressResultSet.getInt("organization_attorney_address_id") + } + + // Ensure both address IDs are not null before proceeding + if (organizationAddressId == null || organizationAttorneyAddressId == null) { + log.error("Failed to insert or retrieve address IDs for record: " + organizationId) + continue + } + + // Step 4: Generate a unique organization_code + def organizationCode = null + while (true) { + organizationCode = generateRandomCode() + // Pass the checkOrganizationCodeQuery as a parameter + if (isOrganizationCodeUnique(destinationConn, organizationCode, checkOrganizationCodeQuery)) { + break + } + } + + // Step 5: Insert into the organization table with the generated address IDs and organization code + PreparedStatement insertOrgStmt = destinationConn.prepareStatement(insertOrganizationSQL) + insertOrgStmt.setInt(1, organizationId) + insertOrgStmt.setString(2, organizationCode) // Generated organization code + insertOrgStmt.setString(3, name) + insertOrgStmt.setString(4, operatingName) + insertOrgStmt.setString(5, email) + insertOrgStmt.setString(6, phone) + insertOrgStmt.setString(7, edrmsRecord) + insertOrgStmt.setInt(8, orgStatusId) + insertOrgStmt.setInt(9, orgTypeId) + insertOrgStmt.setInt(10, organizationAddressId) // Service address ID + insertOrgStmt.setInt(11, organizationAttorneyAddressId) // Attorney address ID + insertOrgStmt.executeUpdate() + } + +} catch (Exception e) { + log.error("Error occurred while processing data", e) +} finally { + if (sourceConn != null) sourceConn.close() + if (destinationConn != null) destinationConn.close() +} + +log.warn("**** COMPLETED ORGANIZATION ETL ****") diff --git a/etl/nifi_scripts/transfer.groovy b/etl/nifi_scripts/transfer.groovy new file mode 100644 index 000000000..88cdf9cd9 --- /dev/null +++ b/etl/nifi_scripts/transfer.groovy @@ -0,0 +1,520 @@ +import groovy.json.JsonSlurper +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet +import java.time.OffsetDateTime +import java.sql.Timestamp + +log.warn("**** STARTING TRANSFER ETL ****") + +def SOURCE_QUERY = """ + WITH + internal_comment AS ( + SELECT + ctc.id, + ctc.credit_trade_id, + ctc.credit_trade_comment, + ctc.create_user_id, + ctc.create_timestamp, + STRING_AGG (r."name", '; ') AS role_names + FROM + credit_trade_comment ctc + JOIN "user" u ON u.id = ctc.create_user_id + AND u.organization_id = 1 + AND ctc.is_privileged_access = TRUE + JOIN user_role ur ON ur.user_id = u.id + JOIN "role" r ON ur.role_id = r.id + GROUP BY + ctc.id, + ctc.credit_trade_id, + ctc.credit_trade_comment, + ctc.create_user_id, + ctc.create_timestamp + ORDER BY + ctc.credit_trade_id, + ctc.create_timestamp + ) + SELECT + ct.id AS transfer_id, + ct.initiator_id AS from_organization_id, + ct.respondent_id AS to_organization_id, + ct.date_of_written_agreement AS agreement_date, + ct.trade_effective_date AS transaction_effective_date, + ct.fair_market_value_per_credit AS price_per_unit, + ct.number_of_credits AS quantity, + ct.create_user_id as create_user, + ct.update_user_id as update_user, + ct.update_timestamp as update_date, + ct.create_timestamp as create_date, + -- Aggregate comments from initiator's organization + COALESCE( + ctzr.description, + STRING_AGG (DISTINCT from_ctc.credit_trade_comment, '; ') + ) AS from_org_comment, + -- Aggregate comments from respondent's organization + STRING_AGG (DISTINCT to_ctc.credit_trade_comment, '; ') AS to_org_comment, + -- Aggregate comments from government with internal comment handling + STRING_AGG (DISTINCT gov_ctc.credit_trade_comment, '; ') AS gov_comment, + -- JSON aggregation for internal comments + json_agg (row_to_json (internal_comment)) AS internal_comments, + -- JSON aggregation for credit trade history + json_agg ( + json_build_object ( + 'transfer_id', + cth.credit_trade_id, + 'transfer_status', + case + WHEN cts_history.status = 'Cancelled' or cth.is_rescinded = true THEN 'Rescinded' + WHEN cts_history.status = 'Accepted' THEN 'Submitted' + WHEN cts_history.status IN ('Approved', 'Recorded') THEN 'Recorded' + WHEN cts_history.status IN ('Not Recommended', 'Declined') THEN 'Refused' + WHEN cts_history.status = 'Declined' THEN 'Refused' + WHEN cts_history.status = 'Refused' THEN 'Declined' + WHEN cts_history.status = 'Submitted' AND cth.is_rescinded = false THEN 'Sent' + ELSE cts_history.status + END, + 'user_profile_id', + cth.create_user_id, + 'create_timestamp', + cth.create_timestamp + ) + ) AS credit_trade_history, + COALESCE(ctc.category, NULL) AS transfer_category, + case + WHEN cts.status = 'Cancelled' or ct.is_rescinded = true THEN 'Rescinded' + WHEN cts.status = 'Accepted' THEN 'Submitted' + WHEN cts.status IN ('Approved', 'Recorded') THEN 'Recorded' + WHEN cts.status IN ('Not Recommended', 'Declined') THEN 'Refused' + WHEN cts.status = 'Declined' THEN 'Refused' + WHEN cts.status = 'Refused' THEN 'Declined' + WHEN cts.status = 'Submitted' THEN 'Sent' + ELSE cts.status + END AS current_status, + CASE + WHEN cts.status = 'Not Recommended' THEN 'Refuse' + WHEN cts.status = 'Recommended' THEN 'Record' + ELSE NULL + END AS recommendation + FROM + credit_trade ct + JOIN credit_trade_type ctt ON ct.type_id = ctt.id + LEFT OUTER JOIN credit_trade_category ctc ON ct.trade_category_id = ctc.id + JOIN credit_trade_status cts ON ct.status_id = cts.id + LEFT JOIN credit_trade_zero_reason ctzr ON ctzr.id = ct.zero_reason_id + AND ctzr.reason = 'Internal' + -- Join for Initiator Comments + LEFT JOIN credit_trade_comment from_ctc ON from_ctc.credit_trade_id = ct.id + AND from_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = ct.initiator_id + ) + -- Join for Respondent Comments + LEFT JOIN credit_trade_comment to_ctc ON to_ctc.credit_trade_id = ct.id + AND to_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = ct.respondent_id + ) + -- Join for Government Comments + LEFT JOIN credit_trade_comment gov_ctc ON gov_ctc.credit_trade_id = ct.id + AND gov_ctc.create_user_id IN ( + SELECT + u.id + FROM + "user" u + WHERE + u.organization_id = 1 + AND gov_ctc.is_privileged_access = FALSE + ) + -- Join the internal comment logic for role-based filtering and audience_scope + LEFT JOIN internal_comment ON internal_comment.credit_trade_id = ct.id + -- Join for credit trade history + LEFT JOIN credit_trade_history cth ON cth.credit_trade_id = ct.id + JOIN credit_trade_status cts_history ON cth.status_id = cts_history.id + WHERE + ctt.the_type IN ('Buy', 'Sell') + GROUP BY + ct.id, + ct.initiator_id, + ct.respondent_id, + ct.date_of_written_agreement, + ct.trade_effective_date, + ct.fair_market_value_per_credit, + ct.number_of_credits, + ctc.category, + cts.status, + ctzr.description, + internal_comment.role_names; + """ + +// Fetch connections to both the source and destination databases +def sourceDbcpService = context.controllerServiceLookup.getControllerService('3245b078-0192-1000-ffff-ffffba20c1eb') +def destinationDbcpService = context.controllerServiceLookup.getControllerService('3244bf63-0192-1000-ffff-ffffc8ec6d93') + +Connection sourceConn = null +Connection destinationConn = null + +try { + sourceConn = sourceDbcpService.getConnection() + destinationConn = destinationDbcpService.getConnection() + destinationConn.setAutoCommit(false) + + // Fetch status and category data once and cache it + def preparedData = prepareData(destinationConn) + + def statements = prepareStatements(destinationConn) + + destinationConn.createStatement().execute('DROP FUNCTION IF EXISTS refresh_transaction_aggregate() CASCADE;') + destinationConn.createStatement().execute(""" + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS void AS \$\$ + BEGIN + -- Temporarily disable the materialized view refresh + END; + \$\$ LANGUAGE plpgsql; + """) + + PreparedStatement sourceStmt = sourceConn.prepareStatement(SOURCE_QUERY) + ResultSet resultSet = sourceStmt.executeQuery() + + int recordCount = 0 + + while (resultSet.next()) { + recordCount++ + def jsonSlurper = new JsonSlurper() + def internalComments = resultSet.getString('internal_comments') + def creditTradeHistory = resultSet.getString('credit_trade_history') + + def internalCommentsJson = internalComments ? jsonSlurper.parseText(internalComments) : [] + def creditTradeHistoryJson = creditTradeHistory ? jsonSlurper.parseText(creditTradeHistory) : [] + + // First, determine if the transfer already exists + def transferIdFromSource = resultSet.getInt('transfer_id') + if (transferExists(destinationConn, transferIdFromSource)) { + log.warn("Duplicate transfer detected with transfer_id: ${transferIdFromSource}, skipping insertion.") + // Since this transfer already exists, do not insert transactions or history again. + continue + } + + // Only if transfer does not exist, proceed to create transactions and then insert the transfer. + def (fromTransactionId, toTransactionId) = processTransactions(resultSet.getString('current_status'), + resultSet, + statements.transactionStmt) + + def transferId = insertTransfer(resultSet, statements.transferStmt, + fromTransactionId, toTransactionId, preparedData, destinationConn) + + if (transferId) { + processHistory(transferId, creditTradeHistoryJson, statements.historyStmt, preparedData) + processInternalComments(transferId, internalCommentsJson, statements.internalCommentStmt, + statements.transferInternalCommentStmt) + } else { + log.warn("Transfer not inserted for record: ${transferIdFromSource}") + } + } + resultSet.close() + destinationConn.createStatement().execute(""" + CREATE OR REPLACE FUNCTION refresh_transaction_aggregate() + RETURNS void AS \$\$ + BEGIN + REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate; + END; + \$\$ LANGUAGE plpgsql; + """) + destinationConn.createStatement().execute('REFRESH MATERIALIZED VIEW CONCURRENTLY mv_transaction_aggregate') + + destinationConn.commit() + log.debug("Processed ${recordCount} records successfully.") +} catch (Exception e) { + log.error('Error occurred while processing data', e) + destinationConn?.rollback() +} finally { + if (sourceConn != null) sourceConn.close() + if (destinationConn != null) destinationConn.close() +} + +def logResultSetRow(ResultSet rs) { + def metaData = rs.getMetaData() + def columnCount = metaData.getColumnCount() + def rowData = [:] + + for (int i = 1; i <= columnCount; i++) { + def columnName = metaData.getColumnName(i) + def columnValue = rs.getObject(i) + rowData[columnName] = columnValue + } + + log.debug("Row data: ${rowData}") +} + +def loadTableData(Connection conn, String query, String keyColumn, String valueColumn) { + def dataMap = [:] + def stmt = conn.createStatement() + def rs = stmt.executeQuery(query) + + while (rs.next()) { + def key = rs.getString(keyColumn) + def value = rs.getInt(valueColumn) + if (dataMap.containsKey(key)) { + log.warn("Duplicate key found for ${key}. Existing value: ${dataMap[key]}, New value: ${value}.") + } + dataMap[key] = value + } + + rs.close() + stmt.close() + return dataMap +} + +def prepareData(Connection conn) { + def categoryMap = loadTableData(conn, + 'SELECT DISTINCT category, MIN(transfer_category_id) AS transfer_category_id FROM transfer_category GROUP BY category', + 'category', + 'transfer_category_id' + ) + def statusMap = loadTableData(conn, + 'SELECT DISTINCT status, MIN(transfer_status_id) AS transfer_status_id FROM transfer_status GROUP BY status', + 'status', + 'transfer_status_id' + ) + return [ + categoryMap: categoryMap, + statusMap : statusMap + ] +} + +def getTransferCategoryId(String category, Map preparedData) { + return preparedData.categoryMap[category] +} + +def getStatusId(String status, Map preparedData) { + return preparedData.statusMap[status] +} + +def prepareStatements(Connection conn) { + def INSERT_TRANSFER_SQL = ''' + INSERT INTO transfer ( + from_organization_id, to_organization_id, from_transaction_id, to_transaction_id, + agreement_date, transaction_effective_date, price_per_unit, quantity, from_org_comment, to_org_comment, gov_comment, + transfer_category_id, current_status_id, recommendation, create_date, update_date, create_user, update_user, effective_status, transfer_id + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?::transfer_recommendation_enum, ?, ?, ?, ?, true, ?) + RETURNING transfer_id + ''' + def INSERT_TRANSFER_HISTORY_SQL = ''' + INSERT INTO transfer_history ( + transfer_history_id, transfer_id, transfer_status_id, user_profile_id, create_date, effective_status + ) VALUES (DEFAULT, ?, ?, ?, ?, true) + ''' + def INSERT_INTERNAL_COMMENT_SQL = ''' + INSERT INTO internal_comment ( + internal_comment_id, comment, audience_scope, create_user, create_date + ) VALUES (DEFAULT, ?, ?::audience_scope, ?, ?) + RETURNING internal_comment_id + ''' + def INSERT_TRANSFER_INTERNAL_COMMENT_SQL = ''' + INSERT INTO transfer_internal_comment ( + transfer_id, internal_comment_id + ) VALUES (?, ?) + ''' + def INSERT_TRANSACTION_SQL = ''' + INSERT INTO transaction ( + transaction_id, compliance_units, organization_id, transaction_action, effective_date, create_user, create_date, effective_status + ) VALUES (DEFAULT, ?, ?, ?::transaction_action_enum, ?, ?, ?, true) + RETURNING transaction_id + ''' + return [transferStmt : conn.prepareStatement(INSERT_TRANSFER_SQL), + historyStmt : conn.prepareStatement(INSERT_TRANSFER_HISTORY_SQL), + internalCommentStmt : conn.prepareStatement(INSERT_INTERNAL_COMMENT_SQL), + transferInternalCommentStmt: conn.prepareStatement(INSERT_TRANSFER_INTERNAL_COMMENT_SQL), + transactionStmt : conn.prepareStatement(INSERT_TRANSACTION_SQL)] +} + +def toSqlTimestamp(String timestampString) { + try { + // Parse the ISO 8601 timestamp and convert to java.sql.Timestamp + def offsetDateTime = OffsetDateTime.parse(timestampString) + return Timestamp.from(offsetDateTime.toInstant()) + } catch (Exception e) { + log.error("Invalid timestamp format: ${timestampString}, defaulting to '1970-01-01T00:00:00Z'") + return Timestamp.valueOf('1970-01-01 00:00:00') + } +} + +def processTransactions(String currentStatus, ResultSet rs, PreparedStatement stmt) { + def fromTransactionId = null + def toTransactionId = null + + switch (currentStatus) { + case ['Draft', 'Deleted', 'Refused', 'Declined', 'Rescinded']: + break + case ['Sent', 'Submitted', 'Recommended']: + fromTransactionId = insertTransaction(stmt, rs, 'Reserved', rs.getInt('from_organization_id'), true) + break + case 'Recorded': + fromTransactionId = insertTransaction(stmt, rs, 'Adjustment', rs.getInt('from_organization_id'), true) + toTransactionId = insertTransaction(stmt, rs, 'Adjustment', rs.getInt('to_organization_id'), false) + break + } + + return [fromTransactionId, toTransactionId] +} + +def insertTransaction(PreparedStatement stmt, ResultSet rs, String action, int orgId, boolean isDebit) { + def quantity = rs.getInt('quantity') + if (isDebit) { + quantity *= -1 // Make the transaction negative for the sender + } + + stmt.setInt(1, quantity) + stmt.setInt(2, orgId) + stmt.setString(3, action) + stmt.setDate(4, rs.getDate('transaction_effective_date') ?: rs.getDate('agreement_date')) + stmt.setInt(5, rs.getInt('create_user')) + stmt.setTimestamp(6, rs.getTimestamp('create_date')) + + def result = stmt.executeQuery() + return result.next() ? result.getInt('transaction_id') : null +} + +def transferExists(Connection conn, int transferId) { + def duplicateCheckStmt = conn.prepareStatement('SELECT COUNT(*) FROM transfer WHERE transfer_id = ?') + duplicateCheckStmt.setInt(1, transferId) + def duplicateResult = duplicateCheckStmt.executeQuery() + duplicateResult.next() + def count = duplicateResult.getInt(1) + duplicateResult.close() + duplicateCheckStmt.close() + + return count > 0 +} + +def processHistory(Integer transferId, List creditTradeHistory, PreparedStatement historyStmt, Map preparedData) { + if (!creditTradeHistory) return + + // Use a Set to track unique combinations of transfer_id and transfer_status + def processedEntries = new HashSet() + + creditTradeHistory.each { historyItem -> + try { + def statusId = getStatusId(historyItem.transfer_status, preparedData) + def uniqueKey = "${transferId}_${statusId}" + + // Check if this combination has already been processed + if (!processedEntries.contains(uniqueKey)) { + // If not processed, add to batch and mark as processed + historyStmt.setInt(1, transferId) + historyStmt.setInt(2, statusId) + historyStmt.setInt(3, historyItem.user_profile_id) + historyStmt.setTimestamp(4, toSqlTimestamp(historyItem.create_timestamp ?: '2013-01-01T00:00:00Z')) + historyStmt.addBatch() + + processedEntries.add(uniqueKey) + } + } catch (Exception e) { + log.error("Error processing history record for transfer_id: ${transferId}", e) + } + } + + // Execute batch + historyStmt.executeBatch() +} + + +def processInternalComments(Integer transferId, List internalComments, + PreparedStatement internalCommentStmt, + PreparedStatement transferInternalCommentStmt) { + if (!internalComments) return + + internalComments.each { comment -> + if (!comment) return // Skip null comments + + try { + // Insert the internal comment + internalCommentStmt.setString(1, comment.credit_trade_comment ?: '') + internalCommentStmt.setString(2, getAudienceScope(comment.role_names ?: '')) + internalCommentStmt.setInt(3, comment.create_user_id ?: null) + internalCommentStmt.setTimestamp(4, toSqlTimestamp(comment.create_timestamp ?: '2013-01-01T00:00:00Z')) + + def internalCommentId = null + def commentResult = internalCommentStmt.executeQuery() + if (commentResult.next()) { + internalCommentId = commentResult.getInt('internal_comment_id') + + // Insert the transfer-comment relationship + transferInternalCommentStmt.setInt(1, transferId) + transferInternalCommentStmt.setInt(2, internalCommentId) + transferInternalCommentStmt.executeUpdate() + } + + commentResult.close() + } catch (Exception e) { + log.error("Error processing internal comment for transfer ${transferId}: ${e.getMessage()}", e) + } + } + } + +// Helper function to determine audience scope based on role names +def getAudienceScope(String roleNames) { + if (!roleNames) return 'Analyst' + + switch (true) { + case roleNames.contains('GovDirector'): + return 'Director' + case roleNames.contains('GovComplianceManager'): + return 'Compliance Manager' + default: + return 'Analyst' + } +} + +def insertTransfer(ResultSet rs, PreparedStatement transferStmt, Long fromTransactionId, + Long toTransactionId, Map preparedData, Connection conn) { + // Check for duplicates in the `transfer` table + def transferId = rs.getInt('transfer_id') + def duplicateCheckStmt = conn.prepareStatement('SELECT COUNT(*) FROM transfer WHERE transfer_id = ?') + duplicateCheckStmt.setInt(1, transferId) + def duplicateResult = duplicateCheckStmt.executeQuery() + duplicateResult.next() + def count = duplicateResult.getInt(1) + duplicateResult.close() + duplicateCheckStmt.close() + + if (count > 0) { + log.warn("Duplicate transfer detected with transfer_id: ${transferId}, skipping insertion.") + return null + } + + // Proceed with insertion if no duplicate exists + def categoryId = getTransferCategoryId(rs.getString('transfer_category'), preparedData) + def statusId = getStatusId(rs.getString('current_status'), preparedData) + transferStmt.setInt(1, rs.getInt('from_organization_id')) + transferStmt.setInt(2, rs.getInt('to_organization_id')) + transferStmt.setObject(3, fromTransactionId) + transferStmt.setObject(4, toTransactionId) + transferStmt.setTimestamp(5, rs.getTimestamp('agreement_date')) + transferStmt.setTimestamp(6, rs.getTimestamp('transaction_effective_date')) + transferStmt.setBigDecimal(7, rs.getBigDecimal('price_per_unit')) + transferStmt.setInt(8, rs.getInt('quantity')) + transferStmt.setString(9, rs.getString('from_org_comment')) + transferStmt.setString(10, rs.getString('to_org_comment')) + transferStmt.setString(11, rs.getString('gov_comment')) + transferStmt.setObject(12, categoryId) + transferStmt.setObject(13, statusId) + transferStmt.setString(14, rs.getString('recommendation')) + transferStmt.setTimestamp(15, rs.getTimestamp('create_date')) + transferStmt.setTimestamp(16, rs.getTimestamp('update_date')) + transferStmt.setInt(17, rs.getInt('create_user')) + transferStmt.setInt(18, rs.getInt('update_user')) + transferStmt.setInt(19, rs.getInt('transfer_id')) + def result = transferStmt.executeQuery() + return result.next() ? result.getInt('transfer_id') : null +} + +log.warn("**** COMPLETED TRANSFER ETL ****") diff --git a/etl/nifi_scripts/user.groovy b/etl/nifi_scripts/user.groovy new file mode 100644 index 000000000..77fb9bdf7 --- /dev/null +++ b/etl/nifi_scripts/user.groovy @@ -0,0 +1,160 @@ +import java.sql.Connection +import java.sql.PreparedStatement +import java.sql.ResultSet + +log.warn("**** STARTING USER ETL ****") + +// SQL query to extract user profiles +def userProfileQuery = """ + SELECT id as user_profile_id, + keycloak_user_id, + COALESCE(NULLIF(email, ''), 'test@gov.bc.ca') as keycloak_email, + username as keycloak_username, + COALESCE(NULLIF(email, ''), 'test@gov.bc.ca') as email, + title, + phone, + cell_phone as mobile_phone, + first_name, + last_name, + is_active, + CASE WHEN organization_id = 1 THEN null ELSE organization_id END as organization_id + FROM public.user; +""" + +// SQL query to extract user roles +def userRoleQuery = """ + SELECT ur.user_id as user_profile_id, + CASE + WHEN r.name = 'Admin' THEN 'ADMINISTRATOR' + WHEN r.name = 'GovUser' THEN 'ANALYST' + WHEN r.name IN ('GovDirector', 'GovDeputyDirector') THEN 'DIRECTOR' + WHEN r.name = 'GovComplianceManager' THEN 'COMPLIANCE_MANAGER' + WHEN r.name = 'FSAdmin' THEN 'MANAGE_USERS' + WHEN r.name = 'FSUser' THEN 'TRANSFER' + WHEN r.name = 'FSManager' THEN 'SIGNING_AUTHORITY' + WHEN r.name = 'FSNoAccess' THEN 'READ_ONLY' + WHEN r.name = 'ComplianceReporting' THEN 'COMPLIANCE_REPORTING' + ELSE NULL + END AS role_name + FROM public.user u + INNER JOIN user_role ur ON ur.user_id = u.id + INNER JOIN role r ON r.id = ur.role_id + WHERE r.name NOT IN ('FSDocSubmit', 'GovDoc'); +""" + +// SQL queries to insert user profiles and roles into destination tables with ON CONFLICT handling +def insertUserProfileSQL = """ + INSERT INTO user_profile (user_profile_id, keycloak_user_id, keycloak_email, keycloak_username, email, title, phone, mobile_phone, first_name, last_name, is_active, organization_id) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT (user_profile_id) DO UPDATE + SET keycloak_user_id = EXCLUDED.keycloak_user_id, + keycloak_email = EXCLUDED.keycloak_email, + keycloak_username = EXCLUDED.keycloak_username, + email = EXCLUDED.email, + title = EXCLUDED.title, + phone = EXCLUDED.phone, + mobile_phone = EXCLUDED.mobile_phone, + first_name = EXCLUDED.first_name, + last_name = EXCLUDED.last_name, + is_active = EXCLUDED.is_active, + organization_id = EXCLUDED.organization_id; +""" + +def insertUserRoleSQL = """ + INSERT INTO user_role (user_profile_id, role_id) + VALUES (?, (SELECT role_id FROM role WHERE name = ?::role_enum)) + ON CONFLICT (user_profile_id, role_id) DO NOTHING; +""" + +// Fetch connections to both source and destination databases +def sourceDbcpService = context.controllerServiceLookup.getControllerService("3245b078-0192-1000-ffff-ffffba20c1eb") +def destinationDbcpService = context.controllerServiceLookup.getControllerService("3244bf63-0192-1000-ffff-ffffc8ec6d93") + +Connection sourceConn = null +Connection destinationConn = null + +try { + // Get connections + sourceConn = sourceDbcpService.getConnection() + destinationConn = destinationDbcpService.getConnection() + + // Step 1: Extract User Profile Data + PreparedStatement sourceProfileStmt = sourceConn.prepareStatement(userProfileQuery) + ResultSet userProfileResultSet = sourceProfileStmt.executeQuery() + + // Prepare the destination SQL for inserting/updating user profiles + PreparedStatement insertUserProfileStmt = destinationConn.prepareStatement(insertUserProfileSQL) + PreparedStatement insertUserRoleStmt = destinationConn.prepareStatement(insertUserRoleSQL) + + // Process the result set for user profiles + while (userProfileResultSet.next()) { + def userProfileId = userProfileResultSet.getInt("user_profile_id") + def keycloakUserId = userProfileResultSet.getString("keycloak_user_id") + def keycloakEmail = userProfileResultSet.getString("keycloak_email") + def keycloakUsername = userProfileResultSet.getString("keycloak_username") + def email = userProfileResultSet.getString("email") + def title = userProfileResultSet.getString("title") + def phone = userProfileResultSet.getString("phone") + def mobilePhone = userProfileResultSet.getString("mobile_phone") + def firstName = userProfileResultSet.getString("first_name") + def lastName = userProfileResultSet.getString("last_name") + def isActive = userProfileResultSet.getBoolean("is_active") + def organizationId = userProfileResultSet.getObject("organization_id") // Nullable + + // Bind values to the prepared statement + insertUserProfileStmt.setInt(1, userProfileId) + insertUserProfileStmt.setString(2, keycloakUserId) + insertUserProfileStmt.setString(3, keycloakEmail) + insertUserProfileStmt.setString(4, keycloakUsername) + insertUserProfileStmt.setString(5, email) + insertUserProfileStmt.setString(6, title) + insertUserProfileStmt.setString(7, phone) + insertUserProfileStmt.setString(8, mobilePhone) + insertUserProfileStmt.setString(9, firstName) + insertUserProfileStmt.setString(10, lastName) + insertUserProfileStmt.setBoolean(11, isActive) + insertUserProfileStmt.setObject(12, organizationId) // Handle nulls correctly + + // Execute the insert/update for user profiles + insertUserProfileStmt.executeUpdate() + // Step 2: Insert user role based on organization_id + if (organizationId == null) { + // If organization_id is null, assign 'GOVERNMENT' role + insertUserRoleStmt.setInt(1, userProfileId) + insertUserRoleStmt.setString(2, 'GOVERNMENT') + } else { + // If organization_id is not null, assign 'SUPPLIER' role + insertUserRoleStmt.setInt(1, userProfileId) + insertUserRoleStmt.setString(2, 'SUPPLIER') + } + + // Execute the insert for the user role + insertUserRoleStmt.executeUpdate() + } + + // Step 3: Extract User Role Data + PreparedStatement sourceRoleStmt = sourceConn.prepareStatement(userRoleQuery) + ResultSet userRoleResultSet = sourceRoleStmt.executeQuery() + + // Process the result set for user roles + while (userRoleResultSet.next()) { + def userProfileId = userRoleResultSet.getInt("user_profile_id") + def roleName = userRoleResultSet.getString("role_name") + + // Bind values to the prepared statement + insertUserRoleStmt.setInt(1, userProfileId) + insertUserRoleStmt.setString(2, roleName) + + // Execute the insert/update for user roles + insertUserRoleStmt.executeUpdate() + } + +} catch (Exception e) { + log.error("Error occurred while processing data", e) +} finally { + // Close the connections + if (sourceConn != null) sourceConn.close() + if (destinationConn != null) destinationConn.close() +} + +log.warn("**** COMPLETED USER ETL ****") diff --git a/etl/readme.md b/etl/readme.md new file mode 100755 index 000000000..37307864d --- /dev/null +++ b/etl/readme.md @@ -0,0 +1,85 @@ +# ETL Overview +This project sets up Apache NiFi along with two PostgreSQL databases, TFRS and LCFS, using Docker. It enables data migration between these databases via NiFi. + +## How to Use + +## 1. Run Containers: + +```bash +$ docker-compose up -d +``` + +Starts three containers: NiFi, TFRS, and LCFS databases. + +## 2. Access NiFi: + +Go to http://localhost:8080/nifi/ + + +## 3. Load NiFi Template: + +In the NiFi UI, upload your template via the Upload Template button in the Operate section. + +You can find the template under `templates` directory. + +Drag the template onto the canvas. + +## 4. Configure Databases: + +* In NiFi, click on the Configuration gear in the Operate Box +* Switch to the Controller Services Tab +* Click the configure button on the LCFS and TFRS Databasepool rows to Edit them +* Switch to the properties tab and set "pass" as the password + +## 5. Enable Services: + +Click the lignting bolt next to all services to Enable them + +## 6. Data transfer between OpenShift and local containers: + +Copy test/prod/dev data into your local database using the /etl/data-transfer.sh script +This is a bash script that creates a pg_dump .tar file from and openshift postgres container +and imports it into a local postgres container and vice versa + +There are # 4 Arguments + +- $1 = 'tfrs' or 'lcfs' (application) +- $2 = 'test', 'prod', or 'dev' (environment) +- $3 = 'import' or 'export' (direction of data transfer from openshift to local or vice versa) +- $4 = 'local container name or id' + +### example command + +- . data-transfer.sh lcfs dev export 398cd4661173 +- . data-transfer.sh tfrs prod import 398cd4661173 + +You will need to be logged into Openshift using oc +for installation of Openshift CLI, refer: https://console.apps.silver.devops.gov.bc.ca/command-line-tools +for login token, refer: https://oauth-openshift.apps.silver.devops.gov.bc.ca/oauth/token/request +You will also need your LCFS postgres docker container running + +## 7. Start NiFi Flow: + +Click the Start icon to begin the data flow. + +## NiFi Monitoring + +To monitor your NiFi data flow: + +## 1. View Flow Status: + - Check each processor and connection for real-time data on processed, queued, or penalized FlowFiles. + - Click on components for detailed stats and performance metrics. + +## 2. Enable Bulletins: + - Configure bulletins to receive alerts (INFO, WARN, ERROR) for any issues that require attention. + +## 3. Use Data Provenance: + - Track the lineage of each FlowFile to see its origin, processing steps, and final destination. + +## 4. Monitor System Health: + - Use the ## Summary## tab to check overall system health, including memory usage, thread activity, and performance. + +## Error Handling +If any records cannot be added to the databases, they will be logged and stored in the `nifi_output` directory. + +You can access these failed records for further inspection or troubleshooting. \ No newline at end of file diff --git a/etl/templates/fuel_code.xml b/etl/templates/fuel_code.xml new file mode 100644 index 000000000..8f59186d7 --- /dev/null +++ b/etl/templates/fuel_code.xml @@ -0,0 +1,2173 @@ + +