diff --git a/.github/labeler.yml b/.github/labeler.yml index e18983173e..f1d9c2b14e 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,16 +1,16 @@ "frontend": - - any: ["src/frontend/**/*", "!src/frontend/public/install-fmtm.sh"] + - "src/frontend/**/*" "backend": - "src/backend/**/*" "devops": - ".github/**/*" - "nginx/**/*" - "scripts/**/*" - - "src/frontend/public/install-fmtm.sh" - "docker-*.yml" - "**/Dockerfile" - "**/*.dockerfile" - "**/*entrypoint.sh" + - "Justfile" "migration": - "src/backend/migrations/**/*" "documentation": diff --git a/.github/workflows/build_and_deploy.yml b/.github/workflows/build_and_deploy.yml index 149b0b484b..b7c83b7305 100644 --- a/.github/workflows/build_and_deploy.yml +++ b/.github/workflows/build_and_deploy.yml @@ -17,7 +17,7 @@ on: jobs: pytest: - uses: hotosm/gh-workflows/.github/workflows/test_compose.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/test_compose.yml@1.4.5 with: image_name: ghcr.io/${{ github.repository }}/backend build_context: src/backend @@ -25,15 +25,16 @@ jobs: compose_service: api compose_command: pytest tag_override: ci-${{ github.ref_name }} + coverage: true secrets: inherit frontend-tests: - uses: hotosm/gh-workflows/.github/workflows/test_pnpm.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/test_pnpm.yml@1.4.5 with: working_dir: src/frontend backend-build: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 needs: [pytest] with: context: src/backend @@ -41,7 +42,7 @@ jobs: image_name: ghcr.io/${{ github.repository }}/backend frontend-build: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 needs: [frontend-tests] with: context: src/frontend @@ -118,14 +119,37 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 + - name: Create .env file + run: | + # Get a8m/envsubst (required for default vals syntax ${VAR:-default}) + echo "Downloading envsubst" + curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o envsubst + chmod +x envsubst + + echo "Substituing variables from .env.example --> .env" + ./envsubst < .env.example > .env + - name: Frontend smoke test - run: echo "Not implemented" + run: | + if docker compose up --detach \ + --no-deps --wait --wait-timeout 30 \ + ui + then + docker compose logs ui + echo "Sleeping 5 seconds to wait for dev server" + sleep 5 + curl --fail http://localhost:7051 || exit 1 + else + echo "Application not healthy after 30s. Exiting." + docker compose logs ui + exit 1 + fi deploy-containers: needs: - smoke-test-backend - smoke-test-frontend - uses: hotosm/gh-workflows/.github/workflows/remote_deploy.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/remote_deploy.yml@1.4.5 with: environment: ${{ github.ref_name }} docker_compose_file: "docker-compose.${{ github.ref_name }}.yml" diff --git a/.github/workflows/build_ci_img.yml b/.github/workflows/build_ci_img.yml index 3769f6e2c9..6f0e5cf89c 100644 --- a/.github/workflows/build_ci_img.yml +++ b/.github/workflows/build_ci_img.yml @@ -16,7 +16,7 @@ on: jobs: backend-ci-build: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 with: context: src/backend build_target: ci diff --git a/.github/workflows/build_odk_imgs.yml b/.github/workflows/build_odk_imgs.yml index 72f4a6a1f2..d39e65f8d8 100644 --- a/.github/workflows/build_odk_imgs.yml +++ b/.github/workflows/build_odk_imgs.yml @@ -13,7 +13,7 @@ on: jobs: build-odkcentral: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 with: context: odkcentral/api image_tags: | @@ -26,7 +26,7 @@ jobs: # multi_arch: true build-odkcentral-ui: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 with: context: odkcentral/ui image_tags: | diff --git a/.github/workflows/build_proxy_imgs.yml b/.github/workflows/build_proxy_imgs.yml index f931603620..728c783c1c 100644 --- a/.github/workflows/build_proxy_imgs.yml +++ b/.github/workflows/build_proxy_imgs.yml @@ -10,7 +10,7 @@ on: jobs: build-cert-init-main: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 with: context: nginx build_target: certs-init-main @@ -21,7 +21,7 @@ jobs: multi_arch: true build-cert-init-dev: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 with: context: nginx build_target: certs-init-development @@ -33,7 +33,7 @@ jobs: multi_arch: true build-proxy-main: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 with: context: nginx build_target: main @@ -43,19 +43,8 @@ jobs: NGINX_TAG=${{ vars.NGINX_TAG }} multi_arch: true - build-proxy-main-plus-script: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 - with: - context: nginx - build_target: main-plus-script - image_tags: | - "ghcr.io/${{ github.repository }}/proxy:main-plus-script" - extra_build_args: | - NGINX_TAG=${{ vars.NGINX_TAG }} - multi_arch: true - build-proxy-dev: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 with: context: nginx build_target: development diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 3ae6c969d3..e46126692f 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,23 +12,22 @@ on: jobs: build_doxygen: - uses: hotosm/gh-workflows/.github/workflows/doxygen_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/doxygen_build.yml@1.4.5 with: output_path: docs/apidocs build_openapi_json: - uses: hotosm/gh-workflows/.github/workflows/openapi_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/openapi_build.yml@1.4.5 with: image: ghcr.io/${{ github.repository }}/backend:ci-${{ github.ref_name }} example_env_file_path: ".env.example" output_path: docs/openapi.json publish_docs: - uses: hotosm/gh-workflows/.github/workflows/mkdocs_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/mkdocs_build.yml@1.4.5 needs: - build_doxygen - build_openapi_json with: - image: ghcr.io/${{ github.repository }}/backend:ci-${{ github.ref_name }} doxygen: true openapi: true diff --git a/.github/workflows/pr_test_backend.yml b/.github/workflows/pr_test_backend.yml index 4505c82fc6..70aeae426d 100644 --- a/.github/workflows/pr_test_backend.yml +++ b/.github/workflows/pr_test_backend.yml @@ -14,7 +14,7 @@ on: jobs: pytest: - uses: hotosm/gh-workflows/.github/workflows/test_compose.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/test_compose.yml@1.4.5 with: image_name: ghcr.io/${{ github.repository }}/backend build_context: src/backend diff --git a/.github/workflows/pr_test_frontend.yml b/.github/workflows/pr_test_frontend.yml index 376864107e..a791f88af4 100644 --- a/.github/workflows/pr_test_frontend.yml +++ b/.github/workflows/pr_test_frontend.yml @@ -14,6 +14,6 @@ on: jobs: frontend-tests: - uses: hotosm/gh-workflows/.github/workflows/test_pnpm.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/test_pnpm.yml@1.4.5 with: working_dir: src/frontend diff --git a/.github/workflows/tag_build.yml b/.github/workflows/tag_build.yml index 3494452c7c..f7171d353a 100644 --- a/.github/workflows/tag_build.yml +++ b/.github/workflows/tag_build.yml @@ -9,7 +9,7 @@ on: jobs: backend-build: - uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.5 with: context: src/backend build_target: prod diff --git a/.github/workflows/tests/pytest.yml b/.github/workflows/tests/pytest.yml index 86ed03a9c3..2df6fe44ad 100644 --- a/.github/workflows/tests/pytest.yml +++ b/.github/workflows/tests/pytest.yml @@ -9,6 +9,9 @@ on: - staging - development +permissions: + contents: write + jobs: run-tests: runs-on: ubuntu-latest @@ -20,6 +23,7 @@ jobs: - name: Vars and Secrets to Env env: TAG_OVERRIDE: ${{ env.TAG_OVERRIDE || 'ci-development' }} + TARGET_OVERRIDE: ${{ env.TARGET_OVERRIDE || 'ci' }} GIT_BRANCH: ${{ github.ref_name }} VARS_CONTEXT: ${{ toJson(vars) }} SECRETS_CONTEXT: ${{ toJson(secrets) }} @@ -30,10 +34,6 @@ jobs: # Parse JSON with multiline strings, using delimeter (Github specific) to_envs() { jq -r "to_entries[] | \"\(.key)<<$delim\n\(.value)\n$delim\n\""; } - # Set all vars - echo "TAG_OVERRIDE=${TAG_OVERRIDE}" >> $GITHUB_ENV - echo "GIT_BRANCH=${GIT_BRANCH}" >> $GITHUB_ENV - # Set VARS_CONTEXT if not null if [ "${VARS_CONTEXT}" != "null" ]; then echo "${VARS_CONTEXT}" | to_envs >> $GITHUB_ENV @@ -51,18 +51,37 @@ jobs: curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o envsubst chmod +x envsubst - # Check if .env.example exists - if [ -f .env.example ]; then - echo "Substituting variables from .env.example --> .env" - ./envsubst < .env.example > .env - else - echo ".env.example not found, creating .env with GIT_BRANCH only" - echo "GIT_BRANCH=${GIT_BRANCH}" > .env - fi + echo "Substituting variables from .env.example --> .env" + ./envsubst < .env.example > .env + # Set all vars + echo "TAG_OVERRIDE=${TAG_OVERRIDE}" >> .env + echo "TARGET_OVERRIDE=${TARGET_OVERRIDE}" >> .env echo "GIT_BRANCH=${GIT_BRANCH}" >> .env - - name: Run Tests + - name: Run Tests With Coverage run: | docker compose up -d proxy - docker compose run api pytest + docker compose run --entrypoint "sh -c" \ + --volume ${{ github.workspace }}/coverage:/tmp/coverage api \ + "coverage run -m pytest \ + && coverage report && coverage html \ + && coverage-badge -o coverage.svg \ + && mv htmlcov/index.html /tmp/coverage/coverage.html \ + && mv coverage.svg /tmp/coverage/coverage.svg" + + - name: Upload Coverage + run: | + # Checkout to gh-pages + git config user.name svchot + git config user.email sysadmin@hotosm.org + git checkout gh-pages + + # Overwrite coverage index and badge + echo "Coverage dir contents:" + ls ${{ github.workspace }}/coverage + echo "" + mv ${{ github.workspace }}/coverage/* ${{ github.workspace }} + + # Assess diff + git --no-pager diff diff --git a/.github/workflows/tests/test_ci.sh b/.github/workflows/tests/test_ci.sh index aaad26820b..5ba7cd05a1 100644 --- a/.github/workflows/tests/test_ci.sh +++ b/.github/workflows/tests/test_ci.sh @@ -7,22 +7,25 @@ set -e ######################################## # TODO read personal access token -# read -p +# read -erp # GITHUB_TOKEN=input # Feed to act using -s flag: -s GITHUB_TOKEN=input_personal_access_token -# Run backend PyTest manually -docker compose build api -act pull_request -W .github/workflows/tests/pytest.yml \ - -e .github/workflows/tests/pr_payload.json \ - --var-file=.env --secret-file=.env +export TAG_OVERRIDE=ci +export TARGET_OVERRIDE=ci # # PR Test Backend -# Includes image build, which fails due to registry auth +# NOTE: Includes image build, which fails due to registry auth # act pull_request -W .github/workflows/pr_test_backend.yml \ # -e .github/workflows/tests/pr_payload.json \ # --var-file=.env --secret-file=.env +# Instead, run backend PyTest manually +TAG_OVERRIDE=ci TARGET_OVERRIDE=ci docker compose build api +act pull_request -W .github/workflows/tests/pytest.yml \ + -e .github/workflows/tests/pr_payload.json \ + --var-file=.env --secret-file=.env + # PR Test Frontend act pull_request -W .github/workflows/pr_test_frontend.yml \ -e .github/workflows/tests/pr_payload.json \ diff --git a/.github/workflows/wiki.yml b/.github/workflows/wiki.yml index acde513e24..d5dc1a430c 100644 --- a/.github/workflows/wiki.yml +++ b/.github/workflows/wiki.yml @@ -10,6 +10,6 @@ on: jobs: publish-docs-to-wiki: - uses: hotosm/gh-workflows/.github/workflows/wiki.yml@1.4.2 + uses: hotosm/gh-workflows/.github/workflows/wiki.yml@1.4.5 with: homepage_path: "wiki_redirect.md" diff --git a/.gitignore b/.gitignore index e43d4b65aa..7380de502c 100644 --- a/.gitignore +++ b/.gitignore @@ -80,6 +80,9 @@ temp_webmaps/Naivasha # mkdocs site +docs/apidocs +coverage +**/**/coverage* # Bash install script envsubst diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1eef82bd17..04dd7cd7d0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,21 +6,18 @@ repos: - id: commitizen stages: [commit-msg] - # Autoformat: Python code - - repo: https://github.com/psf/black - rev: 23.11.0 - hooks: - - id: black - files: ^src/backend/(?:.*/)*.*$ - args: [--target-version=py39] - # Lint / autoformat: Python code - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.1.6" + # Ruff version. + rev: "v0.1.13" hooks: + # Run the linter - id: ruff files: ^src/backend/(?:.*/)*.*$ args: [--fix, --exit-non-zero-on-fix] + # Run the formatter + - id: ruff-format + files: ^src/backend/(?:.*/)*.*$ # Autoformat: YAML, JSON, Markdown, etc. - repo: https://github.com/pre-commit/mirrors-prettier @@ -33,12 +30,53 @@ repos: --no-error-on-unmatched-pattern, "!chart/**", "!CHANGELOG.md", + "!CONTRIBUTING.md", + "!LICENSE.md", "!src/frontend/pnpm-lock.yaml", ] + # # Lint: Dockerfile (disabled until binary is bundled) + # - repo: https://github.com/hadolint/hadolint.git + # rev: v2.12.1-beta + # hooks: + # - id: hadolint + # args: + # [ + # "--ignore=DL3008", + # "--ignore=DL3013", + # "--ignore=DL3018", + # "--ignore=DL3059", + # ] + + # Lint: Bash scripts + - repo: https://github.com/openstack-dev/bashate.git + rev: 2.1.1 + hooks: + - id: bashate + files: ^(?!.*(?:^|/)contrib(?:/|$)).*$ + + # Lint: Shell scripts + - repo: https://github.com/shellcheck-py/shellcheck-py + rev: v0.9.0.6 + hooks: + - id: shellcheck + files: ^(?!.*(?:^|/)contrib(?:/|$)).*$ + args: ["-x", "--exclude=SC2317,SC2188,SC2143"] + # Lint: Markdown - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.37.0 + rev: v0.38.0 hooks: - id: markdownlint - args: [--fix, --ignore, CHANGELOG.md, --ignore, .github] + args: + [ + --fix, + --disable, + MD033, + --ignore, + LICENSE.md, + --ignore, + CHANGELOG.md, + --ignore, + .github, + ] diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index 6f18f60fde..0000000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,83 +0,0 @@ -# 📜 Code of conduct - -(The latest version can be found at ) - -Welcome to Humanitarian OpenStreetMap Team. HOT is committed to providing a welcoming and safe environment for people of all races, gender identities, gender expressions, sexual orientations, physical abilities, physical appearances, socio-economic backgrounds, nationalities, ages, religions, and beliefs. - -The HOT community principles are: - -- **Be friendly and patient.** Be generous and kind in both giving and accepting critique. Critique is a natural and important part of our culture. Good critiques are kind, respectful, clear, and constructive, focused on goals and requirements rather than personal preferences. You are expected to give and receive criticism with grace. Be considerate in speech and actions, and actively seek to acknowledge and respect the boundaries of fellow attendees. - -- **Be welcoming.** We strive to be a community that welcomes and supports people of all backgrounds and identities. Some examples of behavior that contributes to creating a positive environment include: - - - Using welcoming and inclusive language. - - - Being respectful of differing viewpoints and experiences. - - - Gracefully accepting constructive criticism. - - - Showing empathy towards other community members. - - - Placing collective interest before your own interest. - -- **Be considerate.** Your work will be used by other people, and you in turn will depend on the work of others. Any decision you take will affect users and colleagues, and you should take those consequences into account when making decisions. Remember that we're a world-wide community, so you might not be communicating in someone else's primary language. - -- **Be respectful.** Not all of us will agree all the time, but disagreement is no excuse for poor behavior and poor manners. We might all experience some frustration now and then, but we cannot allow that frustration to turn into a personal attack. It’s important to remember that a community where people feel uncomfortable or threatened is not a productive one. Members of the HOT community should be respectful when dealing with other members as well as with people outside the HOT community. - -- **Be careful in your word choice.** We are a global community of professionals, and we conduct ourselves professionally. Be kind to others. Do not insult or put down other participants. Harassment and other exclusionary behavior aren't acceptable. This includes, but is not limited to: - - - Violent threats or language directed against another person. - - - Discriminatory jokes and language. - - - Posting sexually explicit or violent material. - - - Posting (or threatening to post) other people's personally identifying information ("doxing"). - - - Personal insults, especially those using racist or sexist terms. - - - Unwelcome sexual attention. - - - Advocating for, or encouraging, any of the above behavior. - - - Repeated harassment of others. In general, if someone asks you to stop, then stop. - -- **Assume all communications are positive.** Always remain polite, and assume good faith. It is surprisingly easy to misunderstand each other, be it online or in person, particularly in such a culturally diverse setting as ours. Misunderstandings are particularly easy to arise when we are in a rush, or otherwise distracted. Please ask clarifying questions before assuming that a communication was inappropriate. - -- **When we disagree, try to understand why.** Disagreements, both social and technical, happen easily and often. It is important that we resolve such disagreements and differing views constructively. At times it can be hard to appreciate a viewpoint that contradicts your own perceptions. Instead of pushing back, try to understand where the other person is coming from, and don’t be afraid to ask questions. You can be most helpful if your own replies serve to clarify, rather than to escalate an issue. Also don’t forget that it can be easy to make mistakes, and allow for the possibility that the mistake may have been yours. When this happens it is better to resolve the issue together, and to learn from the experience together, than to place blame. - -Original text courtesy of the [Speak Up! project](http://web.archive.org/web/20141109123859/http://speakup.io/coc.html). - -Further sources: - -- [Ada Initiative: HOWTO design a code of conduct for your community](https://adainitiative.org/2014/02/18/howto-design-a-code-of-conduct-for-your-community/) - -- [Algorithm Club Code of Conduct](https://github.com/drtortoise/critical-algorithm-studies/blob/master/code-of-conduct.md) - -- [American Red Cross GIS Team Code of Conduct](https://github.com/AmericanRedCross/team-code-of-conduct) - -- [Contributor Covenant – A Code of Conduct for Open Source Projects](http://contributor-covenant.org/) - -- [Django Code of Conduct](https://www.djangoproject.com/conduct/) - -- [Mozilla Community Participation Guidelines](https://www.mozilla.org/en-US/about/governance/policies/participation/) - -- [Vox Media Code of Conduct](http://code-of-conduct.voxmedia.com/) - -## Complaint Handling Process - -As a first measure, it is preferable to work out issues directly with the people involved, or to work with other Community Members who can help you resolve the issue. This may take several forms: - -- Talk with one another. Assume that communications are positive and that people are treating each other with respect. Cues about emotions are often lacking from digital communications. Many of our modes of digital communication tend towards brevity, which can be easier to interpret incorrectly as being negative. - -- Contact a representative of the [Community Working Group](https://www.hotosm.org/community/working-groups/), which exists to support the HOT Community. Representatives are available to discuss any concerns about behaviour within the community, or ideas to promote positive behaviours. You can email them at [community@hotosm.org](mailto:community@hotosm.org). - -- Contact a representative of the [Governance Working Group](https://www.hotosm.org/community/working-groups/), which drafted these recommendations and the CoC. Representatives are available to provide advice on particular scenarios, as well as on the processes around the CoC. - -- Contact the HOT Chair of Voting Members. - -- Contact a [HOT Board Member](https://www.hotosm.org/board). Board members are well versed in the community and its management. They can offer advice on your particular situation, and know the resources of the organization that may be available to you. - -- Contact the HOT Community Partnerships Manager. - -When these informal processes fail, or when a situation warrants an immediate response by HOT, you can evoke the **HOT Policy and Code of Conduct Complaint Handling Process**. This process was adopted by HOT Voting Members in 2016 to provide a more formal means of enforcement for our community standards. You start it by emailing [complaints@hotosm.org](mailto:compaints@hotosm.org) with a description of your complaint, your name, and the name of the offending party. All complaints will be considered confidential. The full process is described [here](https://docs.google.com/document/d/1xb-SPADtSbgwl6mAgglHMPHpknt-E7lKRoIcSbW431A/edit) . diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 322f93b478..bc3ec7956f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,20 +1,32 @@ -## 🤗 Welcome +# 🤗 Welcome -:+1::tada: First off, We are really glad you're reading this, because we need volunteer developers to help improve the Field Mapping Tasking Manager (FMTM)! :tada::+1: +:+1::tada: First off, We are really glad you're reading this, because we need +volunteer developers to help improve the Field Mapping Tasking Manager (FMTM)! +:tada::+1: -We welcome and encourage contributors of all skill levels, and we are committed to making sure your participation is inclusive, enjoyable, and rewarding. If you have never contributed to an open source project before, we are a good place to start, and we will make sure you are supported every step of the way. If you have **any** questions, please ask! +We welcome and encourage contributors of all skill levels, and we are committed +to making sure your participation is inclusive, enjoyable, and rewarding. If +you have never contributed to an open source project before, we are a good +place to start, and we will make sure you are supported every step of the way. +If you have **any** questions, please ask! -You can see an overview of the project and the process we have gone through in developing FMTM so far in these [slides](https://docs.google.com/presentation/d/1UrBG1X4MXwVd8Ps498FDlAYvesIailjjPPJfR_B4SUs/edit#slide=id.g15c1f409958_0_0) . +You can see an overview of the project and the process we have gone through in +developing FMTM so far in these +[slides][1] -Furthermore, there are many ways to contribute to the **Field Mapping Tasking Manager (FMTM)**, which includes: +Furthermore, there are many ways to contribute to the +**Field Mapping Tasking Manager (FMTM)**, which includes: ## Testing -Right now, we are in the process of building the prototype. We warmly welcome your input in testing and sharing your feedback. If you are also interested in coordinating a field testing session, please reach out! +Right now, we are in the process of building the prototype. We warmly welcome +your input in testing and sharing your feedback. If you are also interested in +coordinating a field testing session, please reach out! ## Code contributions -Create pull requests (PRs) for changes that you think are needed. We would really appreciate your help! +Create pull requests (PRs) for changes that you think are needed. We would +really appreciate your help! Skills with the following would be beneficial: @@ -25,23 +37,31 @@ Skills with the following would be beneficial: - Docker - CI/CD workflows -Our latest task board can be found [here](https://github.com/orgs/hotosm/projects/22). +Our latest task board can be found +[here][2]. ## Report bugs and suggest improvements -The [issue queue](https://github.com/hotosm/fmtm/issues) is the best way to get started. There are issue templates for BUGs and FEATURES that you can use, you could also create your own. Once you have submitted an issue, it will be assigned one label from the following [label categories](https://github.com/hotosm/fmtm/labels). If you are wondering where to start, you can filter by the **good first issue label**. +The [issue queue][3] is the best way to get +started. There are issue templates for BUGs and FEATURES that you can use, you +could also create your own. Once you have submitted an issue, it will be +assigned one label from the following +[label categories][4]. +If you are wondering where to start, you can filter by the +**good first issue label**. ## :handshake: Thank you -Thank you very much in advance for your contributions!! Please ensure you refer to our **Code of Conduct**. -If you've read the guidelines, but are still not sure how to contribute on Github, please reach out to us via our Slack **#geospatial-tech-and-innovation**. +Thank you very much in advance for your contributions!! Please ensure you refer +to our **Code of Conduct**. +If you've read the guidelines, but are still not sure how to contribute on +Github, please reach out to us via our Slack **#geospatial-tech-and-innovation**. -# Code Contribution guidelines +## Code Contribution guidelines -## Workflow +### Workflow -We operate the "Fork & Pull" model explained at [About Pull -Requests](https://help.github.com/articles/about-pull-requests/) +We operate the "Fork & Pull" model explained at [About Pull Requests][5] You should fork the project into your own repo, create a topic branch there and then make one or more pull requests back to the repository. @@ -58,7 +78,7 @@ complications with other developers. The old free software joke is "patches are better than bug reports" is how we contribute to the community of people involved with this project. -# If you are reporting a problem +### If you are reporting a problem - Describe exactly what you were trying to achieve, what you did, what you expected to happen and what did happen instead. Include relevant information @@ -70,15 +90,14 @@ community of people involved with this project. keeps issues small and manageable and makes it much easier to follow through and make sure each problem is taken care of. -## Documentation +### Documentation -Project documentation should be in [Markdown -format](https://www.markdownguide.org/), and in a _docs_ +Project documentation should be in [Markdown format][6], and in a _docs_ subdirectory. While it is possible to use HTML in Markdown documents for tables and images, it is prefered to use the Markdown style as it's much easier to read. -## Coding Style +### Coding Style Python enforces a certain amount of style due to indent levels. Unlike C/C++, we don't have to worry about curly braces. It is prefered that @@ -88,10 +107,10 @@ to be easily reused and run either standalone, or part of a REST API backend. Code that is not designed to be run standalone can have a main function to do simple testing during development. That test code should be moved to a standalone test case when possible. -[Pytest](https://pytest.org/) is used as the test framework for +[Pytest][7] is used as the test framework for standalone test cases. -Code follows a [CamelCase](https://en.wikipedia.org/wiki/Camel_case) +Code follows a [CamelCase][8] style. Classes use an Upper Case for the first word, method use a lower case for the first word. Variable names are all lower case with an underbar as a word separator. Properly naming everything makes it @@ -99,9 +118,19 @@ much easier to read the code and get an idea of what it is doing. This enables people new to this project to contribute easier. All methods should have a comment that can be used by -[pydoc](https://docs.python.org/3/library/pydoc.html). The usage of +[pydoc][9]. The usage of base classes is encouraged so functionality can be shared. Comments in the code are encouraged when necessary to explain code that may not be obvious, but avoid over commenting as well. Code should be able to be read like a book, with descriptive names used, no fancy tricks unless required. Always be concious of performance and security. + +[1]: https://docs.google.com/presentation/d/1UrBG1X4MXwVd8Ps498FDlAYvesIailjjPPJfR_B4SUs/edit#slide=id.g15c1f409958_0_0 "slides" +[2]: https://github.com/orgs/hotosm/projects/22 "Our latest task board" +[3]: https://github.com/hotosm/fmtm/issues "issue queue" +[4]: https://github.com/hotosm/fmtm/labels "label categories" +[5]: https://help.github.com/articles/about-pull-requests/ "About Pull Requests" +[6]: https://www.markdownguide.org/ "Markdown format" +[7]: https://pytest.org/ "Pytest" +[8]: https://en.wikipedia.org/wiki/Camel_case "CamelCase" +[9]: https://docs.python.org/3/library/pydoc.html "pydoc" diff --git a/INSTALL.md b/INSTALL.md index d4739f4a3a..244dc912da 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -4,23 +4,21 @@ It is recommended to run FMTM on a Linux-based machine. -> This includes MacOS, but some [tools must be -> substituted](#alternative-operating-systems). +> This includes MacOS, but some [tools must be substituted][1]. > -> For Windows users, the easiest option is to use [Windows Subsystem for -> Linux](#alternative-operating-systems) +> For Windows users, the easiest option is to use [Windows Subsystem for Linux][2] Before you can install and use this application, you will need to have the following software installed and configured on your system: > If running Debian/Ubuntu, the install script below does this for you. -[Git](https://git-scm.com/) to clone the FMTM repository. +[Git][3] to clone the FMTM repository. -[Docker](https://docs.docker.com/engine/install/) +[Docker][4] to run FMTM inside containers. -[Docker Compose](https://docs.docker.com/compose/install) +[Docker Compose][5] for easy orchestration of the FMTM services. > This is Docker Compose V2, the official Docker CLI plugin. @@ -36,13 +34,8 @@ On a Linux-based machine with `bash` installed, run the script: > However, if you run as root, a user svcfmtm will be created for you. ```bash -# Option A) If you already cloned the repo -bash src/frontend/public/install.sh - -# Option B) Download the script & run curl -L https://get.fmtm.dev -o install.sh bash install.sh -# Alternative URL: https://fmtm.hotosm.org/install.sh # Then follow the prompts ``` @@ -50,16 +43,31 @@ bash install.sh ## Manual Install If more details are required, check out the -[dev docs](https://hotosm.github.io/fmtm/dev/Setup/) +[dev docs][6] ### Table of Contents -1. [Clone the FMTM repository](#clone-the-fmtm-repository) -2. [Development: Setup Your Local Environment](#setup-your-local-environment) -3. [Start the API with Docker](#start-the-api-with-docker) -4. [Setup ODK Central User (Optional)](#setup-odk-central-user-optional) -5. [Import Test Data (Optional)](#import-test-data-optional) -6. [Check Authentication (Optional)](#check-authentication-optional) +- [Installation](#installation) + - [Software Requirements](#software-requirements) + - [Easy Install](#easy-install) + - [Manual Install](#manual-install) + - [Table of Contents](#table-of-contents) + - [Clone the FMTM repository](#clone-the-fmtm-repository) + - [Setup Your Local Environment](#setup-your-local-environment) + - [1. Setup OSM OAUTH 2.0](#1-setup-osm-oauth-20) + - [2. Create an `.env` File](#2-create-an-env-file) + - [Start the API with Docker](#start-the-api-with-docker) + - [Select the install type](#select-the-install-type) + - [Pull the Images](#pull-the-images) + - [Build the Frontend](#build-the-frontend) + - [Start the Containers](#start-the-containers) + - [Setup ODK Central User (Optional)](#setup-odk-central-user-optional) + - [Import Test Data (Optional)](#import-test-data-optional) + - [Check Authentication (Optional)](#check-authentication-optional) + - [Alternative Operating Systems](#alternative-operating-systems) + - [Windows](#windows) + - [MacOS](#macos) + - [A Note on Docker Desktop](#a-note-on-docker-desktop) ### Clone the FMTM repository @@ -82,7 +90,7 @@ The FMTM uses OAUTH2 with OSM to authenticate users. To properly configure your FMTM project, you will need to create keys for OSM. -1. [Login to OSM](https://www.openstreetmap.org/login) +1. [Login to OSM][28] (_If you do not have an account yet, click the signup button at the top navigation bar to create one_). Click the drop down arrow on the top right of the navigation bar @@ -95,7 +103,7 @@ To properly configure your FMTM project, you will need to create keys for OSM. > Note: `127.0.0.1` is required for debugging instead of `localhost` > due to OSM restrictions. - ![image](https://user-images.githubusercontent.com/36752999/216319298-1444a62f-ba6b-4439-bb4f-2075fdf03291.png) + ![image][29] 3. Only the _read user preferences permission_ is required as of now. @@ -188,7 +196,7 @@ https://{YOUR_DOMAIN} http://fmtm.localhost:7050 ``` -> Note: If those link doesn't work, check the logs with `docker logs fmtm-api`. +> Note: If those link doesn't work, check the logs with `docker compose logs api`. > > Note: Use `docker ps` to view all container names. @@ -242,13 +250,13 @@ Windows Subsystem for Linux (WSL) can be used to run Docker. This will run a Linux machine inside Windows very efficiently. To install follow the -[official instructions](https://learn.microsoft.com/en-us/windows/wsl/install). +[official instructions][30]. -Then continue with the FMTM [installation](#software-requirements). +Then continue with the FMTM [installation][31]. ### MacOS -[Colima](https://github.com/abiosoft/colima) is recommended +[Colima][32] is recommended to run `docker` and `docker compose` commands. Install colima, docker, docker compose using brew: @@ -272,7 +280,7 @@ Run Colima: colima start ``` -Then continue with the FMTM [installation](#software-requirements). +Then continue with the FMTM [installation][33]. > Note: only tagged backend images are multi-architecture, supporting > MacOS. The regular images for fast continuous deployment are not: @@ -294,3 +302,16 @@ runs docker commands inside a Linux virtual machine underneath. It is often easier and more flexible to do this yourself. Plus it gives you access to all other other tools available in a Linux operating system! + +[1]: #alternative-operating-systems "tools must be substituted" +[2]: #alternative-operating-systems "Windows Subsystem for Linux" +[3]: https://git-scm.com/ "Git" +[4]: https://docs.docker.com/engine/install/ "Docker" +[5]: https://docs.docker.com/compose/install "Docker Compose" +[6]: https://hotosm.github.io/fmtm/dev/Setup/ "dev docs" +[28]: https://www.openstreetmap.org/login "Login to OSM" +[29]: https://user-images.githubusercontent.com/36752999/216319298-1444a62f-ba6b-4439-bb4f-2075fdf03291.png "image" +[30]: https://learn.microsoft.com/en-us/windows/wsl/install "official instructions" +[31]: #software-requirements "installation" +[32]: https://github.com/abiosoft/colima "Colima" +[33]: #software-requirements "installation" diff --git a/Justfile b/Justfile new file mode 100644 index 0000000000..46797ffc37 --- /dev/null +++ b/Justfile @@ -0,0 +1,107 @@ +# Copyright (c) 2020, 2021 Humanitarian OpenStreetMap Team +# +# This file is part of FMTM. +# +# FMTM is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FMTM is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FMTM. If not, see . +# + +# Builds + +build-backend: + docker compose build api + +build-frontend: + docker compose build ui + +build: build-backend build-frontend + +# Run + +run: + docker compose up -d + +run-without-central: + docker compose --profile no-odk up -d + +run-with-josm: + docker compose \ + -f docker-compose.yml \ + -f josm/docker-compose.yml \ + up -d + +stop: + docker compose down + +clean-db: + docker compose down -v + +# Tests + +test-backend: + docker compose run --rm api pytest + +test-frontend: + docker compose run -e CI=True --rm --entrypoint='sh -c' ui 'npm run test' + +test: test-backend test-frontend + +# Maintenance + +lint: + TAG_OVERRIDE=ci TARGET_OVERRIDE=ci docker compose run --rm --no-deps \ + --volume $PWD:$PWD --workdir $PWD \ + --entrypoint='sh -c' api \ + 'git config --global --add safe.directory $PWD \ + && pre-commit run --all-files' + +bump: + TAG_OVERRIDE=ci TARGET_OVERRIDE=ci docker compose run --rm --no-deps \ + --volume $PWD:$PWD --workdir $PWD \ + --entrypoint='sh -c' api \ + 'git config --global --add safe.directory $PWD \ + && git config --global user.name svcfmtm \ + && git config --global user.email fmtm@hotosm.org \ + && cd src/backend \ + && cz bump --check-consistency' + +# Docs + +docs-rebuild: docs-clean docs-doxygen docs-uml + +docs-clean: + @rm -rf docs/{apidocs,html,docbook,man} docs/packages.png docs/classes.png + +docs-doxygen: + cd docs && doxygen + +docs-uml: + cd docs && pyreverse -o png ../src/backend/app + +docs-pdf: + # Strip any unicode out of the markdown file before converting to PDF + # FIXME + MDS := \ + docs/dev/Backend.md \ + docs/dev/Database-Tips.md \ + docs/dev/Deployment-Flow.md \ + docs/dev/Frontend.md \ + docs/dev/Production.md \ + docs/dev/Version-Control.md \ + docs/dev/Setup.md \ + docs/dev/Troubleshooting.md \ + PDFS := $(MDS:.md=.pdf) + @echo "Converting $PDFS to a PDF" + @new=$(notdir $(basename $PDFS)); \ + iconv -f utf-8 -t US $PDFS -c | \ + pandoc $PDFS -f markdown -t pdf -s -o /tmp/$$new.pdf \ No newline at end of file diff --git a/LICENSE b/LICENSE.md similarity index 100% rename from LICENSE rename to LICENSE.md diff --git a/Makefile b/Makefile deleted file mode 100644 index 50cb41945a..0000000000 --- a/Makefile +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) 2020, 2021 Humanitarian OpenStreetMap Team -# -# This file is part of FMTM. -# -# FMTM is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# FMTM is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with FMTM. If not, see . -# - -# All python source files -# MDS := $(wildcard ./docs/*.md) -MDS := \ - docs/dev/Backend.md \ - docs/dev/Database-Tips.md \ - docs/dev/Deployment-Flow.md \ - docs/dev/Frontend.md \ - docs/dev/Production.md \ - docs/dev/Version-Control.md \ - docs/dev/Setup.md \ - docs/dev/Troubleshooting.md \ - -PDFS := $(MDS:.md=.pdf) - -all: - @echo "Targets are:" - @echo " clean - remove generated files" - @echo " apidoc - generate Doxygen API docs" - @echo " check - run the tests" - @echo " uml - generate UML diagrams" - -clean: - @rm -fr docs/{apidocs,html,docbook,man} docs/packages.png docs/classes.png - -uml: - cd docs && pyreverse -o png ../src/backend/app - -apidoc: force - cd docs && doxygen - -# Strip any unicode out of the markdown file before converting to PDF -pdf: $(PDFS) -%.pdf: %.md - @echo "Converting $< to a PDF" - @new=$(notdir $(basename $<)); \ - iconv -f utf-8 -t US $< -c | \ - pandoc $< -f markdown -t pdf -s -o /tmp/$$new.pdf - -.SUFFIXES: .md .pdf - -.PHONY: apidoc - -force: diff --git a/README.md b/README.md index 7fc44c8c9e..4e1f4d1954 100644 --- a/README.md +++ b/README.md @@ -2,21 +2,16 @@ ![HOT Logo](https://github.com/hotosm/fmtm/blob/main/images/hot_logo.png?raw=true) -[![All Contributors](https://img.shields.io/github/all-contributors/hotosm/fmtm?color=ee8449&style=flat-square)](#contributors-) - -**Production Workflows** - -| Build & Deploy | Docs | Wiki | CI Img | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [![Build and Deploy](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml/badge.svg?branch=main)](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml) | [![Publish Docs](https://github.com/hotosm/fmtm/actions/workflows/docs.yml/badge.svg?branch=main)](https://github.com/hotosm/fmtm/actions/workflows/docs.yml) | [![Publish Docs to Wiki](https://github.com/hotosm/fmtm/actions/workflows/wiki.yml/badge.svg?branch=main)](https://github.com/hotosm/fmtm/actions/workflows/wiki.yml) | [![Build CI Img](https://github.com/hotosm/fmtm/actions/workflows/build_ci_img.yml/badge.svg?branch=main)](https://github.com/hotosm/fmtm/actions/workflows/build_ci_img.yml) | - -**Development Workflows** - -| Build & Deploy | CI Img | ODK Imgs | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [![Build and Deploy](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml) | [![Build CI Img](https://github.com/hotosm/fmtm/actions/workflows/build_ci_img.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/build_ci_img.yml) | [![Build ODK Images](https://github.com/hotosm/fmtm/actions/workflows/build_odk_imgs.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/build_odk_imgs.yml) | +| **Version** | [![Version](https://img.shields.io/github/v/release/hotosm/fmtm?logo=github)](https://github.com/hotosm/fmtm/releases) | +| :-------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Deployments** | [![Build and Deploy](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml/badge.svg?branch=main)](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml?query=branch%3Amain) **Prod**
[![Build and Deploy](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml/badge.svg?branch=staging)](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml?query=branch%3Astaging) **Staging**
[![Build and Deploy](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/build_and_deploy.yml?query=branch%3Adevelopment) **Dev** | +| **Images** | [![Build CI Img](https://github.com/hotosm/fmtm/actions/workflows/build_ci_img.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/build_ci_img.yml) [![Build ODK Images](https://github.com/hotosm/fmtm/actions/workflows/build_odk_imgs.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/build_odk_imgs.yml) [![🔧 Build Proxy Images](https://github.com/hotosm/fmtm/actions/workflows/build_proxy_imgs.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/build_proxy_imgs.yml) | +| **Docs** | [![Publish Docs](https://github.com/hotosm/fmtm/actions/workflows/docs.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/docs.yml) [![Publish Docs to Wiki](https://github.com/hotosm/fmtm/actions/workflows/wiki.yml/badge.svg?branch=development)](https://github.com/hotosm/fmtm/actions/workflows/wiki.yml) | +| **Tech Stack** | ![FastAPI](https://img.shields.io/badge/FastAPI-005571?style=for-the-badge&logo=fastapi) ![React](https://img.shields.io/badge/react-%2320232a.svg?style=for-the-badge&logo=react&logoColor=%2361DAFB) ![Postgres](https://img.shields.io/badge/postgres-%23316192.svg?style=for-the-badge&logo=postgresql&logoColor=white) ![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white) ![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white) | +| **Code Style** | ![Backend Style](https://img.shields.io/badge/code%20style-black-black) ![Frontend Style](https://img.shields.io/badge/code%20style-prettier-F7B93E?logo=Prettier) ![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white) | +| **Other Info** | [![GitHub Sponsors](https://img.shields.io/badge/sponsor-30363D?logo=GitHub-Sponsors&logoColor=#EA4AAA)](https://github.com/sponsors/hotosm) [![License](https://img.shields.io/github/license/hotosm/fmtm.svg)](https://github.com/hotosm/fmtm/blob/main/LICENSE.md) [![All Contributors](https://img.shields.io/github/all-contributors/hotosm/fmtm?color=ee8449&style=flat-square)](#contributors-) [![Coverage](https://hotosm.github.io/fmtm/coverage.svg)](https://hotosm.github.io/fmtm/coverage.html) | @@ -248,7 +243,7 @@ Its functions include: This is pretty straightforward using [OpenPyXL](https://openpyxl.readthedocs.io/en/stable/), though we have to be careful to keep the location within the spreadsheet of these two items consistent. - - GeoJSON feature collections for each form (the buildings/amenities or whatever) + - GeoJSON feature collections for each form (the buildings/amenities etc) - An App User for each form, which in turn corresponds to a single task. When the ODK Collect app on a user's phone is configured to function as that App User, they have access to diff --git a/contrib/scripts/docs/reformat_md_links_as_ref.py b/contrib/scripts/docs/reformat_md_links_as_ref.py new file mode 100644 index 0000000000..54c2294083 --- /dev/null +++ b/contrib/scripts/docs/reformat_md_links_as_ref.py @@ -0,0 +1,60 @@ +import re + +this_file_in_docs = "User-Manual-For-Project-Managers.md" + + +def reformat_links_as_refs(file_name: str) -> None: + """ + Function takes in a mark down file, searches for inline links and changes them to reference (footnote) version. + + NB the path to "docs" is hardcoded. + NB: Care should be taken to make sure that inline url links are formatted correctly (broken lines, spaces, parenthesis, etc.) + + function created by contributor @cordovez. + """ + pattern = r"\[([^\]]+)\]\(([^)]+)\)" + + # Read the original markdown document + with open(f"./docs/{this_file_in_docs}", "r") as file: + lines = file.readlines() + + # Create a list to store the footnotes + footnotes = [] + + # Create a new list to store the modified lines + modified_lines = [] + + # Iterate through each line in the document + for line in lines: + # Find all matches of the pattern in the line + matches = re.findall(pattern, line) + + # Iterate through the matches in reverse order + for match in reversed(matches): + label = match[0] + url = match[1] + + # Generate the footnote reference + footnote_ref = f"[{label}][{len(footnotes) + 1}]" + + # Replace the original hyperlink with the footnote reference + line = line.replace(f"[{label}]({url})", footnote_ref) + + # Append the footnote to the list + footnotes.append(f'[{len(footnotes) + 1}]: {url} "{label}"') + + # Append the modified line to the new list + modified_lines.append(line) + + # Write the modified lines to the new document + with open(f"./docs/{this_file_in_docs}", "w") as file: + file.writelines(modified_lines) + + # Append the footnotes to the end of the document + with open(f"./docs/{this_file_in_docs}", "a") as file: + file.write("\n\n") + file.write("\n".join(footnotes)) + + +if __name__ == "__main__": + reformat_links_as_refs(this_file_in_docs) diff --git a/docker-compose.development.yml b/docker-compose.development.yml index 664a9c84fe..45585228dd 100644 --- a/docker-compose.development.yml +++ b/docker-compose.development.yml @@ -17,6 +17,8 @@ version: "3" +name: fmtm-dev + volumes: fmtm_frontend: name: fmtm-frontend-${GIT_BRANCH} @@ -46,7 +48,6 @@ networks: services: proxy: image: "ghcr.io/hotosm/fmtm/proxy:${GIT_BRANCH}" - container_name: fmtm-${GIT_BRANCH} depends_on: api: condition: service_started @@ -70,7 +71,6 @@ services: FMTM_API_DOMAIN: ${FMTM_API_DOMAIN:-api.${FMTM_DOMAIN}} FMTM_ODK_DOMAIN: ${FMTM_ODK_DOMAIN:-odk.${FMTM_DOMAIN}} FMTM_S3_DOMAIN: ${FMTM_S3_DOMAIN:-s3.${FMTM_DOMAIN}} - FMTM_SCRIPT_DOMAIN: ${FMTM_SCRIPT_DOMAIN:-_} ports: - 80:80 - 443:443 @@ -80,7 +80,6 @@ services: api: image: "ghcr.io/hotosm/fmtm/backend:${GIT_BRANCH}" - container_name: fmtm-api-${GIT_BRANCH} volumes: - fmtm_logs:/opt/logs - fmtm_tiles:/opt/tiles @@ -96,6 +95,15 @@ services: networks: - fmtm-net restart: "unless-stopped" + deploy: + replicas: ${API_REPLICAS:-2} + resources: + limits: + cpus: "0.9" + memory: 1500M + reservations: + cpus: "0.1" + memory: 100M ui: # This service simply builds the frontend to a volume @@ -107,7 +115,6 @@ services: args: APP_VERSION: ${GIT_BRANCH} VITE_API_URL: https://${FMTM_API_DOMAIN:-api.${FMTM_DOMAIN}} - container_name: fmtm-ui-${GIT_BRANCH} volumes: - fmtm_frontend:/frontend network_mode: none @@ -115,7 +122,6 @@ services: central: image: "ghcr.io/hotosm/fmtm/odkcentral:${ODK_CENTRAL_TAG:-v2023.5.0}" - container_name: fmtm-central-${GIT_BRANCH} depends_on: central-db: condition: service_healthy @@ -148,7 +154,6 @@ services: # This service simply builds the frontend to a volume # accessible to the proxy, then shuts down image: "ghcr.io/hotosm/fmtm/odkcentral-ui:${ODK_CENTRAL_TAG:-v2023.5.0}" - container_name: fmtm-central-ui-${GIT_BRANCH} volumes: - central_frontend:/frontend network_mode: none @@ -156,7 +161,6 @@ services: s3: image: "docker.io/minio/minio:${MINIO_TAG:-RELEASE.2024-01-01T16-36-33Z}" - container_name: fmtm-s3-${GIT_BRANCH} environment: MINIO_ROOT_USER: ${S3_ACCESS_KEY} MINIO_ROOT_PASSWORD: ${S3_SECRET_KEY} @@ -178,7 +182,6 @@ services: fmtm-db: image: "postgis/postgis:${POSTGIS_TAG:-14-3.4-alpine}" - container_name: fmtm-db-${GIT_BRANCH} volumes: - fmtm_db_data:/var/lib/postgresql/data/ environment: @@ -199,7 +202,6 @@ services: central-db: image: "postgis/postgis:${POSTGIS_TAG:-14-3.4-alpine}" - container_name: fmtm-central-db-${GIT_BRANCH} volumes: - central_db_data:/var/lib/postgresql/data/ environment: @@ -220,7 +222,6 @@ services: migrations: image: "ghcr.io/hotosm/fmtm/backend:${GIT_BRANCH}" - container_name: fmtm-migrations-${GIT_BRANCH} depends_on: fmtm-db: condition: service_healthy @@ -235,7 +236,6 @@ services: certbot: image: "ghcr.io/hotosm/fmtm/proxy:certs-init-development" - container_name: fmtm-cert-renew-${GIT_BRANCH} volumes: - certs:/etc/letsencrypt - certbot_data:/var/www/certbot @@ -244,7 +244,6 @@ services: FMTM_API_DOMAIN: ${FMTM_API_DOMAIN:-api.${FMTM_DOMAIN}} FMTM_ODK_DOMAIN: ${FMTM_ODK_DOMAIN:-odk.${FMTM_DOMAIN}} FMTM_S3_DOMAIN: ${FMTM_S3_DOMAIN:-s3.${FMTM_DOMAIN}} - FMTM_SCRIPT_DOMAIN: ${FMTM_SCRIPT_DOMAIN:-_} CERT_EMAIL: ${CERT_EMAIL} ports: - 80:80 diff --git a/docker-compose.main.yml b/docker-compose.main.yml index 86209686de..85cb523f45 100644 --- a/docker-compose.main.yml +++ b/docker-compose.main.yml @@ -17,6 +17,8 @@ version: "3" +name: fmtm-main + volumes: fmtm_frontend: name: fmtm-frontend-main @@ -39,8 +41,7 @@ networks: services: proxy: - image: "ghcr.io/hotosm/fmtm/proxy:main${FMTM_SCRIPT_DOMAIN:+-plus-script}" - container_name: fmtm-main + image: "ghcr.io/hotosm/fmtm/proxy:main" depends_on: api: condition: service_started @@ -55,7 +56,6 @@ services: environment: FMTM_DOMAIN: ${FMTM_DOMAIN} FMTM_API_DOMAIN: ${FMTM_API_DOMAIN:-api.${FMTM_DOMAIN}} - FMTM_SCRIPT_DOMAIN: ${FMTM_SCRIPT_DOMAIN:-_} ports: - 80:80 - 443:443 @@ -65,7 +65,6 @@ services: api: image: "ghcr.io/hotosm/fmtm/backend:main" - container_name: fmtm-api-main volumes: - fmtm_logs:/opt/logs - fmtm_tiles:/opt/tiles @@ -79,18 +78,26 @@ services: networks: - fmtm-net restart: "unless-stopped" + deploy: + replicas: ${API_REPLICAS:-4} + resources: + limits: + cpus: "0.9" + memory: 1500M + reservations: + cpus: "0.1" + memory: 100M ui: # This service simply builds the frontend to a volume # accessible to the proxy, then shuts down - image: "ghcr.io/hotosm/fmtm/frontend:${GIT_BRANCH:-main}" + image: "ghcr.io/hotosm/fmtm/frontend:main" build: context: src/frontend dockerfile: prod.dockerfile args: APP_VERSION: main VITE_API_URL: https://${FMTM_API_DOMAIN:-api.${FMTM_DOMAIN}} - container_name: fmtm-ui-main volumes: - fmtm_frontend:/frontend network_mode: none @@ -98,7 +105,6 @@ services: fmtm-db: image: "postgis/postgis:${POSTGIS_TAG:-14-3.4-alpine}" - container_name: fmtm-db-main volumes: - fmtm_db_data:/var/lib/postgresql/data/ environment: @@ -119,7 +125,6 @@ services: migrations: image: "ghcr.io/hotosm/fmtm/backend:main" - container_name: fmtm-migrations-main depends_on: fmtm-db: condition: service_healthy @@ -132,7 +137,6 @@ services: backups: image: "ghcr.io/hotosm/fmtm/backend:main" - container_name: fmtm-backups-main depends_on: fmtm-db: condition: service_healthy @@ -151,14 +155,12 @@ services: certbot: image: "ghcr.io/hotosm/fmtm/proxy:certs-init-main" - container_name: fmtm-cert-renew-main volumes: - certs:/etc/letsencrypt - certbot_data:/var/www/certbot environment: FMTM_DOMAIN: ${FMTM_DOMAIN} FMTM_API_DOMAIN: ${FMTM_API_DOMAIN:-api.${FMTM_DOMAIN}} - FMTM_SCRIPT_DOMAIN: ${FMTM_SCRIPT_DOMAIN:-_} CERT_EMAIL: ${CERT_EMAIL} ports: - 80:80 diff --git a/docker-compose.staging.yml b/docker-compose.staging.yml index 62329c7d5e..c1680f84a9 100644 --- a/docker-compose.staging.yml +++ b/docker-compose.staging.yml @@ -17,6 +17,8 @@ version: "3" +name: fmtm-stage + volumes: fmtm_frontend: name: fmtm-frontend-${GIT_BRANCH} @@ -56,7 +58,6 @@ services: extends: file: docker-compose.development.yml service: ui - image: "ghcr.io/hotosm/fmtm/frontend:${GIT_BRANCH:-staging}" central: extends: file: docker-compose.development.yml @@ -83,7 +84,6 @@ services: service: migrations backups: image: "ghcr.io/hotosm/fmtm/backend:${GIT_BRANCH}" - container_name: fmtm-backups-${GIT_BRANCH} depends_on: fmtm-db: condition: service_healthy diff --git a/docker-compose.yml b/docker-compose.yml index bce836048c..2e8597d398 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,6 +17,8 @@ version: "3" +name: fmtm + volumes: fmtm_data: fmtm_db_data: @@ -38,7 +40,6 @@ services: target: debug args: NGINX_TAG: "${NGINX_TAG:-1.25.3}" - container_name: fmtm depends_on: api: condition: service_started @@ -67,7 +68,6 @@ services: target: "${TARGET_OVERRIDE:-debug}" args: APP_VERSION: "${TAG_OVERRIDE:-debug}" - container_name: fmtm-api # Uncomment these to debug with a terminal debugger like pdb # Then `docker attach fmtm_api` to debug # stdin_open: true @@ -94,18 +94,26 @@ services: env_file: - .env ports: - - "7052:8000" - # - "5678:5678" # Debugger port + - "7052-7053:8000" + # - "5678-5679:5678" # Debugger port networks: - fmtm-net restart: "unless-stopped" + deploy: + replicas: ${API_REPLICAS:-1} + resources: + limits: + cpus: "0.9" + memory: 1500M + reservations: + cpus: "0.1" + memory: 100M ui: image: "ghcr.io/hotosm/fmtm/frontend:debug" build: context: src/frontend dockerfile: debug.dockerfile - container_name: fmtm-ui depends_on: api: condition: service_started @@ -130,7 +138,6 @@ services: context: odkcentral/api args: ODK_CENTRAL_TAG: ${ODK_CENTRAL_TAG:-v2023.5.0} - container_name: fmtm-central depends_on: central-db: condition: service_healthy @@ -170,7 +177,6 @@ services: context: odkcentral/ui args: ODK_CENTRAL_TAG: ${ODK_CENTRAL_TAG:-v2023.5.0} - container_name: fmtm-central-ui volumes: - central_frontend:/frontend network_mode: none @@ -178,7 +184,6 @@ services: s3: image: "docker.io/minio/minio:${MINIO_TAG:-RELEASE.2024-01-01T16-36-33Z}" - container_name: fmtm-s3 environment: MINIO_ROOT_USER: ${S3_ACCESS_KEY:-fmtm} MINIO_ROOT_PASSWORD: ${S3_SECRET_KEY:-somelongpassword} @@ -203,7 +208,6 @@ services: fmtm-db: image: "postgis/postgis:${POSTGIS_TAG:-14-3.4-alpine}" - container_name: fmtm-db volumes: - fmtm_db_data:/var/lib/postgresql/data/ environment: @@ -225,7 +229,6 @@ services: central-db: profiles: ["", "central"] image: "postgis/postgis:${POSTGIS_TAG:-14-3.4-alpine}" - container_name: fmtm-central-db volumes: - central_db_data:/var/lib/postgresql/data/ environment: @@ -246,7 +249,6 @@ services: migrations: image: "ghcr.io/hotosm/fmtm/backend:${TAG_OVERRIDE:-debug}" - container_name: fmtm-migrations depends_on: fmtm-db: condition: service_healthy diff --git a/docs/About.md b/docs/About.md index 2b6cfe6170..1a68db48bc 100644 --- a/docs/About.md +++ b/docs/About.md @@ -1,83 +1,150 @@ -![](https://github.com/hotosm/fmtm/blob/main/images/hot_logo.png?raw=true) - -# Field Mapping Tasking Manager (FMTM) - -## 📖 History - -### How was FMTM born? - -It started as Ivan's idea to build FMTM (Ivan Gayton is Senior Humanitarian Advisor at Humanitarian OpenStreetMap Team) which then became a collaborative project with the efforts of Ivan , Rob Savoye who is Senior Technical Lead at Humanitarian OpenStreetMap Team and many other members from HOT as well as volunteers interested in the project. -HOT uses ODK heavily, but most of the data never makes it into OSM because all the data processing is manual and slow, so it doesn't get done. -Ivan Gayton(Senior Humanitarian Advisor at Humanitarian OpenStreetMap Team) heard about what Rob was working on and goes "That's the missing piece I needed!". He'd been wanting to build FMTM for years, but lacked the ability to process the data.A [webinar](https://www.youtube.com/watch?v=GiLKRZpbtrc&ab_channel=HumanitarianOpenStreetMapTeam) then took place in September 2022 that showcased the high interest from the community and the need for collaborative field mapping that really kicked off the starting point for building the Field Mapping Tasking Manager. It was Ivan who got HOT interested enough to direct some resources to his idea, so FMTM was born. - - - -_Want to know about OSM-fieldwork project ?_ Click [here](https://github.com/hotosm/osm-fieldwork/wiki) -
-
-
-
+# 📖 History + +## How was FMTM born? + +It started as Ivan's idea to build FMTM (Ivan Gayton is Senior Humanitarian +Advisor at Humanitarian OpenStreetMap Team) which then became a collaborative +project with the efforts of Ivan, Rob Savoye who is Senior Technical Lead at +Humanitarian OpenStreetMap Team and many other members from HOT as well as +volunteers interested in the project. + +HOT uses ODK heavily, but most of the data never makes it into OSM because all +the data processing is manual and slow, so it doesn't get done. + +fmtm-splash + +Ivan Gayton(Senior Humanitarian Advisor at Humanitarian OpenStreetMap Team) +heard about what Rob was working on and goes "That's the missing piece I +needed!". He'd been wanting to build FMTM for years, but lacked the ability to +process the data. +A [webinar][1] then took place in September 2022 +that showcased the high interest from the community and the need for +collaborative field mapping that really kicked off the starting point for +building the Field Mapping Tasking Manager. It was Ivan who got HOT interested +enough to direct some resources to his idea, so FMTM was born. + +**Want to know about OSM-fieldwork project?** click [here][2] ## A project to provide tools for Open Mapping campaigns -The Field Mapping Tasking Manager (FMTM) is a project that aims to provide tools for coordinating field mapping activities in Open Mapping campaigns. While there are existing field mapping applications, there is a lack of efficient tools to coordinate these activities. The FMTM builds on the HOT Tasking Manager and other mapping applications to provide a more streamlined and organized process for completing mapping tasks. - -Currently, it is possible to implement a Field Mapping Tasking Manager workflow using existing tools, but it requires significant effort and can be challenging. The FMTM project is developing automation features to address these challenges and make the process more accessible to users. - -By providing a centralized platform for organizing and managing mapping tasks, assigning them to specific users, and tracking their progress, the FMTM aims to simplify the coordination of mapping activities. The tool also provides analytics and reporting features, allowing users to gain insights into mapping campaigns and adjust their strategies accordingly. - -[Background and description of the project and idea are here: please have a look at this blog if you haven't yet!](https://www.hotosm.org/updates/field-mapping-is-the-future-a-tasking-manager-workflow-using-odk/) - -# How to contribute - -The FMTM project is open source and community-driven, welcoming contributions from designers, user testers, and both front-end and back-end developers. If you're interested in getting involved, please see our [contributor guidelines](https://github.com/hotosm/fmtm/blob/main/CONTRIBUTING.md) for more information. We welcome questions and feedback, so don't hesitate to reach out to us. 👍🎉 - -# Using OpenDataKit's Select From Map feature - -OpenDataKit's Select From Map feature is a useful tool for field mappers to collect data in a well-structured questionnaire format. The tool was incorporated into ODK in mid-2022 and allows mappers to select an object from a map, view its existing attributes, and fill out a form with new information and attributes. - -To prepare map files for ODK, inspiration is taken from the HOT Tasking Manager, which allows remote mappers to choose well-defined small "task" areas, ensuring full coverage of the project area and no unintended duplication of tasks. For example, a mapper can approach a building, select that building from a map view within ODK on their mobile phone, and add the opening hours, number of floors, construction material, or any number of useful attributes in a well-structured questionnaire format - - - -To prepare the appropriate map files for ODK, we are taking our inspiration from the [HOT Tasking Manager](https://tasks.hotosm.org/), which allows remote mappers to choose well-defined small "task" areas, ensuring full coverage of the project area and no unintended duplication of tasks. - - - -# Users - -There are three main user roles for using ODK's Select From Map feature: campaign managers, field mappers, and validators. - -## Campaign managers - -Campaign managers select an Area of Interest (AOI) and organize field mappers to go out and collect data. They need to: - - - -- Select an AOI polygon by creating a GeoJSON or by tracing a polygon in a Web map -- Choose a task division scheme (number of features or area per task, and possible variations on what features to use as the preferred splitting lines) +The Field Mapping Tasking Manager (FMTM) is a project that aims to provide tools +for coordinating field mapping activities in Open Mapping campaigns. While +there are existing field mapping applications, there is a lack of efficient +tools to coordinate these activities. The FMTM builds on the HOT Tasking +Manager and other mapping applications to provide a more streamlined and +organized process for completing mapping tasks. + +Currently, it is possible to implement a Field Mapping Tasking Manager workflow +using existing tools, but it requires significant effort and can be challenging. + +The FMTM project is developing automation features to address these challenges +and make the process more accessible to users. + +By providing a centralized platform for organizing and managing mapping tasks, +assigning them to specific users, and tracking their progress, the FMTM aims to +simplify the coordination of mapping activities. The tool also provides +analytics and reporting features, allowing users to gain insights into mapping +campaigns and adjust their strategies accordingly. + +Background and description of the project and idea are +[here][3]: +please have a look at this blog if you haven't yet! + +## How to contribute + +The FMTM project is open source and community-driven, welcoming contributions +from designers, user testers, and both front-end and back-end developers. If +you're interested in getting involved, please see our +[contributor guidelines][4] +for more information. We welcome questions and feedback, so don't hesitate +to reach out to us. 👍🎉 + +## Using OpenDataKit's Select From Map feature + +OpenDataKit's Select From Map feature is a useful tool for field mappers to +collect data in a well-structured questionnaire format. The tool was +incorporated into ODK in mid-2022 and allows mappers to select an object from a +map, view its existing attributes, and fill out a form with new information +and attributes. + +To prepare map files for ODK, inspiration is taken from the HOT Tasking Manager, +which allows remote mappers to choose well-defined small "task" areas, ensuring +full coverage of the project area and no unintended duplication of tasks. For +example, a mapper can approach a building, select that building from a map +view within ODK on their mobile phone, and add the opening hours, number of +floors, construction material, or any number of useful attributes in a +well-structured questionnaire format + +To prepare the appropriate map files for ODK, we are taking our inspiration from +the [HOT Tasking Manager][5], which allows remote +mappers to choose well-defined small "task" areas, ensuring full coverage +of the project area and no unintended duplication of tasks. + +## Users + +There are three main user roles for using ODK's Select From Map feature: +campaign managers, field mappers, and validators. + +### Campaign managers + +Campaign managers select an Area of Interest (AOI) and organize field mappers +to go out and collect data. They need to: + +campaign-managers + +- Select an AOI polygon by creating a GeoJSON or by tracing a polygon + in a Web map +- Choose a task division scheme (number of features or area per task, + and possible variations on what features to use as the preferred splitting lines) - Provide specific instructions and guidance for field mappers on the project. -- Provide a URL to a mobile-friendly Web page where field mappers can, from their mobile phone, select a task that is not already "checked out" (or possibly simply allocate areas to the field mappers). -- See the status of tasks (open, "checked out", completed but not validated, requires to rework, validated, etc) in the Web browser on their computer +- Provide a URL to a mobile-friendly Web page where field mappers can, from + their mobile phone, select a task that is not already "checked out" + (or possibly simply allocate areas to the field mappers). +- See the status of tasks (open, "checked out", completed but not validated, + requires to rework, validated, etc) in the Web browser on their computer -## Field mappers +### Field mappers -Field mappers select (or are allocated) individual tasks within a project AOI and use ODK Collect to gather data in those areas. They need to: +Field mappers select (or are allocated) individual tasks within a project AOI +and use ODK Collect to gather data in those areas. They need to: - Visit a mobile-friendly Web page where they can see available tasks on a map -- Choose an area and launch ODK Collect with the form corresponding to their allocated area pre-loaded +- Choose an area and launch ODK Collect with the form corresponding to their + allocated area pre-loaded -## Validators +### Validators -Validators review the data collected by field mappers and assess its quality. If the data is good, the validators merge the portion of the data that belongs in OpenStreetMap to OSM. If it requires more work, the validators either fix it themselves (for minor stuff like spelling or capitalization mistakes that don't seem to be systematic) or inform the field mappers that they need to fix it. They need to: +Validators review the data collected by field mappers and assess its quality. +If the data is good, the validators merge the portion of the data that +belongs in OpenStreetMap to OSM. If it requires more work, the validators +either fix it themselves (for minor stuff like spelling or capitalization +mistakes that don't seem to be systematic) or inform the field mappers +that they need to fix it. They need to: -- Access completed data sets of "submissions" as Comma Separated Values and/or OSM XML so that they can review them. +- Access completed data sets of "submissions" as Comma Separated Values + and/or OSM XML so that they can review them. - Mark areas as validated or requiring rework - Communicate with field mappers if rework is necessary - Merge good-quality data into OSM (probably from JOSM). - Mark areas as completed and merged. -# Info for developers +## Info for developers + +For this visit the [Getting Started Page][6] -For this visit the -[Getting Started Page](https://hotosm.github.io/fmtm/dev/Setup/). +[1]: https://www.youtube.com/watch?v=GiLKRZpbtrc&\ab_channel=HumanitarianOpenStreetMapTeam +[2]: https://github.com/hotosm/osm-fieldwork/wiki +[3]: https://www.hotosm.org/updates/field-mapping-is-the-future-a-tasking-manager-workflow-using-odk +[4]: https://github.com/hotosm/fmtm/blob/main/CONTRIBUTING.md +[5]: https://tasks.hotosm.org/ +[6]: https://hotosm.github.io/fmtm/dev/Setup/ diff --git a/docs/CNAME b/docs/CNAME new file mode 100644 index 0000000000..bedcdccf15 --- /dev/null +++ b/docs/CNAME @@ -0,0 +1 @@ +fmtm.dev \ No newline at end of file diff --git a/docs/CODE_OF_CONDUCT.md b/docs/CODE_OF_CONDUCT.md deleted file mode 120000 index 0400d57460..0000000000 --- a/docs/CODE_OF_CONDUCT.md +++ /dev/null @@ -1 +0,0 @@ -../CODE_OF_CONDUCT.md \ No newline at end of file diff --git a/docs/FAQ.md b/docs/FAQ.md index a0fd7acd5e..4a33af6e68 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -1,6 +1,6 @@ -## :question: Frequently Asked Questions :question +# ❓ Frequently Asked Questions ❓ -### For Users +## For Users Q: What is FMTM? @@ -8,7 +8,7 @@ Q: What is FMTM? application that facilitates remote monitoring of field activities for humanitarian organizations. -
+--- **Q:** Do I need to create an account to use the FMTM Web App? @@ -16,7 +16,7 @@ humanitarian organizations. creating an account allows you to contribute to mapping projects and access additional features. -
+--- **Q:** How do I browse and select mapping projects on the FMTM Web App? @@ -25,7 +25,7 @@ clicking on the "Projects" tab and selecting a project of interest. You can view project details, tasks, and mapping data on the project page. -
+--- **Q:** How do I contribute to a mapping project on the FMTM Web App? @@ -34,7 +34,7 @@ create an account, select a project of interest, and choose a task to work on. You can then use the mapping tools provided to complete the task. -
+--- **Q:** Can I work on multiple mapping tasks at the same time on the FMTM Web App? @@ -42,7 +42,7 @@ task. FMTM Web App, as long as you can commit the necessary time and effort to each task. -
+--- **Q:** How do I know if my mapping work on the FMTM Web App is accurate? @@ -50,7 +50,7 @@ to each task. review and validate the mapping work. This helps to ensure the accuracy of the mapping data. -
+--- **Q:** Can I provide feedback on a mapping project on the FMTM Web App? @@ -58,7 +58,7 @@ accuracy of the mapping data. App by leaving a comment on the project page or contacting the project manager. -
+--- **Q:** How do I download mapping data from a project on the FMTM Web App? @@ -66,7 +66,7 @@ manager. can select the project of interest and click on the "Export" button on the project page. -
+--- **Q:** Can I use the mapping data from the FMTM Web App for my own research or projects? @@ -75,9 +75,9 @@ available for use, but it is important to check the specific project requirements and licenses before using the data for your own research or projects. -
+--- -### For Contributors +## For Contributors **Q:** What is the Field Mapping Tasking Manager (FMTM)? @@ -85,14 +85,14 @@ or projects. allows contributors to participate in mapping projects related to humanitarian and development work. -
+--- **Q:** How can I become a contributor to the FMTM? **A:** To become a contributor to the FMTM, you can create an account on the platform and join a mapping project. -
+--- **Q:** Who can contribute to FMTM? @@ -100,7 +100,7 @@ the platform and join a mapping project. contributions from developers, designers, and other contributors are always welcome. -
+--- **Q:** What kind of contributions can I make to FMTM? @@ -126,7 +126,7 @@ Translation: If you are fluent in a language other than English, you can contribute by translating the application or its documentation into your language. -
+--- **Q:** What technologies are used in FMTM? @@ -134,7 +134,7 @@ into your language. Postgres, Redis, Celery, and Vue.js. The codebase is written in Python, HTML, CSS, and JavaScript. -
+--- **Q:** How do I set up FMTM locally? @@ -142,7 +142,7 @@ Python, HTML, CSS, and JavaScript. and Node.js installed on your system. You can follow the instructions in the README file of the FMTM repository to set up the project. -
+--- **Q:** How can I report a bug or suggest a new feature for FMTM? @@ -151,7 +151,7 @@ the FMTM repository on GitHub. Be sure to provide as much detail as possible, including steps to reproduce the bug, screenshots, or mockups for new features. -
+--- **Q:** How can I contribute to FMTM if I'm new to open source or web development? @@ -163,7 +163,7 @@ issues labeled as "good first issue." Additionally, you can join the FMTM community on Slack to connect with other contributors and get help with your contributions. -
+--- **Q:** What are the benefits of contributing to the FMTM? @@ -171,7 +171,7 @@ help with your contributions. humanitarian and development work, while also developing your mapping skills and knowledge. -
+--- **Q:** Do I need to have prior mapping experience to contribute to the FMTM? @@ -179,7 +179,7 @@ skills and knowledge. FMTM. The platform provides training and resources to help new contributors get started. -
+--- **Q:** How do I know which mapping project to join? @@ -188,7 +188,7 @@ location, the organization sponsoring the project, and the mapping goals. Review the project information and choose a project that interests you. -
+--- **Q:** Can I work on multiple mapping projects at the same time? @@ -196,7 +196,7 @@ interests you. time. However, it is important to ensure that you can commit the necessary time and effort to each project. -
+--- **Q:** How do I get feedback on my mapping work? @@ -204,7 +204,7 @@ necessary time and effort to each project. review and provide feedback on mapping work. You can also contact project managers or experienced contributors for additional feedback. -
+--- **Q:** How can I improve my mapping skills? @@ -212,7 +212,7 @@ project managers or experienced contributors for additional feedback. mapping skills. You can also join mapping communities and forums to connect with other contributors and learn from their experiences. -
+--- **Q:** Can I use the mapping data for my own research or projects? diff --git a/docs/User-Manual-For-Project-Managers.md b/docs/User-Manual-For-Project-Managers.md index b2b559d3d3..cf8d6a612d 100644 --- a/docs/User-Manual-For-Project-Managers.md +++ b/docs/User-Manual-For-Project-Managers.md @@ -1,16 +1,17 @@ # User Manual for FMTM -
-This manual is a step by step guide for the project managers on how to get started with the Field Mapping Tasking Manager. - -- [Introduction](#introduction) -- [An Overview Of FMTM In Relations With HOT, OSM and ODK.](#an-overview-of-fmtm-in-relations-with-hot-osm-and-odk) -- [Prerequisites](#prerequisites) -- [Video Tutorial](#video-tutorial) -- [Steps to create a project in FMTM](#steps-to-create-a-project-in-fmtm) -- [Steps to start access your project and Start mapping](#steps-to-start-access-your-project-and-start-mapping-or-a-mapping-campaign) -- [Help and Support](#help-and-support) -- [Thank you note](#thank-you) +This manual is a step by step guide for the project managers on how to get +started with the Field Mapping Tasking Manager. + +- [User Manual for FMTM](#user-manual-for-fmtm) + - [Introduction](#introduction) + - [An Overview Of FMTM In Relations With HOT, OSM and ODK](#an-overview-of-fmtm-in-relations-with-hot-osm-and-odk) + - [Prerequisites](#prerequisites) + - [Video Tutorial](#video-tutorial) + - [Steps to create a project in FMTM](#steps-to-create-a-project-in-fmtm) + - [Steps to start access your project and Start mapping or a mapping campaign](#steps-to-start-access-your-project-and-start-mapping-or-a-mapping-campaign) + - [Help and Support](#help-and-support) + - [Thank you](#thank-you) ## Introduction @@ -78,8 +79,12 @@ and improve the effectiveness of humanitarian efforts. ## Prerequisites - Stable Internet connection -- Knowledge on field mapping . If you are new to mapping we suggest you to read [this](https://tasks.hotosm.org/learn/map) . -- Account on ODK Central Server. [Here are the instructions for setting up an ODK Central server on Digital Ocean](https://docs.getodk.org/central-install-digital-ocean/) (it's very similar on AWS or whatever) +- Knowledge on field mapping . If you are new to mapping we suggest you to read + [this][9] . +- Account on ODK Central Server. + [Here][10] + are the instructions for setting up an ODK Central server on Digital Ocean + (it's very similar on AWS etc) ## Video Tutorial @@ -101,109 +106,171 @@ and improve the effectiveness of humanitarian efforts. ## Steps to create a project in FMTM -1. Go to [fmtm](https://fmtm.hotosm.org/) . -2. In the header, you'll find three tabs: Explore Projects, Manage Organization, and Manage Categories. +1. Go to [fmtm][11] . +2. In the header, you'll find three tabs: Explore Projects, Manage Organization, + and Manage Categories. - ![WhatsApp Image 2023-06-23 at 1 23 07 PM](https://github.com/hotosm/fmtm/assets/97789856/c0d272f0-c69c-483f-9e9d-83dd75b9e748) + ![fmtm dashboard][12] -3. Start by exploring the projects listed by different nations and world communities for field mapping exercises. -4. Use the filters or search option to narrow down the project cards based on your preferences. - ![WhatsApp Image 2023-06-23 at 1 21 05 PM](https://github.com/hotosm/fmtm/assets/97789856/a5d61628-70e6-426c-a860-b9c7968b4dea) -5. If you are not logged into the system, the "Create new project" button will be disabled. -6. If you are new then on the top right cornor click on Sign up and create an account . Else , Sign in to your existing account . +3. Start by exploring the projects listed by different nations and world + communities for field mapping exercises. +4. Use the filters or search option to narrow down the project cards based on + your preferences. + + ![project filters][13] + +5. If you are not logged into the system, the "Create new project" button will + be disabled. +6. If you are new then on the top right cornor click on Sign up and create an + account . Else , Sign in to your existing account . 7. Once signed in, the "Create new project" button will be enabled. Click on it. -8. The process of creating a new project involves four steps: Project Details, Uploading the Area, Defining the Task, and Selecting the Form. -9. Start by filling in the project details, including the organization name, project name, description, and other relevant information. +8. The process of creating a new project involves four steps: Project Details, + Uploading the Area, Defining the Task, and Selecting the Form. +9. Start by filling in the project details, including the organization name, + project name, description, and other relevant information. -![2](https://github.com/hotosm/fmtm/assets/97789856/97c38c80-aa0e-4fe2-b8a5-f4ee43a9a63a) + ![project details 2][14] -10. If your organization's name is not listed, you can add it through the "Manage Organization" tab. -11. Provide the necessary credentials for the ODK (Open Data Kit) central setup, including URL, username, and password. -12. Proceed to the next step, which is uploading the area for field mapping. Choose the file option and select the AOI (Area of Interest) file in GEOJSON file format. - Review the displayed map that corresponds to your selected area and click on "Next". +10. If your organization's name is not listed, you can add it through the + "Manage Organization" tab. +11. Provide the necessary credentials for the ODK (Open Data Kit) central setup, + including URL, username, and password. +12. Proceed to the next step, which is uploading the area for field mapping. + Choose the file option and select the AOI (Area of Interest) file in GEOJSON + file format. + Review the displayed map that corresponds to your selected area and click + on "Next". -![3](https://github.com/hotosm/fmtm/assets/97789856/680eb831-790a-48f1-8997-c20b5213909d) + ![project create info][15] 13. Define the tasks of the project. - ![WhatsApp Image 2023-06-23 at 1 38 18 PM](https://github.com/hotosm/fmtm/assets/97789856/177d8258-900e-447f-906a-28aeb1fd6b03) -If you choose "Divide on Square," specify the dimensions of the square tasks. -Click on "Next" to proceed. + ![project create area][16] -![WhatsApp Image 2023-06-23 at 1 17 37 PM](https://github.com/hotosm/fmtm/assets/97789856/f53d76b4-e6cc-44a4-8c7c-00082eb72693) - -14. Select Form . Select the form category you want to use for the field mapping, such as "Data Extract" or any other relevant category. - Choose a specific form from the existing categories or upload a custom form if needed. - Click on "Submit" to proceed. + If you choose "Divide on Square," specify the dimensions of the square tasks. -![WhatsApp Image 2023-06-23 at 1 37 19 PM](https://github.com/hotosm/fmtm/assets/97789856/f9a4bed7-d1a9-44dd-b2d4-b55f428f9416) + Click on "Next" to proceed. -15. Wait for the system to generate QR codes for each task, which will be used later in the field mapping process. -16. After the QR codes are generated, you can find your project in the project dashboard. + ![project task split][17] -
+14. Select Form . Select the form category you want to use for the field + mapping, such as "Data Extract" or any other relevant category. -## Steps to start access your project and Start mapping or a mapping campaign - -1. Go to the Explore projects tab . Click on the project card and proceed to the next step. -2. Select one of the available tasks and start the field mapping exercise. - ![WhatsApp Image 2023-06-23 at 1 26 39 PM](https://github.com/hotosm/fmtm/assets/97789856/162af2e0-dbfa-4787-8037-f03e71417df8) - -3. If a task is already locked by another user, choose a different task that is available for mapping.If a task is already locked by another user, choose a different task that is available for mapping. - -- The drop down icon beside **LEGEND** displays a color code. This - color code lets you know the status of each task on the map. + Choose a specific form from the existing categories or upload a custom form + if needed. + Click on "Submit" to proceed. - - **READY** means that task is available to be mapped - - **LOCKED FOR MAPPING** means that task is already being mapped by another volunteer and therefore unavailable for mapping - - **MAPPED** or **READY FOR VALIDATION** means that task has been completely mapped and ready to be validated. - - **LOCKED FOR VALIDATION** means that task has been mapped and being validated. - - **VALIDATED** means that task has successfully been validated and completely mapped with no errors - - **INVALIDATED** or **MORE MAPPING NEEDED** means that task did not pass the validation process and needs more mapping - - **BAD** means that task is not clear and cannot be mapped + ![project creation status][18] -> Note: 'task' refers to each section of the map enclosed in the dotted -> lines and each task has a corresponding number tag. +15. Wait for the system to generate QR codes for each task, which will be used + later in the field mapping process. +16. After the QR codes are generated, you can find your project in the project + dashboard. -![WhatsApp Image 2023-06-23 at 1 29 10 PM](https://github.com/hotosm/fmtm/assets/97789856/2c0397b0-1829-420a-982e-3d971b514f2c) +## Steps to start access your project and Start mapping or a mapping campaign -- To begin mapping, click on a task closest to you that has the color - code associated with **READY** and change it's status from **READY** - to **LOCKED FOR MAPPING**. Remember to take note of the number tag. -- Scroll to the bottom of the page. The **ACTIVITIES** tab shows the - tasks either **LOCKED FOR MAPPING**, **BAD** or **LOCKED FOR - VALIDATION**. You can search for tasks with the status mentioned - using the number tag associated with each task. +1. Go to the Explore projects tab . Click on the project card and proceed to the + next step. +2. Select one of the available tasks and start the field mapping exercise. -4. Use the QR code to start mapping the selected task using the ODK Collect app on your mobile phone. + ![select task][19] + +3. If a task is already locked by another user, choose a different task that is + available for mapping.If a task is already locked by another user, choose a + different task that is available for mapping. + + - The drop down icon beside **LEGEND** displays a color code. This + color code lets you know the status of each task on the map. + + - **READY** means that task is available to be mapped + - **LOCKED FOR MAPPING** means that task is already being mapped by another + volunteer and therefore unavailable for mapping + - **MAPPED** or **READY FOR VALIDATION** means that task has been completely + mapped and ready to be validated. + - **LOCKED FOR VALIDATION** means that task has been mapped and being + validated. + - **VALIDATED** means that task has successfully been validated and completely + mapped with no errors + - **INVALIDATED** or **MORE MAPPING NEEDED** means that task did not pass the + validation process and needs more mapping + - **BAD** means that task is not clear and cannot be mapped + + > Note: 'task' refers to each section of the map enclosed in the dotted + > lines and each task has a corresponding number tag. + + ![map legend][20] + + - To begin mapping, click on a task closest to you that has the color + code associated with **READY** and change it's status from **READY** + to **LOCKED FOR MAPPING**. Remember to take note of the number tag. + - Scroll to the bottom of the page. The **ACTIVITIES** tab shows the + tasks either **LOCKED FOR MAPPING**, **BAD** or **LOCKED FOR + VALIDATION**. You can search for tasks with the status mentioned + using the number tag associated with each task. + +4. Use the QR code to start mapping the selected task using the ODK Collect app + on your mobile phone. 5. Install and open the ODK Collect app on your phone. 6. Set up the project details by scanning the QR code provided. -7. Once the project is set up in the app, start a new form based on the selected form from the project setup. +7. Once the project is set up in the app, start a new form based on the selected + form from the project setup. 8. Fill in the questionnaires and collect data for the field mapping exercise. 9. Save and send the completed form to the server. -10. After completing the assigned task, go back to the project platform on FMTM and mark it as fully mapped. +10. After completing the assigned task, go back to the project platform on FMTM + and mark it as fully mapped. ## Help and Support -If you encounter any issues or need assistance while using FMTM, you can access the following resources: +If you encounter any issues or need assistance while using FMTM, you can access +the following resources: -- Check the [FAQs](https://hotosm.github.io/fmtm/FAQ) . -- Ask your doubts in the [Slack channel: #fmtm-field-pilots](https://hotosm.slack.com/archives/C04PCBFDEGN) +- Check the [FAQs][21] . +- Ask your doubts in the [Slack channel: #fmtm-field-pilots][22] ## Thank you -We are excited to have you join our community of passionate mappers and volunteers. FMTM is a powerful platform developed by the Humanitarian OpenStreetMap Team (HOT) to facilitate mapping projects for disaster response, humanitarian efforts, and community development. +We are excited to have you join our community of passionate mappers and +volunteers. FMTM is a powerful platform developed by the Humanitarian +OpenStreetMap Team (HOT) to facilitate mapping projects for disaster response, +humanitarian efforts, and community development. -With FMTM, you have the opportunity to make a real impact by mapping areas that are in need of support. Your contributions help create detailed and up-to-date maps that aid organizations and communities in their efforts to respond to crises, plan infrastructure, and improve the lives of people around the world. +With FMTM, you have the opportunity to make a real impact by mapping areas that +are in need of support. Your contributions help create detailed and up-to-date +maps that aid organizations and communities in their efforts to respond to +crises, plan infrastructure, and improve the lives of people around the world. -Whether you are a seasoned mapper or new to the world of mapping, FMTM provides a user-friendly interface and a range of tools to make your mapping experience smooth and rewarding. You can create tasks, collaborate with other volunteers, and contribute to ongoing projects that align with your interests and expertise. +Whether you are a seasoned mapper or new to the world of mapping, FMTM provides +a user-friendly interface and a range of tools to make your mapping experience +smooth and rewarding. You can create tasks, collaborate with other volunteers, +and contribute to ongoing projects that align with your interests and expertise. -By mapping with FMTM, you are joining a global community of dedicated individuals who share a common goal of using open data to make a positive difference. Together, we can create a more resilient and inclusive world. +By mapping with FMTM, you are joining a global community of dedicated +individuals who share a common goal of using open data to make a positive +difference. Together, we can create a more resilient and inclusive world. -Explore the projects, join tasks, and contribute your skills to help us build accurate and comprehensive maps. Don't hesitate to ask questions, seek guidance, and engage with fellow mappers through our forums and communication channels. +Explore the projects, join tasks, and contribute your skills to help us build +accurate and comprehensive maps. Don't hesitate to ask questions, seek +guidance, and engage with fellow mappers through our forums and communication +channels. -Thank you for being part of FMTM. Your mapping efforts are invaluable, and we appreciate your commitment to making a difference. +Thank you for being part of FMTM. Your mapping efforts are invaluable, and we +appreciate your commitment to making a difference. Happy mapping! The FMTM Team + +[9]: https://tasks.hotosm.org/learn/map "If you are new to mapping" +[10]: https://docs.getodk.org/central-install-digital-ocean/ "Account on ODK Central Server" +[11]: https://fmtm.hotosm.org/ "fmtm" +[12]: https://github.com/hotosm/fmtm/assets/97789856/c0d272f0-c69c-483f-9e9d-83dd75b9e748 "fmtm dashboard" +[13]: https://github.com/hotosm/fmtm/assets/97789856/a5d61628-70e6-426c-a860-b9c7968b4dea "project filters" +[14]: https://github.com/hotosm/fmtm/assets/97789856/97c38c80-aa0e-4fe2-b8a5-f4ee43a9a63a "project details 2" +[15]: https://github.com/hotosm/fmtm/assets/97789856/680eb831-790a-48f1-8997-c20b5213909d "project create info" +[16]: https://github.com/hotosm/fmtm/assets/97789856/177d8258-900e-447f-906a-28aeb1fd6b03 "project create area" +[17]: https://github.com/hotosm/fmtm/assets/97789856/f53d76b4-e6cc-44a4-8c7c-00082eb72693 "project task split" +[18]: https://github.com/hotosm/fmtm/assets/97789856/f9a4bed7-d1a9-44dd-b2d4-b55f428f9416 "project creation status" +[19]: https://github.com/hotosm/fmtm/assets/97789856/162af2e0-dbfa-4787-8037-f03e71417df8 "select task" +[20]: https://github.com/hotosm/fmtm/assets/97789856/2c0397b0-1829-420a-982e-3d971b514f2c "map legend" +[21]: https://hotosm.github.io/fmtm/FAQ "FAQs" +[22]: https://hotosm.slack.com/archives/C04PCBFDEGN "Slack channel: #fmtm-field-pilots" diff --git a/docs/dev/Backend.md b/docs/dev/Backend.md index 9115d24eb7..9dfa08c491 100644 --- a/docs/dev/Backend.md +++ b/docs/dev/Backend.md @@ -39,7 +39,7 @@ URLs defined in the docker-compose file and your env file. `http://api.fmtm.localhost:7050/docs` > Note: If that link doesn't work, check the logs with -> `docker log fmtm-api`. +> `docker compose logs api`. > Note: the database host `fmtm-db` is automatically > resolved by docker compose to the database container IP. @@ -133,11 +133,12 @@ fastapi `Depends(login_required)` on endpoints. #### Creating Migration Files -- Exec into the API container: `docker compose exec api bash`. -- Run the command to generate migrations: `alembic revision`. -- The migration file should be generated under - `src/backend/migrations/versions`. -- Commit the file to the repo. +- Migrations can be written to `src/backend/migrations`. +- Each file must be an SQL script that is: + - Idempotent: can be run multiple times without consequence. + - Atomic: Run within a BEGIN/COMMIT transaction. +- Migrations must also include an equivalent revert migration under: + `src/backend/migrations/revert` #### Applying Migrations @@ -146,7 +147,21 @@ fastapi `Depends(login_required)` on endpoints. - To run manually: ```bash -alembic upgrade head +docker compose up -d migrations +``` + +### Type Checking + +- It is a good idea to have your code 'type checked' to avoid potential + future bugs. +- To do this, install `pyright` (VSCode has an extension). +- You may need to add the backend dependencies to `extraPaths`. In VSCode + your settings.json would include: + +```json +{ + "python.analysis.extraPaths": ["src/backend/__pypackages__/3.10/lib/"] +} ``` ## Backend Debugging @@ -157,8 +172,20 @@ alembic upgrade head To use it: 1. Re-build the docker image `docker compose build api` -2. Start the docker container `docker compose up -d api` -3. Connect to the debugger on port **5678**. +2. Uncomment the debug port in docker-compose.yml: + + ```yml + services: + ... + api: + ... + ports: + - "7052:8000" + # - "5678:5678" # Debugger port + ``` + +3. Start the docker container `docker compose up -d api` +4. Connect to the debugger on port **5678**. You can configure your IDE to do this with the build in debugger. @@ -196,6 +223,19 @@ To run the backend tests locally, run: docker compose run --rm api pytest ``` +To assess coverage of tests, run: + +```bash +docker compose run --rm --entrypoint='sh -c' api \ + 'coverage run -m pytest && coverage report -m' +``` + +To assess performance of endpoints: + +- We can use the pyinstrument profiler. +- While in debug mode (DEBUG=True), access any endpoint. +- Add the `?profile=true` arg to the URL to view the execution time. + ## Using the local version of ODK Central - During project creation a Central ODK URL must be provided. diff --git a/docs/dev/Frontend.md b/docs/dev/Frontend.md index a66fc0a0ba..f52c322791 100644 --- a/docs/dev/Frontend.md +++ b/docs/dev/Frontend.md @@ -11,13 +11,17 @@ For details on how to run the API first, please see: ### 1B: Starting the Frontend Containers -1. You will need to [Install Docker](https://docs.docker.com/engine/install/) and ensure that it is running on your local machine. +1. You will need to [Install Docker](https://docs.docker.com/engine/install/) + and ensure that it is running on your local machine. 2. From the command line: navigate to the top level directory of the FMTM project. 3. From the command line run: `docker compose build ui` - This is essential, as the development container for the frontend is different to production. + This is essential, as the development container for the frontend is + different to production. 4. Once everything is built, from the command line run: `docker compose up -d ui` -5. If everything goes well you should now be able to **navigate to the project in your browser:** +5. If everything goes well you should now be able to + **navigate to the project in your browser:** + [http://fmtm.localhost:7050](http://fmtm.localhost:7050) > Note: during development, if you rebuild the frontend, then > run 'docker compose up -d', the node_modules directory may @@ -42,7 +46,8 @@ The frontend should now be accessible at: `http://127.0.0.1:` ## Frontend Tips -The frontend is built with React and Typescript. Here are some tips on how to work with the frontend: +The frontend is built with React and Typescript. Here are some tips on how to +work with the frontend: ### Adding Environment Variables diff --git a/docs/dev/Production.md b/docs/dev/Production.md index 21e2ae513a..496909cfec 100644 --- a/docs/dev/Production.md +++ b/docs/dev/Production.md @@ -19,7 +19,6 @@ your own cloud server. ```bash curl -L https://get.fmtm.dev -o install.sh bash install.sh -# Alternative URL: https://fmtm.hotosm.org/install.sh # Then follow the prompts ``` diff --git a/docs/dev/Setup.md b/docs/dev/Setup.md index 9fa7b22aad..9735145b9c 100644 --- a/docs/dev/Setup.md +++ b/docs/dev/Setup.md @@ -17,23 +17,23 @@ - [Verify Setup](#verify-setup) - [Start Developing](#start-developing) -# Overview +## Overview -## Introduction to Field Mapping Tasking Manager Web App +### Introduction to Field Mapping Tasking Manager Web App The FMTM web app is an app that serves as a frontend for the ODK Central server, using the -[ODK Central API](https://odkcentral.docs.apiary.io/#) to allocate specific +[ODK Central API][1] to allocate specific areas/features to individual mappers, and receive their data submissions. -![1](https://github.com/hotosm/fmtm/assets/97789856/305be31a-96b4-42df-96fc-6968e9bd4e5f) +![fmtm splash screen][2] The FMTM codebase consists of: - An API backend in FastAPI (code in: `src/backend`) - A frontend website in React (code in: `src/frontend`) -### Manager Web Interface (with PC browser-friendlymap view) +#### Manager Web Interface (with PC browser-friendlymap view) A computer-screen-optimized web app that allows Campaign Managers to: @@ -41,11 +41,12 @@ A computer-screen-optimized web app that allows Campaign Managers to: - Choose task-splitting schemes - Provide instructions and guidance specific to the project - View areas that are at various stages of completion -- Provide a project-specific URL that field mappers can access from their mobile phones to select and map tasks. +- Provide a project-specific URL that field mappers can access from their mobile + phones to select and map tasks. -## Basic Tools used +### Basic Tools used -### [ODK Collect](https://docs.getodk.org/collect-intro/) +#### [ODK Collect](https://docs.getodk.org/collect-intro/) A mobile data collection tool that functions on almost all Android phones. Field mappers use ODK Collect to select features such as buildings or amenities, @@ -53,25 +54,33 @@ and fill out forms with survey questions to collect attributes or data about those features (normally at least some of these attributes are intended to become OSM tags associated with those features). -The ODK Collect app connects to a back-end server (in this case ODK Central), which provides the features to be mapped and the survey form definitions. +The ODK Collect app connects to a back-end server (in this case ODK Central), +which provides the features to be mapped and the survey form definitions. -### [ODK Central server](https://odkcentral.docs.apiary.io/#) +#### [ODK Central server](https://odkcentral.docs.apiary.io) -An ODK Central server that functions as the back end for the field data collectors' ODK Collect apps on their Android phones. Devs must have access to an ODK Central server with a username and password granting admin credentials. +An ODK Central server that functions as the back end for the field data +collectors' ODK Collect apps on their Android phones. Devs must have access to +an ODK Central server with a username and password granting admin credentials. -[Here are the instructions for setting up an ODK Central server on Digital Ocean](https://docs.getodk.org/central-install-digital-ocean/) (it's very similar on AWS or whatever) +[Here](https://docs.getodk.org/central-install-digital-ocean/) are the +instructions for setting up an ODK Central server on Digital Ocean (it's very +similar on AWS etc) -# Guide for Users +## Guide for Users -## Prerequisites +### Prerequisites - Stable internet connection. -- Mapping Knowledge (Optional): While not mandatory, having some mapping knowledge can enhance your experience with FMTM. If you are new to mapping we suggest you to read [this](https://tasks.hotosm.org/learn/map) +- Mapping Knowledge (Optional): While not mandatory, having some mapping + knowledge can enhance your experience with FMTM. If you are new to mapping we + suggest you to read [this](https://tasks.hotosm.org/learn/map) -## Steps to create a project in FMTM +### Steps to create a project in FMTM - Go to [fmtm](https://fmtm.hotosm.org/) . -- If you are new then on the top right cornor click on Sign up and create an account . Else , Sign in to your existing account . +- If you are new then on the top right cornor click on Sign up and create an + account. Else, sign in to your existing account. - Click the '+ CREATE NEW PROJECT' button. - Enter the project details. @@ -85,92 +94,126 @@ An ODK Central server that functions as the back end for the field data collecto ![Screenshot 2023-06-07 232152](https://github.com/hotosm/fmtm/assets/97789856/b735a661-d0f6-46b8-b548-5ad7b1928480) -- Select Form . +- Select Form. ![Screenshot 2023-06-07 232316](https://github.com/hotosm/fmtm/assets/97789856/475a6070-4897-4e84-8050-6ecf024d0095) - Click on Submit button. -- **Please watch the video below for more details**:point_down: - - - +- **Please watch the video below for more details** :point_down: -## Work on existing projects +### Work on existing projects -If you donot want to create a new project and wish to work on an existing project then follow the steps below: +If you donot want to create a new project and wish to work on an existing +project then follow the steps below: - Go to [fmtm](https://fmtm.hotosm.org/) . -- If you are new then on the top right cornor click on Sign up and create an account . Else , Sign in to your existing account . -- Click the button **Explore Projects** . -- Select the project you can work on . +- If you are new then on the top right cornor click on Sign up and create an + account. Else, sign in to your existing account. +- Click the button **Explore Projects**. +- Select the project you can work on. - Click on the marked area. - Click on start mapping. ![5](https://github.com/hotosm/fmtm/assets/97789856/9343a4bc-462c-44af-af93-8a67907837b3) -## Help and Support +### Help and Support -If you encounter any issues or need assistance while using FMTM, you can access the following resources: +If you encounter any issues or need assistance while using FMTM, you can access +the following resources: - Check the [FAQs](https://hotosm.github.io/fmtm/FAQ/) . - Ask your doubts in the [Slack channel: #fmtm-field-pilots](https://hotosm.slack.com/archives/C04PCBFDEGN) -## Thank you +### Thank you -We are excited to have you join our community of passionate mappers and volunteers. FMTM is a powerful platform developed by the Humanitarian OpenStreetMap Team (HOT) to facilitate mapping projects for disaster response, humanitarian efforts, and community development. +We are excited to have you join our community of passionate mappers and +volunteers. FMTM is a powerful platform developed by the Humanitarian +OpenStreetMap Team (HOT) to facilitate mapping projects for disaster response, +humanitarian efforts, and community development. -With FMTM, you have the opportunity to make a real impact by mapping areas that are in need of support. Your contributions help create detailed and up-to-date maps that aid organizations and communities in their efforts to respond to crises, plan infrastructure, and improve the lives of people around the world. +With FMTM, you have the opportunity to make a real impact by mapping areas that +are in need of support. Your contributions help create detailed and up-to-date +maps that aid organizations and communities in their efforts to respond to +crises, plan infrastructure, and improve the lives of people around the world. -Whether you are a seasoned mapper or new to the world of mapping, FMTM provides a user-friendly interface and a range of tools to make your mapping experience smooth and rewarding. You can create tasks, collaborate with other volunteers, and contribute to ongoing projects that align with your interests and expertise. +Whether you are a seasoned mapper or new to the world of mapping, FMTM provides +a user-friendly interface and a range of tools to make your mapping experience +smooth and rewarding. You can create tasks, collaborate with other volunteers, +and contribute to ongoing projects that align with your interests and +expertise. -By mapping with FMTM, you are joining a global community of dedicated individuals who share a common goal of using open data to make a positive difference. Together, we can create a more resilient and inclusive world. +By mapping with FMTM, you are joining a global community of dedicated +individuals who share a common goal of using open data to make a positive +difference. Together, we can create a more resilient and inclusive world. -Explore the projects, join tasks, and contribute your skills to help us build accurate and comprehensive maps. Don't hesitate to ask questions, seek guidance, and engage with fellow mappers through our forums and communication channels. +Explore the projects, join tasks, and contribute your skills to help us build +accurate and comprehensive maps. Don't hesitate to ask questions, seek guidance, +and engage with fellow mappers through our forums and communication channels. -Thank you for being part of FMTM. Your mapping efforts are invaluable, and we appreciate your commitment to making a difference. +Thank you for being part of FMTM. Your mapping efforts are invaluable, and we +appreciate your commitment to making a difference. Happy mapping! The FMTM Team -# Guide for Developers +## Guide for Developers -## FMTM frontend +### FMTM frontend _To view details about the frontend click [here](https://hotosm.github.io/fmtm/dev/Frontend/)_ -### Field Mapper Web Interface (with mobile-friendly map view) +#### Field Mapper Web Interface (with mobile-friendly map view) -Ideally with a link that opens ODK Collect directly from the browser, but if that's hard, the fallback is downloading a QR code and importing it into ODK Collect. +Ideally with a link that opens ODK Collect directly from the browser, but if +that's hard, the fallback is downloading a QR code and importing it into ODK +Collect. -## FMTM backend +### FMTM backend _To in details about the backend click [here](https://hotosm.github.io/fmtm/dev/backend/)_ -A backend that converts the project parameters entered by the Campaign Manager in the Manager Web Interface into a corresponding ODK Central project. Its functions include: +A backend that converts the project parameters entered by the Campaign Manager +in the Manager Web Interface into a corresponding ODK Central project. It's +functions include: - Convert the AOI into a bounding box and corresponding Overpass API query -- Download (using the Overpass API) the OSM features that will be mapped in that bounding box (buildings and/or amenities) as well as the OSM line features that will be used as cutlines to subdivide the area +- Download (using the Overpass API) the OSM features that will be mapped in that + bounding box (buildings and/or amenities) as well as the OSM line features that + will be used as cutlines to subdivide the area - Trim the features within the bounding box but outside the AOI polygon -- Convert the polygon features into centroid points (needed because ODK select from map doesn't yet deal with polygons; this is likely to change in the future but for now we'll work with points only) -- Use line features as cutlines to create individual tasks (squares don't make sense for field mapping, neighborhoods delineated by large roads, watercourses, and railways do) -- Split the AOI into those tasks based on parameters set in the Manager Web Interface (number of features or area per task, splitting strategy, etc). +- Convert the polygon features into centroid points (needed because ODK select + from map doesn't yet deal with polygons; this is likely to change in the future + but for now we'll work with points only) +- Use line features as cutlines to create individual tasks (squares don't make + sense for field mapping, neighborhoods delineated by large roads, watercourses, + and railways do) +- Split the AOI into those tasks based on parameters set in the Manager Web + Interface (number of features or area per task, splitting strategy, etc). - Use the ODK Central API to create, on the associated ODK Central server: - A project for the whole AOI - One survey form for each split task (neighborhood) - - This might require modifying the xlsforms (to update the version ID of the forms and change the name of the geography file being referred to). This is pretty straightforward using [OpenPyXL](https://openpyxl.readthedocs.io/en/stable/), though we have to be careful to keep the location within the spreadsheet of these two items consistent. - - GeoJSON feature collections for each form (the buildings/amenities or whatever) - - An App User for each form, which in turn corresponds to a single task. When the ODK Collect app on a user's phone is configured to function as that App User, they have access to _only_ the form and features/area of that task. - - A set of QR Codes and/or configuration files/strings for ODK Collect, one for each App User - -## Prerequisites for Contribution - -### 1. Review documentation + - This might require modifying the xlsforms (to update the version ID of the + forms and change the name of the geography file being referred to). This is + pretty straightforward using + [OpenPyXL](https://openpyxl.readthedocs.io/en/stable/), + though we have to be careful to keep the location within the spreadsheet + of these two items consistent. + - GeoJSON feature collections for each form (the buildings/amenities etc) + - An App User for each form, which in turn corresponds to a single task. When + the ODK Collect app on a user's phone is configured to function as that App + User, they have access to _only_ the form and features/area of that task. + - A set of QR Codes and/or configuration files/strings for ODK Collect, one + for each App User + +### Prerequisites for Contribution + +#### 1. Review documentation Don't forget to review the [Contribution](https://hotosm.github.io/fmtm/CONTRIBUTING/) @@ -178,22 +221,26 @@ guidelines and our [Code of Conduct](https://hotosm.github.io/fmtm/CODE_OF_CONDUCT/) before contributing! -Here are the steps to contribute to the frontend of Field Mapping Tasking Manager: +Here are the steps to contribute to the frontend of Field Mapping Tasking +Manager: -### 2. Fork the repository +#### 2. Fork the repository Forking creates a copy of the repository in your own GitHub account. -Go to the [Field Mapping Tasking Manager repository](https://github.com/hotosm/fmtm) and click the "Fork" button in the top right corner of the page. +Go to the +[Field Mapping Tasking Manager repository](https://github.com/hotosm/fmtm) +and click the "Fork" button in the top right corner of the page. -### 3. Navigate to your working directory +#### 3. Navigate to your working directory -Open a terminal and navigate to the directory you want to work in using the following command: +Open a terminal and navigate to the directory you want to work in using the +following command: `cd ` Make sure to replace `` with the name of your directory. -### 4. Clone the forked repository +#### 4. Clone the forked repository Clone the forked repository to your local machine using the following command: @@ -201,70 +248,100 @@ Clone the forked repository to your local machine using the following command: Make sure to replace `` with your GitHub username. -### 5. Create a new branch +#### 5. Create a new branch Create a new branch for your changes using the following command: `git checkout -b branch-name` -Make sure to give your branch a descriptive name that reflects the changes you'll be making. +Make sure to give your branch a descriptive name that reflects the changes +you'll be making. -### 6. Make changes +#### 6. Make changes Make your contribution, run tests where needed and save. -### 7. Add changes +#### 7. Add changes Add the changes you've made using the following command: `git add ` -Make sure you replace `` with the name of the file you made changes to. +Make sure you replace `` with the name of the file you made changes +to. -### 8. Commit and push +#### 8. Commit and push -Once you've added changes, commit them to your local branch using the following command: +Once you've added changes, commit them to your local branch using the following +command: `git commit -m "Add feature"` -Make sure to write a descriptive commit message that explains the changes you've made. Then, push your changes to your forked repository using the following command: +Make sure to write a descriptive commit message that explains the changes you've +made. Then, push your changes to your forked repository using the following +command: `git push origin branch-name` -### 9. Submit a pull request +#### 9. Submit a pull request Go to your forked repository on GitHub and click the "Pull requests" tab. -Change the base branch from `main` to `development`, select the branch that contains your changes from the compare branch, then click "Create pull request". -This will open a new pull request in the fmtm repository, where you can describe your changes and request that they be merged into the main codebase. - -> Note: After a PR has been approved and merged, if the branch is no longer in use, delete it both locally and remotely. Otherwise we get buried in dead branches we don't need. -> Use the following commands: -> Switch out of the branch you want to delete `git checkout `(_Replace `` with any other existing branch name_). -> Delete the branch locally `git branch -d ` and then delete the branch remotely `git push origin --delete `(_Replace `` with the name of the branch you want to delete_). +Change the base branch from `main` to `development`, select the branch that +contains your changes from the compare branch, then click "Create pull +request". +This will open a new pull request in the fmtm repository, where you can describe +your changes and request that they be merged into the main codebase. + +> Note: After a PR has been approved and merged, if the branch is no longer +> in use, delete it both locally and remotely. Otherwise we get buried in dead +> branches we don't need. +> +> Use the following commands: +> +> Switch out of the branch you want to delete: +> `git checkout ` +> +> _Replace `` with any other existing branch name_ +> +> Delete the branch locally: +> `git branch -d ` +> and then delete the branch remotely: +> `git push origin --delete ` +> +> _Replace `` with the name of the branch you want to delete_ That's it! You've now contributed to the Field Mapping Tasking Manager. -## Setup Your Local Environment +### Setup Your Local Environment These steps are essential to run and test your code! -### 1. Setup OSM OAUTH 2.0 +#### 1. Setup OSM OAUTH 2.0 -The FMTM uses OAUTH2 with OSM to authenticate users. To properly configure your FMTM project, you will need to create keys for OSM. +The FMTM uses OAUTH2 with OSM to authenticate users. To properly configure your +FMTM project, you will need to create keys for OSM. -1. [Login to OSM](https://www.openstreetmap.org/login) (_If you do not have an account yet, click the signup button at the top navigation bar to create one_). Click the drop down arrow on the extreme right of the navigation bar and select My Settings. +1. [Login to OSM](https://www.openstreetmap.org/login) (_If you do not have an + account yet, click the signup button at the top navigation bar to create one_). + Click the drop down arrow on the extreme right of the navigation bar and + select My Settings. -2. Register your FMTM instance to OAuth 2 applications. Put your login redirect url as `http://127.0.0.1:7051/osmauth/`, For Production replace the URL as production API Url +2. Register your FMTM instance to OAuth 2 applications. Put your login redirect + url as `http://127.0.0.1:7051/osmauth/`, For Production replace the URL as + production API Url -> Note: `127.0.0.1` is required instead of `localhost` due to OSM restrictions. + > Note: `127.0.0.1` is required instead of `localhost` due to OSM restrictions. -image + image -3. Right now read user preferences permission is enough later on fmtm may need permission to modify the map option which should be updated on OSM_SCOPE variable on .env , Keep read_prefs for now. +3. Right now read user preferences permission is enough later on fmtm may need + permission to modify the map option which should be updated on OSM_SCOPE + variable on .env , Keep read_prefs for now. -4. Now Copy your Client ID and Client Secret. Put them in the `OSM_CLIENT_ID` and `OSM_CLIENT_SECRET` field of your `.env` file +4. Now Copy your Client ID and Client Secret. Put them in the `OSM_CLIENT_ID` + and `OSM_CLIENT_SECRET` field of your `.env` file -#### 2. Create an `.env` File +##### 2. Create an `.env` File Environmental variables are used throughout this project. To get started, create `.env` file in the top level dir, @@ -280,36 +357,49 @@ bash scripts/gen-env.sh > `EXTRA_CORS_ORIGINS` is a set of comma separated strings, e.g.: > -## Verify Setup +### Verify Setup -### Check Deployment +#### Check Deployment -For details on how to run this project locally for development, please look at: [Backend Docs](https://hotosm.github.io/fmtm/dev/Backend) +For details on how to run this project locally for development, please look at: +[Backend Docs](https://hotosm.github.io/fmtm/dev/Backend) -### Check Authentication +#### Check Authentication -Once you have deployed, you will need to check that you can properly authenticate. +Once you have deployed, you will need to check that you can properly +authenticate. 1. Navigate to `http://api.fmtm.localhost:7050/docs` - Three endpoints are responsible for oauth - image + Three endpoints are responsible for oauth + image -2. Select the `/auth/osm_login/` endpoint, click `Try it out` and then `Execute`. - This would give you the Login URL where you can supply your osm username and password. +2. Select the `/auth/osm_login/` endpoint, click `Try it out` and then + `Execute`. + This would give you the Login URL where you can supply your osm username + and password. - Your response should look like this: + Your response should look like this: - {"login_url": "https://www.openstreetmap.org/oauth2/authorize/?response_type=code&client_id=xxxx"} + ```json + { + "login_url": "https://www.openstreetmap.org/oauth2/authorize/?response_type=code&client_id=xxxx" + } + ``` - Now copy and paste your login_url in a new tab. You would be redirected to OSM for your LOGIN. Give FMTM the necessary permission. + Now copy and paste your login_url in a new tab. You would be redirected to + OSM for your LOGIN. Give FMTM the necessary permission. - After a successful login, you will get your `access_token` for FMTM, Copy it. Now, you can use it for rest of the endpoints that needs authorization. + After a successful login, you will get your `access_token` for FMTM, Copy + it. Now, you can use it for rest of the endpoints that needs authorization. -3. Check your access token: Select the `/auth/me/` endpoint and click `Try it out`. - Pass in the `access_token` you copied in the previous step into the `access-token` field and click `Execute`. You should get your osm id, username and profile picture id. +3. Check your access token: Select the `/auth/me/` endpoint and click + `Try it out`. + Pass in the `access_token` you copied in the previous step into the + `access-token` field and click `Execute`. You should get your osm id, + username and profile picture id. -## Start Developing +### Start Developing Don't forget to review the [Contribution](https://hotosm.github.io/fmtm/CONTRIBUTING/) @@ -320,3 +410,6 @@ before contributing! Happy coding! The FMTM Developer Team + +[1]: https://odkcentral.docs.apiary.io +[2]: https://github.com/hotosm/fmtm/assets/97789856/305be31a-96b4-42df-96fc-6968e9bd4e5f diff --git a/docs/dev/Version-Control.md b/docs/dev/Version-Control.md deleted file mode 100644 index b0a8c0708a..0000000000 --- a/docs/dev/Version-Control.md +++ /dev/null @@ -1,29 +0,0 @@ -# Version Control - -- FMTM uses semantic versioning: MAJOR.MINOR.PATCH. - -- The versioning is the same for both the backend and frontend. - -- Versions are managed by Commitizen from the `src/backend/pyproject.toml` file. - -- Versions are determined by conventional commit messages: - - `fix: xxx` denotes a patch, `feat: xxx` denotes a minor increment. - - Breaking changes would typically denote a major increment. - - This does not necessarily apply to web apps, so major versioning is subjective. - -## Bumping Versions - -Install commitizen: - -```bash -pip install commitizen -``` - -Bump the version: - -```bash -cd src/backend -cz bump --check-consistency -git push -git push --tag -``` diff --git a/docs/index.md b/docs/index.md index 76512fd8b8..610512756f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,10 +1,11 @@ # 🤗 Welcome To The Field Mapping Documentation 🤗 -### _**What is ONE thing all the greatest solutions out there have in common?**_ +## What is ONE thing all the greatest solutions out there have in common? They never forget the people they are building for (^^) :smiley: -So who are we building field mapping solutions for? In short, we build solutions for: +So who are we building field mapping solutions for? In short, we build solutions +for: ## The field mapping manager (coordinating field mapping) @@ -24,7 +25,13 @@ steps, including: - **_Coordination:_** The field mapping manager must coordinate the activities of the field mappers, ensuring that they are working together effectively and efficiently. - + + coordination ## The field mapper (collecting data) @@ -61,13 +68,13 @@ errors. The validator faces a number of challenges and issues, including: validation protocols and guidelines, ensuring that the data meets a high level of quality and accuracy. -[Click here](https://miro.com/app/board/uXjVPX4XLcI=/?moveToWidget=3458764534814634286&cot=14) to see an overview of who you are building for, which actions they take and some ideas for solutions! - -Below is an overview of the user flow of the **Field Mapper**, the **Mapping Coordinator / Field Mapping Manager** and the **Validator**. +[Click here][1] to see an overview of who you are building for, which actions +they take and some ideas for solutions! - +Below is an overview of the user flow of the **Field Mapper**, the +**Mapping Coordinator / Field Mapping Manager** and the **Validator**. -# The Field Mapping Manager (The person who coordinates field mapping) +## The Field Mapping Manager (The person who coordinates field mapping) The **Field Mapping Manager** is tasked with coordinating all mapping activities, ensuring that **Field Mappers** are safe and qualified to @@ -77,7 +84,8 @@ Manager** has to consider 3 main activities: 1. **Pre-field data collection activities** - Setting up and testing tools - Creating a form - - Training field mappers (making sure that they understand the form and how to fill it) + - Training field mappers (making sure that they understand the form and how + to fill it) - Creating an area of interest within the location to be mapped 2. **Data collection activities** - Preparing daily, weekly and monthly activity plans @@ -88,15 +96,18 @@ Manager** has to consider 3 main activities: - Giving and recieving team feedback - Updating tools and methodoligies -If you want more information, click on [this virtual whiteboard!](https://miro.com/app/board/uXjVPX4XLcI=/?moveToWidget=3458764535077658755&cot=14) to see all the steps a **Field Mapping Coordinator or Manager** goes through. Here's a sneak-peek: +If you want more information, click on [this][2] virtual whiteboard! to see all +the steps a **Field Mapping Coordinator or Manager** goes through. +Here's a sneak-peek: -- See also data collection approach [>>in this training guide<<](https://hotosm.github.io/pdc-documentation/en/pages/01-osm-field-surveyor-guideliness/01-metodologi-pengumpulan-data/01_metodologi_pengumpulan_data/) +- See also data collection approach in [in this training guide<<][3] -![Screenshot 2022-10-10 at 13 48 40](https://user-images.githubusercontent.com/107098623/194859584-31dae52a-e918-4cd8-9071-24791750d6bb.png) +![Field mapping coordinator][4] -# The Field Mapper (The person who collects data) +## The Field Mapper (The person who collects data) -The **Field Mapper** surveys locations and collects data from them. The basic work flow that a **Field Mapper** has to consider is as follows: +The **Field Mapper** surveys locations and collects data from them. The basic +work flow that a **Field Mapper** has to consider is as follows: 1. Find a phone that: - Works well with the app. @@ -105,43 +116,60 @@ The **Field Mapper** surveys locations and collects data from them. The basic wo 3. Access the **OSM** registration site and register. 4. Follow the tutorial and choose a campaign. 5. Select a task from the campaign. -6. Launch the map app which allows you to see the moving map of the points that you are now responsible for. -7. Complete each point within the task, making sure to answer the questions in the form for each one. +6. Launch the map app which allows you to see the moving map of the points that + you are now responsible for. +7. Complete each point within the task, making sure to answer the questions in + the form for each one. 8. Send the data of each point until the task is complete. -9. Once the task is complete, signal that your task is finished. This lets the **Validator** know that they can check the quality of your data. +9. Once the task is complete, signal that your task is finished. This lets the + **Validator** know that they can check the quality of your data. -If you want to see this workflow in more detail, click on [this virtual whiteboard!](https://miro.com/app/board/uXjVPX4XLcI=/?moveToWidget=3458764535074723879&cot=14) Here's a sneak-peek: +If you want to see this workflow in more detail, click on +[this virtual whiteboard!][5] - - +Here's a sneak-peek: -# The Validator (The person who checks the quality of data collected) +fmtm-workflow -![Screenshot 2022-10-20 at 12 14 16](https://user-images.githubusercontent.com/107098623/196922048-c156b8ed-7f1a-404b-a636-fcfca2c0b328.png) +## The Validator (The person who checks the quality of data collected) -The **Validator** does the following tasks to ensure that quality data is collected and uploaded to **OSM**: +![osm data collection][6] + +The **Validator** does the following tasks to ensure that quality data is +collected and uploaded to **OSM**: - Logs into task manager - Checks how many tasks need validation - Checks tasks for errors - Stops mappers that make mistakes and explains how they can fix their errors -- Lowers supervision on good mappers in order to spend more time on those who are stuggling +- Lowers supervision on good mappers in order to spend more time on those who + are stuggling - Uploads high quality data to **OSM** and marks completed tasks as **done** Some of the pain points and struggles faced by the **Validator** include: -- Lack of validators (the **Validator** is often the same person as the **Field Mapping Manager**) +- Lack of validators (the **Validator** is often the same person as the + **Field Mapping Manager**) - Many areas of the world are still unmapped on online commercial maps - The quality of maps needs to improve -- The more developers complicate tools, the more they reduce the usefulness of the tools +- The more developers complicate tools, the more they reduce the usefulness + of the tools + +For more details, click on [this virtual whiteboard!][7] -For more details, click on [this virtual whiteboard!](https://miro.com/app/board/uXjVPX4XLcI=/?moveToWidget=3458764535077160536&cot=14) See sneak-peek below. +See sneak-peek below. -- See also all steps of validation [>>in this training guide<<](https://hotosm.github.io/pdc-documentation/en/pages/04-data-validation-and-quality-assurance/02-penggunaan-josm-untuk-validasi-data-survei/josm-for-validating-survey-data/) +- See also all steps of validation [>>in this training guide<<][8] - See also global validator Becky's OSM diary which features: - ["Common Feedback I give while Validating Building Projects"](https://www.openstreetmap.org/user/Becky%20Candy/diary) + ["Common Feedback I give while Validating Building Projects"][9] -![Screenshot 2022-10-10 at 14 45 39](https://user-images.githubusercontent.com/107098623/194870234-fa9497cb-d9e4-43ff-b7dd-ad731f8be488.png) +![validator flow][10] Some of the key points covered are: @@ -150,4 +178,16 @@ Some of the key points covered are: - Ensuring consistency with existing data and standards - Providing clear and constructive feedback to field mappers and coordinators -Overall, understanding the needs and considerations of each of these three user groups is essential for building effective field mapping solutions. +Overall, understanding the needs and considerations of each of these three user +groups is essential for building effective field mapping solutions. + +[1]: https://miro.com/app/board/uXjVPX4XLcI=/?moveToWidget=3458764534814634286&cot=14 +[2]: https://miro.com/app/board/uXjVPX4XLcI=/?moveToWidget=3458764535077658755&cot=14 +[3]: https://hotosm.github.io/pdc-documentation/en/pages/01-osm-field-surveyor-guideliness/01-metodologi-pengumpulan-data/01_metodologi_pengumpulan_data/ +[4]: https://user-images.githubusercontent.com/107098623/194859584-31dae52a-e918-4cd8-9071-24791750d6bb.png +[5]: https://miro.com/app/board/uXjVPX4XLcI=/?moveToWidget=3458764535074723879&cot=14 +[6]: https://user-images.githubusercontent.com/107098623/196922048-c156b8ed-7f1a-404b-a636-fcfca2c0b328.png +[7]: https://miro.com/app/board/uXjVPX4XLcI=/?moveToWidget=3458764535077160536&cot=14 +[8]: https://hotosm.github.io/pdc-documentation/en/pages/04-data-validation-and-quality-assurance/02-penggunaan-josm-untuk-validasi-data-survei/josm-for-validating-survey-data +[9]: https://www.openstreetmap.org/user/Becky%20Candy/diary/399055 +[10]: https://user-images.githubusercontent.com/107098623/194870234-fa9497cb-d9e4-43ff-b7dd-ad731f8be488.png diff --git a/mkdocs.yml b/mkdocs.yml index dd68be945a..9ba846a0a4 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -20,6 +20,7 @@ theme: name: material palette: primary: custom + language: en extra_css: - css/extra.css @@ -40,6 +41,7 @@ markdown_extensions: plugins: - search + - git-revision-date-localized - exclude: glob: - plugins/* @@ -63,7 +65,7 @@ nav: - Installation: INSTALL.md - Contribution Guidelines: CONTRIBUTING.md - Documentation Contribution: Guide-On-Improving-Documentation.md - - Code of Conduct: CODE_OF_CONDUCT.md + - Code of Conduct: https://docs.hotosm.org/code-of-conduct - FAQ: FAQ.md - User Manuals: - Project Managers: User-Manual-For-Project-Managers.md @@ -75,6 +77,8 @@ nav: - Deployment Flow: dev/Deployment-Flow.md - Database Tips: dev/Database-Tips.md - Troubleshooting: dev/Troubleshooting.md - - Version Control: dev/Version-Control.md + - Pre-Commit: https://docs.hotosm.org/dev-guide/pre-commit/ + - Versioning: https://docs.hotosm.org/dev-guide/version-control/#creating-releases - API: https://hotosm.github.io/swagger/?url=https://hotosm.github.io/fmtm/openapi.json - Class Hierarchy: apidocs/html/inherits.html + - Code Coverage: coverage.html diff --git a/nginx/Dockerfile b/nginx/Dockerfile index 7d79188003..83b71b92f9 100644 --- a/nginx/Dockerfile +++ b/nginx/Dockerfile @@ -102,7 +102,6 @@ COPY container-entrypoint.sh /docker-entrypoint.sh RUN chmod +x /docker-entrypoint.sh COPY templates/cert-init/fmtm.conf.template \ templates/cert-init/api.conf.template \ - templates/cert-init/script.conf.template \ /etc/nginx/templates/ @@ -122,13 +121,6 @@ COPY templates/fmtm.conf.template \ -FROM main as main-plus-script -# Add install script -COPY templates/script.conf.template \ - /etc/nginx/templates/ - - - FROM main as development # API, Frontend, ODK, S3 COPY templates/odk.conf.template \ diff --git a/nginx/build_imgs.sh b/nginx/build_imgs.sh index d2e9588735..9a4d17c1f6 100644 --- a/nginx/build_imgs.sh +++ b/nginx/build_imgs.sh @@ -63,14 +63,3 @@ docker build nginx \ if [[ -n "$PUSH_IMGS" ]]; then docker push "ghcr.io/hotosm/fmtm/proxy:main" fi - -# Main plus script proxy -echo "Building proxy:main-plus-script" -docker build nginx \ - --tag "ghcr.io/hotosm/fmtm/proxy:main-plus-script" \ - --target main-plus-script \ - --build-arg NGINX_TAG="${NGINX_TAG}" - -if [[ -n "$PUSH_IMGS" ]]; then - docker push "ghcr.io/hotosm/fmtm/proxy:main-plus-script" -fi diff --git a/nginx/container-entrypoint.sh b/nginx/container-entrypoint.sh index 6bda9b35f4..17f645d04c 100644 --- a/nginx/container-entrypoint.sh +++ b/nginx/container-entrypoint.sh @@ -27,8 +27,8 @@ done # Check if the timeout was reached if [ $timeout -eq 0 ]; then - echo "NGINX did not start within the timeout." - exit 1 + echo "NGINX did not start within the timeout." + exit 1 fi # Check if FMTM_DOMAIN is set @@ -63,19 +63,9 @@ if [ -n "${FMTM_S3_DOMAIN}" ]; then fi # Run certbot with the constructed arguments -echo "Running command: certbot --non-interactive certonly ${certbot_args[@]}" +echo "Running command: certbot --non-interactive certonly ${certbot_args[*]}" certbot --non-interactive certonly "${certbot_args[@]}" echo "Certificate generated under: /etc/letsencrypt/live/${FMTM_DOMAIN}/" -# Add FMTM_SCRIPT_DOMAIN if present -if [ -n "${FMTM_SCRIPT_DOMAIN}" ] && [ "${FMTM_SCRIPT_DOMAIN}" != "_" ]; then - echo - echo "FMTM_SCRIPT_DOMAIN variable set. Generating separate certificate." - certbot --non-interactive certonly \ - --webroot --webroot-path=/var/www/certbot \ - --email "${CERT_EMAIL}" --agree-tos --no-eff-email \ - -d "${FMTM_SCRIPT_DOMAIN}" -fi - # Successful exit (stop container) exit 0 diff --git a/nginx/templates/cert-init/script.conf.template b/nginx/templates/cert-init/script.conf.template deleted file mode 100644 index e1f4b46346..0000000000 --- a/nginx/templates/cert-init/script.conf.template +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team -# -# This file is part of FMTM. -# -# FMTM is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# FMTM is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with FMTM. If not, see . -# - -server { - listen 80; - server_name ${FMTM_SCRIPT_DOMAIN}; - - location /.well-known/acme-challenge/ { - root /var/www/certbot; - } - - if ($server_name = "") { - return 444; - } -} diff --git a/nginx/templates/script.conf.template b/nginx/templates/script.conf.template deleted file mode 100644 index 41a53744b0..0000000000 --- a/nginx/templates/script.conf.template +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team -# -# This file is part of FMTM. -# -# FMTM is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# FMTM is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with FMTM. If not, see . -# - -server { - listen 80; - server_name ${FMTM_SCRIPT_DOMAIN}; - return 301 https://$host$request_uri; -} - -server { - listen 443 ssl; - server_name ${FMTM_SCRIPT_DOMAIN}; - - ssl_certificate /etc/letsencrypt/live/${FMTM_SCRIPT_DOMAIN}/fullchain.pem; - ssl_certificate_key /etc/letsencrypt/live/${FMTM_SCRIPT_DOMAIN}/privkey.pem; - include /etc/nginx/options-ssl-nginx.conf; - include /etc/nginx/options-security.conf; - - # Disable file uploads - client_max_body_size 0; - - # Response headers - add_header 'Content-Security-Policy' 'upgrade-insecure-requests'; - # For opentelemetry - add_header 'Access-Control-Allow-Headers' 'traceparent,tracestate'; - - location / { - # Serve FMTM install script /usr/share/nginx/html/fmtm/install.sh - root /usr/share/nginx/html/fmtm; - try_files /install.sh /install.sh; - - # Allow executing shell scripts directly in the browser - default_type text/plain; - - # Prevent caching - add_header Cache-Control "no-cache, no-store, must-revalidate"; - - # Prevent directory listing - autoindex off; - } - - error_page 500 502 503 504 /50x.html; - location = /50x.html { - root /usr/share/nginx/html; - } -} diff --git a/scripts/gen-env.sh b/scripts/gen-env.sh index 4f7374df45..1fbc35b874 100644 --- a/scripts/gen-env.sh +++ b/scripts/gen-env.sh @@ -1,6 +1,6 @@ #!/bin/bash -DOTENV_PATH=.env +DOTENV_NAME=.env IS_TEST=false BRANCH_NAME= @@ -41,7 +41,7 @@ install_envsubst_if_missing() { else echo "Downloading a8m/envsubst" echo - curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o envsubst + curl -L "https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-$(uname -s)-$(uname -m)" -o envsubst chmod +x envsubst fi } @@ -51,12 +51,10 @@ check_if_test() { echo "Is this a test deployment?" echo - while true - do - read -e -p "Enter 'y' if yes, anything else to continue: " test + while true; do + read -erp "Enter 'y' if yes, anything else to continue: " test - if [[ "$test" = "y" || "$test" = "yes" ]] - then + if [[ "$test" = "y" || "$test" = "yes" ]]; then IS_TEST=true export DEBUG=True export LOG_LEVEL="DEBUG" @@ -72,48 +70,18 @@ check_if_test() { } check_existing_dotenv() { - if [ -f "${DOTENV_PATH}" ] - then - echo "WARNING: ${DOTENV_PATH} file already exists." - echo "This script will overwrite the content of this file." - echo - echo "Do you want to overwrite "${DOTENV_PATH}"? y/n" - until [ "$overwrite" = "y" -o "$overwrite" = "n" ] - do - read -e -p "Enter 'y' to overwrite, anything else to continue: " overwrite - - if [ "$overwrite" = "y" ] - then - return 1 - elif [ "$overwrite" = "n" ] - then - echo "Continuing with existing .env file." - return 0 - else - echo "Invalid input!" - fi - done - fi - - return 1 -} - -check_existing_dotenv() { - if [ -f "${DOTENV_PATH}" ] - then - echo "WARNING: ${DOTENV_PATH} file already exists." + if [ -f "${DOTENV_NAME}" ]; then + echo "WARNING: ${DOTENV_NAME} file already exists." echo "This script will overwrite the content of this file." echo - echo "Do you want to overwrite file '"${DOTENV_PATH}"'? y/n" + printf "Do you want to overwrite file \'%s\'? y/n" "${DOTENV_NAME}" echo - while true - do - read -e -p "Enter 'y' to overwrite, anything else to continue: " overwrite + while true; do + read -erp "Enter 'y' to overwrite, anything else to continue: " overwrite - if [[ "$overwrite" = "y" || "$overwrite" = "yes" ]] - then + if [[ "$overwrite" = "y" || "$overwrite" = "yes" ]]; then return 1 - else + else echo "Continuing with existing .env file." return 0 fi @@ -126,8 +94,7 @@ check_existing_dotenv() { set_deploy_env() { pretty_echo "Deployment Environment" - while true - do + while true; do echo "Which environment do you wish to run? (dev/staging/prod)" echo echo "Both dev & staging include ODK Central and S3 buckets." @@ -136,7 +103,7 @@ set_deploy_env() { echo "- ODK Central" echo "- S3 Buckets" echo - read -e -p "Enter the environment (dev/staging/prod): " environment + read -erp "Enter the environment (dev/staging/prod): " environment case "$environment" in dev) @@ -154,7 +121,7 @@ set_deploy_env() { *) echo "Invalid environment name. Please enter dev, staging, or prod." ;; - esac + esac done } @@ -162,7 +129,7 @@ set_external_odk() { pretty_echo "External ODK Central Host" echo "Please enter the ODKCentral URL." - read -e -p "ODKCentral URL: " ODK_CENTRAL_URL + read -erp "ODKCentral URL: " ODK_CENTRAL_URL echo export ODK_CENTRAL_URL=${ODK_CENTRAL_URL} @@ -183,7 +150,7 @@ set_odk_user_creds() { pretty_echo "ODK User Credentials" echo "Please enter the ODKCentral Email." - read -e -p "ODKCentral Email: " ODK_CENTRAL_USER + read -erp "ODKCentral Email: " ODK_CENTRAL_USER echo export ODK_CENTRAL_USER=${ODK_CENTRAL_USER} @@ -192,7 +159,7 @@ set_odk_user_creds() { echo echo "Note: this must be >10 characters long." echo - read -e -p "ODKCentral Password: " ODK_CENTRAL_PASSWD + read -erp "ODKCentral Password: " ODK_CENTRAL_PASSWD echo # Check the length of the entered password @@ -211,38 +178,35 @@ check_external_database() { pretty_echo "External Database" echo "Do you want to use an external database instead of local?" - while true - do - read -e -p "Enter y for external, anything else to continue: " externaldb + while true; do + read -erp "Enter y for external, anything else to continue: " externaldb - if [ "$externaldb" = "y" ] - then + if [ "$externaldb" = "y" ]; then EXTERNAL_DB="True" echo "Using external database." fi break done - if [ "$EXTERNAL_DB" = "True" ] - then + if [ "$EXTERNAL_DB" = "True" ]; then echo echo "Please enter the database host." - read -e -p "FMTM DB Host: " FMTM_DB_HOST + read -erp "FMTM DB Host: " FMTM_DB_HOST echo export FMTM_DB_HOST=${FMTM_DB_HOST} echo "Please enter the database name." - read -e -p "FMTM DB Name: " FMTM_DB_NAME + read -erp "FMTM DB Name: " FMTM_DB_NAME echo export FMTM_DB_NAME=${FMTM_DB_NAME} echo "Please enter the database user." - read -e -p "FMTM DB User: " FMTM_DB_USER + read -erp "FMTM DB User: " FMTM_DB_USER echo export FMTM_DB_USER=${FMTM_DB_USER} echo "Please enter the database password." - read -e -p "FMTM DB Password: " FMTM_DB_PASSWORD + read -erp "FMTM DB Password: " FMTM_DB_PASSWORD echo export FMTM_DB_PASSWORD=${FMTM_DB_PASSWORD} @@ -255,17 +219,17 @@ set_external_s3() { pretty_echo "S3 Credentials" echo "Please enter the S3 host endpoint." - read -e -p "S3 Endpoint: " S3_ENDPOINT + read -erp "S3 Endpoint: " S3_ENDPOINT echo export S3_ENDPOINT=${S3_ENDPOINT} echo "Please enter the access key." - read -e -p "S3 Access Key: " S3_ACCESS_KEY + read -erp "S3 Access Key: " S3_ACCESS_KEY echo export S3_ACCESS_KEY=${S3_ACCESS_KEY} echo "Please enter the secret key." - read -e -p "S3 Secret Key: " S3_SECRET_KEY + read -erp "S3 Secret Key: " S3_SECRET_KEY echo export S3_SECRET_KEY=${S3_SECRET_KEY} @@ -275,7 +239,7 @@ set_external_s3() { echo "The bucket should be public." echo echo "Please enter the bucket name." - read -e -p "S3 Bucket Name: " S3_BUCKET_NAME + read -erp "S3 Bucket Name: " S3_BUCKET_NAME echo export S3_BUCKET_NAME=${S3_BUCKET_NAME} fi @@ -293,12 +257,10 @@ set_domains() { pretty_echo "FMTM Domain Name" echo "To run FMTM you must own a domain name." - while true - do - read -e -p "Enter a valid domain name you wish to run FMTM from: " fmtm_domain + while true; do + read -erp "Enter a valid domain name you wish to run FMTM from: " fmtm_domain - if [ "$fmtm_domain" = "" ] - then + if [ "$fmtm_domain" = "" ]; then echo "Invalid input!" else export FMTM_DOMAIN="${fmtm_domain}" @@ -317,26 +279,23 @@ set_domains() { echo "$fmtm_domain --> $current_ip" echo "api.$fmtm_domain --> $current_ip" - if [ "$BRANCH_NAME" != "main" ] - then + if [ "$BRANCH_NAME" != "main" ]; then echo "s3.$fmtm_domain --> $current_ip" echo "odk.$fmtm_domain --> $current_ip" fi echo - read -e -p "Once these DNS entries are set and valid, press ENTER to continue." valid + read -erp "Once these DNS entries are set and valid, press ENTER to continue." pretty_echo "Certificates" echo "FMTM will automatically generate SSL (HTTPS) certificates for your domain name." - while true - do + while true; do echo "Enter an email address you wish to use for certificate generation." - read -e -p "This will be used by LetsEncrypt, but for no other purpose: " cert_email + read -erp "This will be used by LetsEncrypt, but for no other purpose: " cert_email - if [ "$cert_email" = "" ] - then + if [ "$cert_email" = "" ]; then echo "Invalid input!" - else + else export CERT_EMAIL="${cert_email}" break fi @@ -359,9 +318,9 @@ set_osm_credentials() { echo "Please enter your OSM authentication details" echo - read -e -p "Client ID: " OSM_CLIENT_ID + read -erp "Client ID: " OSM_CLIENT_ID echo - read -e -p "Client Secret: " OSM_CLIENT_SECRET + read -erp "Client Secret: " OSM_CLIENT_SECRET export OSM_CLIENT_ID=${OSM_CLIENT_ID} export OSM_CLIENT_SECRET=${OSM_CLIENT_SECRET} @@ -373,7 +332,7 @@ check_change_port() { pretty_echo "Set Default Port" echo "The default port for local development is 7050." echo - read -e -p "Enter a different port if required, or nothing for default: " fmtm_port + read -erp "Enter a different port if required, or nothing for default: " fmtm_port if [ -n "$fmtm_port" ]; then echo "Using $fmtm_port" @@ -389,15 +348,15 @@ generate_dotenv() { if [ -f ./.env.example ]; then echo ".env.example already exists. Continuing." - echo "substituting variables from .env.example --> ${DOTENV_PATH}" - ./envsubst < .env.example > ${DOTENV_PATH} + echo "substituting variables from .env.example --> ${DOTENV_NAME}" + ./envsubst < .env.example > ${DOTENV_NAME} else echo "Downloading .env.example from repo." echo curl -LO "https://raw.githubusercontent.com/hotosm/fmtm/${BRANCH_NAME:-development}/.env.example" - echo "substituting variables from .env.example --> ${DOTENV_PATH}" - ./envsubst < .env.example > ${DOTENV_PATH} + echo "substituting variables from .env.example --> ${DOTENV_NAME}" + ./envsubst < .env.example > ${DOTENV_NAME} echo "Deleting .env.example" rm .env.example @@ -413,8 +372,7 @@ prompt_user_gen_dotenv() { if [ $IS_TEST != true ]; then set_deploy_env - if [ "$BRANCH_NAME" == "main" ] - then + if [ "$BRANCH_NAME" == "main" ]; then set_external_odk check_external_database set_external_s3 diff --git a/scripts/renew-certs-manual.sh b/scripts/renew-certs-manual.sh index 0b380c670c..6bf6195378 100644 --- a/scripts/renew-certs-manual.sh +++ b/scripts/renew-certs-manual.sh @@ -12,7 +12,7 @@ cleanup_and_exit() { trap cleanup_and_exit INT # Prompt the user for input and set the BRANCH_NAME variable -read -p "Enter the environment (dev/staging/prod): " ENVIRONMENT +read -erp "Enter the environment (dev/staging/prod): " ENVIRONMENT case "$ENVIRONMENT" in dev) diff --git a/scripts/setup/docker.sh b/scripts/setup/docker.sh index d95afb7cac..5631eefa60 100644 --- a/scripts/setup/docker.sh +++ b/scripts/setup/docker.sh @@ -1,10 +1,15 @@ #!/bin/bash +# Script installs Docker in rootless mode for the current user + # Tested for Debian 11 Bookworm & Ubuntu 22.04 LTS -# Note: this script must be run as a non-root user -# Note: The user must be logged in directly (not via su) -OS_NAME="debian" +# Note # +# This script must be run as any user other than 'root' +# However they must have 'sudo' access to run the script +# The user must also be logged in directly (not via su) +# +# If ran as root, the user fmtm is created instead pretty_echo() { local message="$1" @@ -115,18 +120,38 @@ apt_install_docker() { } check_user_not_root() { - pretty_echo "Use non-root user" - if [ "$(id -u)" -eq 0 ]; then - if id "fmtm" &>/dev/null; then - echo "Current user is root. Switching to existing non-privileged user 'fmtm'." + + pretty_echo "Use non-root user" + + echo "Current user is root." + echo "This script must run as a non-privileged user account." + echo + + if id "svcfmtm" &>/dev/null; then + echo "User 'svcfmtm' found." else - echo "Current user is root. Creating a non-privileged user 'fmtm'." - useradd -m -s /bin/bash fmtm + echo "Creating user 'svcfmtm'." + useradd -m -d /home/svcfmtm -s /bin/bash svcfmtm 2>/dev/null fi - echo "Rerunning this script as user 'fmtm'." - sudo -u fmtm bash -c "$0 $*" + echo + echo "Temporarily adding to sudoers list." + echo "svcfmtm ALL=(ALL) NOPASSWD:ALL" | tee /etc/sudoers.d/fmtm-sudoers >/dev/null + + # User called script directly, copy to /home/svcfmtm/docker.sh + root_script_path="$(readlink -f "$0")" + user_script_path="/home/svcfmtm/$(basename "$0")" + cp "$root_script_path" "$user_script_path" + chown svcfmtm:svcfmtm "$user_script_path" + chmod +x "$user_script_path" + + + echo + echo "Rerunning this script as user 'svcfmtm'." + echo + + sudo -u svcfmtm bash -c "${user_script_path} $*" exit 0 fi } @@ -140,8 +165,9 @@ update_to_rootless() { } restart_docker_rootless() { - heading_echo "Restarting Docker Service" + pretty_echo "Restarting Docker Service" echo "This is required as sometimes docker doesn't init correctly." + sleep 5 systemctl --user daemon-reload systemctl --user restart docker echo @@ -169,8 +195,8 @@ EOF add_vars_to_bashrc() { # DOCKER_HOST must be added to the top of bashrc, as running non-interactively # Most distros exit .bashrc execution is non-interactive - - heading_echo "Adding rootless DOCKER_HOST to bashrc" + + pretty_echo "Adding rootless DOCKER_HOST to bashrc" user_id=$(id -u) docker_host_var="export DOCKER_HOST=unix:///run/user/$user_id/docker.sock" @@ -188,7 +214,7 @@ add_vars_to_bashrc() { echo "Done" echo - heading_echo "Adding dc='docker compose' alias" + pretty_echo "Adding dc='docker compose' alias" # Check if the alias already exists in user's .bashrc if ! grep -q "$dc_alias_cmd" ~/.bashrc; then @@ -207,6 +233,28 @@ add_vars_to_bashrc() { echo "Done" } +remove_from_sudoers() { + pretty_echo "Remove from sudoers" + + echo "This script installed docker for user svcfmtm" + echo + echo "The user will now have sudo access revoked" + echo + sudo rm /etc/sudoers.d/fmtm-sudoers + + echo + echo "You must exit (login or ssh) your session, then login as user svcfmtm" + echo + echo "You may need to add an authorized key for the user svcfmtm first:" + echo + echo " mkdir /home/svcfmtm/.ssh" + echo " cp ~/.ssh/authorized_keys /home/svcfmtm/.ssh/authorized_keys" + echo " chown svcfmtm:svcfmtm /home/svcfmtm/.ssh/authorized_keys" + echo " chmod 600 /home/svcfmtm/.ssh/authorized_keys" + echo + echo +} + install_docker() { check_os remove_old_docker_installs @@ -221,6 +269,7 @@ install_docker() { add_vars_to_bashrc } -check_user_not_root +check_user_not_root "$@" trap cleanup_and_exit INT install_docker +remove_from_sudoers diff --git a/scripts/setup/podman.sh b/scripts/setup/podman.sh index 3bf6b79d44..f4e3960fd8 100644 --- a/scripts/setup/podman.sh +++ b/scripts/setup/podman.sh @@ -27,7 +27,6 @@ check_os() { source /etc/os-release case "$ID" in debian) - IS_DEBIAN=true echo "Current OS is ${PRETTY_NAME}." ;; ubuntu) diff --git a/src/backend/Dockerfile b/src/backend/Dockerfile index 72df47ae73..9035aedafe 100644 --- a/src/backend/Dockerfile +++ b/src/backend/Dockerfile @@ -154,7 +154,7 @@ RUN pip install --user --upgrade --no-warn-script-location \ && rm -r /opt/python CMD ["python", "-m", "debugpy", "--listen", "0.0.0.0:5678", \ "-m", "uvicorn", "app.main:api", \ - "--host", "0.0.0.0", "--port", "8000", \ + "--host", "0.0.0.0", "--port", "8000", "--workers", "1", \ "--reload", "--log-level", "critical", "--no-access-log"] @@ -190,6 +190,6 @@ CMD ["sleep", "infinity"] FROM runtime as prod # Pre-compile packages to .pyc (init speed gains) RUN python -c "import compileall; compileall.compile_path(maxlevels=10, quiet=1)" -# Note: 4 uvicorn workers as running with docker, change to 1 worker for Kubernetes +# Note: 1 worker (process) per container, behind load balancer CMD ["uvicorn", "app.main:api", "--host", "0.0.0.0", "--port", "8000", \ - "--workers", "4", "--log-level", "critical", "--no-access-log"] + "--workers", "1", "--log-level", "critical", "--no-access-log"] diff --git a/src/backend/app-entrypoint.sh b/src/backend/app-entrypoint.sh index 2080c57e46..be05ecb68d 100644 --- a/src/backend/app-entrypoint.sh +++ b/src/backend/app-entrypoint.sh @@ -7,7 +7,7 @@ wait_for_db() { retry_interval=5 for ((i = 0; i < max_retries; i++)); do - if /dev/null; then + if curl --silent -I "${S3_ENDPOINT:-http://s3:9000}" >/dev/null; then echo "S3 is available." return 0 # S3 is available, exit successfully fi diff --git a/src/backend/app/auth/auth_routes.py b/src/backend/app/auth/auth_routes.py index 205294f224..573f173c0f 100644 --- a/src/backend/app/auth/auth_routes.py +++ b/src/backend/app/auth/auth_routes.py @@ -133,6 +133,7 @@ async def my_data( """Read access token and get user details from OSM. Args: + request: The HTTP request (automatically included variable). db: The db session. user_data: User data provided by osm-login-python Auth. @@ -162,9 +163,13 @@ async def my_data( ) db.add(db_user) db.commit() + # Append role + user_data["role"] = db_user.role else: if user_data.get("img_url"): user.profile_img = user_data["img_url"] db.commit() + # Append role + user_data["role"] = user.role return JSONResponse(content={"user_data": user_data}, status_code=200) diff --git a/src/backend/app/auth/osm.py b/src/backend/app/auth/osm.py index 117de5e7b2..d64ebf561b 100644 --- a/src/backend/app/auth/osm.py +++ b/src/backend/app/auth/osm.py @@ -24,9 +24,10 @@ from fastapi import Header, HTTPException, Request from loguru import logger as log from osm_login_python.core import Auth -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from app.config import settings +from app.models.enums import UserRole if settings.DEBUG: # Required as callback url is http during dev @@ -34,12 +35,18 @@ class AuthUser(BaseModel): + """The user model returned from OSM OAuth2.""" + + model_config = ConfigDict(use_enum_values=True) + id: int username: str - img_url: Optional[str] + img_url: Optional[str] = None + role: Optional[UserRole] = None -def init_osm_auth(): +async def init_osm_auth(): + """Initialise Auth object from osm-login-python.""" return Auth( osm_url=settings.OSM_URL, client_id=settings.OSM_CLIENT_ID, @@ -50,8 +57,17 @@ def init_osm_auth(): ) -def login_required(request: Request, access_token: str = Header(None)): - osm_auth = init_osm_auth() +async def login_required( + request: Request, access_token: str = Header(None) +) -> AuthUser: + """Dependency to inject into endpoints requiring login.""" + if settings.DEBUG: + return AuthUser( + id=20386219, + username="svcfmtm", + ) + + osm_auth = await init_osm_auth() # Attempt extract from cookie if access token not passed if not access_token: @@ -62,4 +78,11 @@ def login_required(request: Request, access_token: str = Header(None)): if not access_token: raise HTTPException(status_code=401, detail="No access token provided") - return osm_auth.deserialize_access_token(access_token) + try: + osm_user = osm_auth.deserialize_access_token(access_token) + except ValueError as e: + log.error(e) + log.error("Failed to deserialise access token") + raise HTTPException(status_code=401, detail="Access token not valid") from e + + return osm_user diff --git a/src/backend/app/auth/roles.py b/src/backend/app/auth/roles.py new file mode 100644 index 0000000000..17c16c2191 --- /dev/null +++ b/src/backend/app/auth/roles.py @@ -0,0 +1,165 @@ +# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team +# +# This file is part of FMTM. +# +# FMTM is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FMTM is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FMTM. If not, see . +# + +"""User roles authorisation Depends methods. + +These methods use FastAPI Depends for dependency injection +and always return an AuthUser object in a standard format. +""" + +from typing import Optional + +from fastapi import Depends, HTTPException +from loguru import logger as log +from sqlalchemy.orm import Session + +from app.auth.osm import AuthUser, login_required +from app.db.database import get_db +from app.db.db_models import DbProject, DbUser, DbUserRoles, organisation_managers +from app.models.enums import HTTPStatus, ProjectRole, UserRole +from app.organisations.organisation_deps import check_org_exists +from app.projects.project_deps import get_project_by_id + + +async def get_uid(user_data: AuthUser) -> int: + """Extract user id from returned OSM user.""" + if user_id := user_data.get("id"): + return user_id + else: + log.error(f"Failed to get user id from auth object: {user_data}") + raise HTTPException( + status_code=HTTPStatus.UNAUTHORIZED, + detail="Auth failed. No user id present", + ) + + +async def check_super_admin( + db: Session, + user: [AuthUser, int], +) -> DbUser: + """Database check to determine if super admin role.""" + if isinstance(user, int): + user_id = user + else: + user_id = await get_uid(user) + return db.query(DbUser).filter_by(id=user_id, role=UserRole.ADMIN).first() + + +async def super_admin( + user_data: AuthUser = Depends(login_required), + db: Session = Depends(get_db), +) -> AuthUser: + """Super admin role, with access to all endpoints.""" + super_admin = await check_super_admin(db, user_data) + + if not super_admin: + log.error( + f"User {user_data.get('username')} requested an admin endpoint, " + "but is not admin" + ) + raise HTTPException( + status_code=HTTPStatus.FORBIDDEN, detail="User must be an administrator" + ) + + return user_data + + +async def check_org_admin( + db: Session, + user: [AuthUser, int], + project: Optional[DbProject], + org_id: Optional[int], +) -> DbUser: + """Database check to determine if org admin role.""" + if isinstance(user, int): + user_id = user + else: + user_id = await get_uid(user) + + if project: + org_id = db.query(DbProject).filter_by(id=project.id).first().organisation_id + + # Check org exists + await check_org_exists(db, org_id) + + # If user is admin, skip checks + if await check_super_admin(db, user): + return user + + return ( + db.query(organisation_managers) + .filter_by(organisation_id=org_id, user_id=user_id) + .first() + ) + + +async def org_admin( + project: DbProject = Depends(get_project_by_id), + org_id: int = None, + db: Session = Depends(get_db), + user_data: AuthUser = Depends(login_required), +) -> AuthUser: + """Organisation admin with full permission for projects in an organisation.""" + if project and org_id: + log.error("Both org_id and project_id cannot be passed at the same time") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail="Both org_id and project_id cannot be passed at the same time", + ) + + org_admin = await check_org_admin(db, user_data, project, org_id) + + if not org_admin: + log.error(f"User {user_data} is not an admin for organisation {org_id}") + raise HTTPException( + status_code=HTTPStatus.FORBIDDEN, + detail="User is not organisation admin", + ) + + return user_data + + +async def validator( + project_id: int, + db: Session = Depends(get_db), + user_data: AuthUser = Depends(login_required), +) -> AuthUser: + """A validator for a specific project.""" + user_id = await get_uid(user_data) + + match = ( + db.query(DbUserRoles).filter_by(user_id=user_id, project_id=project_id).first() + ) + + if not match: + log.error(f"User ID {user_id} has no access to project ID {project_id}") + raise HTTPException( + status_code=HTTPStatus.FORBIDDEN, detail="User has no access to project" + ) + + if match.role.value < ProjectRole.VALIDATOR.value: + log.error( + f"User ID {user_id} does not have validator permission" + f"for project ID {project_id}" + ) + raise HTTPException( + status_code=HTTPStatus.FORBIDDEN, + detail="User is not a validator for this project", + ) + + return user_data diff --git a/src/backend/app/central/central_crud.py b/src/backend/app/central/central_crud.py index e0a2ee6ba1..5b873905ee 100644 --- a/src/backend/app/central/central_crud.py +++ b/src/backend/app/central/central_crud.py @@ -33,9 +33,9 @@ from pyxform.xls2xform import xls2xform_convert from sqlalchemy.orm import Session -from ..config import settings -from ..db import db_models -from ..projects import project_schemas +from app.config import settings +from app.db import db_models +from app.projects import project_schemas def get_odk_project(odk_central: project_schemas.ODKCentral = None): @@ -161,10 +161,10 @@ async def delete_odk_project( return "Could not delete project from central odk" -def create_appuser( +def create_odk_app_user( project_id: int, name: str, odk_credentials: project_schemas.ODKCentral = None ): - """Create an app-user on a remote ODK Server. + """Create an app user specific to a project on ODK Central. If odk credentials of the project are provided, use them to create an app user. """ @@ -180,23 +180,23 @@ def create_appuser( user = settings.ODK_CENTRAL_USER pw = settings.ODK_CENTRAL_PASSWD - app_user = OdkAppUser(url, user, pw) + odk_app_user = OdkAppUser(url, user, pw) log.debug( "ODKCentral: attempting user creation: name: " f"{name} | project: {project_id}" ) - result = app_user.create(project_id, name) + result = odk_app_user.create(project_id, name) log.debug(f"ODKCentral response: {result.json()}") return result -def delete_app_user( +def delete_odk_app_user( project_id: int, name: str, odk_central: project_schemas.ODKCentral = None ): """Delete an app-user from a remote ODK Server.""" - appuser = get_odk_app_user(odk_central) - result = appuser.delete(project_id, name) + odk_app_user = get_odk_app_user(odk_central) + result = odk_app_user.delete(project_id, name) return result @@ -537,22 +537,26 @@ def generate_updated_xform( return outfile -async def create_qrcode( +async def encode_qrcode_json( project_id: int, token: str, name: str, odk_central_url: str = None ): - """Create the QR Code for an app-user.""" + """Assemble the ODK Collect JSON and base64 encode. + + The base64 encoded string is used to generate a QR code later. + """ if not odk_central_url: log.debug("ODKCentral connection variables not set in function") log.debug("Attempting extraction from environment variables") odk_central_url = settings.ODK_CENTRAL_URL - # Qr code text json in the format acceptable by odk collect. + # QR code text json in the format acceptable by odk collect qr_code_setting = { "general": { "server_url": f"{odk_central_url}/v1/key/{token}/projects/{project_id}", "form_update_mode": "match_exactly", "basemap_source": "osm", "autosend": "wifi_and_cellular", + "metadata_username": "svcfmtm", }, "project": {"name": f"{name}"}, "admin": {}, diff --git a/src/backend/app/central/central_routes.py b/src/backend/app/central/central_routes.py index 60933cf45f..60e7f13997 100644 --- a/src/backend/app/central/central_routes.py +++ b/src/backend/app/central/central_routes.py @@ -15,6 +15,8 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Routes to relay requests to ODK Central server.""" + import json from fastapi import APIRouter, Depends, HTTPException @@ -29,14 +31,13 @@ from sqlalchemy.orm import Session from sqlalchemy.sql import text -from ..central import central_crud -from ..db import database -from ..projects import project_crud, project_schemas +from app.central import central_crud +from app.db import database +from app.projects import project_schemas router = APIRouter( prefix="/central", tags=["central"], - dependencies=[Depends(database.get_db)], responses={404: {"description": "Not found"}}, ) @@ -52,40 +53,25 @@ async def list_projects(): return JSONResponse(content={"projects": projects}) -@router.get("/appuser") -async def create_appuser( - project_id: int, - name: str, - db: Session = Depends(database.get_db), -): - """Create an appuser in Central.""" - appuser = central_crud.create_appuser(project_id, name=name) - return await project_crud.create_qrcode(db, project_id, appuser.get("token"), name) - - -# @router.get("/list_submissions") -# async def list_submissions(project_id: int): -# """List the submissions data from Central""" -# submissions = central_crud.list_submissions(project_id) -# log.info("/central/list_submissions is Unimplemented!") -# return {"data": submissions} - - @router.get("/list-forms") async def get_form_lists( db: Session = Depends(database.get_db), skip: int = 0, limit: int = 100 ): - """This function retrieves a list of XForms from a database, - with the option to skip a certain number of records and limit the number of records returned. + """Get a list of all XForms on ODK Central. + + Option to skip a certain number of records and limit the number of + records returned. Parameters: - skip:int: the number of records to skip before starting to retrieve records. Defaults to 0 if not provided. - limit:int: the maximum number of records to retrieve. Defaults to 10 if not provided. + skip (int): the number of records to skip before starting to retrieve records. + Defaults to 0 if not provided. + limit (int): the maximum number of records to retrieve. + Defaults to 10 if not provided. Returns: - A list of dictionary containing the id and title of each XForm record retrieved from the database. + list[dict]: list of id:title dicts of each XForm record. """ # NOTE runs in separate thread using run_in_threadpool forms = await run_in_threadpool(lambda: central_crud.get_form_list(db, skip, limit)) @@ -138,7 +124,8 @@ async def list_submissions( project_id: int, xml_form_id: str = None, db: Session = Depends(database.get_db), -): +) -> list[dict]: + """Get all submissions JSONs for a project.""" try: project = table( "projects", @@ -188,20 +175,21 @@ async def list_submissions( @router.get("/submission") async def get_submission( project_id: int, - xmlFormId: str = None, + xml_form_id: str = None, submission_id: str = None, db: Session = Depends(database.get_db), -): - """This api returns the submission json. +) -> dict: + """Return the submission JSON for a single XForm. Parameters: - project_id:int the id of the project in the database. - xml_form_id:str: the xmlFormId of the form in Central. - submission_id:str: the submission id of the submission in Central. + project_id (int): the id of the project in the database. + xml_form_id (str): the xml_form_id of the form in Central. + submission_id (str): the submission id of the submission in Central. If the submission_id is provided, an individual submission is returned. - Returns: Submission json. + Returns: + dict: Submission JSON. """ try: """Download the submissions data from Central.""" @@ -231,9 +219,9 @@ async def get_submission( submissions = [] - if xmlFormId and submission_id: + if xml_form_id and submission_id: data = central_crud.download_submissions( - first.odkid, xmlFormId, submission_id, True, odk_credentials + first.odkid, xml_form_id, submission_id, True, odk_credentials ) if submissions != 0: submissions.append(json.loads(data[0])) @@ -242,7 +230,7 @@ async def get_submission( submissions.append(json.loads(data[entry])) else: - if not xmlFormId: + if not xml_form_id: xforms = central_crud.list_odk_xforms(first.odkid, odk_credentials) for xform in xforms: try: @@ -262,7 +250,7 @@ async def get_submission( submissions.append(json.loads(data[entry])) else: data = central_crud.download_submissions( - first.odkid, xmlFormId, None, True, odk_credentials + first.odkid, xml_form_id, None, True, odk_credentials ) submissions.append(json.loads(data[0])) if len(data) >= 2: @@ -275,27 +263,3 @@ async def get_submission( except Exception as e: log.error(e) raise HTTPException(status_code=500, detail=str(e)) from e - - -# @router.get("/upload") -# async def upload_project_files( -# project_id: int, -# filespec: str -# ): -# """Upload the XForm and data files to Central""" -# log.warning("/central/upload is Unimplemented!") -# return {"message": "Hello World from /central/upload"} - - -# @router.get("/download") -# async def download_project_files( -# project_id: int, -# type: central_schemas.CentralFileType -# ): -# """Download the project data files from Central. The filespec is -# a string that can contain multiple filenames separated by a comma. -# """ -# # FileResponse("README.md") -# # xxx = central_crud.does_central_exist() -# log.warning("/central/download is Unimplemented!") -# return {"message": "Hello World from /central/download"} diff --git a/src/backend/app/central/central_schemas.py b/src/backend/app/central/central_schemas.py index d4157ac229..c9aa2a676c 100644 --- a/src/backend/app/central/central_schemas.py +++ b/src/backend/app/central/central_schemas.py @@ -15,28 +15,40 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Schemas for returned ODK Central objects.""" + from enum import Enum from pydantic import BaseModel class CentralBase(BaseModel): + """ODK Central return.""" + central_url: str class Central(CentralBase): + """ODK Central return, with extras.""" + geometry_geojson: str # qr_code_binary: bytes class CentralOut(CentralBase): + """ODK Central output.""" + pass class CentralFileType(BaseModel): + """ODK Central file return.""" + filetype: Enum("FileType", ["xform", "extract", "zip", "xlsform", "all"]) pass class CentralDetails(CentralBase): + """ODK Central details.""" + pass diff --git a/src/backend/app/db/db_models.py b/src/backend/app/db/db_models.py index 35bd3d7724..dccea2e90c 100644 --- a/src/backend/app/db/db_models.py +++ b/src/backend/app/db/db_models.py @@ -43,15 +43,18 @@ relationship, ) +from app.db.database import Base, FmtmMetadata +from app.db.postgis_utils import timestamp from app.models.enums import ( BackgroundTaskStatus, MappingLevel, MappingPermission, OrganisationType, ProjectPriority, + ProjectRole, ProjectStatus, + ProjectVisibility, TaskAction, - TaskCreationMode, TaskSplitType, TaskStatus, TeamVisibility, @@ -59,8 +62,21 @@ ValidationPermission, ) -from .database import Base, FmtmMetadata -from .postgis_utils import timestamp + +class DbUserRoles(Base): + """Fine grained user access for projects, described by roles.""" + + __tablename__ = "user_roles" + + # Table has composite PK on (user_id and project_id) + user_id = Column(BigInteger, ForeignKey("users.id"), primary_key=True) + project_id = Column( + Integer, + ForeignKey("projects.id"), + index=True, + primary_key=True, + ) + role = Column(Enum(ProjectRole), default=UserRole.MAPPER) class DbUser(Base): @@ -72,6 +88,9 @@ class DbUser(Base): username = Column(String, unique=True) profile_img = Column(String) role = Column(Enum(UserRole), default=UserRole.MAPPER) + project_roles = relationship( + DbUserRoles, backref="user_roles_link", cascade="all, delete, delete-orphan" + ) name = Column(String) city = Column(String) @@ -128,7 +147,7 @@ class DbOrganisation(Base): description = Column(String) url = Column(String) type = Column(Enum(OrganisationType), default=OrganisationType.FREE, nullable=False) - # subscription_tier = Column(Integer) + approved = Column(Boolean, default=False) managers = relationship( DbUser, @@ -159,16 +178,6 @@ class DbTeam(Base): organisation = relationship(DbOrganisation, backref="teams") -# Secondary table defining many-to-many join for -# private projects that only defined users can map on -project_allowed_users = Table( - "project_allowed_users", - FmtmMetadata, - Column("project_id", Integer, ForeignKey("projects.id")), - Column("user_id", BigInteger, ForeignKey("users.id")), -) - - class DbProjectTeams(Base): """Link table between teams and projects.""" @@ -413,9 +422,8 @@ class DbProject(Base): ) author = relationship(DbUser, uselist=False, backref="user") created = Column(DateTime, default=timestamp, nullable=False) - task_creation_mode = Column( - Enum(TaskCreationMode), default=TaskCreationMode.UPLOAD, nullable=False - ) + + task_split_type = Column(Enum(TaskSplitType), nullable=True) # split_strategy = Column(Integer) # grid_meters = Column(Integer) # task_type = Column(Integer) @@ -435,16 +443,24 @@ class DbProject(Base): # GEOMETRY outline = Column(Geometry("POLYGON", srid=4326)) # geometry = Column(Geometry("POLYGON", srid=4326, from_text='ST_GeomFromWkt')) - # TODO add outline_geojson as computed @property + centroid = Column(Geometry("POINT", srid=4326)) # PROJECT STATUS last_updated = Column(DateTime, default=timestamp) status = Column(Enum(ProjectStatus), default=ProjectStatus.DRAFT, nullable=False) + visibility = Column( + Enum(ProjectVisibility), default=ProjectVisibility.PUBLIC, nullable=False + ) total_tasks = Column(Integer) # tasks_mapped = Column(Integer, default=0, nullable=False) # tasks_validated = Column(Integer, default=0, nullable=False) # tasks_bad_imagery = Column(Integer, default=0, nullable=False) + # Roles + roles = relationship( + DbUserRoles, backref="project_roles_link", cascade="all, delete, delete-orphan" + ) + # TASKS tasks = relationship( DbTask, backref="projects", cascade="all, delete, delete-orphan" @@ -484,9 +500,6 @@ def tasks_bad(self): ) # XFORM DETAILS - # TODO This field was probably replaced by odk_central_url - # TODO remove in a migration - odk_central_src = Column(String, default="") xform_title = Column(String, ForeignKey("xlsforms.title", name="fk_xform")) xform = relationship(DbXForm) @@ -495,10 +508,6 @@ def tasks_bad(self): {}, ) - ## ---------------------------------------------- ## - # FOR REFERENCE: OTHER ATTRIBUTES IN TASKING MANAGER - # PROJECT ACCESS - private = Column(Boolean, default=False) # Only allowed users can validate mapper_level = Column( Enum(MappingLevel), default=MappingLevel.INTERMEDIATE, @@ -513,31 +522,13 @@ def tasks_bad(self): validation_permission = Column( Enum(ValidationPermission), default=ValidationPermission.LEVEL ) # Means only users with validator role can validate - allowed_users = relationship(DbUser, secondary=project_allowed_users) organisation_id = Column( Integer, ForeignKey("organisations.id", name="fk_organisations"), index=True, ) organisation = relationship(DbOrganisation, backref="projects") - # PROJECT DETAILS - due_date = Column(DateTime) changeset_comment = Column(String) - osmcha_filter_id = Column( - String - ) # Optional custom filter id for filtering on OSMCha - imagery = Column(String) - osm_preset = Column(String) - odk_preset = Column(String) - josm_preset = Column(String) - id_presets = Column(ARRAY(String)) - extra_id_params = Column(String) - license_id = Column(Integer, ForeignKey("licenses.id", name="fk_licenses")) - # GEOMETRY - centroid = Column(Geometry("POINT", srid=4326)) - # country = Column(ARRAY(String), default=[]) - # FEEDBACK - project_chat = relationship(DbProjectChat, lazy="dynamic", cascade="all") ## Odk central server odk_central_url = Column(String) @@ -558,6 +549,24 @@ def tasks_bad(self): hashtags = Column(ARRAY(String)) # Project hashtag + ## ---------------------------------------------- ## + # FOR REFERENCE: OTHER ATTRIBUTES IN TASKING MANAGER + imagery = Column(String) + osm_preset = Column(String) + odk_preset = Column(String) + josm_preset = Column(String) + id_presets = Column(ARRAY(String)) + extra_id_params = Column(String) + license_id = Column(Integer, ForeignKey("licenses.id", name="fk_licenses")) + # GEOMETRY + # country = Column(ARRAY(String), default=[]) + # FEEDBACK + project_chat = relationship(DbProjectChat, lazy="dynamic", cascade="all") + osmcha_filter_id = Column( + String + ) # Optional custom filter id for filtering on OSMCha + due_date = Column(DateTime) + # TODO: Add index on project geometry, tried to add in __table args__ # Index("idx_geometry", DbProject.geometry, postgresql_using="gist") @@ -623,20 +632,6 @@ class BackgroundTasks(Base): message = Column(String) -class DbUserRoles(Base): - """Fine grained user control for projects, described by roles.""" - - __tablename__ = "user_roles" - - user_id = Column(BigInteger, ForeignKey("users.id"), primary_key=True) - user = relationship(DbUser, backref="user_roles") - organization_id = Column(Integer, ForeignKey("organisations.id")) - organization = relationship(DbOrganisation, backref="user_roles") - project_id = Column(Integer, ForeignKey("projects.id")) - project = relationship(DbProject, backref="user_roles") - role = Column(Enum(UserRole), nullable=False) - - class DbTilesPath(Base): """Keeping track of mbtile basemaps for a project.""" diff --git a/src/backend/app/main.py b/src/backend/app/main.py index b45e5640ed..071106caed 100644 --- a/src/backend/app/main.py +++ b/src/backend/app/main.py @@ -34,10 +34,10 @@ from app.central import central_routes from app.config import settings from app.db.database import get_db -from app.organization import organization_routes +from app.organisations import organisation_routes from app.projects import project_routes from app.projects.project_crud import read_xlsforms -from app.submission import submission_routes +from app.submissions import submission_routes from app.tasks import tasks_routes from app.users import user_routes @@ -70,7 +70,7 @@ def get_application() -> FastAPI: version=__version__, license_info={ "name": "GPL-3.0-only", - "url": "https://raw.githubusercontent.com/hotosm/fmtm/main/LICENSE", + "url": "https://raw.githubusercontent.com/hotosm/fmtm/main/LICENSE.md", }, debug=settings.DEBUG, lifespan=lifespan, @@ -95,7 +95,7 @@ def get_application() -> FastAPI: _app.include_router(central_routes.router) _app.include_router(auth_routes.router) _app.include_router(submission_routes.router) - _app.include_router(organization_routes.router) + _app.include_router(organisation_routes.router) return _app diff --git a/src/backend/app/models/enums.py b/src/backend/app/models/enums.py index fda24b7adf..0b0525f5be 100644 --- a/src/backend/app/models/enums.py +++ b/src/backend/app/models/enums.py @@ -15,18 +15,45 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Enum definitions to translate values into human enum strings.""" from enum import Enum class StrEnum(str, Enum): + """Wrapper for string enums, until Python 3.11 upgrade.""" + pass class IntEnum(int, Enum): + """Wrapper for string enums, until Python 3.11 upgrade.""" + pass +class HTTPStatus(IntEnum): + """All HTTP status codes used in endpoints.""" + + # Success + OK = 200 + CREATED = 201 + ACCEPTED = 202 + NO_CONTENT = 204 + + # Client Error + BAD_REQUEST = 400 + UNAUTHORIZED = 401 + FORBIDDEN = 403 + NOT_FOUND = 404 + CONFLICT = 409 + UNPROCESSABLE_ENTITY = 422 + + # Server Error + INTERNAL_SERVER_ERROR = 500 + NOT_IMPLEMENTED = 501 + + class TeamVisibility(IntEnum, Enum): """Describes the visibility associated with an Team.""" @@ -60,14 +87,35 @@ class ProjectPriority(IntEnum, Enum): class UserRole(IntEnum, Enum): - """Describes the role a user can be assigned, app doesn't support multiple roles.""" + """Available roles assigned to a user site-wide in FMTM. + Can be used for global user permissions: + - READ_ONLY = write access blocked (i.e. banned) + - MAPPER = default for all + - ADMIN = super admin with access to everything + """ + + READ_ONLY = -1 MAPPER = 0 ADMIN = 1 - VALIDATOR = 2 - FIELD_ADMIN = 3 - ORGANIZATION_ADMIN = 4 - READ_ONLY = 5 + + +class ProjectRole(IntEnum, Enum): + """Available roles assigned to a user for a specific project. + + All roles must be assigned by someone higher in the hierarchy: + - MAPPER = default for all + - VALIDATOR = can validate the mappers output + - FIELD_MANAGER = can invite mappers and organise people + - ASSOCIATE_PROJECT_MANAGER = helps the project manager, cannot delete project + - PROJECT_MANAGER = has all permissions to manage a project, including delete + """ + + MAPPER = 0 + VALIDATOR = 1 + FIELD_MANAGER = 2 + ASSOCIATE_PROJECT_MANAGER = 3 + PROJECT_MANAGER = 4 class MappingLevel(IntEnum, Enum): @@ -96,14 +144,6 @@ class ValidationPermission(IntEnum, Enum): TEAMS_LEVEL = 3 -class TaskCreationMode(IntEnum, Enum): - """Enum to describe task creation mode.""" - - GRID = 0 - ROADS = 1 - UPLOAD = 2 - - class TaskStatus(IntEnum, Enum): """Enum describing available Task Statuses.""" @@ -119,6 +159,7 @@ class TaskStatus(IntEnum, Enum): def verify_valid_status_update(old_status: TaskStatus, new_status: TaskStatus): + """Verify the status update is valid, inferred from previous state.""" if old_status is TaskStatus.READY: return new_status in [ TaskStatus.LOCKED_FOR_MAPPING, @@ -154,7 +195,7 @@ def verify_valid_status_update(old_status: TaskStatus, new_status: TaskStatus): class TaskAction(IntEnum, Enum): - """Describes the possible actions that can happen to to a task, that we'll record history for.""" + """All possible task actions, recorded in task history.""" RELEASED_FOR_MAPPING = 0 LOCKED_FOR_MAPPING = 1 @@ -169,6 +210,7 @@ class TaskAction(IntEnum, Enum): def is_status_change_action(task_action): + """Check if action is a valid status change type.""" return task_action in [ TaskAction.RELEASED_FOR_MAPPING, TaskAction.LOCKED_FOR_MAPPING, @@ -182,6 +224,7 @@ def is_status_change_action(task_action): def get_action_for_status_change(task_status: TaskStatus): + """Update task action inferred from previous state.""" return TaskAction.RELEASED_FOR_MAPPING # match task_status: # case TaskStatus.READY: @@ -201,12 +244,16 @@ def get_action_for_status_change(task_status: TaskStatus): class TaskType(IntEnum, Enum): + """Task type.""" + BUILDINGS = 0 AMENITIES = 1 OTHER = 2 class ProjectSplitStrategy(IntEnum, Enum): + """Task splitting type.""" + GRID = 0 OSM_VECTORS = 1 OTHER = 2 @@ -231,3 +278,11 @@ class TaskSplitType(IntEnum, Enum): DIVIDE_ON_SQUARE = 0 CHOOSE_AREA_AS_TASK = 1 TASK_SPLITTING_ALGORITHM = 2 + + +class ProjectVisibility(IntEnum, Enum): + """Enum describing task splitting type.""" + + PUBLIC = 0 + PRIVATE = 1 + INVITE_ONLY = 2 diff --git a/src/backend/app/models/languages_and_countries.py b/src/backend/app/models/languages_and_countries.py index 27f97890f1..d109fe937f 100644 --- a/src/backend/app/models/languages_and_countries.py +++ b/src/backend/app/models/languages_and_countries.py @@ -1,4 +1,7 @@ -# see https://gist.github.com/alexanderjulo/4073388 +"""Language and country codes for reference. + +see https://gist.github.com/alexanderjulo/4073388 +""" languages = [ ("aa", "Afar"), @@ -34,7 +37,10 @@ ("zh", "Chinese"), ( "cu", - "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic", + ( + "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; " + "Old Church Slavonic", + ), ), ("cv", "Chuvash"), ("kw", "Cornish"), diff --git a/src/backend/app/organization/__init__.py b/src/backend/app/organisations/__init__.py similarity index 100% rename from src/backend/app/organization/__init__.py rename to src/backend/app/organisations/__init__.py diff --git a/src/backend/app/organisations/organisation_crud.py b/src/backend/app/organisations/organisation_crud.py new file mode 100644 index 0000000000..87d7a2f5e8 --- /dev/null +++ b/src/backend/app/organisations/organisation_crud.py @@ -0,0 +1,231 @@ +# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team +# +# This file is part of FMTM. +# +# FMTM is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FMTM is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FMTM. If not, see . +# +"""Logic for organisation management.""" + +from io import BytesIO + +from fastapi import HTTPException, Response, UploadFile +from loguru import logger as log +from sqlalchemy import update +from sqlalchemy.orm import Session + +from app.auth.osm import AuthUser +from app.auth.roles import check_super_admin +from app.config import settings +from app.db import db_models +from app.models.enums import HTTPStatus +from app.organisations.organisation_deps import ( + get_organisation_by_name, +) +from app.organisations.organisation_schemas import OrganisationEdit, OrganisationIn +from app.s3 import add_obj_to_bucket + + +async def get_organisations(db: Session, current_user: AuthUser, is_approved: bool): + """Get all orgs.""" + super_admin = await check_super_admin(db, current_user) + + if super_admin: + return db.query(db_models.DbOrganisation).filter_by(approved=is_approved).all() + + # If user not admin, only show approved orgs + return db.query(db_models.DbOrganisation).filter_by(approved=True).all() + + +async def upload_logo_to_s3( + db_org: db_models.DbOrganisation, logo_file: UploadFile(None) +) -> str: + """Upload logo using standardised /{org_id}/logo.png format. + + Browsers treat image mimetypes the same, regardless of extension, + so it should not matter if a .jpg is renamed .png. + + Args: + db_org(db_models.DbOrganisation): The organisation database object. + logo_file(UploadFile): The logo image uploaded to FastAPI. + + Returns: + logo_url(str): The S3 URL for the logo file. + """ + logo_path = f"/{db_org.id}/logo.png" + + file_bytes = await logo_file.read() + file_obj = BytesIO(file_bytes) + + add_obj_to_bucket( + settings.S3_BUCKET_NAME, + file_obj, + logo_path, + content_type=logo_file.content_type, + ) + + logo_url = f"{settings.S3_DOWNLOAD_ROOT}/{settings.S3_BUCKET_NAME}{logo_path}" + + return logo_url + + +async def create_organisation( + db: Session, org_model: OrganisationIn, logo: UploadFile(None) +) -> db_models.DbOrganisation: + """Creates a new organisation with the given name, description, url, type, and logo. + + Saves the logo file S3 bucket under /{org_id}/logo.png. + + Args: + db (Session): database session + org_model (OrganisationIn): Pydantic model for organisation input. + logo (UploadFile, optional): logo file of the organisation. + Defaults to File(...). + + Returns: + DbOrganisation: SQLAlchemy Organisation model. + """ + if await get_organisation_by_name(db, org_name=org_model.name): + raise HTTPException( + status_code=HTTPStatus.CONFLICT, + detail=f"Organisation already exists with the name {org_model.name}", + ) + + # Required to check if exists on error + db_organisation = None + + try: + # Create new organisation without logo set + db_organisation = db_models.DbOrganisation(**org_model.dict()) + + db.add(db_organisation) + db.commit() + # Refresh to get the assigned org id + db.refresh(db_organisation) + + # Update the logo field in the database with the correct path + if logo: + db_organisation.logo = await upload_logo_to_s3(db_organisation, logo) + db.commit() + + except Exception as e: + log.exception(e) + log.debug("Rolling back changes to db organisation") + # Rollback any changes + db.rollback() + # Delete the failed organisation entry + if db_organisation: + log.debug(f"Deleting created organisation ID {db_organisation.id}") + db.delete(db_organisation) + db.commit() + raise HTTPException( + status_code=400, detail=f"Error creating organisation: {e}" + ) from e + + return db_organisation + + +async def update_organisation( + db: Session, + organisation: db_models.DbOrganisation, + values: OrganisationEdit, + logo: UploadFile(None), +) -> db_models.DbOrganisation: + """Update an existing organisation database entry. + + Args: + db (Session): database session + organisation (DbOrganisation): Editing database model. + values (OrganisationEdit): Pydantic model for organisation edit. + logo (UploadFile, optional): logo file of the organisation. + Defaults to File(...). + + Returns: + DbOrganisation: SQLAlchemy Organisation model. + """ + if not (updated_fields := values.dict(exclude_none=True)): + raise HTTPException( + status_code=HTTPStatus.UNPROCESSABLE_ENTITY, + detail=f"No values were provided to update organisation {organisation.id}", + ) + + update_cmd = ( + update(db_models.DbOrganisation) + .where(db_models.DbOrganisation.id == organisation.id) + .values(**updated_fields) + ) + db.execute(update_cmd) + + if logo: + organisation.logo = await upload_logo_to_s3(organisation, logo) + + db.commit() + db.refresh(organisation) + + return organisation + + +async def delete_organisation( + db: Session, + organisation: db_models.DbOrganisation, +) -> Response: + """Delete an existing organisation database entry. + + Args: + db (Session): database session + organisation (DbOrganisation): Database model to delete. + + Returns: + bool: If deletion was successful. + """ + db.delete(organisation) + db.commit() + + return Response(status_code=HTTPStatus.NO_CONTENT) + + +async def add_organisation_admin( + db: Session, user: db_models.DbUser, organisation: db_models.DbOrganisation +): + """Adds a user as an admin to the specified organisation. + + Args: + db (Session): The database session. + user (DbUser): The user model instance. + organisation (DbOrganisation): The organisation model instance. + + Returns: + Response: The HTTP response with status code 200. + """ + log.info(f"Adding user ({user.id}) as org ({organisation.id}) admin") + # add data to the managers field in organisation model + organisation.managers.append(user) + db.commit() + + return Response(status_code=HTTPStatus.OK) + + +async def approve_organisation(db, organisation): + """Approves an oranisation request made by the user . + + Args: + db: The database session. + organisation (DbOrganisation): The organisation model instance. + + Returns: + Response: An HTTP response with the status code 200. + """ + log.info(f"Approving organisation ID {organisation.id}") + organisation.approved = True + db.commit() + return Response(status_code=HTTPStatus.OK) diff --git a/src/backend/app/organisations/organisation_deps.py b/src/backend/app/organisations/organisation_deps.py new file mode 100644 index 0000000000..36c147302d --- /dev/null +++ b/src/backend/app/organisations/organisation_deps.py @@ -0,0 +1,131 @@ +# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team +# +# This file is part of FMTM. +# +# FMTM is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FMTM is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FMTM. If not, see . +# + +"""Organisation dependencies for use in Depends.""" + +from typing import Union + +from fastapi import Depends +from fastapi.exceptions import HTTPException +from loguru import logger as log +from sqlalchemy import func +from sqlalchemy.orm import Session + +from app.db.database import get_db +from app.db.db_models import DbOrganisation, DbProject +from app.models.enums import HTTPStatus +from app.projects import project_deps + + +async def get_organisation_by_name( + db: Session, org_name: str, check_approved: bool = True +) -> DbOrganisation: + """Get an organisation from the db by name. + + Args: + db (Session): database session + org_name (int): id of the organisation + check_approved (bool): first check if the organisation is approved + + Returns: + DbOrganisation: organisation with the given id + """ + org_obj = ( + db.query(DbOrganisation) + .filter(func.lower(DbOrganisation.name).like(func.lower(f"%{org_name}%"))) + .first() + ) + if org_obj and check_approved and org_obj.approved is False: + raise HTTPException( + status_code=HTTPStatus.NOT_FOUND, + detail=f"Organisation ({org_obj.id}) is not approved yet", + ) + return org_obj + + +async def get_organisation_by_id( + db: Session, org_id: int, check_approved: bool = True +) -> DbOrganisation: + """Get an organisation from the db by id. + + Args: + db (Session): database session + org_id (int): id of the organisation + check_approved (bool): first check if the organisation is approved + + Returns: + DbOrganisation: organisation with the given id + """ + org_obj = db.query(DbOrganisation).filter_by(id=org_id).first() + if org_obj and check_approved and org_obj.approved is False: + raise HTTPException( + status_code=HTTPStatus.NOT_FOUND, + detail=f"Organisation {org_id} is not approved yet", + ) + return org_obj + + +async def check_org_exists( + db: Session, + org_id: Union[str, int], + check_approved: bool = True, +) -> DbOrganisation: + """Check if organisation name exists, else error. + + The org_id can also be an org name. + """ + try: + org_id = int(org_id) + except ValueError: + pass + + if isinstance(org_id, int): + log.debug(f"Getting organisation by id: {org_id}") + db_organisation = await get_organisation_by_id(db, org_id, check_approved) + + if isinstance(org_id, str): + log.debug(f"Getting organisation by name: {org_id}") + db_organisation = await get_organisation_by_name(db, org_id, check_approved) + + if not db_organisation: + raise HTTPException( + status_code=HTTPStatus.NOT_FOUND, + detail=f"Organisation {org_id} does not exist", + ) + + log.debug(f"Organisation match: {db_organisation}") + return db_organisation + + +async def org_exists( + org_id: Union[str, int], + db: Session = Depends(get_db), +) -> DbOrganisation: + """Wrapper for check_org_exists to be used as a route dependency. + + Requires Depends from a route. + """ + return await check_org_exists(db, org_id) + + +async def org_from_project( + project: DbProject = Depends(project_deps.get_project_by_id), + db: Session = Depends(get_db), +) -> DbOrganisation: + """Get an organisation from a project id.""" + return await check_org_exists(db, project.organisation_id) diff --git a/src/backend/app/organisations/organisation_routes.py b/src/backend/app/organisations/organisation_routes.py new file mode 100644 index 0000000000..6b5000f004 --- /dev/null +++ b/src/backend/app/organisations/organisation_routes.py @@ -0,0 +1,119 @@ +# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team +# +# This file is part of FMTM. +# +# FMTM is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FMTM is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FMTM. If not, see . +# +"""Routes for organisation management.""" + +from fastapi import ( + APIRouter, + Depends, + File, + UploadFile, +) +from sqlalchemy.orm import Session + +from app.auth.osm import AuthUser, login_required +from app.auth.roles import org_admin, super_admin +from app.db import database +from app.db.db_models import DbOrganisation, DbUser +from app.organisations import organisation_crud, organisation_schemas +from app.organisations.organisation_deps import check_org_exists, org_exists +from app.users.user_deps import user_exists_in_db + +router = APIRouter( + prefix="/organisation", + tags=["organisation"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/", response_model=list[organisation_schemas.OrganisationOut]) +async def get_organisations( + db: Session = Depends(database.get_db), + current_user: AuthUser = Depends(login_required), + approved: bool = True, +) -> list[organisation_schemas.OrganisationOut]: + """Get a list of all organisations.""" + return await organisation_crud.get_organisations(db, current_user, approved) + + +@router.get("/{org_id}", response_model=organisation_schemas.OrganisationOut) +async def get_organisation_detail( + organisation: DbOrganisation = Depends(org_exists), + db: Session = Depends(database.get_db), +): + """Get a specific organisation by id or name.""" + return organisation + + +@router.post("/", response_model=organisation_schemas.OrganisationOut) +async def create_organisation( + org: organisation_schemas.OrganisationIn = Depends(), + logo: UploadFile = File(None), + db: Session = Depends(database.get_db), +) -> organisation_schemas.OrganisationOut: + """Create an organisation with the given details.""" + return await organisation_crud.create_organisation(db, org, logo) + + +@router.patch("/{org_id}/", response_model=organisation_schemas.OrganisationOut) +async def update_organisation( + new_values: organisation_schemas.OrganisationEdit = Depends(), + logo: UploadFile = File(None), + organisation: DbOrganisation = Depends(org_exists), + db: Session = Depends(database.get_db), +): + """Partial update for an existing organisation.""" + return await organisation_crud.update_organisation( + db, organisation, new_values, logo + ) + + +@router.delete("/{org_id}") +async def delete_organisations( + organisation: DbOrganisation = Depends(org_exists), + db: Session = Depends(database.get_db), +): + """Delete an organisation.""" + return await organisation_crud.delete_organisation(db, organisation) + + +@router.post("/approve/") +async def approve_organisation( + org_id: int, + db: Session = Depends(database.get_db), + current_user: AuthUser = Depends(super_admin), +): + """Approve the organisation request made by the user. + + The logged in user must be super admin to perform this action . + """ + org_obj = await check_org_exists(db, org_id, check_approved=False) + return await organisation_crud.approve_organisation(db, org_obj) + + +@router.post("/add_admin/") +async def add_new_organisation_admin( + db: Session = Depends(database.get_db), + organisation: DbOrganisation = Depends(org_exists), + user: DbUser = Depends(user_exists_in_db), + current_user: AuthUser = Depends(org_admin), +): + """Add a new organisation admin. + + The logged in user must be either the owner of the organisation or a super admin. + """ + return await organisation_crud.add_organisation_admin(db, user, organisation) diff --git a/src/backend/app/organisations/organisation_schemas.py b/src/backend/app/organisations/organisation_schemas.py new file mode 100644 index 0000000000..0a2b9aabc8 --- /dev/null +++ b/src/backend/app/organisations/organisation_schemas.py @@ -0,0 +1,81 @@ +# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team +# +# This file is part of FMTM. +# +# FMTM is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FMTM is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FMTM. If not, see . +# +"""Pydantic models for Organisations.""" + +from re import sub +from typing import Optional + +from fastapi import Form +from pydantic import BaseModel, Field, HttpUrl, computed_field +from pydantic.functional_validators import field_validator + +from app.models.enums import OrganisationType + +# class OrganisationBase(BaseModel): +# """Base model for organisation to extend.""" + + +class OrganisationIn(BaseModel): + """Organisation to create from user input.""" + + name: str = Field(Form(..., description="Organisation name")) + description: Optional[str] = Field( + Form(None, description="Organisation description") + ) + url: Optional[HttpUrl] = Field(Form(None, description="Organisation website URL")) + + @field_validator("url", mode="after") + @classmethod + def convert_url_to_str(cls, value: HttpUrl) -> str: + """Convert Pydantic Url type to string. + + Database models do not accept type Url for a string field. + """ + if value: + return value.unicode_string() + return "" + + @computed_field + @property + def slug(self) -> str: + """Sanitise the organisation name for use in a URL.""" + if self.name: + # Remove special characters and replace spaces with hyphens + slug = sub(r"[^\w\s-]", "", self.name).strip().lower().replace(" ", "-") + # Remove consecutive hyphens + slug = sub(r"[-\s]+", "-", slug) + return slug + + +class OrganisationEdit(OrganisationIn): + """Organisation to edit via user input.""" + + # Override to make name optional + name: Optional[str] = Field(Form(None, description="Organisation name")) + + +class OrganisationOut(BaseModel): + """Organisation to display to user.""" + + id: int + name: str + logo: Optional[str] + description: Optional[str] + slug: Optional[str] + url: Optional[str] + type: OrganisationType diff --git a/src/backend/app/organization/organization_crud.py b/src/backend/app/organization/organization_crud.py deleted file mode 100644 index 2dc7e7385c..0000000000 --- a/src/backend/app/organization/organization_crud.py +++ /dev/null @@ -1,193 +0,0 @@ -# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team -# -# This file is part of FMTM. -# -# FMTM is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# FMTM is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with FMTM. If not, see . -# -"""Logic for organization management.""" - -import re -from io import BytesIO - -from fastapi import HTTPException, UploadFile -from loguru import logger as log -from sqlalchemy import func -from sqlalchemy.orm import Session - -from app.config import settings -from app.db import db_models -from app.s3 import add_obj_to_bucket - - -def get_organisations( - db: Session, -): - """Get all orgs.""" - db_organisation = db.query(db_models.DbOrganisation).all() - return db_organisation - - -def generate_slug(text: str) -> str: - """Sanitise the organization name for use in a URL.""" - # Remove special characters and replace spaces with hyphens - slug = re.sub(r"[^\w\s-]", "", text).strip().lower().replace(" ", "-") - # Remove consecutive hyphens - slug = re.sub(r"[-\s]+", "-", slug) - return slug - - -async def get_organisation_by_name(db: Session, name: str): - """Get org by name. - - This function is used to check if a org exists with the same name. - """ - # Use SQLAlchemy's query-building capabilities - db_organisation = ( - db.query(db_models.DbOrganisation) - .filter(func.lower(db_models.DbOrganisation.name).like(func.lower(f"%{name}%"))) - .first() - ) - return db_organisation - - -async def upload_logo_to_s3( - db_org: db_models.DbOrganisation, logo_file: UploadFile(None) -) -> str: - """Upload logo using standardised /{org_id}/logo.png format. - - Browsers treat image mimetypes the same, regardless of extension, - so it should not matter if a .jpg is renamed .png. - - Args: - db_org(db_models.DbOrganisation): The organization database object. - logo_file(UploadFile): The logo image uploaded to FastAPI. - - Returns: - logo_url(str): The S3 URL for the logo file. - """ - logo_path = f"/{db_org.id}/logo.png" - - file_bytes = await logo_file.read() - file_obj = BytesIO(file_bytes) - - add_obj_to_bucket( - settings.S3_BUCKET_NAME, - file_obj, - logo_path, - content_type=logo_file.content_type, - ) - - logo_url = f"{settings.S3_DOWNLOAD_ROOT}/{settings.S3_BUCKET_NAME}{logo_path}" - - return logo_url - - -async def create_organization( - db: Session, name: str, description: str, url: str, logo: UploadFile(None) -): - """Creates a new organization with the given name, description, url, type, and logo. - - Saves the logo file S3 bucket under /{org_id}/logo.png. - - Args: - db (Session): database session - name (str): name of the organization - description (str): description of the organization - url (str): url of the organization - type (int): type of the organization - logo (UploadFile, optional): logo file of the organization. - Defaults to File(...). - - Returns: - bool: True if organization was created successfully - """ - try: - # Create new organization without logo set - db_organization = db_models.DbOrganisation( - name=name, - slug=generate_slug(name), - description=description, - url=url, - ) - - db.add(db_organization) - db.commit() - # Refresh to get the assigned org id - db.refresh(db_organization) - - # Update the logo field in the database with the correct path - if logo: - db_organization.logo = await upload_logo_to_s3(db_organization, logo) - db.commit() - - except Exception as e: - log.exception(e) - log.debug("Rolling back changes to db organization") - # Rollback any changes - db.rollback() - # Delete the failed organization entry - if db_organization: - log.debug(f"Deleting created organisation ID {db_organization.id}") - db.delete(db_organization) - db.commit() - raise HTTPException( - status_code=400, detail=f"Error creating organization: {e}" - ) from e - - return True - - -async def get_organisation_by_id(db: Session, id: int): - """Get an organization by its id. - - Args: - db (Session): database session - id (int): id of the organization - - Returns: - DbOrganisation: organization with the given id - """ - db_organization = ( - db.query(db_models.DbOrganisation) - .filter(db_models.DbOrganisation.id == id) - .first() - ) - return db_organization - - -async def update_organization_info( - db: Session, - organization_id, - name: str, - description: str, - url: str, - logo: UploadFile, -): - """Update an existing organisation database entry.""" - organization = await get_organisation_by_id(db, organization_id) - if not organization: - raise HTTPException(status_code=404, detail="Organization not found") - - if name: - organization.name = name - if description: - organization.description = description - if url: - organization.url = url - if logo: - organization.logo = await upload_logo_to_s3(organization, logo) - - db.commit() - db.refresh(organization) - return organization diff --git a/src/backend/app/organization/organization_routes.py b/src/backend/app/organization/organization_routes.py deleted file mode 100644 index 6e2798c290..0000000000 --- a/src/backend/app/organization/organization_routes.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team -# -# This file is part of FMTM. -# -# FMTM is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# FMTM is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with FMTM. If not, see . -# -"""Routes for organization management.""" - -from fastapi import ( - APIRouter, - Depends, - File, - Form, - HTTPException, - UploadFile, -) -from loguru import logger as log -from sqlalchemy.orm import Session - -from ..db import database -from . import organization_crud - -router = APIRouter( - prefix="/organization", - tags=["organization"], - dependencies=[Depends(database.get_db)], - responses={404: {"description": "Not found"}}, -) - - -@router.get("/") -def get_organisations( - db: Session = Depends(database.get_db), -): - """Get api for fetching organization list.""" - organizations = organization_crud.get_organisations(db) - return organizations - - -@router.get("/{organization_id}") -async def get_organization_detail( - organization_id: int, db: Session = Depends(database.get_db) -): - """Get API for fetching detail about a organiation based on id.""" - organization = await organization_crud.get_organisation_by_id(db, organization_id) - if not organization: - raise HTTPException(status_code=404, detail="Organization not found") - - return organization - - -@router.post("/") -async def create_organization( - name: str = Form(), # Required field for organization name - description: str = Form(None), # Optional field for organization description - url: str = Form(None), # Optional field for organization URL - logo: UploadFile = File(None), # Optional field for organization logo - db: Session = Depends(database.get_db), # Dependency for database session -): - """Create an organization with the given details. - - Parameters: - name (str): The name of the organization. Required. - description (str): The description of the organization. Optional. - url (str): The URL of the organization. Optional. - logo (UploadFile): The logo of the organization. Optional. - db (Session): The database session. Dependency. - - Returns: - dict: A dictionary with a message indicating successful creation - of the organization. - """ - # Check if the organization with the same already exists - if await organization_crud.get_organisation_by_name(db, name=name): - raise HTTPException( - status_code=400, detail=f"Organization already exists with the name {name}" - ) - - await organization_crud.create_organization(db, name, description, url, logo) - - return {"Message": "Organization Created Successfully."} - - -@router.patch("/{organization_id}/") -async def update_organization( - organization_id: int, - name: str = Form(None), - description: str = Form(None), - url: str = Form(None), - logo: UploadFile = File(None), - db: Session = Depends(database.get_db), -): - """PUT API to update the details of an organization.""" - try: - organization = await organization_crud.update_organization_info( - db, organization_id, name, description, url, logo - ) - return organization - except Exception as e: - log.exception(e) - raise HTTPException( - status_code=400, detail="Error updating organization." - ) from e - - -@router.delete("/{organization_id}") -async def delete_organisations( - organization_id: int, db: Session = Depends(database.get_db) -): - """Delete an organization.""" - organization = await organization_crud.get_organisation_by_id(db, organization_id) - - if not organization: - raise HTTPException(status_code=404, detail="Organization not found") - - db.delete(organization) - db.commit() - return {"Message": "Organization Deleted Successfully."} diff --git a/src/backend/app/pagination/pagination.py b/src/backend/app/pagination/pagination.py index 2c678e7939..3b5abc8c37 100644 --- a/src/backend/app/pagination/pagination.py +++ b/src/backend/app/pagination/pagination.py @@ -1,8 +1,11 @@ +"""Logic for API pagination.""" + import math from typing import List def get_pages_nav(total_pages, current_page): + """Get page position (prev / next pages).""" next_page = None prev_page = None if current_page + 1 <= total_pages: @@ -13,6 +16,7 @@ def get_pages_nav(total_pages, current_page): def paginate_data(data: List[dict], page_no: int, page_size: int, total_content: int): + """Generate pagination JSON.""" total_pages = math.ceil(total_content / page_size) next_page, prev_page = get_pages_nav(total_pages, page_no) diff --git a/src/backend/app/projects/project_crud.py b/src/backend/app/projects/project_crud.py index 3f2a864978..6125a975e2 100644 --- a/src/backend/app/projects/project_crud.py +++ b/src/backend/app/projects/project_crud.py @@ -55,7 +55,7 @@ shape, ) from shapely.ops import unary_union -from sqlalchemy import and_, column, inspect, select, table, text +from sqlalchemy import and_, column, func, inspect, select, table, text from sqlalchemy.dialects.postgresql import insert from sqlalchemy.orm import Session @@ -64,7 +64,6 @@ from app.db import db_models from app.db.database import get_db from app.db.postgis_utils import geojson_to_flatgeobuf, geometry_to_geojson, timestamp -from app.organization import organization_crud from app.projects import project_schemas from app.s3 import add_obj_to_bucket, get_obj_from_bucket from app.tasks import tasks_crud @@ -147,7 +146,6 @@ async def get_project_by_id(db: Session, project_id: int): db_project = ( db.query(db_models.DbProject) .filter(db_models.DbProject.id == project_id) - .order_by(db_models.DbProject.id) .first() ) return await convert_to_app_project(db_project) @@ -164,22 +162,16 @@ async def get_project_info_by_id(db: Session, project_id: int): return await convert_to_app_project_info(db_project_info) -async def delete_project_by_id(db: Session, project_id: int): +async def delete_one_project(db: Session, db_project: db_models.DbProject) -> None: """Delete a project by id.""" try: - db_project = ( - db.query(db_models.DbProject) - .filter(db_models.DbProject.id == project_id) - .order_by(db_models.DbProject.id) - .first() - ) - if db_project: - db.delete(db_project) - db.commit() + project_id = db_project.id + db.delete(db_project) + db.commit() + log.info(f"Deleted project with ID: {project_id}") except Exception as e: log.exception(e) raise HTTPException(e) from e - return f"Project {project_id} deleted" async def partial_update_project_info( @@ -522,10 +514,12 @@ def remove_z_dimension(coord): # Merge multiple geometries into single polygon if multi_polygons: - boundary = multi_polygons[0] + geometry = multi_polygons[0] for geom in multi_polygons[1:]: - boundary = boundary.union(geom) - + geometry = geometry.union(geom) + for feature in features: + feature["geometry"] = geometry + boundary["features"] = features return await run_in_threadpool( lambda: split_by_square( boundary, @@ -578,7 +572,7 @@ async def get_data_extract_from_osm_rawdata( return data_extract except Exception as e: log.error(e) - raise HTTPException(status_code=400, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) from e async def get_data_extract_url( @@ -622,13 +616,13 @@ async def get_data_extract_url( } } - if geom_type := aoi.get("type") == "FeatureCollection": + if (geom_type := aoi.get("type")) == "FeatureCollection": # Convert each feature into a Shapely geometry geometries = [ shape(feature.get("geometry")) for feature in aoi.get("features", []) ] merged_geom = unary_union(geometries) - elif geom_type := aoi.get("type") == "Feature": + elif geom_type == "Feature": merged_geom = shape(aoi.get("geometry")) else: merged_geom = shape(aoi) @@ -1160,7 +1154,7 @@ def generate_task_files( # Create an app user for the task project_log.info(f"Creating odkcentral app user for task {task_id}") - appuser = central_crud.create_appuser(odk_id, name, odk_credentials) + appuser = central_crud.create_odk_app_user(odk_id, name, odk_credentials) # If app user could not be created, raise an exception. if not appuser: @@ -1500,7 +1494,7 @@ async def create_qrcode( """Create a QR code for a task.""" # Make QR code for an app_user. log.debug(f"Generating base64 encoded QR settings for token: {token}") - qrcode_data = await central_crud.create_qrcode( + qrcode_data = await central_crud.encode_qrcode_json( odk_id, token, project_name, odk_central_url ) @@ -2071,22 +2065,6 @@ async def update_project_form( return True -async def update_odk_credentials_in_db( - project_instance: project_schemas.ProjectUpload, - odk_central_cred: project_schemas.ODKCentral, - odkid: int, - db: Session, -): - """Update odk credentials for a project.""" - project_instance.odkid = odkid - project_instance.odk_central_url = odk_central_cred.odk_central_url - project_instance.odk_central_user = odk_central_cred.odk_central_user - project_instance.odk_central_password = odk_central_cred.odk_central_password - - db.commit() - db.refresh(project_instance) - - async def get_extracted_data_from_db(db: Session, project_id: int, outfile: str): """Get the geojson of those features for this project.""" query = text( @@ -2365,31 +2343,28 @@ async def get_tasks_count(db: Session, project_id: int): async def get_pagination(page: int, count: int, results_per_page: int, total: int): """Pagination result for splash page.""" total_pages = (count + results_per_page - 1) // results_per_page - hasNext = (page * results_per_page) < count # noqa: N806 - hasPrev = page > 1 # noqa: N806 + has_next = (page * results_per_page) < count # noqa: N806 + has_prev = page > 1 # noqa: N806 pagination = project_schemas.PaginationInfo( - hasNext=hasNext, - hasPrev=hasPrev, - nextNum=page + 1 if hasNext else None, + has_next=has_next, + has_prev=has_prev, + next_num=page + 1 if has_next else None, page=page, pages=total_pages, - prevNum=page - 1 if hasPrev else None, - perPage=results_per_page, + prev_num=page - 1 if has_prev else None, + per_page=results_per_page, total=total, ) return pagination -async def get_dashboard_detail(project_id: int, db: Session): +async def get_dashboard_detail( + project: db_models.DbProject, db_organisation: db_models.DbOrganisation, db: Session +): """Get project details for project dashboard.""" - project = await get_project(db, project_id) - db_organization = await organization_crud.get_organisation_by_id( - db, project.organisation_id - ) - - s3_project_path = f"/{project.organisation_id}/{project_id}" + s3_project_path = f"/{project.organisation_id}/{project.id}" s3_submission_path = f"/{s3_project_path}/submission.zip" s3_submission_meta_path = f"/{s3_project_path}/submissions.meta.json" @@ -2413,17 +2388,17 @@ async def get_dashboard_detail(project_id: int, db: Session): contributors = ( db.query(db_models.DbTaskHistory.user_id) .filter( - db_models.DbTaskHistory.project_id == project_id, + db_models.DbTaskHistory.project_id == project.id, db_models.DbTaskHistory.user_id.isnot(None), ) .distinct() .count() ) - project.total_tasks = await tasks_crud.get_task_count_in_project(db, project_id) - project.organization, project.organization_logo = ( - db_organization.name, - db_organization.logo, + project.total_tasks = await tasks_crud.get_task_count_in_project(db, project.id) + project.organisation_name, project.organisation_logo = ( + db_organisation.name, + db_organisation.logo, ) project.total_contributors = contributors @@ -2438,7 +2413,9 @@ async def get_project_users(db: Session, project_id: int): project_id (int): The ID of the project. Returns: - List[Dict[str, Union[str, int]]]: A list of dictionaries containing the username and the number of contributions made by each user for the specified project. + List[Dict[str, Union[str, int]]]: A list of dictionaries containing + the username and the number of contributions made by each user + for the specified project. """ contributors = ( db.query(db_models.DbTaskHistory) @@ -2468,7 +2445,8 @@ def count_user_contributions(db: Session, user_id: int, project_id: int) -> int: project_id (int): The ID of the project. Returns: - int: The number of contributions made by the user for the specified project. + int: The number of contributions made by the user for the specified + project. """ contributions_count = ( db.query(func.count(db_models.DbTaskHistory.user_id)) diff --git a/src/backend/app/projects/project_deps.py b/src/backend/app/projects/project_deps.py new file mode 100644 index 0000000000..b7520246fd --- /dev/null +++ b/src/backend/app/projects/project_deps.py @@ -0,0 +1,45 @@ +# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team +# +# This file is part of FMTM. +# +# FMTM is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FMTM is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FMTM. If not, see . +# + +"""Project dependencies for use in Depends.""" + +from fastapi import Depends +from fastapi.exceptions import HTTPException +from sqlalchemy.orm import Session + +from app.db.database import get_db +from app.db.db_models import DbProject +from app.models.enums import HTTPStatus + + +async def get_project_by_id( + db: Session = Depends(get_db), project_id: int = None +) -> DbProject: + """Get a single project by id.""" + if not project_id: + # Skip if no project id passed + return None + + db_project = db.query(DbProject).filter(DbProject.id == project_id).first() + if not db_project: + raise HTTPException( + status_code=HTTPStatus.NOT_FOUND, + detail=f"Project with ID {project_id} does not exist", + ) + + return db_project diff --git a/src/backend/app/projects/project_routes.py b/src/backend/app/projects/project_routes.py index dcb0517407..3c37290728 100644 --- a/src/backend/app/projects/project_routes.py +++ b/src/backend/app/projects/project_routes.py @@ -40,21 +40,22 @@ from osm_fieldwork.make_data_extract import getChoices from osm_fieldwork.xlsforms import xlsforms_path from sqlalchemy.orm import Session +from sqlalchemy.sql import text from app.auth.osm import AuthUser, login_required -from app.submission import submission_crud - -from ..central import central_crud -from ..db import database, db_models -from ..models.enums import TILES_FORMATS, TILES_SOURCE -from ..tasks import tasks_crud -from . import project_crud, project_schemas -from .project_crud import check_crs +from app.central import central_crud +from app.db import database, db_models +from app.models.enums import TILES_FORMATS, TILES_SOURCE, HTTPStatus +from app.organisations import organisation_deps +from app.projects import project_crud, project_deps, project_schemas +from app.projects.project_crud import check_crs +from app.static import data_path +from app.submissions import submission_crud +from app.tasks import tasks_crud router = APIRouter( prefix="/projects", tags=["projects"], - dependencies=[Depends(database.get_db)], responses={404: {"description": "Not found"}}, ) @@ -66,6 +67,7 @@ async def read_projects( limit: int = 100, db: Session = Depends(database.get_db), ): + """Return all projects.""" project_count, projects = await project_crud.get_projects(db, user_id, skip, limit) return projects @@ -117,6 +119,10 @@ async def get_projet_details(project_id: int, db: Session = Depends(database.get @router.post("/near_me", response_model=list[project_schemas.ProjectSummary]) async def get_tasks_near_me(lat: float, long: float, user_id: int = None): + """Get projects near me. + + TODO to be implemented in future. + """ return [project_schemas.ProjectSummary()] @@ -128,6 +134,7 @@ async def read_project_summaries( results_per_page: int = Query(13, le=100), db: Session = Depends(database.get_db), ): + """Get a paginated summary of projects.""" if hashtags: hashtags = hashtags.split(",") # create list of hashtags hashtags = list( @@ -167,6 +174,7 @@ async def search_project( results_per_page: int = Query(13, le=100), db: Session = Depends(database.get_db), ): + """Search projects by string, hashtag, or other criteria.""" if hashtags: hashtags = hashtags.split(",") # create list of hashtags hashtags = list( @@ -197,40 +205,34 @@ async def search_project( @router.get("/{project_id}", response_model=project_schemas.ReadProject) async def read_project(project_id: int, db: Session = Depends(database.get_db)): + """Get a specific project by ID.""" project = await project_crud.get_project_by_id(db, project_id) if not project: raise HTTPException(status_code=404, detail="Project not found") return project -@router.delete("/delete/{project_id}") +@router.delete("/{project_id}") async def delete_project( - project_id: int, + project: db_models.DbProject = Depends(project_deps.get_project_by_id), db: Session = Depends(database.get_db), user_data: AuthUser = Depends(login_required), ): - """Delete a project from ODK Central and the local database.""" - # FIXME: should check for error - - project = await project_crud.get_project(db, project_id) - - if not project: - raise HTTPException(status_code=404, detail="Project not found") - + """Delete a project from both ODK Central and the local database.""" + log.info(f"User {user_data.username} attempting deletion of project {project.id}") # Odk crendentials odk_credentials = project_schemas.ODKCentral( odk_central_url=project.odk_central_url, odk_central_user=project.odk_central_user, odk_central_password=project.odk_central_password, ) - + # Delete ODK Central project await central_crud.delete_odk_project(project.odkid, odk_credentials) + # Delete FMTM project + await project_crud.delete_one_project(db, project) - deleted_project = await project_crud.delete_project_by_id(db, project_id) - if deleted_project: - return deleted_project - else: - raise HTTPException(status_code=404, detail="Project not found") + log.info(f"Deletion of project {project.id} successful") + return Response(status_code=HTTPStatus.NO_CONTENT) @router.post("/create_project", response_model=project_schemas.ProjectOut) @@ -238,7 +240,11 @@ async def create_project( project_info: project_schemas.ProjectUpload, db: Session = Depends(database.get_db), ): - """Create a project in ODK Central and the local database.""" + """Create a project in ODK Central and the local database. + + TODO refactor to standard REST POST to /projects + TODO but first check doesn't break other endpoints + """ log.debug(f"Creating project {project_info.project_info.name}") if project_info.odk_central.odk_central_url.endswith("/"): @@ -261,28 +267,6 @@ async def create_project( return project -@router.post("/update_odk_credentials") -async def update_odk_credentials( - odk_central_cred: project_schemas.ODKCentral, - project_id: int, - db: Session = Depends(database.get_db), -): - """Update odk credential of a project.""" - if odk_central_cred.odk_central_url.endswith("/"): - odk_central_cred.odk_central_url = odk_central_cred.odk_central_url[:-1] - - project = await project_crud.get_project(db, project_id) - - if not project: - raise HTTPException(status_code=404, detail="Project not found") - - await project_crud.update_odk_credentials_in_db( - project, odk_central_cred, odkproject["id"], db - ) - - return JSONResponse(status_code=200, content={"success": True}) - - @router.put("/{id}", response_model=project_schemas.ProjectOut) async def update_project( id: int, @@ -346,9 +330,11 @@ async def upload_custom_xls( db: Session = Depends(database.get_db), ): """Upload a custom XLSForm to the database. - Parameters: - - upload: the XLSForm file - - category: the category of the XLSForm. + + Args: + upload (UploadFile): the XLSForm file + category (str): the category of the XLSForm. + db (Session): the DB session, provided automatically. """ content = await upload.read() # read file content name = upload.filename.split(".")[0] # get name of file without extension @@ -358,26 +344,24 @@ async def upload_custom_xls( return {"xform_title": f"{category}"} -@router.post("/{project_id}/upload_multi_polygon") -async def upload_multi_project_boundary( +@router.post("/{project_id}/custom_task_boundaries") +async def upload_custom_task_boundaries( project_id: int, project_geojson: UploadFile = File(...), db: Session = Depends(database.get_db), ): - """This API allows for the uploading of a multi-polygon project boundary - in JSON format for a specified project ID. Each polygon in the uploaded geojson are made a single task. + """Set project task boundaries manually using multi-polygon GeoJSON. + + Each polygon in the uploaded geojson are made a single task. Required Parameters: - project_id: ID of the project to which the boundary is being uploaded. - project_geojson: a file upload containing the multi-polygon boundary in geojson format. + project_id (id): ID for associated project. + project_geojson (UploadFile): Multi-polygon GeoJSON file. Returns: - A success message indicating that the boundary was successfully uploaded. - If the project ID does not exist in the database, an HTTP 428 error is raised. + dict: JSON containing success message, project ID, and number of tasks. """ - log.debug( - "Uploading project boundary multipolygon for " f"project ID: {project_id}" - ) + log.debug(f"Uploading project boundary multipolygon for project ID: {project_id}") # read entire file content = await project_geojson.read() boundary = json.loads(content) @@ -454,14 +438,14 @@ async def upload_project_boundary( ): """Uploads the project boundary. The boundary is uploaded as a geojson file. - Params: - - project_id (int): The ID of the project to update. - - boundary_geojson (UploadFile): The boundary file to upload. - - dimension (int): The new dimension of the project. - - db (Session): The database session to use. + Args: + project_id (int): The ID of the project to update. + boundary_geojson (UploadFile): The boundary file to upload. + dimension (int): The new dimension of the project. + db (Session): The database session to use. Returns: - - Dict: A dictionary with a message, the project ID, and the number of tasks in the project. + dict: JSON with message, project ID, and task count for project. """ # Validating for .geojson File. file_name = os.path.splitext(boundary_geojson.filename) @@ -503,6 +487,7 @@ async def edit_project_boundary( dimension: int = Form(500), db: Session = Depends(database.get_db), ): + """Edit the existing project boundary.""" # Validating for .geojson File. file_name = os.path.splitext(boundary_geojson.filename) file_ext = file_name[1] @@ -565,22 +550,28 @@ async def generate_files( data_extracts: Optional[UploadFile] = File(None), db: Session = Depends(database.get_db), ): - """Generate additional content for the project to function. + """Generate additional content to initialise the project. - QR codes, + Boundary, ODK Central forms, QR codes, etc. Accepts a project ID, category, custom form flag, and an uploaded file as inputs. The generated files are associated with the project ID and stored in the database. - This api generates qr_code, forms. This api also creates an app user for each task and provides the required roles. - Some of the other functionality of this api includes converting a xls file provided by the user to the xform, - generates osm data extracts and uploads it to the form. + This api generates qr_code, forms. This api also creates an app user for + each task and provides the required roles. + Some of the other functionality of this api includes converting a xls file + provided by the user to the xform, generates osm data extracts and uploads + it to the form. Args: + background_tasks (BackgroundTasks): FastAPI bg tasks, provided automatically. project_id (int): The ID of the project for which files are being generated. - polygon (bool): A boolean flag indicating whether the polygon + extract_polygon (bool): A boolean flag indicating whether the polygon is extracted or not. xls_form_upload (UploadFile, optional): A custom XLSForm to use in the project. A file should be provided if user wants to upload a custom xls form. + xls_form_config_file (UploadFile, optional): The config YAML for the XLS form. + data_extracts (UploadFile, optional): Custom data extract GeoJSON. + db (Session): Database session, provided automatically. Returns: json (JSONResponse): A success message containing the project ID. @@ -669,6 +660,7 @@ async def update_project_form( form: Optional[UploadFile], db: Session = Depends(database.get_db), ): + """Update XLSForm for a project.""" file_name = os.path.splitext(form.filename) file_ext = file_name[1] allowed_extensions = [".xls"] @@ -677,7 +669,10 @@ async def update_project_form( contents = await form.read() form_updated = await project_crud.update_project_form( - db, project_id, contents, file_ext[1:] # Form Contents # File type + db, + project_id, + contents, + file_ext[1:], # Form Contents # File type ) return form_updated @@ -696,6 +691,7 @@ async def get_project_features( Args: project_id (int): The project id. task_id (int): The task id. + db (Session): the DB session, provided automatically. Returns: feature(json): JSON object containing a list of features @@ -711,9 +707,11 @@ async def generate_log( r"""Get the contents of a log file in a log format. ### Response - - **200 OK**: Returns the contents of the log file in a log format. Each line is separated by a newline character "\n". + - **200 OK**: Returns the contents of the log file in a log format. + Each line is separated by a newline character "\n". - - **500 Internal Server Error**: Returns an error message if the log file cannot be generated. + - **500 Internal Server Error**: Returns an error message if the log file + cannot be generated. ### Return format Task Status and Logs are returned in a JSON format. @@ -850,6 +848,7 @@ async def upload_custom_extract( @router.get("/download_form/{project_id}/") async def download_form(project_id: int, db: Session = Depends(database.get_db)): + """Download the XLSForm for a project.""" project = await project_crud.get_project(db, project_id) if not project: raise HTTPException(status_code=404, detail="Project not found") @@ -876,6 +875,10 @@ async def update_project_category( upload: Optional[UploadFile] = File(None), db: Session = Depends(database.get_db), ): + """Update the XLSForm category for a project. + + Not valid for custom form uploads. + """ contents = None project = await project_crud.get_project(db, project_id) @@ -907,7 +910,10 @@ async def update_project_category( # Update odk forms await project_crud.update_project_form( - db, project_id, file_ext[1:] if upload else "xls", upload # Form + db, + project_id, + file_ext[1:] if upload else "xls", + upload, # Form ) return JSONResponse(status_code=200, content={"success": True}) @@ -915,6 +921,7 @@ async def update_project_category( @router.get("/download_template/") async def download_template(category: str, db: Session = Depends(database.get_db)): + """Download an XLSForm template to fill out.""" xlsform_path = f"{xlsforms_path}/{category}.xls" if os.path.exists(xlsform_path): return FileResponse(xlsform_path, filename="form.xls") @@ -931,6 +938,7 @@ async def download_project_boundary( Args: project_id (int): The id of the project. + db (Session): The database session, provided automatically. Returns: Response: The HTTP response object containing the downloaded file. @@ -953,6 +961,7 @@ async def download_task_boundaries( Args: project_id (int): The id of the project. + db (Session): The database session, provided automatically. Returns: Response: The HTTP response object containing the downloaded file. @@ -973,6 +982,7 @@ async def download_features(project_id: int, db: Session = Depends(database.get_ Args: project_id (int): The id of the project. + db (Session): The database session, provided automatically. Returns: Response: The HTTP response object containing the downloaded file. @@ -1006,10 +1016,12 @@ async def generate_project_tiles( """Returns basemap tiles for a project. Args: + background_tasks (BackgroundTasks): FastAPI bg tasks, provided automatically. project_id (int): ID of project to create tiles for. source (str): Tile source ("esri", "bing", "topo", "google", "oam"). format (str, optional): Default "mbtiles". Other options: "pmtiles", "sqlite3". tms (str, optional): Default None. Custom TMS provider URL. + db (Session): The database session, provided automatically. Returns: str: Success message that tile generation started. @@ -1042,6 +1054,7 @@ async def tiles_list(project_id: int, db: Session = Depends(database.get_db)): Parameters: project_id: int + db (Session): The database session, provided automatically. Returns: Response: List of generated tiles for a project. @@ -1051,6 +1064,7 @@ async def tiles_list(project_id: int, db: Session = Depends(database.get_db)): @router.get("/download_tiles/") async def download_tiles(tile_id: int, db: Session = Depends(database.get_db)): + """Download the basemap tile archive for a project.""" log.debug("Getting tile archive path from DB") tiles_path = ( db.query(db_models.DbTilesPath) @@ -1060,9 +1074,9 @@ async def download_tiles(tile_id: int, db: Session = Depends(database.get_db)): log.info(f"User requested download for tiles: {tiles_path.path}") project_id = tiles_path.project_id - project_name = await project_crud.get_project(db, project_id).project_name_prefix + project = await project_crud.get_project(db, project_id) filename = Path(tiles_path.path).name.replace( - f"{project_id}_", f"{project_name.replace(' ', '_')}_" + f"{project_id}_", f"{project.project_name_prefix.replace(' ', '_')}_" ) log.debug(f"Sending tile archive to user: {filename}") @@ -1081,6 +1095,7 @@ async def download_task_boundary_osm( Args: project_id (int): The id of the project. + db (Session): The database session, provided automatically. Returns: Response: The HTTP response object containing the downloaded file. @@ -1100,9 +1115,6 @@ async def download_task_boundary_osm( return response -from sqlalchemy.sql import text - - @router.get("/centroid/") async def project_centroid( project_id: int = None, @@ -1112,12 +1124,16 @@ async def project_centroid( Parameters: project_id (int): The ID of the project. + db (Session): The database session, provided automatically. Returns: - list[tuple[int, str]]: A list of tuples containing the task ID and the centroid as a string. + list[tuple[int, str]]: A list of tuples containing the task ID and + the centroid as a string. """ query = text( - f"""SELECT id, ARRAY_AGG(ARRAY[ST_X(ST_Centroid(outline)), ST_Y(ST_Centroid(outline))]) AS centroid + f"""SELECT id, + ARRAY_AGG(ARRAY[ST_X(ST_Centroid(outline)), + ST_Y(ST_Centroid(outline))]) AS centroid FROM projects WHERE {f"id={project_id}" if project_id else "1=1"} GROUP BY id;""" @@ -1146,14 +1162,11 @@ async def get_task_status( ) -from ..static import data_path - - @router.get("/templates/") async def get_template_file( file_type: str = Query( ..., enum=["data_extracts", "form"], description="Choose file type" - ) + ), ): """Get template file. @@ -1176,27 +1189,33 @@ async def get_template_file( "/project_dashboard/{project_id}", response_model=project_schemas.ProjectDashboard ) async def project_dashboard( - project_id: int, background_tasks: BackgroundTasks, + db_project: db_models.DbProject = Depends(project_deps.get_project_by_id), + db_organisation: db_models.DbOrganisation = Depends( + organisation_deps.org_from_project + ), db: Session = Depends(database.get_db), ): """Get the project dashboard details. Args: - project_id (int): The ID of the project. + background_tasks (BackgroundTasks): FastAPI bg tasks, provided automatically. + db_project (db_models.DbProject): An instance of the project. + db_organisation (db_models.DbOrganisation): An instance of the organisation. db (Session): The database session. Returns: ProjectDashboard: The project dashboard details. """ - data = await project_crud.get_dashboard_detail(project_id, db) + data = await project_crud.get_dashboard_detail(db_project, db_organisation, db) + background_task_id = await project_crud.insert_background_task_into_database( - db, "sync_submission", project_id + db, "sync_submission", db_project.id ) - background_tasks.add_task( - submission_crud.update_submission_in_s3, db, project_id, background_task_id + submission_crud.update_submission_in_s3, db, db_project.id, background_task_id ) + return data @@ -1206,6 +1225,7 @@ async def get_contributors(project_id: int, db: Session = Depends(database.get_d Args: project_id (int): ID of project. + db (Session): The database session. Returns: list[project_schemas.ProjectUser]: List of project users. diff --git a/src/backend/app/projects/project_schemas.py b/src/backend/app/projects/project_schemas.py index 9f81b34d4f..aee4740fbe 100644 --- a/src/backend/app/projects/project_schemas.py +++ b/src/backend/app/projects/project_schemas.py @@ -15,6 +15,7 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Pydantic schemas for Projects.""" import uuid from datetime import datetime @@ -32,24 +33,32 @@ class ODKCentral(BaseModel): + """ODK Central credentials.""" + odk_central_url: str odk_central_user: str odk_central_password: str class ProjectInfo(BaseModel): + """Basic project info.""" + name: str short_description: str description: str class ProjectUpdate(BaseModel): + """Update project.""" + name: Optional[str] = None short_description: Optional[str] = None description: Optional[str] = None class ProjectUpload(BaseModel): + """Upload new project.""" + author: User project_info: ProjectInfo xform_title: Optional[str] @@ -66,11 +75,15 @@ class ProjectUpload(BaseModel): class Feature(BaseModel): + """Features used for Task definitions.""" + id: int geometry: Optional[GeojsonFeature] = None class ProjectSummary(BaseModel): + """Project summaries.""" + id: int = -1 priority: ProjectPriority = ProjectPriority.MEDIUM priority_str: str = priority.name @@ -91,6 +104,7 @@ def from_db_project( cls, project: db_models.DbProject, ) -> "ProjectSummary": + """Generate model from database obj.""" priority = project.priority return cls( id=project.id, @@ -111,22 +125,28 @@ def from_db_project( class PaginationInfo(BaseModel): - hasNext: bool - hasPrev: bool - nextNum: Optional[int] + """Pagination JSON return.""" + + has_next: bool + has_prev: bool + next_num: Optional[int] page: int pages: int - prevNum: Optional[int] - perPage: int + prev_num: Optional[int] + per_page: int total: int class PaginatedProjectSummaries(BaseModel): + """Project summaries + Pagination info.""" + results: List[ProjectSummary] pagination: PaginationInfo class ProjectBase(BaseModel): + """Base project model.""" + id: int odkid: int author: User @@ -141,31 +161,40 @@ class ProjectBase(BaseModel): class ProjectOut(ProjectBase): + """Project display to user.""" + project_uuid: uuid.UUID = uuid.uuid4() class ReadProject(ProjectBase): + """Redundant model for refactor.""" + project_uuid: uuid.UUID = uuid.uuid4() location_str: Optional[str] = None class BackgroundTaskStatus(BaseModel): + """Background task status for project related tasks.""" + status: str message: Optional[str] = None class ProjectDashboard(BaseModel): + """Project details dashboard.""" + project_name_prefix: str - organization: str + organisation_name: str total_tasks: int created: datetime - organization_logo: Optional[str] = None + organisation_logo: Optional[str] = None total_submission: Optional[int] = None total_contributors: Optional[int] = None last_active: Optional[Union[str, datetime]] = None @field_serializer("last_active") def get_last_active(self, value, values): + """Date of last activity on project.""" if value is None: return None diff --git a/src/backend/app/s3.py b/src/backend/app/s3.py index 44854a46df..0e3acd76e9 100644 --- a/src/backend/app/s3.py +++ b/src/backend/app/s3.py @@ -113,7 +113,7 @@ def get_obj_from_bucket(bucket_name: str, s3_path: str) -> BytesIO: response = client.get_object(bucket_name, s3_path) return BytesIO(response.read()) except Exception as e: - raise ValueError(str(e)) + raise ValueError(str(e)) from e finally: if response: response.close() diff --git a/src/backend/app/submission/submission_schemas.py b/src/backend/app/submission/submission_schemas.py deleted file mode 100644 index 721ae2a66d..0000000000 --- a/src/backend/app/submission/submission_schemas.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team -# -# This file is part of FMTM. -# -# FMTM is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# FMTM is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with FMTM. If not, see . -# diff --git a/src/backend/app/submission/__init__.py b/src/backend/app/submissions/__init__.py similarity index 100% rename from src/backend/app/submission/__init__.py rename to src/backend/app/submissions/__init__.py diff --git a/src/backend/app/submission/submission_crud.py b/src/backend/app/submissions/submission_crud.py similarity index 75% rename from src/backend/app/submission/submission_crud.py rename to src/backend/app/submissions/submission_crud.py index 46ec181fb0..327ca7729d 100644 --- a/src/backend/app/submission/submission_crud.py +++ b/src/backend/app/submissions/submission_crud.py @@ -15,6 +15,8 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Functions for task submissions.""" + import concurrent.futures import csv import io @@ -22,22 +24,21 @@ import os import threading import uuid -from asyncio import gather from collections import Counter from datetime import datetime, timedelta from io import BytesIO -from pathlib import Path import sozipfile.sozipfile as zipfile from asgiref.sync import async_to_sync from fastapi import HTTPException, Response from fastapi.responses import FileResponse from loguru import logger as log -from osm_fieldwork.json2osm import JsonDump +from osm_fieldwork.json2osm import json2osm from sqlalchemy.orm import Session from app.central.central_crud import get_odk_form, get_odk_project, list_odk_xforms from app.config import settings +from app.db import db_models from app.projects import project_crud, project_schemas from app.s3 import add_obj_to_bucket, get_obj_from_bucket from app.tasks import tasks_crud @@ -45,8 +46,9 @@ def get_submission_of_project(db: Session, project_id: int, task_id: int = None): """Gets the submission of project. - This function takes project_id and task_id as a parameter. - If task_id is provided, it returns all the submission made to that particular task, else all the submission made in the projects are returned. + + If task_id is provided, it submissions for a specific task, + else all the submission made for a project are returned. """ get_project_sync = async_to_sync(project_crud.get_project) project_info = get_project_sync(db, project_id) @@ -106,156 +108,19 @@ def get_submission_of_project(db: Session, project_id: int, task_id: int = None) return submission_list -async def get_forms_of_project(db: Session, project_id: int): - project_info = await project_crud.get_project_by_id(db, project_id) - - # Return empty list if project is not found - if not project_info: - return [] - - odkid = project_info.odkid - project = get_odk_project() - - result = project.listForms(odkid) - return result - - -async def list_app_users_or_project(db: Session, project_id: int): - project_info = await project_crud.get_project_by_id(db, project_id) - - # Return empty list if project is not found - if not project_info: - return [] - - odkid = project_info.odkid - project = get_odk_project() - result = project.listAppUsers(odkid) - return result - - -# async def convert_json_to_osm_xml(file_path): - -# jsonin = JsonDump() -# infile = Path(file_path) - -# base = os.path.splitext(infile.name)[0] - -# osmoutfile = f"/tmp/{base}.osm" -# jsonin.createOSM(osmoutfile) - -# data = jsonin.parse(infile.as_posix()) - -# for entry in data: -# feature = jsonin.createEntry(entry) -# # Sometimes bad entries, usually from debugging XForm design, sneak in -# if len(feature) == 0: -# continue -# if len(feature) > 0: -# if "lat" not in feature["attrs"]: -# if 'geometry' in feature['tags']: -# if type(feature['tags']['geometry']) == str: -# coords = list(feature['tags']['geometry']) -# else: -# coords = feature['tags']['geometry']['coordinates'] -# feature['attrs'] = {'lat': coords[1], 'lon': coords[0]} -# else: -# log.warning("Bad record! %r" % feature) -# continue -# jsonin.writeOSM(feature) - -# jsonin.finishOSM() -# log.info("Wrote OSM XML file: %r" % osmoutfile) -# return osmoutfile - - -async def convert_json_to_osm_xml(file_path): - # TODO refactor to simply use json2osm(file_path) - jsonin = JsonDump() - infile = Path(file_path) - - base = os.path.splitext(infile.name)[0] - - osmoutfile = f"/tmp/{base}.osm" - jsonin.createOSM(osmoutfile) - - data = jsonin.parse(infile.as_posix()) - - async def process_entry_async(entry): - feature = jsonin.createEntry(entry) - if len(feature) == 0: - return None - if len(feature) > 0: - if "lat" not in feature["attrs"]: - if "geometry" in feature["tags"]: - if type(feature["tags"]["geometry"]) == str: - coords = list(feature["tags"]["geometry"]) - else: - coords = feature["tags"]["geometry"]["coordinates"] - feature["attrs"] = {"lat": coords[1], "lon": coords[0]} - else: - log.warning("Bad record! %r" % feature) - return None - return feature - - async def write_osm_async(features): - for feature in features: - if feature: - jsonin.writeOSM(feature) - jsonin.finishOSM() - log.info("Wrote OSM XML file: %r" % osmoutfile) - return osmoutfile - - data_processing_tasks = [process_entry_async(entry) for entry in data] - processed_features = await gather(*data_processing_tasks) - await write_osm_async(processed_features) - - return osmoutfile - - async def convert_json_to_osm(file_path): - # TODO refactor to simply use json2osm(file_path) - jsonin = JsonDump() - infile = Path(file_path) - - base = os.path.splitext(infile.name)[0] - - osmoutfile = f"/tmp/{base}.osm" - jsonin.createOSM(osmoutfile) - - jsonoutfile = f"/tmp/{base}.geojson" - jsonin.createGeoJson(jsonoutfile) - - data = jsonin.parse(infile.as_posix()) - - for entry in data: - feature = jsonin.createEntry(entry) - # Sometimes bad entries, usually from debugging XForm design, sneak in - if len(feature) == 0: - continue - if len(feature) > 0: - if "lat" not in feature["attrs"]: - if "geometry" in feature["tags"]: - if type(feature["tags"]["geometry"]) == str: - coords = list(feature["tags"]["geometry"]) - # del feature['tags']['geometry'] - else: - coords = feature["tags"]["geometry"]["coordinates"] - # del feature['tags']['geometry'] - feature["attrs"] = {"lat": coords[1], "lon": coords[0]} - else: - log.warning("Bad record! %r" % feature) - continue - jsonin.writeOSM(feature) - jsonin.writeGeoJson(feature) + """Wrapper for osm-fieldwork json2osm. - jsonin.finishOSM() - jsonin.finishGeoJson() - log.info("Wrote OSM XML file: %r" % osmoutfile) - log.info("Wrote GeoJson file: %r" % jsonoutfile) - return osmoutfile, jsonoutfile + FIXME add json output to osm2json (in addition to default OSM XML output) + """ + # TODO check speed of json2osm + # TODO if slow response, use run_in_threadpool + osm_xml_path = json2osm(file_path) + return osm_xml_path async def convert_to_osm_for_task(odk_id: int, form_id: int, xform: any): + """Convert JSON --> OSM XML for a specific XForm/Task.""" # This file stores the submission data. file_path = f"/tmp/{odk_id}_{form_id}.json" @@ -268,12 +133,12 @@ async def convert_to_osm_for_task(odk_id: int, form_id: int, xform: any): with open(file_path, "wb") as f: f.write(file) - convert_json_to_osm_sync = async_to_sync(convert_json_to_osm) - osmoutfile, jsonoutfile = convert_json_to_osm_sync(file_path) - return osmoutfile, jsonoutfile + osmoutfile = await convert_json_to_osm(file_path) + return osmoutfile def convert_to_osm(db: Session, project_id: int, task_id: int): + """Convert submissions to OSM XML format.""" get_project_sync = async_to_sync(project_crud.get_project) project_info = get_project_sync(db, project_id) @@ -321,9 +186,10 @@ def convert_to_osm(db: Session, project_id: int, task_id: int): # Convert the submission to osm xml format convert_json_to_osm_sync = async_to_sync(convert_json_to_osm) - osmoutfile, jsonoutfile = convert_json_to_osm_sync(jsoninfile) + osmoutfile = convert_json_to_osm_sync(jsoninfile) - if osmoutfile and jsonoutfile: + # if osmoutfile and jsonoutfile: + if osmoutfile: # FIXME: Need to fix this when generating osm file # Remove the extra closing tag from the end of the file @@ -344,7 +210,7 @@ def convert_to_osm(db: Session, project_id: int, task_id: int): # Add the files to the ZIP file with zipfile.ZipFile(final_zip_file_path, mode="a") as final_zip_file: final_zip_file.write(osmoutfile) - final_zip_file.write(jsonoutfile) + # final_zip_file.write(jsonoutfile) return FileResponse(final_zip_file_path) @@ -380,7 +246,8 @@ def gather_all_submission_csvs(db, project_id): def download_submission_for_task(task_id): log.info( - f"Thread {threading.current_thread().name} - Downloading submission for Task ID {task_id}" + f"Thread {threading.current_thread().name} - " + f"Downloading submission for Task ID {task_id}" ) xml_form_id = f"{project_name}_{form_category}_{task_id}".split("_")[2] file = xform.getSubmissionMedia(odkid, xml_form_id) @@ -391,7 +258,8 @@ def download_submission_for_task(task_id): def extract_files(zip_file_path): log.info( - f"Thread {threading.current_thread().name} - Extracting files from {zip_file_path}" + f"Thread {threading.current_thread().name} - " + f"Extracting files from {zip_file_path}" ) with zipfile.ZipFile(zip_file_path, "r") as zip_file: extract_dir = os.path.splitext(zip_file_path)[0] @@ -414,11 +282,14 @@ def extract_files(zip_file_path): file_path = future.result() files.append(file_path) log.info( - f"Thread {threading.current_thread().name} - Task {task_id} - Download completed." + f"Thread {threading.current_thread().name} -" + f" Task {task_id} - Download completed." ) except Exception as e: log.error( - f"Thread {threading.current_thread().name} - Error occurred while downloading submission for task {task_id}: {e}" + f"Thread {threading.current_thread().name} -" + f" Error occurred while downloading submission for task " + f"{task_id}: {e}" ) # Extract files using thread pool @@ -431,11 +302,13 @@ def extract_files(zip_file_path): try: extracted_files.extend(future.result()) log.info( - f"Thread {threading.current_thread().name} - Extracted files from {file_path}" + f"Thread {threading.current_thread().name} -" + f" Extracted files from {file_path}" ) except Exception as e: log.error( - f"Thread {threading.current_thread().name} - Error occurred while extracting files from {file_path}: {e}" + f"Thread {threading.current_thread().name} -" + f" Error occurred while extracting files from {file_path}: {e}" ) # Create a new ZIP file for the extracted files @@ -450,6 +323,7 @@ def extract_files(zip_file_path): def update_submission_in_s3( db: Session, project_id: int, background_task_id: uuid.UUID ): + """Update or create new submission JSON in S3 for a project.""" try: # Get Project get_project_sync = async_to_sync(project_crud.get_project) @@ -567,49 +441,52 @@ def get_all_submissions_json(db: Session, project_id): return submissions -def get_project_submission(db: Session, project_id: int): - get_project_sync = async_to_sync(project_crud.get_project) - project_info = get_project_sync(db, project_id) +# TODO delete me +# def get_project_submission(db: Session, project_id: int): +# """Get.""" +# get_project_sync = async_to_sync(project_crud.get_project) +# project_info = get_project_sync(db, project_id) - # Return empty list if project is not found - if not project_info: - raise HTTPException(status_code=404, detail="Project not found") +# # Return empty list if project is not found +# if not project_info: +# raise HTTPException(status_code=404, detail="Project not found") - odkid = project_info.odkid - project_name = project_info.project_name_prefix - form_category = project_info.xform_title - project_tasks = project_info.tasks +# odkid = project_info.odkid +# project_name = project_info.project_name_prefix +# form_category = project_info.xform_title +# project_tasks = project_info.tasks - # ODK Credentials - odk_credentials = project_schemas.ODKCentral( - odk_central_url=project_info.odk_central_url, - odk_central_user=project_info.odk_central_user, - odk_central_password=project_info.odk_central_password, - ) +# # ODK Credentials +# odk_credentials = project_schemas.ODKCentral( +# odk_central_url=project_info.odk_central_url, +# odk_central_user=project_info.odk_central_user, +# odk_central_password=project_info.odk_central_password, +# ) - # Get ODK Form with odk credentials from the project. - xform = get_odk_form(odk_credentials) +# # Get ODK Form with odk credentials from the project. +# xform = get_odk_form(odk_credentials) - submissions = [] +# submissions = [] - task_list = [x.id for x in project_tasks] - for id in task_list: - xml_form_id = f"{project_name}_{form_category}_{id}".split("_")[2] - file = xform.getSubmissions(odkid, xml_form_id, None, False, True) - if not file: - json_data = None - else: - json_data = json.loads(file) - json_data_value = json_data.get("value") - if json_data_value: - submissions.extend(json_data_value) +# task_list = [x.id for x in project_tasks] +# for id in task_list: +# xml_form_id = f"{project_name}_{form_category}_{id}".split("_")[2] +# file = xform.getSubmissions(odkid, xml_form_id, None, False, True) +# if not file: +# json_data = None +# else: +# json_data = json.loads(file) +# json_data_value = json_data.get("value") +# if json_data_value: +# submissions.extend(json_data_value) - return submissions +# return submissions async def download_submission( db: Session, project_id: int, task_id: int, export_json: bool ): + """Download submission data from ODK Central and aggregate.""" project_info = await project_crud.get_project(db, project_id) # Return empty list if project is not found @@ -639,7 +516,8 @@ async def download_submission( task_list = [x.id for x in project_tasks] - # zip_file_path = f"{project_name}_{form_category}_submissions.zip" # Create a new ZIP file for all submissions + # # Create a new ZIP file for all submissions + # zip_file_path = f"{project_name}_{form_category}_submissions.zip" files = [] for id in task_list: @@ -653,16 +531,15 @@ async def download_submission( with open(file_path, "wb") as f: f.write(file.content) - files.append( - file_path - ) # Add the output file path to the list of files for the final ZIP file + # Add the output file path to the list of files for the final ZIP file + files.append(file_path) extracted_files = [] for file_path in files: with zipfile.ZipFile(file_path, "r") as zip_file: - zip_file.extractall( - os.path.splitext(file_path)[0] - ) # Extract the contents of the nested ZIP files to a directory with the same name as the ZIP file + # Extract the contents of the nested ZIP files to a directory + # with the same name as the ZIP file + zip_file.extractall(os.path.splitext(file_path)[0]) extracted_files += [ os.path.join(os.path.splitext(file_path)[0], f) for f in zip_file.namelist() @@ -717,9 +594,9 @@ async def download_submission( async def get_submission_points(db: Session, project_id: int, task_id: int = None): """Gets the submission points of project. - This function takes project_id and task_id as a parameter. - If task_id is provided, it returns all the submission points made to that particular task, - else all the submission points made in the projects are returned. + + If task_id is provided, it return point specific to a task, + else the entire project. """ project_info = await project_crud.get_project_by_id(db, project_id) @@ -760,8 +637,10 @@ async def get_submission_points(db: Session, project_id: int, task_id: int = Non csv_reader = csv.DictReader(io.TextIOWrapper(csv_file)) geometry = [] for row in csv_reader: - # Check if the row contains the 'warmup-Latitude' and 'warmup-Longitude' columns - # FIXME: fix the column names (they might not be same warmup-Latitude and warmup-Longitude) + # Check if the row contains the 'warmup-Latitude' and + # 'warmup-Longitude' columns + # FIXME: fix the column names (they might not be same + # warmup-Latitude and warmup-Longitude) if "warmup-Latitude" in row and "warmup-Longitude" in row: point = (row["warmup-Latitude"], row["warmup-Longitude"]) @@ -782,6 +661,7 @@ async def get_submission_points(db: Session, project_id: int, task_id: int = Non async def get_submission_count_of_a_project(db: Session, project_id: int): + """Return the total number of submissions made for a project.""" project_info = await project_crud.get_project(db, project_id) # Return empty list if project is not found @@ -831,6 +711,7 @@ async def get_submissions_by_date( db (Session): The database session. project_id (int): The ID of the project. days (int): The number of days to consider for fetching submissions. + planned_task (int): Associated task id. Returns: dict: A dictionary containing the submission counts for each date. @@ -882,3 +763,70 @@ async def get_submissions_by_date( ] return response + + +async def get_submission_by_project(project_id: int, skip: 0, limit: 100, db: Session): + """Get submission by project. + + Retrieves a paginated list of submissions for a given project. + + Args: + project_id (int): The ID of the project. + skip (int): The number of submissions to skip. + limit (int): The maximum number of submissions to retrieve. + db (Session): The database session. + + Returns: + Tuple[int, List]: A tuple containing the total number of submissions and + the paginated list of submissions. + + Raises: + ValueError: If the submission file cannot be found. + + """ + project = await project_crud.get_project(db, project_id) + s3_project_path = f"/{project.organisation_id}/{project_id}" + s3_submission_path = f"/{s3_project_path}/submission.zip" + + try: + file = get_obj_from_bucket(settings.S3_BUCKET_NAME, s3_submission_path) + except ValueError: + return 0, [] + + with zipfile.ZipFile(file, "r") as zip_ref: + with zip_ref.open("submissions.json") as file_in_zip: + content = file_in_zip.read() + + content = json.loads(content) + start_index = skip + end_index = skip + limit + paginated_content = content[start_index:end_index] + return len(content), paginated_content + + +async def get_submission_by_task( + project: db_models.DbProject, task_id: int, filters: dict, db: Session +): + """Get submissions and count by task. + + Args: + project: The project instance. + task_id: The ID of the task. + filters: A dictionary of filters. + db: The database session. + + Returns: + Tuple: A tuple containing the list of submissions and the count. + """ + odk_credentials = project_schemas.ODKCentral( + odk_central_url=project.odk_central_url, + odk_central_user=project.odk_central_user, + odk_central_password=project.odk_central_password, + ) + + xform = get_odk_form(odk_credentials) + data = xform.listSubmissions(project.odkid, task_id, filters) + submissions = data.get("value", []) + count = data.get("@odata.count", 0) + + return submissions, count diff --git a/src/backend/app/submission/submission_routes.py b/src/backend/app/submissions/submission_routes.py similarity index 64% rename from src/backend/app/submission/submission_routes.py rename to src/backend/app/submissions/submission_routes.py index 2046b62eb3..c7ad5a7f55 100644 --- a/src/backend/app/submission/submission_routes.py +++ b/src/backend/app/submissions/submission_routes.py @@ -15,11 +15,13 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Routes associated with data submission to and from ODK Central.""" + import json import os from typing import Optional -from fastapi import APIRouter, BackgroundTasks, Depends, Response +from fastapi import APIRouter, BackgroundTasks, Depends, Query, Response from fastapi.concurrency import run_in_threadpool from fastapi.responses import FileResponse, JSONResponse from osm_fieldwork.odk_merge import OdkMerge @@ -28,15 +30,14 @@ from app.central import central_crud from app.config import settings -from app.db import database -from app.projects import project_crud, project_schemas -from app.submission import submission_crud +from app.db import database, db_models +from app.projects import project_crud, project_deps, project_schemas +from app.submissions import submission_crud, submission_schemas from app.tasks import tasks_crud router = APIRouter( prefix="/submission", tags=["submission"], - dependencies=[Depends(database.get_db)], responses={404: {"description": "Not found"}}, ) @@ -46,52 +47,21 @@ async def read_submissions( project_id: int, task_id: int = None, db: Session = Depends(database.get_db), -): - """This api returns the submission made in the project. - It takes two parameters: project_id and task_id. - +) -> list[dict]: + """Get all submissions made for a project. - project_id: The ID of the project. This endpoint returns the submission made in this project. - - task_id: The ID of the task. This parameter is optional. If task_id is provided, this endpoint returns the submissions made for this task. + Args: + project_id (int): The ID of the project. + task_id (int, optional): The ID of the task. + If provided, returns the submissions made for a specific task only. + db (Session): The database session, automatically provided. - Returns the list of submissions. + Returns: + list[dict]: The list of submissions. """ return submission_crud.get_submission_of_project(db, project_id, task_id) -@router.get("/list-forms") -async def list_forms( - project_id: int, - db: Session = Depends(database.get_db), -): - """This api returns the list of forms in the odk central. - - It takes one parameter: project_id. - - project_id: The ID of the project. This endpoint returns the list of forms in this project. - - Returns the list of forms details provided by the central api. - """ - return await submission_crud.get_forms_of_project(db, project_id) - - -@router.get("/list-app-users") -async def list_app_users( - project_id: int, - db: Session = Depends(database.get_db), -): - """This api returns the list of forms in the odk central. - - It takes one parameter: project_id. - - project_id: The ID of the project. This endpoint returns the list of forms in this project. - - Returns the list of forms details provided by the central api. - """ - return await submission_crud.list_app_users_or_project(db, project_id) - - @router.get("/download") async def download_submission( project_id: int, @@ -99,13 +69,19 @@ async def download_submission( export_json: bool = True, db: Session = Depends(database.get_db), ): - """This api downloads the the submission made in the project. - It takes two parameters: project_id and task_id. + """Download the submissions for a given project. - project_id: The ID of the project. This endpoint returns the submission made in this project. + Returned as either a JSONResponse, or a file to download. - task_id: The ID of the task. This parameter is optional. If task_id is provided, this endpoint returns the submissions made for this task. + Args: + project_id (int): The ID of the project. + task_id (int, optional): The ID of the task. + If provided, returns the submissions made for a specific task only. + export_json (bool): Export in JSON format, else returns a file. + db (Session): The database session, automatically provided. + Returns: + Union[list[dict], File]: JSON of submissions, or submission file. """ if not (task_id or export_json): file = submission_crud.gather_all_submission_csvs(db, project_id) @@ -122,11 +98,16 @@ async def submission_points( task_id: int = None, db: Session = Depends(database.get_db), ): - """This api returns the submission points of a project. - It takes two parameter: project_id and task_id. + """Get submission points for a given project. + + Args: + project_id (int): The ID of the project. + task_id (int, optional): The ID of the task. + If provided, returns the submissions made for a specific task only. + db (Session): The database session, automatically provided. - project_id: The ID of the project. This endpoint returns the submission points of this project. - task_id: The task_id of the project. This endpoint returns the submission points of this task. + Returns: + File: a zip containing submission points. """ return await submission_crud.get_submission_points(db, project_id, task_id) @@ -136,14 +117,17 @@ async def convert_to_osm( project_id: int, task_id: int = None, db: Session = Depends(database.get_db), -): - """This api converts the submission to osm format. - It takes two parameter: project_id and task_id. +) -> str: + """Convert JSON submissions to OSM XML for a project. - task_id is optional. - If task_id is provided, this endpoint converts the submission of this task. - If task_id is not provided, this endpoint converts the submission of the whole project. + Args: + project_id (int): The ID of the project. + task_id (int, optional): The ID of the task. + If provided, returns the submissions made for a specific task only. + db (Session): The database session, automatically provided. + Returns: + File: an OSM XML of submissions. """ # NOTE runs in separate thread using run_in_threadpool converted = await run_in_threadpool( @@ -157,6 +141,7 @@ async def get_submission_count( project_id: int, db: Session = Depends(database.get_db), ): + """Get the submission count for a project.""" return await submission_crud.get_submission_count_of_a_project(db, project_id) @@ -165,6 +150,7 @@ async def conflate_osm_data( project_id: int, db: Session = Depends(database.get_db), ): + """Conflate submission data against existing OSM data.""" # All Submissions JSON # NOTE runs in separate thread using run_in_threadpool submission = await run_in_threadpool( @@ -192,7 +178,7 @@ async def conflate_osm_data( f.write(json.dumps(submission)) # Convert the submission to osm xml format - osmoutfile, jsonoutfile = await submission_crud.convert_json_to_osm(jsoninfile) + osmoutfile = await submission_crud.convert_json_to_osm(jsoninfile) # Remove the extra closing tag from the end of the file with open(osmoutfile, "r") as f: @@ -225,6 +211,10 @@ async def download_submission_json( background_task_id: Optional[str] = None, db: Session = Depends(database.get_db), ): + """Download submissions for a project in JSON format. + + TODO check for redundancy with submission/download endpoint and refactor. + """ # Get Project project = await project_crud.get_project(db, project_id) @@ -268,6 +258,10 @@ async def get_osm_xml( project_id: int, db: Session = Depends(database.get_db), ): + """Get the submissions in OSM XML format for a project. + + TODO refactor to put logic in crud for easier testing. + """ # JSON FILE PATH jsoninfile = f"/tmp/{project_id}_json_infile.json" @@ -286,7 +280,7 @@ async def get_osm_xml( f.write(json.dumps(submission)) # Convert the submission to osm xml format - osmoutfile = await submission_crud.convert_json_to_osm_xml(jsoninfile) + osmoutfile = await submission_crud.convert_json_to_osm(jsoninfile) # Remove the extra closing tag from the end of the file with open(osmoutfile, "r") as f: @@ -316,9 +310,17 @@ async def get_submission_page( planned_task: Optional[int] = None, db: Session = Depends(database.get_db), ): - """This api returns the submission page of a project. - It takes one parameter: project_id. - project_id: The ID of the project. This endpoint returns the submission page of this project. + """Summary submissison details for submission page. + + Args: + background_tasks (BackgroundTasks): FastAPI bg tasks, provided automatically. + db (Session): The database session, automatically generated. + project_id (int): The ID of the project. + days (int): The number of days to consider for fetching submissions. + planned_task (int): Associated task id. + + Returns: + dict: A dictionary containing the submission counts for each date. """ data = await submission_crud.get_submissions_by_date( db, project_id, days, planned_task @@ -344,7 +346,7 @@ async def get_submission_form_fields( Args: project_id (int): The ID of the project. - db (Session, optional): The database session. Defaults to Depends(database.get_db). + db (Session): The database session, automatically generated. Returns: Any: The response from the submission form API. @@ -354,3 +356,74 @@ async def get_submission_form_fields( odk_form = central_crud.get_odk_form(project) response = odk_form.form_fields(project.odkid, str(task_list[0])) return response + + +@router.get("/submission_table/{project_id}") +async def submission_table( + background_tasks: BackgroundTasks, + project_id: int, + page: int = Query(1, ge=1), + results_per_page: int = Query(13, le=100), + db: Session = Depends(database.get_db), +): + """This api returns the submission table of a project. + + It takes two parameter: project_id and task_id. + + project_id: The ID of the project. + + task_id: The ID of the task. + """ + skip = (page - 1) * results_per_page + limit = results_per_page + count, data = await submission_crud.get_submission_by_project( + project_id, skip, limit, db + ) + background_task_id = await project_crud.insert_background_task_into_database( + db, "sync_submission", project_id + ) + + background_tasks.add_task( + submission_crud.update_submission_in_s3, db, project_id, background_task_id + ) + pagination = await project_crud.get_pagination(page, count, results_per_page, count) + response = submission_schemas.PaginatedSubmissions( + results=data, + pagination=submission_schemas.PaginationInfo(**pagination.dict()), + ) + return response + + +@router.get("/task_submissions/{project_id}") +async def task_submissions( + task_id: int, + project: db_models.DbProject = Depends(project_deps.get_project_by_id), + page: int = Query(1, ge=1), + limit: int = Query(13, le=100), + db: Session = Depends(database.get_db), +): + """This api returns the submission table of a project. + + It takes two parameter: project_id and task_id. + + project_id: The ID of the project. + + task_id: The ID of the task. + """ + skip = (page - 1) * limit + filters = { + "$top": limit, + "$skip": skip, + "$count": True, + "$wkt": True, + } + + data, count = await submission_crud.get_submission_by_task( + project, task_id, filters, db + ) + pagination = await project_crud.get_pagination(page, count, limit, count) + response = submission_schemas.PaginatedSubmissions( + results=data, + pagination=submission_schemas.PaginationInfo(**pagination.dict()), + ) + return response diff --git a/src/backend/app/organization/organization_schemas.py b/src/backend/app/submissions/submission_schemas.py similarity index 64% rename from src/backend/app/organization/organization_schemas.py rename to src/backend/app/submissions/submission_schemas.py index f1994ba069..05a9f61bdc 100644 --- a/src/backend/app/organization/organization_schemas.py +++ b/src/backend/app/submissions/submission_schemas.py @@ -15,13 +15,29 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # + +"""Pydantic models for data submissions.""" + +from typing import List, Optional + from pydantic import BaseModel -class Organisation(BaseModel): - # id: int - slug: str - name: str - description: str - url: str - # type: int +class PaginationInfo(BaseModel): + """Pagination JSON return.""" + + has_next: bool + has_prev: bool + next_num: Optional[int] + page: int + pages: int + prev_num: Optional[int] + per_page: int + total: int + + +class PaginatedSubmissions(BaseModel): + """Paginated Submissions.""" + + results: List + pagination: PaginationInfo diff --git a/src/backend/app/tasks/tasks_crud.py b/src/backend/app/tasks/tasks_crud.py index 2d12c231ca..3345389235 100644 --- a/src/backend/app/tasks/tasks_crud.py +++ b/src/backend/app/tasks/tasks_crud.py @@ -15,8 +15,11 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Logic for FMTM tasks.""" + import base64 -from typing import List +from datetime import datetime, timedelta +from typing import List, Optional from fastapi import Depends, HTTPException from geoalchemy2.shape import from_shape @@ -40,6 +43,7 @@ async def get_task_count_in_project(db: Session, project_id: int): + """Get task count for a project.""" query = text(f"""select count(*) from tasks where project_id = {project_id}""") result = db.execute(query) return result.fetchone()[0] @@ -66,6 +70,7 @@ async def get_task_id_list(db: Session, project_id: int) -> list[int]: async def get_tasks( db: Session, project_id: int, user_id: int, skip: int = 0, limit: int = 1000 ): + """Get task details for a project.""" if project_id: db_tasks = ( db.query(db_models.DbTask) @@ -88,12 +93,15 @@ async def get_tasks( async def get_task(db: Session, task_id: int): + """Get details for a specific task ID.""" + log.debug(f"Getting task with ID '{task_id}' from database") return db.query(db_models.DbTask).filter(db_models.DbTask.id == task_id).first() async def update_task_status( db: Session, user_id: int, task_id: int, new_status: TaskStatus ): + """Update the status of a task.""" log.debug(f"Updating task ID {task_id} to status {new_status}") if not user_id: log.error(f"User id is not present: {user_id}") @@ -121,7 +129,7 @@ async def update_task_status( ) log.error(msg) raise HTTPException( - status_code=401, + status_code=403, detail=msg, ) @@ -163,7 +171,10 @@ async def update_task_status( else: raise HTTPException( status_code=400, - detail=f"Not a valid status update: {db_task.task_status.name} to {new_status.name}", + detail=( + f"Not a valid status update: " + f"{db_task.task_status.name} to {new_status.name}" + ), ) @@ -175,7 +186,11 @@ async def update_task_status( async def create_task_history_for_status_change( db_task: db_models.DbTask, new_status: TaskStatus, db_user: db_models.DbUser ): - msg = f"Status changed from {db_task.task_status.name} to {new_status.name} by: {db_user.username}" + """Append task status change to task history.""" + msg = ( + f"Status changed from {db_task.task_status.name} " + f"to {new_status.name} by: {db_user.username}" + ) log.info(msg) new_task_history = db_models.DbTaskHistory( @@ -211,6 +226,7 @@ async def get_qr_codes_for_task( db: Session, task_id: int, ): + """Get the ODK Collect QR code for a task area.""" task = await get_task(db=db, task_id=task_id) if task: if task.qr_code: @@ -224,12 +240,6 @@ async def get_qr_codes_for_task( raise HTTPException(status_code=400, detail="Task does not exist") -async def get_task_by_id(db: Session, task_id: int): - task = db.query(db_models.DbTask).filter(db_models.DbTask.id == task_id).first() - print("Task ", task) - return task - - async def update_task_files( db: Session, project_id: int, @@ -239,6 +249,7 @@ async def update_task_files( category: str, task_boundary: str, ): + """Update associated files for a task.""" # This file will store osm extracts task_polygons = f"/tmp/{project_name}_{category}_{task_id}.geojson" @@ -268,7 +279,8 @@ async def update_task_files( # Collect feature mappings for bulk insert for feature in outline_geojson["features"]: - # If the osm extracts contents do not have a title, provide an empty text for that. + # If the osm extracts contents do not have a title, + # provide an empty text for that feature["properties"]["title"] = "" feature_shape = shape(feature["geometry"]) @@ -284,7 +296,8 @@ async def update_task_files( db.add(db_feature) db.commit() - # Update task_polygons file containing osm extracts with the new geojson contents containing title in the properties. + # Update task_polygons file containing osm extracts with the new + # geojson contents containing title in the properties. with open(task_polygons, "w") as jsonfile: jsonfile.truncate(0) # clear the contents of the file dump(updated_outline_geojson, jsonfile) @@ -300,7 +313,7 @@ async def edit_task_boundary(db: Session, task_id: int, boundary: str): geometry = boundary["features"][0]["geometry"] outline = shape(geometry) - task = await get_task_by_id(db, task_id) + task = await get_task(db, task_id) if not task: raise HTTPException(status_code=404, detail="Task not found") @@ -324,6 +337,8 @@ async def edit_task_boundary(db: Session, task_id: int, boundary: str): async def update_task_history( tasks: List[tasks_schemas.TaskBase], db: Session = Depends(database.get_db) ): + """Update task history with username and user profile image.""" + def process_history_entry(history_entry): status = history_entry.action_text.split() history_entry.status = status[5] @@ -343,3 +358,77 @@ def process_history_entry(history_entry): process_history_entry(history_entry) return tasks + + +def get_task_history( + project_id: int, + end_date: Optional[datetime], + db: Session, +) -> list[db_models.DbTaskHistory]: + """Retrieves the task history records for a specific project. + + Args: + project_id: The ID of the project. + end_date: The end date of the task history + records to retrieve (optional). + db: The database session. + + Returns: + A list of task history records for the specified project. + """ + query = db.query(db_models.DbTaskHistory).filter( + db_models.DbTaskHistory.project_id == project_id + ) + + if end_date: + query = query.filter(db_models.DbTaskHistory.action_date >= end_date) + + return query.all() + + +async def count_validated_and_mapped_tasks( + task_history: list[db_models.DbTaskHistory], end_date: datetime +) -> list[tasks_schemas.TaskHistoryCount]: + """Counts the number of validated and mapped tasks. + + Args: + task_history: The task history records to count. + end_date: The end date of the date range. + + Returns: + A list of dictionaries with following keys: + - 'date': The date in the format 'MM/DD'. + - 'validated': The cumulative count of validated tasks. + - 'mapped': The cumulative count of mapped tasks. + """ + cumulative_counts = {} + results = [] + + current_date = end_date + while current_date <= datetime.now(): + date_str = current_date.strftime("%m/%d") + cumulative_counts = {"date": date_str, "validated": 0, "mapped": 0} + results.append(cumulative_counts) + current_date += timedelta(days=1) + + # Populate cumulative_counts with counts from task_history + for result in task_history: + task_status = result.action_text.split()[5] + date_str = result.action_date.strftime("%m/%d") + entry = next((entry for entry in results if entry["date"] == date_str), None) + + if entry: + if task_status == "VALIDATED": + entry["validated"] += 1 + elif task_status == "MAPPED": + entry["mapped"] += 1 + + total_validated = 0 + total_mapped = 0 + + for entry in results: + total_validated += entry["validated"] + total_mapped += entry["mapped"] + entry.update({"validated": total_validated, "mapped": total_mapped}) + + return results diff --git a/src/backend/app/tasks/tasks_routes.py b/src/backend/app/tasks/tasks_routes.py index 6df7163a47..bf85659b4c 100644 --- a/src/backend/app/tasks/tasks_routes.py +++ b/src/backend/app/tasks/tasks_routes.py @@ -15,8 +15,10 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Routes for FMTM tasks.""" import json +from datetime import datetime, timedelta from typing import List from fastapi import APIRouter, Depends, File, HTTPException, UploadFile @@ -27,14 +29,12 @@ from app.db import database from app.models.enums import TaskStatus from app.projects import project_crud, project_schemas +from app.tasks import tasks_crud, tasks_schemas from app.users import user_schemas -from . import tasks_crud, tasks_schemas - router = APIRouter( prefix="/tasks", tags=["tasks"], - dependencies=[Depends(database.get_db)], responses={404: {"description": "Not found"}}, ) @@ -45,6 +45,7 @@ async def read_task_list( limit: int = 1000, db: Session = Depends(database.get_db), ): + """Get the task list for a project.""" tasks = await tasks_crud.get_tasks(db, project_id, limit) updated_tasks = await tasks_crud.update_task_history(tasks, db) if not tasks: @@ -60,6 +61,7 @@ async def read_tasks( limit: int = 1000, db: Session = Depends(database.get_db), ): + """Get all task details, either for a project or user.""" if user_id: raise HTTPException( status_code=300, @@ -80,11 +82,14 @@ async def get_point_on_surface(project_id: int, db: Session = Depends(database.g project_id (int): The ID of the project. Returns: - List[Tuple[int, str]]: A list of tuples containing the task ID and the centroid as a string. + List[Tuple[int, str]]: A list of tuples containing the task ID + and the centroid as a string. """ query = text( f""" - SELECT id, ARRAY_AGG(ARRAY[ST_X(ST_PointOnSurface(outline)), ST_Y(ST_PointOnSurface(outline))]) AS point + SELECT id, + ARRAY_AGG(ARRAY[ST_X(ST_PointOnSurface(outline)), + ST_Y(ST_PointOnSurface(outline))]) AS point FROM tasks WHERE project_id = {project_id} GROUP BY id; """ @@ -104,7 +109,8 @@ async def get_tasks_near_me( @router.get("/{task_id}", response_model=tasks_schemas.Task) -async def read_tasks(task_id: int, db: Session = Depends(database.get_db)): +async def get_specific_task(task_id: int, db: Session = Depends(database.get_db)): + """Get a specific task by it's ID.""" task = await tasks_crud.get_task(db, task_id) if not task: raise HTTPException(status_code=404, detail="Task not found") @@ -120,7 +126,7 @@ async def update_task_status( new_status: TaskStatus, db: Session = Depends(database.get_db), ): - # TODO verify logged in user + """Update the task status.""" user_id = user.id task = await tasks_crud.update_task_status(db, user_id, task_id, new_status) @@ -135,6 +141,7 @@ async def get_qr_code_list( task_id: int, db: Session = Depends(database.get_db), ): + """Get the associated ODK Collect QR code for a task.""" return await tasks_crud.get_qr_codes_for_task(db=db, task_id=task_id) @@ -144,6 +151,7 @@ async def edit_task_boundary( boundary: UploadFile = File(...), db: Session = Depends(database.get_db), ): + """Update the task boundary manually.""" # read entire file content = await boundary.read() boundary_json = json.loads(content) @@ -158,6 +166,7 @@ async def task_features_count( project_id: int, db: Session = Depends(database.get_db), ): + """Get all features within a task area.""" # Get the project object. project = await project_crud.get_project(db, project_id) @@ -175,7 +184,8 @@ async def task_features_count( for x in odk_details: feature_count_query = text( f""" - select count(*) from features where project_id = {project_id} and task_id = {x['xmlFormId']} + select count(*) from features + where project_id = {project_id} and task_id = {x['xmlFormId']} """ ) @@ -192,3 +202,28 @@ async def task_features_count( ) return data + + +@router.get("/activity/", response_model=List[tasks_schemas.TaskHistoryCount]) +async def task_activity( + project_id: int, days: int = 10, db: Session = Depends(database.get_db) +): + """Retrieves the validate and mapped task count for a specific project. + + Args: + project_id: The ID of the project. + days: The number of days to consider for the + task activity (default: 10). + db: The database session. + + Returns: + list[TaskHistoryCount]: A list of task history counts. + + """ + end_date = datetime.now() - timedelta(days=days) + task_history = tasks_crud.get_task_history(project_id, end_date, db) + + return await tasks_crud.count_validated_and_mapped_tasks( + task_history, + end_date, + ) diff --git a/src/backend/app/tasks/tasks_schemas.py b/src/backend/app/tasks/tasks_schemas.py index 7be5029ba8..2899e490aa 100644 --- a/src/backend/app/tasks/tasks_schemas.py +++ b/src/backend/app/tasks/tasks_schemas.py @@ -48,6 +48,14 @@ class TaskHistoryOut(TaskHistoryBase): profile_img: Optional[str] +class TaskHistoryCount(BaseModel): + """Task mapping history display.""" + + date: str + validated: int + mapped: int + + class TaskBase(BaseModel): """Core fields for a Task.""" diff --git a/src/backend/app/users/user_crud.py b/src/backend/app/users/user_crud.py index 20d5a420b3..95790f536e 100644 --- a/src/backend/app/users/user_crud.py +++ b/src/backend/app/users/user_crud.py @@ -20,8 +20,8 @@ from sqlalchemy.orm import Session -from ..db import db_models -from . import user_schemas +from app.db import db_models +from app.users import user_schemas # -------------- # ---- CRUD ---- @@ -62,7 +62,6 @@ async def create_user_roles(user_role: user_schemas.UserRoles, db: Session): db_user_role = db_models.DbUserRoles( user_id=user_role.user_id, role=user_role.role, - organization_id=user_role.organization_id, project_id=user_role.project_id, ) diff --git a/src/backend/app/users/user_deps.py b/src/backend/app/users/user_deps.py new file mode 100644 index 0000000000..3bf84e4363 --- /dev/null +++ b/src/backend/app/users/user_deps.py @@ -0,0 +1,70 @@ +# Copyright (c) 2022, 2023 Humanitarian OpenStreetMap Team +# +# This file is part of FMTM. +# +# FMTM is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FMTM is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FMTM. If not, see . +# + +"""User dependencies for use in Depends.""" + + +from typing import Union + +from fastapi import Depends +from fastapi.exceptions import HTTPException +from loguru import logger as log +from sqlalchemy.orm import Session + +from app.db.database import get_db +from app.db.db_models import DbUser +from app.models.enums import HTTPStatus +from app.users.user_crud import get_user, get_user_by_username + + +async def user_exists_in_db( + user_id: Union[str, int], + db: Session = Depends(get_db), +) -> DbUser: + """Check if a user exists, else Error. + + Args: + user_id (Union[str, int]): The user ID (integer) or username (string) to check. + db (Session, optional): The SQLAlchemy database session. + + Returns: + DbUser: The user if found. + + Raises: + HTTPException: Raised with a 404 status code if the user is not found. + """ + try: + user_id = int(user_id) + except ValueError: + pass + + if isinstance(user_id, int): + log.debug(f"Getting user by ID: {user_id}") + db_user = await get_user(db, user_id) + + if isinstance(user_id, str): + log.debug(f"Getting user by username: {user_id}") + db_user = await get_user_by_username(db, user_id) + + if not db_user: + raise HTTPException( + status_code=HTTPStatus.NOT_FOUND, + detail=f"User {user_id} does not exist", + ) + + return db_user diff --git a/src/backend/app/users/user_routes.py b/src/backend/app/users/user_routes.py index 3b6f0d4d15..059b82a90a 100644 --- a/src/backend/app/users/user_routes.py +++ b/src/backend/app/users/user_routes.py @@ -22,14 +22,13 @@ from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session -from ..db import database -from ..models.enums import UserRole as UserRoleEnum -from . import user_crud, user_schemas +from app.db import database +from app.models.enums import UserRole as UserRoleEnum +from app.users import user_crud, user_schemas router = APIRouter( prefix="/users", tags=["users"], - dependencies=[Depends(database.get_db)], responses={404: {"description": "Not found"}}, ) @@ -77,48 +76,6 @@ async def get_user_by_identifier(id: str, db: Session = Depends(database.get_db) return user -@router.post("/user-role") -async def create_user_role( - user_role: user_schemas.UserRoles, db: Session = Depends(database.get_db) -): - """Create a new user role. - - # FIXME is this endpoint really necessary? - - The role can be: - - Admin - - Organization Admin - - Field Admin - - Mapper - - Validator - - Read Only - - The request param `user_role` is a json of user_id, organization_id, - project_id, user_role: - user_id (required): ID of the user for whom the role is being created - organization_id (optional): ID of the organization for which the - user is being assigned a role - project_id (optional): ID of the project for which the user is - being assigned a role - user_role (required): Role being assigned to the user - - Response: - Status Code 200 (OK): If the role is successfully created - Status Code 400 (Bad Request): If the user is already assigned a role - """ - existing_user_role = await user_crud.get_user_role_by_user_id( - db, user_id=user_role.user_id - ) - if existing_user_role is not None: - raise HTTPException(status_code=400, detail="User is already assigned a role") - - user = await user_crud.get_user(db, user_id=user_role.user_id) - if user is None: - raise HTTPException(status_code=404, detail="User not found") - - return await user_crud.create_user_roles(user_role, db) - - @router.get("/user-role-options/") async def get_user_roles(): """Check for available user role options.""" diff --git a/src/backend/app/users/user_schemas.py b/src/backend/app/users/user_schemas.py index df16a42c04..f5a2822e9d 100644 --- a/src/backend/app/users/user_schemas.py +++ b/src/backend/app/users/user_schemas.py @@ -53,6 +53,5 @@ class UserRoles(BaseModel): """User details with role, org, and associated project.""" user_id: int - organization_id: Optional[int] = None project_id: Optional[int] = None role: UserRole diff --git a/src/backend/backup-entrypoint.sh b/src/backend/backup-entrypoint.sh index 6275b2bdda..1bbd5862a5 100644 --- a/src/backend/backup-entrypoint.sh +++ b/src/backend/backup-entrypoint.sh @@ -64,7 +64,7 @@ wait_for_db() { local retry_interval=5 for ((i = 0; i < max_retries; i++)); do - if =1.1.2", - "async-timeout<5.0,>=4.0; python_version < \"3.11\"", - "attrs>=17.3.0", - "frozenlist>=1.1.1", - "multidict<7.0,>=4.5", - "yarl<2.0,>=1.0", -] -files = [ - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, - {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, - {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, - {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, - {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, - {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, - {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, - {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, -] - -[[package]] -name = "aiosignal" -version = "1.3.1" -requires_python = ">=3.7" -summary = "aiosignal: a list of registered asynchronous callbacks" -dependencies = [ - "frozenlist>=1.1.0", -] -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] +strategy = ["cross_platform"] +lock_version = "4.4.1" +content_hash = "sha256:d73a7c181c5594d5f391a5c715559194fb5c6c4a923e5a4d15e3c1a57b073e27" [[package]] name = "annotated-types" @@ -188,24 +112,14 @@ files = [ {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ] -[[package]] -name = "async-timeout" -version = "4.0.3" -requires_python = ">=3.7" -summary = "Timeout context manager for asyncio programs" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" requires_python = ">=3.7" summary = "Classes Without Boilerplate" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [[package]] @@ -220,11 +134,10 @@ files = [ [[package]] name = "black" -version = "23.12.0" +version = "23.12.1" requires_python = ">=3.8" summary = "The uncompromising code formatter." dependencies = [ - "aiohttp>=3.7.4; sys_platform != \"win32\" or implementation_name != \"pypy\" and extra == \"d\"", "click>=8.0.0", "mypy-extensions>=0.4.3", "packaging>=22.0", @@ -234,20 +147,20 @@ dependencies = [ "typing-extensions>=4.0.1; python_version < \"3.11\"", ] files = [ - {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"}, - {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"}, - {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"}, - {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"}, - {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"}, - {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"}, - {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"}, - {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"}, - {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"}, - {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"}, - {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"}, - {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"}, - {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"}, - {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [[package]] @@ -450,6 +363,58 @@ files = [ {file = "commitizen-3.13.0.tar.gz", hash = "sha256:53cd225ae44fc25cb1582f5d50cda78711a5a1d44a32fee3dcf7a22bc204ce06"}, ] +[[package]] +name = "coverage" +version = "7.4.0" +requires_python = ">=3.8" +summary = "Code coverage measurement for Python" +files = [ + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, +] + +[[package]] +name = "coverage-badge" +version = "1.1.0" +summary = "Generate coverage badges for Coverage.py." +dependencies = [ + "coverage", +] +files = [ + {file = "coverage-badge-1.1.0.tar.gz", hash = "sha256:c824a106503e981c02821e7d32f008fb3984b2338aa8c3800ec9357e33345b78"}, + {file = "coverage_badge-1.1.0-py2.py3-none-any.whl", hash = "sha256:e365d56e5202e923d1b237f82defd628a02d1d645a147f867ac85c58c81d7997"}, +] + [[package]] name = "debugpy" version = "1.8.0" @@ -603,76 +568,19 @@ files = [ [[package]] name = "fmtm-splitter" -version = "0.2.5" +version = "1.0.0rc0" requires_python = ">=3.10" -summary = "A program for splitting a large AOI into smaller tasks." +summary = "A utility for splitting an AOI into multiple tasks." dependencies = [ - "geoalchemy2>=0.11.0", "geojson>=2.5.0", "geopandas>=0.11.0", "numpy>=1.21.0", "psycopg2>=2.9.1", "shapely>=1.8.1", - "sqlalchemy>=2.0.0", ] files = [ - {file = "fmtm-splitter-0.2.5.tar.gz", hash = "sha256:03b40cf80ca9d6593d24737df0edd4cc1f0ed91a05f7eee4bdeadb2c810a0300"}, - {file = "fmtm_splitter-0.2.5-py3-none-any.whl", hash = "sha256:b6f6247f06bb30e511ddc22aaba4ccb9e89b7463ce47a7bc9dc9038eac39408b"}, -] - -[[package]] -name = "frozenlist" -version = "1.4.1" -requires_python = ">=3.8" -summary = "A list-like structure which implements collections.abc.MutableSequence" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, + {file = "fmtm-splitter-1.0.0rc0.tar.gz", hash = "sha256:56efe64a1076ef8188afdd423f5895c66602309ee894bf49599bd3ca7e5506ac"}, + {file = "fmtm_splitter-1.0.0rc0-py3-none-any.whl", hash = "sha256:9647a85e99308141df036546380e273ccf9e4317e21298d0131f988c6b61d622"}, ] [[package]] @@ -714,7 +622,7 @@ files = [ [[package]] name = "geopandas" -version = "0.14.1" +version = "0.14.2" requires_python = ">=3.9" summary = "Geographic pandas extensions" dependencies = [ @@ -725,8 +633,8 @@ dependencies = [ "shapely>=1.8.0", ] files = [ - {file = "geopandas-0.14.1-py3-none-any.whl", hash = "sha256:ed5a7cae7874bfc3238fb05e0501cc1760e1b7b11e5b76ecad29da644ca305da"}, - {file = "geopandas-0.14.1.tar.gz", hash = "sha256:4853ff89ecb6d1cfc43e7b3671092c8160e8a46a3dd7368f25906283314e42bb"}, + {file = "geopandas-0.14.2-py3-none-any.whl", hash = "sha256:0efa61235a68862c1c6be89fc3707cdeba67667d5676bb19e24f3c57a8c2f723"}, + {file = "geopandas-0.14.2.tar.gz", hash = "sha256:6e71d57b8376f9fdc9f1c3aa3170e7e420e91778de854f51013ae66fd371ccdb"}, ] [[package]] @@ -741,53 +649,79 @@ files = [ {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, ] +[[package]] +name = "gitdb" +version = "4.0.11" +requires_python = ">=3.7" +summary = "Git Object Database" +dependencies = [ + "smmap<6,>=3.0.1", +] +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[[package]] +name = "gitpython" +version = "3.1.41" +requires_python = ">=3.7" +summary = "GitPython is a Python library used to interact with Git repositories" +dependencies = [ + "gitdb<5,>=4.0.1", +] +files = [ + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, +] + [[package]] name = "greenlet" -version = "3.0.2" +version = "3.0.3" requires_python = ">=3.7" summary = "Lightweight in-process concurrent programming" files = [ - {file = "greenlet-3.0.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9acd8fd67c248b8537953cb3af8787c18a87c33d4dcf6830e410ee1f95a63fd4"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:339c0272a62fac7e602e4e6ec32a64ff9abadc638b72f17f6713556ed011d493"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38878744926cec29b5cc3654ef47f3003f14bfbba7230e3c8492393fe29cc28b"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b3f0497db77cfd034f829678b28267eeeeaf2fc21b3f5041600f7617139e6773"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1a8a08de7f68506a38f9a2ddb26bbd1480689e66d788fcd4b5f77e2d9ecfcc"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:89a6f6ddcbef4000cda7e205c4c20d319488ff03db961d72d4e73519d2465309"}, - {file = "greenlet-3.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c1f647fe5b94b51488b314c82fdda10a8756d650cee8d3cd29f657c6031bdf73"}, - {file = "greenlet-3.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9560c580c896030ff9c311c603aaf2282234643c90d1dec738a1d93e3e53cd51"}, - {file = "greenlet-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2e9c5423046eec21f6651268cb674dfba97280701e04ef23d312776377313206"}, - {file = "greenlet-3.0.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1fd25dfc5879a82103b3d9e43fa952e3026c221996ff4d32a9c72052544835d"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfdc950dd25f25d6582952e58521bca749cf3eeb7a9bad69237024308c8196"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edf7a1daba1f7c54326291a8cde58da86ab115b78c91d502be8744f0aa8e3ffa"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4cf532bf3c58a862196b06947b1b5cc55503884f9b63bf18582a75228d9950e"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e79fb5a9fb2d0bd3b6573784f5e5adabc0b0566ad3180a028af99523ce8f6138"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:006c1028ac0cfcc4e772980cfe73f5476041c8c91d15d64f52482fc571149d46"}, - {file = "greenlet-3.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fefd5eb2c0b1adffdf2802ff7df45bfe65988b15f6b972706a0e55d451bffaea"}, - {file = "greenlet-3.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c0fdb8142742ee68e97c106eb81e7d3e883cc739d9c5f2b28bc38a7bafeb6d1"}, - {file = "greenlet-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:8f8d14a0a4e8c670fbce633d8b9a1ee175673a695475acd838e372966845f764"}, - {file = "greenlet-3.0.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:654b84c9527182036747938b81938f1d03fb8321377510bc1854a9370418ab66"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bc4fde0842ff2b9cf33382ad0b4db91c2582db836793d58d174c569637144"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27b142a9080bdd5869a2fa7ebf407b3c0b24bd812db925de90e9afe3c417fd6"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0df7eed98ea23b20e9db64d46eb05671ba33147df9405330695bcd81a73bb0c9"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5d60805057d8948065338be6320d35e26b0a72f45db392eb32b70dd6dc9227"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e0e28f5233d64c693382f66d47c362b72089ebf8ac77df7e12ac705c9fa1163d"}, - {file = "greenlet-3.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e4bfa752b3688d74ab1186e2159779ff4867644d2b1ebf16db14281f0445377"}, - {file = "greenlet-3.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c42bb589e6e9f9d8bdd79f02f044dff020d30c1afa6e84c0b56d1ce8a324553c"}, - {file = "greenlet-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:b2cedf279ca38ef3f4ed0d013a6a84a7fc3d9495a716b84a5fc5ff448965f251"}, - {file = "greenlet-3.0.2.tar.gz", hash = "sha256:1c1129bc47266d83444c85a8e990ae22688cf05fb20d7951fd2866007c2ba9bc"}, + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, ] [[package]] name = "griffe" -version = "0.38.1" +version = "0.39.1" requires_python = ">=3.8" summary = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." dependencies = [ "colorama>=0.4", ] files = [ - {file = "griffe-0.38.1-py3-none-any.whl", hash = "sha256:334c79d3b5964ade65c05dfcaf53518c576dedd387aaba5c9fd71212f34f1483"}, - {file = "griffe-0.38.1.tar.gz", hash = "sha256:bd68d7da7f3d87bc57eb9962b250db123efd9bbcc06c11c1a91b6e583b2a9361"}, + {file = "griffe-0.39.1-py3-none-any.whl", hash = "sha256:6ce4ecffcf0d2f96362c5974b3f7df812da8f8d4cfcc5ebc8202ef72656fc087"}, + {file = "griffe-0.39.1.tar.gz", hash = "sha256:ead8dfede6e6531cce6bf69090a4f3c6d36fdf923c43f8e85aa530552cef0c09"}, ] [[package]] @@ -802,12 +736,12 @@ files = [ [[package]] name = "haversine" -version = "2.8.0" +version = "2.8.1" requires_python = ">=3.5" summary = "Calculate the distance between 2 points on Earth." files = [ - {file = "haversine-2.8.0-py2.py3-none-any.whl", hash = "sha256:524529d6c39619a513629b68331ce8153ccfc7c30049ed43405c27b12614e8f6"}, - {file = "haversine-2.8.0.tar.gz", hash = "sha256:cca39afd2ae5f1e6ed9231b332395bb8afb2e0a64edf70c238c176492e60c150"}, + {file = "haversine-2.8.1-py2.py3-none-any.whl", hash = "sha256:1acdd7e35115e00346898984acd55b0827d289df434efef98376f0bed37c8fa7"}, + {file = "haversine-2.8.1.tar.gz", hash = "sha256:ab750caa0c8f2168bd7b00a429757a83a8393be1aa30f91c2becf6b523189e2a"}, ] [[package]] @@ -948,15 +882,15 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" requires_python = ">=3.7" summary = "A very fast and expressive template engine." dependencies = [ "MarkupSafe>=2.0", ] files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [[package]] @@ -1050,51 +984,51 @@ files = [ [[package]] name = "markdown" -version = "3.5.1" +version = "3.5.2" requires_python = ">=3.8" summary = "Python implementation of John Gruber's Markdown." files = [ - {file = "Markdown-3.5.1-py3-none-any.whl", hash = "sha256:5874b47d4ee3f0b14d764324d2c94c03ea66bee56f2d929da9f2508d65e722dc"}, - {file = "Markdown-3.5.1.tar.gz", hash = "sha256:b65d7beb248dc22f2e8a31fb706d93798093c308dc1aba295aedeb9d41a813bd"}, + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" requires_python = ">=3.7" summary = "Safely add untrusted strings to HTML/XML markup." files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] @@ -1197,6 +1131,22 @@ files = [ {file = "mkdocs-exclude-1.0.2.tar.gz", hash = "sha256:ba6fab3c80ddbe3fd31d3e579861fd3124513708271180a5f81846da8c7e2a51"}, ] +[[package]] +name = "mkdocs-git-revision-date-localized-plugin" +version = "1.2.2" +requires_python = ">=3.6" +summary = "Mkdocs plugin that enables displaying the localized date of the last git modification of a markdown file." +dependencies = [ + "GitPython", + "babel>=2.7.0", + "mkdocs>=1.0", + "pytz", +] +files = [ + {file = "mkdocs-git-revision-date-localized-plugin-1.2.2.tar.gz", hash = "sha256:0c43a9aac1fa69df99a823f833cc223bac9967b60d5261a857761c7c6e3b30de"}, + {file = "mkdocs_git_revision_date_localized_plugin-1.2.2-py3-none-any.whl", hash = "sha256:85c7fe9ab06e7a63c4e522c26fee8b51d357cb8cbe605064501ad80f4f31cb94"}, +] + [[package]] name = "mkdocs-material" version = "9.4.14" @@ -1264,45 +1214,6 @@ files = [ {file = "mkdocstrings_python-1.7.5.tar.gz", hash = "sha256:c7d143728257dbf1aa550446555a554b760dcd40a763f077189d298502b800be"}, ] -[[package]] -name = "multidict" -version = "6.0.4" -requires_python = ">=3.7" -summary = "multidict implementation" -files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, -] - [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1328,38 +1239,38 @@ files = [ [[package]] name = "numpy" -version = "1.26.2" +version = "1.26.3" requires_python = ">=3.9" summary = "Fundamental package for array computing in Python" files = [ - {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, - {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, - {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, - {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, - {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, - {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, - {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, - {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, - {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, ] [[package]] @@ -1387,7 +1298,7 @@ files = [ [[package]] name = "osm-fieldwork" -version = "0.4.0" +version = "0.4.1" requires_python = ">=3.10" summary = "Processing field data from OpenDataKit to OpenStreetMap format." dependencies = [ @@ -1398,7 +1309,7 @@ dependencies = [ "haversine>=2.8.0", "levenshtein>=0.20.0", "mercantile>=1.2.1", - "osm-rawdata>=0.1.6", + "osm-rawdata>=0.1.7", "pandas>=1.5.0", "pmtiles>=3.2.0", "progress>=1.6", @@ -1413,8 +1324,8 @@ dependencies = [ "xmltodict>=0.13.0", ] files = [ - {file = "osm-fieldwork-0.4.0.tar.gz", hash = "sha256:145011a7d918ac35237fe7953235b3e630e10318a024c2fde2a8dd340a1aa2b1"}, - {file = "osm_fieldwork-0.4.0-py3-none-any.whl", hash = "sha256:10ec0b2ca8e426329adc1a07d640b8ba864ca3fcbfa884db3f58512074a10633"}, + {file = "osm-fieldwork-0.4.1.tar.gz", hash = "sha256:e3f3381b7024d816ffeb15082083accfbdbff573fa1a485e9976f68b2356f1b8"}, + {file = "osm_fieldwork-0.4.1-py3-none-any.whl", hash = "sha256:d0328fb1ea03649a052c96a5cd253218d96909ba8353f6c7fd92cbbfe1566924"}, ] [[package]] @@ -1474,7 +1385,7 @@ files = [ [[package]] name = "pandas" -version = "2.1.4" +version = "2.2.0" requires_python = ">=3.9" summary = "Powerful data structures for data analysis, time series, and statistics" dependencies = [ @@ -1483,28 +1394,31 @@ dependencies = [ "numpy<2,>=1.26.0; python_version >= \"3.12\"", "python-dateutil>=2.8.2", "pytz>=2020.1", - "tzdata>=2022.1", -] -files = [ - {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, - {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, - {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, - {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, - {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, - {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, - {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, - {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, - {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, + "tzdata>=2022.7", +] +files = [ + {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, + {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, + {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, + {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, + {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, ] [[package]] @@ -1650,35 +1564,35 @@ files = [ [[package]] name = "pyarrow" -version = "14.0.1" +version = "15.0.0" requires_python = ">=3.8" summary = "Python library for Apache Arrow" dependencies = [ - "numpy>=1.16.6", -] -files = [ - {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, - {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, - {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, - {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, - {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, - {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, - {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, - {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, - {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, - {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, + "numpy<2,>=1.16.6", +] +files = [ + {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, + {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, + {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, + {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, + {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, ] [[package]] @@ -1693,31 +1607,31 @@ files = [ [[package]] name = "pycryptodome" -version = "3.19.0" +version = "3.20.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" summary = "Cryptographic library for Python" files = [ - {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"}, - {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"}, - {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"}, - {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"}, - {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"}, - {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"}, - {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, + {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, ] [[package]] @@ -1889,7 +1803,7 @@ files = [ [[package]] name = "pymdown-extensions" -version = "10.5" +version = "10.7" requires_python = ">=3.8" summary = "Extension pack for Python Markdown." dependencies = [ @@ -1897,8 +1811,8 @@ dependencies = [ "pyyaml", ] files = [ - {file = "pymdown_extensions-10.5-py3-none-any.whl", hash = "sha256:1f0ca8bb5beff091315f793ee17683bc1390731f6ac4c5eb01e27464b80fe879"}, - {file = "pymdown_extensions-10.5.tar.gz", hash = "sha256:1b60f1e462adbec5a1ed79dac91f666c9c0d241fa294de1989f29d20096cfd0b"}, + {file = "pymdown_extensions-10.7-py3-none-any.whl", hash = "sha256:6ca215bc57bc12bf32b414887a68b810637d039124ed9b2e5bd3325cbb2c050c"}, + {file = "pymdown_extensions-10.7.tar.gz", hash = "sha256:c0d64d5cf62566f59e6b2b690a4095c931107c250a8c8e1351c1de5f6b036deb"}, ] [[package]] @@ -2054,6 +1968,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -2089,121 +2004,124 @@ files = [ [[package]] name = "rapidfuzz" -version = "3.5.2" +version = "3.6.1" requires_python = ">=3.8" summary = "rapid fuzzy string matching" files = [ - {file = "rapidfuzz-3.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a047d6e58833919d742bbc0dfa66d1de4f79e8562ee195007d3eae96635df39"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22877c027c492b7dc7e3387a576a33ed5aad891104aa90da2e0844c83c5493ef"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e0f448b0eacbcc416feb634e1232a48d1cbde5e60f269c84e4fb0912f7bbb001"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05146497672f869baf41147d5ec1222788c70e5b8b0cfcd6e95597c75b5b96b"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f2df3968738a38d2a0058b5e721753f5d3d602346a1027b0dde31b0476418f3"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5afc1fcf1830f9bb87d3b490ba03691081b9948a794ea851befd2643069a30c1"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84be69ea65f64fa01e5c4976be9826a5aa949f037508887add42da07420d65d6"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8658c1045766e87e0038323aa38b4a9f49b7f366563271f973c8890a98aa24b5"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:852b3f93c15fce58b8dc668bd54123713bfdbbb0796ba905ea5df99cfd083132"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:12424a06ad9bd0cbf5f7cea1015e78d924a0034a0e75a5a7b39c0703dcd94095"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b4e9ded8e80530bd7205a7a2b01802f934a4695ca9e9fbe1ce9644f5e0697864"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:affb8fe36157c2dc8a7bc45b6a1875eb03e2c49167a1d52789144bdcb7ab3b8c"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1d33a622572d384f4c90b5f7a139328246ab5600141e90032b521c2127bd605"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-win32.whl", hash = "sha256:2cf9f2ed4a97b388cffd48d534452a564c2491f68f4fd5bc140306f774ceb63a"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:6541ffb70097885f7302cd73e2efd77be99841103023c2f9408551f27f45f7a5"}, - {file = "rapidfuzz-3.5.2-cp310-cp310-win_arm64.whl", hash = "sha256:1dd2542e5103fb8ca46500a979ae14d1609dcba11d2f9fe01e99eec03420e193"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bff7d3127ebc5cd908f3a72f6517f31f5247b84666137556a8fcc5177c560939"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fdfdb3685b631d8efbb6d6d3d86eb631be2b408d9adafcadc11e63e3f9c96dec"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97b043fe8185ec53bb3ff0e59deb89425c0fc6ece6e118939963aab473505801"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a4a7832737f87583f3863dc62e6f56dd4a9fefc5f04a7bdcb4c433a0f36bb1b"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d876dba9a11fcf60dcf1562c5a84ef559db14c2ceb41e1ad2d93cd1dc085889"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa4c0612893716bbb6595066ca9ecb517c982355abe39ba9d1f4ab834ace91ad"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:120316824333e376b88b284724cfd394c6ccfcb9818519eab5d58a502e5533f0"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cdbe8e80cc186d55f748a34393533a052d855357d5398a1ccb71a5021b58e8d"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1062425c8358a547ae5ebad148f2e0f02417716a571b803b0c68e4d552e99d32"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66be181965aff13301dd5f9b94b646ce39d99c7fe2fd5de1656f4ca7fafcb38c"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:53df7aea3cf301633cfa2b4b2c2d2441a87dfc878ef810e5b4eddcd3e68723ad"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:76639dca5eb0afc6424ac5f42d43d3bd342ac710e06f38a8c877d5b96de09589"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:27689361c747b5f7b8a26056bc60979875323f1c3dcaaa9e2fec88f03b20a365"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-win32.whl", hash = "sha256:99c9fc5265566fb94731dc6826f43c5109e797078264e6389a36d47814473692"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:666928ee735562a909d81bd2f63207b3214afd4ca41f790ab3025d066975c814"}, - {file = "rapidfuzz-3.5.2-cp311-cp311-win_arm64.whl", hash = "sha256:d55de67c48f06b7772541e8d4c062a2679205799ce904236e2836cb04c106442"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:04e1e02b182283c43c866e215317735e91d22f5d34e65400121c04d5ed7ed859"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:365e544aba3ac13acf1a62cb2e5909ad2ba078d0bfc7d69b1f801dfd673b9782"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b61f77d834f94b0099fa9ed35c189b7829759d4e9c2743697a130dd7ba62259f"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43fb368998b9703fa8c63db292a8ab9e988bf6da0c8a635754be8e69da1e7c1d"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25510b5d142c47786dbd27cfd9da7cae5bdea28d458379377a3644d8460a3404"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf3093443751e5a419834162af358d1e31dec75f84747a91dbbc47b2c04fc085"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fbaf546f15a924613f89d609ff66b85b4f4c2307ac14d93b80fe1025b713138"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d580df0e130ed85400ff77e1c32d965e9bc7be29ac4072ab637f57e26d29fb"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:358a0fbc49343de20fee8ebdb33c7fa8f55a9ff93ff42d1ffe097d2caa248f1b"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fb379ac0ddfc86c5542a225d194f76ed468b071b6f79ff57c4b72e635605ad7d"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7fb21e182dc6d83617e88dea002963d5cf99cf5eabbdbf04094f503d8fe8d723"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c04f9f1310ce414ab00bdcbf26d0906755094bfc59402cb66a7722c6f06d70b2"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6da61cc38c1a95efc5edcedf258759e6dbab73191651a28c5719587f32a56ad"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-win32.whl", hash = "sha256:f823fd1977071486739f484e27092765d693da6beedaceece54edce1dfeec9b2"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:a8162d81486de85ab1606e48e076431b66d44cf431b2b678e9cae458832e7147"}, - {file = "rapidfuzz-3.5.2-cp312-cp312-win_arm64.whl", hash = "sha256:dfc63fabb7d8da8483ca836bae7e55766fe39c63253571e103c034ba8ea80950"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af5221e4f7800db3e84c46b79dba4112e3b3cc2678f808bdff4fcd2487073846"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8501d7875b176930e6ed9dbc1bc35adb37ef312f6106bd6bb5c204adb90160ac"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e414e1ca40386deda4291aa2d45062fea0fbaa14f95015738f8bb75c4d27f862"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2059cd73b7ea779a9307d7a78ed743f0e3d33b88ccdcd84569abd2953cd859f"}, - {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:58e3e21f6f13a7cca265cce492bc797425bd4cb2025fdd161a9e86a824ad65ce"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b847a49377e64e92e11ef3d0a793de75451526c83af015bdafdd5d04de8a058a"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a42c7a8c62b29c4810e39da22b42524295fcb793f41c395c2cb07c126b729e83"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51b5166be86e09e011e92d9862b1fe64c4c7b9385f443fb535024e646d890460"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f808dcb0088a7a496cc9895e66a7b8de55ffea0eb9b547c75dfb216dd5f76ed"}, - {file = "rapidfuzz-3.5.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d4b05a8f4ab7e7344459394094587b033fe259eea3a8720035e8ba30e79ab39b"}, - {file = "rapidfuzz-3.5.2.tar.gz", hash = "sha256:9e9b395743e12c36a3167a3a9fd1b4e11d92fb0aa21ec98017ee6df639ed385e"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ac434fc71edda30d45db4a92ba5e7a42c7405e1a54cb4ec01d03cc668c6dcd40"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a791168e119cfddf4b5a40470620c872812042f0621e6a293983a2d52372db0"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a2f3e9df346145c2be94e4d9eeffb82fab0cbfee85bd4a06810e834fe7c03fa"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23de71e7f05518b0bbeef55d67b5dbce3bcd3e2c81e7e533051a2e9401354eb0"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d056e342989248d2bdd67f1955bb7c3b0ecfa239d8f67a8dfe6477b30872c607"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01835d02acd5d95c1071e1da1bb27fe213c84a013b899aba96380ca9962364bc"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed0f712e0bb5fea327e92aec8a937afd07ba8de4c529735d82e4c4124c10d5a0"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96cd19934f76a1264e8ecfed9d9f5291fde04ecb667faef5f33bdbfd95fe2d1f"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e06c4242a1354cf9d48ee01f6f4e6e19c511d50bb1e8d7d20bcadbb83a2aea90"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d73dcfe789d37c6c8b108bf1e203e027714a239e50ad55572ced3c004424ed3b"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:06e98ff000e2619e7cfe552d086815671ed09b6899408c2c1b5103658261f6f3"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:08b6fb47dd889c69fbc0b915d782aaed43e025df6979b6b7f92084ba55edd526"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1788ebb5f5b655a15777e654ea433d198f593230277e74d51a2a1e29a986283"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c65f92881753aa1098c77818e2b04a95048f30edbe9c3094dc3707d67df4598b"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:4243a9c35667a349788461aae6471efde8d8800175b7db5148a6ab929628047f"}, + {file = "rapidfuzz-3.6.1-cp310-cp310-win_arm64.whl", hash = "sha256:f59d19078cc332dbdf3b7b210852ba1f5db8c0a2cd8cc4c0ed84cc00c76e6802"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fbc07e2e4ac696497c5f66ec35c21ddab3fc7a406640bffed64c26ab2f7ce6d6"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cced1a8852652813f30fb5d4b8f9b237112a0bbaeebb0f4cc3611502556764"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82300e5f8945d601c2daaaac139d5524d7c1fdf719aa799a9439927739917460"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf97c321fd641fea2793abce0e48fa4f91f3c202092672f8b5b4e781960b891"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7420e801b00dee4a344ae2ee10e837d603461eb180e41d063699fb7efe08faf0"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060bd7277dc794279fa95522af355034a29c90b42adcb7aa1da358fc839cdb11"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7e3375e4f2bfec77f907680328e4cd16cc64e137c84b1886d547ab340ba6928"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a490cd645ef9d8524090551016f05f052e416c8adb2d8b85d35c9baa9d0428ab"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2e03038bfa66d2d7cffa05d81c2f18fd6acbb25e7e3c068d52bb7469e07ff382"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b19795b26b979c845dba407fe79d66975d520947b74a8ab6cee1d22686f7967"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:064c1d66c40b3a0f488db1f319a6e75616b2e5fe5430a59f93a9a5e40a656d15"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3c772d04fb0ebeece3109d91f6122b1503023086a9591a0b63d6ee7326bd73d9"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:841eafba6913c4dfd53045835545ba01a41e9644e60920c65b89c8f7e60c00a9"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-win32.whl", hash = "sha256:266dd630f12696ea7119f31d8b8e4959ef45ee2cbedae54417d71ae6f47b9848"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:d79aec8aeee02ab55d0ddb33cea3ecd7b69813a48e423c966a26d7aab025cdfe"}, + {file = "rapidfuzz-3.6.1-cp311-cp311-win_arm64.whl", hash = "sha256:484759b5dbc5559e76fefaa9170147d1254468f555fd9649aea3bad46162a88b"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b2ef4c0fd3256e357b70591ffb9e8ed1d439fb1f481ba03016e751a55261d7c1"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:588c4b20fa2fae79d60a4e438cf7133d6773915df3cc0a7f1351da19eb90f720"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7142ee354e9c06e29a2636b9bbcb592bb00600a88f02aa5e70e4f230347b373e"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dfc557c0454ad22382373ec1b7df530b4bbd974335efe97a04caec936f2956a"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03f73b381bdeccb331a12c3c60f1e41943931461cdb52987f2ecf46bfc22f50d"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b0ccc2ec1781c7e5370d96aef0573dd1f97335343e4982bdb3a44c133e27786"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da3e8c9f7e64bb17faefda085ff6862ecb3ad8b79b0f618a6cf4452028aa2222"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fde9b14302a31af7bdafbf5cfbb100201ba21519be2b9dedcf4f1048e4fbe65d"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1a23eee225dfb21c07f25c9fcf23eb055d0056b48e740fe241cbb4b22284379"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e49b9575d16c56c696bc7b06a06bf0c3d4ef01e89137b3ddd4e2ce709af9fe06"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:0a9fc714b8c290261669f22808913aad49553b686115ad0ee999d1cb3df0cd66"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a3ee4f8f076aa92184e80308fc1a079ac356b99c39408fa422bbd00145be9854"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f056ba42fd2f32e06b2c2ba2443594873cfccc0c90c8b6327904fc2ddf6d5799"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-win32.whl", hash = "sha256:5d82b9651e3d34b23e4e8e201ecd3477c2baa17b638979deeabbb585bcb8ba74"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:dad55a514868dae4543ca48c4e1fc0fac704ead038dafedf8f1fc0cc263746c1"}, + {file = "rapidfuzz-3.6.1-cp312-cp312-win_arm64.whl", hash = "sha256:3c84294f4470fcabd7830795d754d808133329e0a81d62fcc2e65886164be83b"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eef8b346ab331bec12bbc83ac75641249e6167fab3d84d8f5ca37fd8e6c7a08c"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53251e256017e2b87f7000aee0353ba42392c442ae0bafd0f6b948593d3f68c6"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dede83a6b903e3ebcd7e8137e7ff46907ce9316e9d7e7f917d7e7cdc570ee05"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e4da90e4c2b444d0a171d7444ea10152e07e95972bb40b834a13bdd6de1110c"}, + {file = "rapidfuzz-3.6.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ca3dfcf74f2b6962f411c33dd95b0adf3901266e770da6281bc96bb5a8b20de9"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bcc957c0a8bde8007f1a8a413a632a1a409890f31f73fe764ef4eac55f59ca87"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c9a50bea7a8537442834f9bc6b7d29d8729a5b6379df17c31b6ab4df948c2"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c23ceaea27e790ddd35ef88b84cf9d721806ca366199a76fd47cfc0457a81b"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b155e67fff215c09f130555002e42f7517d0ea72cbd58050abb83cb7c880cec"}, + {file = "rapidfuzz-3.6.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3028ee8ecc48250607fa8a0adce37b56275ec3b1acaccd84aee1f68487c8557b"}, + {file = "rapidfuzz-3.6.1.tar.gz", hash = "sha256:35660bee3ce1204872574fa041c7ad7ec5175b3053a4cb6e181463fc07013de7"}, ] [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" requires_python = ">=3.7" summary = "Alternative regular expression module, to replace re." files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] @@ -2261,12 +2179,12 @@ files = [ [[package]] name = "setuptools" -version = "69.0.2" +version = "69.0.3" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" files = [ - {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, - {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [[package]] @@ -2312,6 +2230,16 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "smmap" +version = "5.0.1" +requires_python = ">=3.7" +summary = "A pure Python implementation of a sliding window memory map manager" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + [[package]] name = "sniffio" version = "1.3.0" @@ -2338,7 +2266,7 @@ version = "2.0.23" requires_python = ">=3.7" summary = "Database Abstraction Library" dependencies = [ - "greenlet!=0.4.17; platform_machine == \"aarch64\" or (platform_machine == \"ppc64le\" or (platform_machine == \"x86_64\" or (platform_machine == \"amd64\" or (platform_machine == \"AMD64\" or (platform_machine == \"win32\" or platform_machine == \"WIN32\")))))", + "greenlet!=0.4.17; platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\"", "typing-extensions>=4.2.0", ] files = [ @@ -2422,15 +2350,15 @@ files = [ [[package]] name = "thefuzz" -version = "0.20.0" -requires_python = ">=3.7" +version = "0.22.1" +requires_python = ">=3.8" summary = "Fuzzy string matching in python" dependencies = [ "rapidfuzz<4.0.0,>=3.0.0", ] files = [ - {file = "thefuzz-0.20.0-py3-none-any.whl", hash = "sha256:bd2b657a12bd8518917d2d71c53125368706233b822fac688fca956730154388"}, - {file = "thefuzz-0.20.0.tar.gz", hash = "sha256:a25e49786b1c4603c7fc6e2d69e6bc660982a2919698b536ff8354e0631cc40d"}, + {file = "thefuzz-0.22.1-py3-none-any.whl", hash = "sha256:59729b33556850b90e1093c4cf9e618af6f2e4c985df193fdf3c5b5cf02ca481"}, + {file = "thefuzz-0.22.1.tar.gz", hash = "sha256:7138039a7ecf540da323792d8592ef9902b1d79eb78c147d4f20664de79f3680"}, ] [[package]] @@ -2455,12 +2383,12 @@ files = [ [[package]] name = "traitlets" -version = "5.14.0" +version = "5.14.1" requires_python = ">=3.8" summary = "Traitlets Python configuration system" files = [ - {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, - {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, + {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, + {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, ] [[package]] @@ -2475,12 +2403,12 @@ files = [ [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" requires_python = ">=2" summary = "Provider of IANA time zone data" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] @@ -2553,11 +2481,11 @@ files = [ [[package]] name = "wcwidth" -version = "0.2.12" +version = "0.2.13" summary = "Measures the displayed width of unicode strings in a terminal" files = [ - {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, - {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] @@ -2590,65 +2518,6 @@ files = [ {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, ] -[[package]] -name = "yarl" -version = "1.9.4" -requires_python = ">=3.7" -summary = "Yet another URL library" -dependencies = [ - "idna>=2.0", - "multidict>=4.0", -] -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - [[package]] name = "zipp" version = "3.17.0" diff --git a/src/backend/pyproject.toml b/src/backend/pyproject.toml index 0c6f130bee..30d43901e0 100644 --- a/src/backend/pyproject.toml +++ b/src/backend/pyproject.toml @@ -46,9 +46,9 @@ dependencies = [ "asgiref==3.7.2", "sozipfile==0.3.2", "osm-login-python==1.0.1", - "osm-fieldwork==0.4.0", + "osm-fieldwork==0.4.1", "osm-rawdata==0.1.7", - "fmtm-splitter==0.2.5", + "fmtm-splitter==1.0.0rc0", ] requires-python = ">=3.10" readme = "../../README.md" @@ -72,6 +72,8 @@ test = [ "httpx==0.25.2", "pytest-asyncio==0.23.2", "pyinstrument==4.6.1", + "coverage>=7.4.0", + "coverage-badge>=1.1.0", ] debug = [ "ipdb>=0.13.13", @@ -83,6 +85,7 @@ docs = [ "mkdocstrings-python==1.7.5", "mkdocs-exclude==1.0.2", "python-dotenv==1.0.0", + "mkdocs-git-revision-date-localized-plugin>=1.2.2", ] [tool.black] @@ -111,6 +114,9 @@ extend-immutable-calls = [ "fastapi.Security", ] +[tool.pyright] +extraPaths = ["__pypackages__/3.10/lib/"] + [tool.pytest.ini_options] addopts = "-ra -q" testpaths = [ diff --git a/src/backend/tests/__init__.py b/src/backend/tests/__init__.py index e69de29bb2..5581e5c3a1 100644 --- a/src/backend/tests/__init__.py +++ b/src/backend/tests/__init__.py @@ -0,0 +1 @@ +"""Backend tests using PyTest.""" diff --git a/src/backend/tests/conftest.py b/src/backend/tests/conftest.py index d68674ca98..68bdcf7ae4 100644 --- a/src/backend/tests/conftest.py +++ b/src/backend/tests/conftest.py @@ -94,7 +94,7 @@ def user(db): @pytest.fixture(scope="function") -def organization(db): +def organisation(db): """A test organisation.""" db_org = DbOrganisation( name="test_org_qwerty", @@ -109,7 +109,7 @@ def organization(db): @pytest.fixture(scope="function") -async def project(db, user, organization): +async def project(db, user, organisation): """A test project, using the test user and org.""" project_metadata = ProjectUpload( author=User(username=user.username, id=user.id), @@ -125,7 +125,7 @@ async def project(db, user, organization): odk_central_password=os.getenv("ODK_CENTRAL_PASSWD"), ), hashtags=["hot-fmtm"], - organisation_id=organization.id, + organisation_id=organisation.id, ) # Create ODK Central Project @@ -162,18 +162,18 @@ async def project(db, user, organization): # @pytest.fixture(scope="function") # def get_ids(db, project): # user_id_query = text(f"SELECT id FROM {DbUser.__table__.name} LIMIT 1") -# organization_id_query = text( +# organisation_id_query = text( # f"SELECT id FROM {DbOrganisation.__table__.name} LIMIT 1" # ) # project_id_query = text(f"SELECT id FROM {DbProject.__table__.name} LIMIT 1") # user_id = db.execute(user_id_query).scalar() -# organization_id = db.execute(organization_id_query).scalar() +# organisation_id = db.execute(organisation_id_query).scalar() # project_id = db.execute(project_id_query).scalar() # data = { # "user_id": user_id, -# "organization_id": organization_id, +# "organisation_id": organisation_id, # "project_id": project_id, # } # log.debug(f"get_ids return: {data}") diff --git a/src/backend/tests/test_projects_routes.py b/src/backend/tests/test_projects_routes.py index fa8931df93..57f067da4a 100644 --- a/src/backend/tests/test_projects_routes.py +++ b/src/backend/tests/test_projects_routes.py @@ -44,7 +44,7 @@ odk_central_password = os.getenv("ODK_CENTRAL_PASSWD") -async def test_create_project(client, organization, user): +async def test_create_project(client, organisation, user): """Test project creation endpoint.""" project_data = { "author": {"username": user.username, "id": user.id}, @@ -60,7 +60,7 @@ async def test_create_project(client, organization, user): "odk_central_password": odk_central_password, }, "hashtags": ["hot-fmtm"], - "organisation_id": organization.id, + "organisation_id": organisation.id, } response = client.post("/projects/create_project", json=project_data) @@ -71,6 +71,12 @@ async def test_create_project(client, organization, user): assert "id" in response_data +async def test_delete_project(client, project): + """Test deleting a FMTM project, plus ODK Central project.""" + response = client.delete(f"/projects/{project.id}") + assert response.status_code == 204 + + async def test_create_odk_project(): """Test creating an odk central project.""" mock_project = Mock() diff --git a/src/frontend/package.json b/src/frontend/package.json index 7e50d7887c..b5e7f6cdca 100755 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -7,7 +7,8 @@ "build:start": "cd dist && PORT=8080 npx serve", "start": "env-cmd -f .env.dev vite dev", "start:live": "vite dev", - "test": "vitest tests/" + "test": "vitest tests/", + "lint": "eslint --fix --ext .js,.jsx,.ts,.tsx src" }, "license": "GPL-3.0-only", "author": { diff --git a/src/frontend/public/install.sh b/src/frontend/public/install.sh deleted file mode 100644 index 671383de11..0000000000 --- a/src/frontend/public/install.sh +++ /dev/null @@ -1,941 +0,0 @@ -#!/bin/bash - -set -o pipefail - -# Tested for Debian 11 Bookworm & Ubuntu 22.04 LTS - -# Auto accept all apt prompts -export DEBIAN_FRONTEND=noninteractive - -# Global Vars -RANDOM_DIR="${RANDOM}${RANDOM}" -DOTENV_NAME=.env -OS_NAME="debian" -IS_TEST=false -BRANCH_NAME=development -COMPOSE_FILE=docker-compose.yml - -heading_echo() { - local message="$1" - local color="${2:-blue}" - local separator="--------------------------------------------------------" - local sep_length=${#separator} - local pad_length=$(( (sep_length - ${#message}) / 2 )) - local pad="" - - case "$color" in - "black") color_code="\e[0;30m" ;; - "red") color_code="\e[0;31m" ;; - "green") color_code="\e[0;32m" ;; - "yellow") color_code="\e[0;33m" ;; - "blue") color_code="\e[0;34m" ;; - "purple") color_code="\e[0;35m" ;; - "cyan") color_code="\e[0;36m" ;; - "white") color_code="\e[0;37m" ;; - *) color_code="\e[0m" ;; # Default: reset color - esac - - for ((i=0; i/dev/null; then - yellow_echo "User 'svcfmtm' found." - else - yellow_echo "Creating user 'svcfmtm'." - useradd -m -d /home/svcfmtm -s /bin/bash svcfmtm 2>/dev/null - fi - - echo - yellow_echo "Temporarily adding to sudoers list." - echo "svcfmtm ALL=(ALL) NOPASSWD:ALL" | tee /etc/sudoers.d/fmtm-sudoers >/dev/null - - echo - yellow_echo "Rerunning this script as user 'svcfmtm'." - echo - - if ! command -v machinectl &>/dev/null; then - # Start the installation process in the background with spinner - ( apt-get update > /dev/null - wait # Wait for 'apt-get update' to complete - apt-get install -y systemd-container --no-install-recommends > /dev/null ) & - install_progress $! - echo - fi - - # Check if input is direct bash script call (i.e. ends in .sh) - ext="$(basename "$0")" - if [ "${ext: -3}" = ".sh" ]; then - # User called script directly, copy to /home/svcfmtm/install.sh - root_script_path="$(readlink -f "$0")" - user_script_path="/home/svcfmtm/$(basename "$0")" - cp "$root_script_path" "$user_script_path" - chmod +x "$user_script_path" - - machinectl --quiet shell \ - --setenv=RUN_AS_ROOT=true \ - --setenv=DOCKER_HOST=${DOCKER_HOST} \ - svcfmtm@ /bin/bash -c "$user_script_path" - else - # User called script remotely, so do the same - machinectl --quiet shell \ - --setenv=RUN_AS_ROOT=true \ - --setenv=DOCKER_HOST=${DOCKER_HOST} \ - svcfmtm@ /bin/bash -c "curl -fsSL https://get.fmtm.dev | bash" - fi - - exit 0 - fi -} - -check_os() { - heading_echo "Checking Current OS" - - if [ -e /etc/os-release ]; then - source /etc/os-release - case "$ID" in - debian) - export OS_NAME=${ID} - echo "Current OS is ${PRETTY_NAME}." - ;; - ubuntu) - export OS_NAME=${ID} - echo "Current OS is ${PRETTY_NAME}." - ;; - *) - echo "Current OS is not Debian or Ubuntu. Exiting." - exit 1 - ;; - esac - else - echo "Could not determine the operating system. Exiting." - exit 1 - fi -} - -remove_old_docker_installs() { - heading_echo "Removing Old Versions of Docker" - packages=( - docker.io - docker-doc - docker-compose - podman-docker - containerd - runc - ) - for pkg in "${packages[@]}"; do - sudo apt-get remove "$pkg" - done -} - -install_dependencies() { - heading_echo "Installing Dependencies" - sudo apt-get update - sudo apt-get install -y \ - ca-certificates \ - curl \ - gnupg \ - uidmap \ - dbus-user-session \ - slirp4netns - - if [ "$OS_NAME" = "debian" ]; then - sudo apt-get install -y fuse-overlayfs - fi -} - -add_gpg_key() { - heading_echo "Adding Docker GPG Key" - sudo install -m 0755 -d /etc/apt/keyrings - curl -fsSL https://download.docker.com/linux/${ID}/gpg | sudo gpg --yes --dearmor -o /etc/apt/keyrings/docker.gpg - sudo chmod a+r /etc/apt/keyrings/docker.gpg - echo "Done" -} - -add_to_apt() { - heading_echo "Adding Docker to Apt Source" - echo \ - "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/${ID} \ - $(. /etc/os-release && echo $VERSION_CODENAME) stable" | \ - sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - echo "Done" -} - -apt_install_docker() { - heading_echo "Installing Docker" - sudo apt-get update - sudo apt-get install -y \ - docker-ce \ - docker-ce-cli \ - containerd.io \ - docker-buildx-plugin \ - docker-compose-plugin \ - docker-ce-rootless-extras -} - -update_to_rootless() { - heading_echo "Disabling Docker Service (If Running)" - sudo systemctl disable --now docker.service docker.socket - - heading_echo "Install Rootless Docker" - dockerd-rootless-setuptool.sh install -} - -restart_docker_rootless() { - heading_echo "Restarting Docker Service" - echo "This is required as sometimes docker doesn't init correctly." - systemctl --user daemon-reload - systemctl --user restart docker - echo - echo "Done." -} - -allow_priv_port_access() { - heading_echo "Allowing Privileged Port Usage" - sudo tee -a /etc/sysctl.conf < /dev/null 2>&1 -net.ipv4.ip_unprivileged_port_start=0 -EOF - sudo sysctl -p - echo "Done" -} - -update_docker_ps_format() { - heading_echo "Updating docker ps Formatting" - - # Root user - if [ "$RUN_AS_ROOT" = true ]; then - sudo mkdir -p /root/.docker - sudo touch /root/.docker/config.json - sudo tee /root/.docker/config.json < /dev/null 2>&1 -{ - "psFormat": "table {{.ID}}\\t{{.Image}}\\t{{.Status}}\\t{{.Names}}" -} -EOF - fi - - # svcfmtm user - mkdir -p ~/.docker - touch ~/.docker/config.json - tee ~/.docker/config.json < /dev/null 2>&1 -{ - "psFormat": "table {{.ID}}\\t{{.Image}}\\t{{.Status}}\\t{{.Names}}" -} -EOF - -echo "Done" -} - - -add_vars_to_bashrc() { - # DOCKER_HOST must be added to the top of bashrc, as running non-interactively - # Most distros exit .bashrc execution is non-interactive - - heading_echo "Adding DOCKER_HOST and 'dc' alias to bashrc" - - user_id=$(id -u) - docker_host_var="export DOCKER_HOST=unix:///run/user/$user_id/docker.sock" - dc_alias_cmd="alias dc='docker compose'" - - # Create temporary files for root and user bashrc - tmpfile_root=$(mktemp) - tmpfile_user=$(mktemp) - - if [ "$RUN_AS_ROOT" = true ]; then - # Check if DOCKER_HOST is already defined in /root/.bashrc - if ! sudo grep -q "$docker_host_var" /root/.bashrc; then - echo "Adding DOCKER_HOST var to /root/.bashrc." - echo "$docker_host_var" | sudo tee -a "$tmpfile_root" > /dev/null - echo - fi - - # Check if the 'dc' alias already exists in /root/.bashrc - if ! sudo grep -q "$dc_alias_cmd" /root/.bashrc; then - echo "Adding 'dc' alias to /root/.bashrc." - echo "$dc_alias_cmd" | sudo tee -a "$tmpfile_root" > /dev/null - echo - fi - fi - - # Check if DOCKER_HOST is already defined in ~/.bashrc - if ! grep -q "$docker_host_var" ~/.bashrc; then - echo "Adding DOCKER_HOST var to ~/.bashrc." - echo "$docker_host_var" | tee -a "$tmpfile_user" > /dev/null - echo - fi - - # Check if the 'dc' alias already exists in ~/.bashrc - if ! grep -q "$dc_alias_cmd" ~/.bashrc; then - echo "Adding 'dc' alias to ~/.bashrc." - echo "$dc_alias_cmd" | tee -a "$tmpfile_user" > /dev/null - echo - fi - - # Append the rest of the original .bashrc to the temporary file - if [ -e ~/.bashrc ]; then - grep -v -e "$docker_host_var" -e "$dc_alias_cmd" ~/.bashrc >> "$tmpfile_user" - fi - # Replace the original .bashrc with the modified file - mv "$tmpfile_user" ~/.bashrc - - # If RUN_AS_ROOT is true, replace /root/.bashrc with the modified file - if [ "$RUN_AS_ROOT" = true ]; then - # Append the rest of the original /root/.bashrc to the temporary file - if [ -e /root/.bashrc ]; then - grep -v -e "$docker_host_var" -e "$dc_alias_cmd" /root/.bashrc >> "$tmpfile_root" - fi - - # Replace the original /root/.bashrc with the modified file - sudo mv "$tmpfile_root" /root/.bashrc - fi - - echo "Setting DOCKER_HOST for the current session." - export DOCKER_HOST=unix:///run/user/$(id -u)/docker.sock - - echo - echo "Done" -} - -install_docker() { - heading_echo "Docker Install" - - if command -v docker &> /dev/null; then - echo "Docker already installed: $(which docker)" - echo "Skipping." - return 0 - fi - - echo "Docker is required for FMTM to run." - echo - echo "Do you want to install Docker? (y/n)" - echo - read -rp "Enter 'y' to install, anything else to continue: " install_docker - - if [[ "$install_docker" = "y" || "$install_docker" = "yes" ]]; then - check_os - remove_old_docker_installs - install_dependencies - add_gpg_key - add_to_apt - apt_install_docker - update_to_rootless - allow_priv_port_access - restart_docker_rootless - update_docker_ps_format - add_vars_to_bashrc - # Enable docker daemon to remain after ssh disconnect - echo - yellow_echo "Enable login linger for user $(whoami) (docker daemon on ssh disconnect)." - loginctl enable-linger "$(whoami)" - else - heading_echo "Docker is Required. Aborting." "red" - exit 1 - fi -} - -install_envsubst_if_missing() { - if ! command -v curl &> /dev/null; then - sudo apt-get update - sudo apt-get install -y curl --no-install-recommends - fi - - echo - # Get a8m/envsubst (required for default vals syntax ${VAR:-default}) - # Use local version, as envsubst may be installed on system already - if [ -f ./envsubst ]; then - echo "envsubst already exists. Continuing." - else - echo "Downloading a8m/envsubst" - echo - curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o envsubst - chmod +x envsubst - fi -} - -check_existing_dotenv() { - if [ -f "${DOTENV_NAME}" ] - then - echo "WARNING: ${DOTENV_NAME} file already exists." - echo "This script will overwrite the content of this file." - echo - echo "Do you want to overwrite file '"${DOTENV_NAME}"'? y/n" - echo - while true - do - read -e -p "Enter 'y' to overwrite, anything else to continue: " overwrite - - if [[ "$overwrite" = "y" || "$overwrite" = "yes" ]] - then - return 1 - else - echo "Continuing with existing .env file." - return 0 - fi - done - fi - - return 1 -} - -check_if_test() { - heading_echo "Test Deployment?" - - echo "Is this a test deployment?" - echo - while true - do - read -e -p "Enter 'y' if yes, anything else to continue: " test - - if [[ "$test" = "y" || "$test" = "yes" ]] - then - IS_TEST=true - export DEBUG="True" - export LOG_LEVEL="DEBUG" - echo "Using debug configuration." - else - IS_TEST=false - export DEBUG="False" - export LOG_LEVEL="INFO" - break - fi - break - done -} - -get_repo() { - heading_echo "Getting Necessary Files" - - current_dir="${PWD}" - - if ! command -v git &>/dev/null; then - yellow_echo "Downloading GIT." - echo - sudo apt-get update - sudo apt-get install -y git --no-install-recommends - echo - fi - - # Files in a random temp dir - mkdir -p "/tmp/${RANDOM_DIR}" - cd "/tmp/${RANDOM_DIR}" - - repo_url="https://github.com/hotosm/fmtm.git" - - echo "Cloning repo $repo_url to dir: /tmp/${RANDOM_DIR}" - echo - git clone --branch "${BRANCH_NAME}" --depth 1 "$repo_url" - - # Check for existing .env files - existing_dotenv="" - if [ "${RUN_AS_ROOT}" = true ] && sudo test -f "/root/fmtm/${DOTENV_NAME}"; then - existing_dotenv="/root/fmtm/${DOTENV_NAME}" - elif [ -f "${current_dir}/${DOTENV_NAME}" ]; then - existing_dotenv="${current_dir}/${DOTENV_NAME}" - fi - - if [ -n "$existing_dotenv" ]; then - echo - echo "Found existing dotenv file." - echo - echo "Copying $existing_dotenv --> /tmp/${RANDOM_DIR}/fmtm/${DOTENV_NAME}" - if [ "${RUN_AS_ROOT}" = true ]; then - sudo cp "$existing_dotenv" "/tmp/${RANDOM_DIR}/fmtm/" - else - cp "$existing_dotenv" "/tmp/${RANDOM_DIR}/fmtm/" - fi - fi -} - -set_deploy_env() { - heading_echo "Deployment Environment" - - while true - do - echo "Which environment do you wish to run? (dev/staging/prod)" - echo - echo "Both dev & staging include ODK Central and S3 buckets." - echo "For prod, it is expected you provide and external instances of:" - echo - echo "- ODK Central" - echo "- S3 Buckets" - echo - read -e -p "Enter the environment (dev/staging/prod): " environment - - case "$environment" in - dev) - BRANCH_NAME="development" - ;; - staging) - BRANCH_NAME="staging" - ;; - prod) - BRANCH_NAME="main" - ;; - *) - echo "Invalid environment name. Please enter dev, staging, or prod." - ;; - - esac - - export GIT_BRANCH="${BRANCH_NAME}" - COMPOSE_FILE="docker-compose.${BRANCH_NAME}.yml" - break - done -} - -set_external_odk() { - heading_echo "External ODK Central Host" - - echo "Please enter the ODKCentral URL." - read -e -p "ODKCentral URL: " ODK_CENTRAL_URL - echo - export ODK_CENTRAL_URL=${ODK_CENTRAL_URL} - - set_odk_user_creds -} - -set_fmtm_db_pass() { - db_pass=$(tr -dc 'a-zA-Z0-9' 10 characters long." - while true; do - echo - read -e -p "ODKCentral Password: " ODK_CENTRAL_PASSWD - echo - - # Check the length of the entered password - if [ ${#ODK_CENTRAL_PASSWD} -ge 10 ]; then - export ODK_CENTRAL_PASSWD=${ODK_CENTRAL_PASSWD} - break - else - yellow_echo "Password is too short. It must be at least 10 characters long." - fi - done -} - -check_external_database() { - heading_echo "External Database" - - echo "Do you want to use an external database instead of local?" - echo - while true - do - read -e -p "Enter y for external, anything else to continue: " externaldb - - if [ "$externaldb" = "y" ] - then - EXTERNAL_DB="True" - echo "Using external database." - fi - break - done - - if [ "$EXTERNAL_DB" = "True" ] - then - echo - echo "Please enter the database host." - read -e -p "FMTM DB Host: " FMTM_DB_HOST - echo - export FMTM_DB_HOST=${FMTM_DB_HOST} - - echo "Please enter the database name." - read -e -p "FMTM DB Name: " FMTM_DB_NAME - echo - export FMTM_DB_NAME=${FMTM_DB_NAME} - - echo "Please enter the database user." - read -e -p "FMTM DB User: " FMTM_DB_USER - echo - export FMTM_DB_USER=${FMTM_DB_USER} - - echo "Please enter the database password." - read -e -p "FMTM DB Password: " FMTM_DB_PASSWORD - echo - export FMTM_DB_PASSWORD=${FMTM_DB_PASSWORD} - - else - set_fmtm_db_pass - fi -} - -set_external_s3() { - heading_echo "S3 Credentials" - - echo "Please enter the S3 host endpoint." - read -e -p "S3 Endpoint: " S3_ENDPOINT - echo - export S3_ENDPOINT=${S3_ENDPOINT} - - echo "Please enter the access key." - read -e -p "S3 Access Key: " S3_ACCESS_KEY - echo - export S3_ACCESS_KEY=${S3_ACCESS_KEY} - - echo "Please enter the secret key." - read -e -p "S3 Secret Key: " S3_SECRET_KEY - echo - export S3_SECRET_KEY=${S3_SECRET_KEY} - - if [ "$BRANCH_NAME" = "main" ]; then - yellow_echo "Production deployments require a preconfigured S3 bucket." - echo - yellow_echo "The bucket should be public." - echo - echo "Please enter the bucket name." - read -e -p "S3 Bucket Name: " S3_BUCKET_NAME - echo - export S3_BUCKET_NAME=${S3_BUCKET_NAME} - fi -} - -set_minio_s3_creds() { - access_key=$(tr -dc 'a-zA-Z0-9' $current_ip" - yellow_echo "api.$fmtm_domain --> $current_ip" - - if [ "$BRANCH_NAME" != "main" ] - then - yellow_echo "s3.$fmtm_domain --> $current_ip" - yellow_echo "odk.$fmtm_domain --> $current_ip" - fi - - echo - read -e -p "Once these DNS entries are set and valid, press ENTER to continue." valid - - heading_echo "Certificates" - echo "FMTM will automatically generate SSL (HTTPS) certificates for your domain name." - echo - while true - do - echo "Enter an email address you wish to use for certificate generation." - echo "This will be used by LetsEncrypt, but for no other purpose." - echo - read -e -p "Email: " cert_email - - if [ "$cert_email" = "" ] - then - echo "Invalid input!" - else - export CERT_EMAIL="${cert_email}" - break - fi - done -} - -set_osm_credentials() { - heading_echo "OSM OAuth2 Credentials" - - redirect_uri="http${FMTM_DOMAIN:+s}://${FMTM_DOMAIN:-127.0.0.1:7051}/osmauth/" - - yellow_echo "App credentials are generated from your OSM user profile." - echo - yellow_echo "If you need to generate new OAuth2 App credentials, visit:" - echo - yellow_echo "> https://www.openstreetmap.org/oauth2/applications" - echo - yellow_echo "Set the redirect URI to: ${redirect_uri}" - echo - - echo "Please enter your OSM authentication details" - echo - read -e -p "Client ID: " OSM_CLIENT_ID - echo - read -e -p "Client Secret: " OSM_CLIENT_SECRET - - export OSM_CLIENT_ID=${OSM_CLIENT_ID} - export OSM_CLIENT_SECRET=${OSM_CLIENT_SECRET} - secret_key=$(tr -dc 'a-zA-Z0-9' ${DOTENV_NAME}" - ./envsubst < .env.example > ${DOTENV_NAME} - else - echo "Downloading .env.example from repo." - echo - curl -LO "https://raw.githubusercontent.com/hotosm/fmtm/${BRANCH_NAME:-development}/.env.example" - - echo - echo "substituting variables from .env.example --> ${DOTENV_NAME}" - ./envsubst < .env.example > ${DOTENV_NAME} - - echo - echo "Deleting .env.example" - rm .env.example - fi - - heading_echo "Completed Dotenv File Generation." "green" - echo "File ${DOTENV_NAME} content:" - echo - cat ${DOTENV_NAME} - echo - if [ "${RUN_AS_ROOT}" = true ] && sudo test ! -f "/root/fmtm/${DOTENV_NAME}"; then - echo "Copying generated dotenv to /root/fmtm/${DOTENV_NAME}" - cp "${DOTENV_NAME}" "/root/fmtm/${DOTENV_NAME}" || true - elif [ ! -f "/home/svcfmtm/${DOTENV_NAME}" ]; then - echo "Copying generated dotenv to /home/svcfmtm/fmtm/${DOTENV_NAME}" - cp "${DOTENV_NAME}" "/home/svcfmtm/fmtm/${DOTENV_NAME}" || true - fi -} - -prompt_user_gen_dotenv() { - heading_echo "Generate dotenv config for FMTM" - - # Exit if user does not overwrite existing dotenv - if check_existing_dotenv; then - return - fi - - install_envsubst_if_missing - - if [ $IS_TEST != true ]; then - if [ "$BRANCH_NAME" = "main" ]; then - set_external_odk - check_external_database - set_external_s3 - else - set_fmtm_db_pass - set_odk_db_pass - set_odk_user_creds - set_minio_s3_creds - fi - - set_domains - - else - check_change_port - fi - - set_osm_credentials - generate_dotenv -} - -run_compose_stack() { - # Workaround if DOCKER_HOST is missed (i.e. docker just installed) - if [ -z "$DOCKER_HOST" ]; then - export DOCKER_HOST=unix:///run/user/$(id -u)/docker.sock - fi - - heading_echo "Pulling Required Images" - docker compose -f ${COMPOSE_FILE} pull - heading_echo "Building Frontend Image" - docker compose -f ${COMPOSE_FILE} build ui - - heading_echo "Starting FMTM" - docker compose -f ${COMPOSE_FILE} up \ - --detach --remove-orphans --force-recreate -} - -final_output() { - # Source env vars - . .env - - proto="http" - suffix="" - - if [ "$IS_TEST" != true ]; then - proto="https" - else - suffix=":${FMTM_DEV_PORT:-7050}" - fi - - heading_echo "FMTM Setup Complete" - heading_echo "Services" "green" - echo "Frontend: ${proto}://${FMTM_DOMAIN}${suffix}" - echo "API: ${proto}://api.${FMTM_DOMAIN}${suffix}" - echo "S3 Buckets: ${proto}://s3.${FMTM_DOMAIN}${suffix}" - echo "ODK Central: ${proto}://odk.${FMTM_DOMAIN}${suffix}" - heading_echo "Inspect Containers" "green" - echo "To login as svcfmtm and inspect the containers, run:" - echo - echo "$ machinectl shell svcfmtm@" - echo "$ docker ps" - echo - echo "Alternatively, to run as the current user:" - echo - echo "$ export DOCKER_HOST=unix:///run/user/$(id -u svcfmtm)/docker.sock" - echo "$ docker ps" - echo - heading_echo "ODK Central Credentials" "green" - echo "URL: ${ODK_CENTRAL_URL}" - echo "Email: ${ODK_CENTRAL_USER}" - echo "Password: ${ODK_CENTRAL_PASSWD}" - echo -} - -install_fmtm() { - check_user_not_root - display_logo - - trap cleanup_and_exit INT - install_docker - - check_if_test - if [ $IS_TEST != true ]; then - set_deploy_env - fi - - get_repo - # Work in generated temp dir - local repo_dir="/tmp/${RANDOM_DIR}/fmtm" - cd "${repo_dir}" - - if [ -f "${repo_dir}/${DOTENV_NAME}" ]; then - heading_echo "Skip Dotenv Generation" - echo "Using existing dotenv file." - else - prompt_user_gen_dotenv - fi - - run_compose_stack - final_output - - if [[ "$RUN_AS_ROOT" = true ]]; then - # Remove from sudoers - sudo rm /etc/sudoers.d/fmtm-sudoers - fi - - # Cleanup files - if [[ "$IS_TEST" != true ]]; then - rm -rf "/tmp/${RANDOM_DIR:-tmp}" - fi - -} - -install_fmtm diff --git a/src/frontend/src/App.jsx b/src/frontend/src/App.jsx index e09fff60a4..9010333731 100755 --- a/src/frontend/src/App.jsx +++ b/src/frontend/src/App.jsx @@ -1,14 +1,17 @@ +import axios from 'axios'; import React, { useEffect } from 'react'; import ReactDOM from 'react-dom'; import { RouterProvider } from 'react-router-dom'; -import { store, persistor } from './store/Store'; import { Provider } from 'react-redux'; -import routes from './routes'; import { PersistGate } from 'redux-persist/integration/react'; -import './index.css'; + +import { store, persistor } from '@/store/Store'; +import routes from '@/routes'; +import environment from '@/environment'; + +import '@/index.css'; import 'ol/ol.css'; import 'react-loading-skeleton/dist/skeleton.css'; -import environment from './environment'; // Added Fix of Console Error of MUI Issue const consoleError = console.error; @@ -26,6 +29,32 @@ console.error = function filterWarnings(msg, ...args) { } }; +const GlobalInit = () => { + useEffect(() => { + axios.interceptors.request.use( + (config) => { + // Do something before request is sent + + // const excludedDomains = ['xxx', 'xxx']; + // const urlIsExcluded = excludedDomains.some((domain) => config.url.includes(domain)); + // if (!urlIsExcluded) { + // config.withCredentials = true; + // } + + config.withCredentials = true; + + return config; + }, + (error) => + // Do something with request error + Promise.reject(error), + ); + return () => {}; + }, []); + + return null; // Renders nothing +}; + const SentryInit = () => { useEffect(() => { if (import.meta.env.MODE === 'development') { @@ -105,6 +134,7 @@ ReactDOM.render( + diff --git a/src/frontend/src/api/CreateProjectService.ts b/src/frontend/src/api/CreateProjectService.ts index 3a8d302de9..e6b192e517 100755 --- a/src/frontend/src/api/CreateProjectService.ts +++ b/src/frontend/src/api/CreateProjectService.ts @@ -1,13 +1,13 @@ import axios from 'axios'; -import { CreateProjectActions } from '../store/slices/CreateProjectSlice'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; import { ProjectDetailsModel, FormCategoryListModel, OrganisationListModel, -} from '../models/createproject/createProjectModel'; -import { CommonActions } from '../store/slices/CommonSlice'; +} from '@/models/createproject/createProjectModel'; +import { CommonActions } from '@/store/slices/CommonSlice'; import { ValidateCustomFormResponse } from 'store/types/ICreateProject'; -import { task_split_type } from '../types/enums'; +import { task_split_type } from '@/types/enums'; const CreateProjectService: Function = ( url: string, @@ -29,7 +29,7 @@ const CreateProjectService: Function = ( if (payload.task_split_type === task_split_type['choose_area_as_task']) { await dispatch( - UploadAreaService(`${import.meta.env.VITE_API_URL}/projects/${resp.id}/upload_multi_polygon`, fileUpload), + UploadAreaService(`${import.meta.env.VITE_API_URL}/projects/${resp.id}/custom_task_boundaries`, fileUpload), ); } else if (payload.splitting_algorithm === 'Use natural Boundary') { await dispatch( @@ -37,7 +37,7 @@ const CreateProjectService: Function = ( ); } else { await dispatch( - UploadAreaService(`${import.meta.env.VITE_API_URL}/projects/${resp.id}/upload_multi_polygon`, fileUpload), + UploadAreaService(`${import.meta.env.VITE_API_URL}/projects/${resp.id}/custom_task_boundaries`, fileUpload), ); // await dispatch(UploadAreaService(`${import.meta.env.VITE_API_URL}/projects/${resp.id}/upload`, fileUpload, { dimension: payload.dimension })); } @@ -212,7 +212,7 @@ const OrganisationService: Function = (url: string) => { const resp: OrganisationListModel = getOrganisationListResponse.data; dispatch(CreateProjectActions.GetOrganisationList(resp)); } catch (error) { - dispatch(CreateProjectActions.GetOrganizationListLoading(false)); + dispatch(CreateProjectActions.GetOrganisationListLoading(false)); } }; @@ -352,6 +352,14 @@ const TaskSplittingPreviewService: Function = ( dispatch(CreateProjectActions.SetIsTasksGenerated({ key: 'task_splitting_algorithm', value: true })); dispatch(CreateProjectActions.GetTaskSplittingPreview(resp)); } catch (error) { + dispatch( + CommonActions.SetSnackBar({ + open: true, + message: 'Task generation failed. Please try again', + variant: 'error', + duration: 2000, + }), + ); dispatch(CreateProjectActions.GetTaskSplittingPreviewLoading(false)); } finally { dispatch(CreateProjectActions.GetTaskSplittingPreviewLoading(false)); diff --git a/src/frontend/src/api/Files.js b/src/frontend/src/api/Files.js index 554c8c9c59..bdf97aa805 100755 --- a/src/frontend/src/api/Files.js +++ b/src/frontend/src/api/Files.js @@ -1,5 +1,5 @@ import React, { useEffect, useState } from 'react'; -import CoreModules from '../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; export const ProjectFilesById = (url, taskId) => { const [loading, setLoading] = useState(true); diff --git a/src/frontend/src/api/HomeService.ts b/src/frontend/src/api/HomeService.ts index de3ece8b3d..b15a519147 100755 --- a/src/frontend/src/api/HomeService.ts +++ b/src/frontend/src/api/HomeService.ts @@ -1,7 +1,7 @@ import axios from 'axios'; -import { HomeActions } from '../store/slices/HomeSlice'; -import { HomeProjectCardModel } from '../models/home/homeModel'; -import environment from '../environment'; +import { HomeActions } from '@/store/slices/HomeSlice'; +import { HomeProjectCardModel } from '@/models/home/homeModel'; +import environment from '@/environment'; export const HomeSummaryService: Function = (url: string) => { return async (dispatch) => { diff --git a/src/frontend/src/api/OrganisationService.ts b/src/frontend/src/api/OrganisationService.ts new file mode 100644 index 0000000000..c31641a564 --- /dev/null +++ b/src/frontend/src/api/OrganisationService.ts @@ -0,0 +1,100 @@ +import axios from 'axios'; +import { HomeProjectCardModel } from '@/models/home/homeModel'; +import { GetOrganisationDataModel, OrganisationModal } from '@/models/organisation/organisationModel'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import { OrganisationAction } from '@/store/slices/organisationSlice'; + +function appendObjectToFormData(formData, object) { + for (const [key, value] of Object.entries(object)) { + // if (key === 'logo') { + // formData.append(key, value[0]) + // } + formData.append(key, value); + } +} + +export const OrganisationService: Function = (url: string, payload: OrganisationModal) => { + return async (dispatch) => { + dispatch(CommonActions.PostOrganisationLoading(true)); + + const postOrganisation = async (url, payload) => { + try { + const generateApiFormData = new FormData(); + appendObjectToFormData(generateApiFormData, payload); + await axios.post(url, generateApiFormData, { + headers: { + 'Content-Type': 'multipart/form-data', + }, + }); + // const resp: HomeProjectCardModel = postOrganisationData.data; + // dispatch(CommonActions.SetOrganisationDetail(resp)) + dispatch(CommonActions.PostOrganisationLoading(false)); + } catch (error) { + dispatch(CommonActions.PostOrganisationLoading(false)); + } + }; + + await postOrganisation(url, payload); + }; +}; + +export const OrganisationDataService: Function = (url: string) => { + return async (dispatch) => { + dispatch(OrganisationAction.GetOrganisationDataLoading(true)); + const getOrganisationData = async (url) => { + try { + const getOrganisationDataResponse = await axios.get(url); + const response: GetOrganisationDataModel = getOrganisationDataResponse.data; + dispatch(OrganisationAction.GetOrganisationsData(response)); + } catch (error) { + dispatch(OrganisationAction.GetOrganisationDataLoading(false)); + } + }; + await getOrganisationData(url); + }; +}; + +export const PostOrganisationDataService: Function = (url: string, payload: any) => { + return async (dispatch) => { + dispatch(OrganisationAction.PostOrganisationDataLoading(true)); + + const postOrganisationData = async (url, payload) => { + dispatch(OrganisationAction.SetOrganisationFormData(payload)); + + try { + const generateApiFormData = new FormData(); + appendObjectToFormData(generateApiFormData, payload); + + const postOrganisationData = await axios.post(url, payload, { + headers: { + 'Content-Type': 'multipart/form-data', + }, + }); + + const resp: HomeProjectCardModel = postOrganisationData.data; + dispatch(OrganisationAction.PostOrganisationDataLoading(false)); + dispatch(OrganisationAction.postOrganisationData(resp)); + dispatch( + CommonActions.SetSnackBar({ + open: true, + message: 'Organization Successfully Created.', + variant: 'success', + duration: 2000, + }), + ); + } catch (error: any) { + dispatch( + CommonActions.SetSnackBar({ + open: true, + message: error.response.data.detail, + variant: 'error', + duration: 2000, + }), + ); + dispatch(OrganisationAction.PostOrganisationDataLoading(false)); + } + }; + + await postOrganisationData(url, payload); + }; +}; diff --git a/src/frontend/src/api/OrganizationService.ts b/src/frontend/src/api/OrganizationService.ts deleted file mode 100644 index 1aba3c4fe6..0000000000 --- a/src/frontend/src/api/OrganizationService.ts +++ /dev/null @@ -1,100 +0,0 @@ -import axios from 'axios'; -import { HomeProjectCardModel } from '../models/home/homeModel'; -import { GetOrganizationDataModel, OrganizationModal } from '../models/organization/organizationModel'; -import { CommonActions } from '../store/slices/CommonSlice'; -import { OrganizationAction } from '../store/slices/organizationSlice'; - -function appendObjectToFormData(formData, object) { - for (const [key, value] of Object.entries(object)) { - // if (key === 'logo') { - // formData.append(key, value[0]) - // } - formData.append(key, value); - } -} - -export const OrganizationService: Function = (url: string, payload: OrganizationModal) => { - return async (dispatch) => { - dispatch(CommonActions.PostOrganizationLoading(true)); - - const postOrganization = async (url, payload) => { - try { - const generateApiFormData = new FormData(); - appendObjectToFormData(generateApiFormData, payload); - await axios.post(url, generateApiFormData, { - headers: { - 'Content-Type': 'multipart/form-data', - }, - }); - // const resp: HomeProjectCardModel = postOrganizationData.data; - // dispatch(CommonActions.SetOrganizationDetail(resp)) - dispatch(CommonActions.PostOrganizationLoading(false)); - } catch (error) { - dispatch(CommonActions.PostOrganizationLoading(false)); - } - }; - - await postOrganization(url, payload); - }; -}; - -export const OrganizationDataService: Function = (url: string) => { - return async (dispatch) => { - dispatch(OrganizationAction.GetOrganizationDataLoading(true)); - const getOrganizationData = async (url) => { - try { - const getOrganizationDataResponse = await axios.get(url); - const response: GetOrganizationDataModel = getOrganizationDataResponse.data; - dispatch(OrganizationAction.GetOrganizationsData(response)); - } catch (error) { - dispatch(OrganizationAction.GetOrganizationDataLoading(false)); - } - }; - await getOrganizationData(url); - }; -}; - -export const PostOrganizationDataService: Function = (url: string, payload: any) => { - return async (dispatch) => { - dispatch(OrganizationAction.PostOrganizationDataLoading(true)); - - const postOrganizationData = async (url, payload) => { - dispatch(OrganizationAction.SetOrganizationFormData(payload)); - - try { - const generateApiFormData = new FormData(); - appendObjectToFormData(generateApiFormData, payload); - - const postOrganizationData = await axios.post(url, payload, { - headers: { - 'Content-Type': 'multipart/form-data', - }, - }); - - const resp: HomeProjectCardModel = postOrganizationData.data; - dispatch(OrganizationAction.PostOrganizationDataLoading(false)); - dispatch(OrganizationAction.postOrganizationData(resp)); - dispatch( - CommonActions.SetSnackBar({ - open: true, - message: 'Organization Successfully Created.', - variant: 'success', - duration: 2000, - }), - ); - } catch (error: any) { - dispatch( - CommonActions.SetSnackBar({ - open: true, - message: error.response.data.detail, - variant: 'error', - duration: 2000, - }), - ); - dispatch(OrganizationAction.PostOrganizationDataLoading(false)); - } - }; - - await postOrganizationData(url, payload); - }; -}; diff --git a/src/frontend/src/api/Project.js b/src/frontend/src/api/Project.js index 7f85f0e333..625d531075 100755 --- a/src/frontend/src/api/Project.js +++ b/src/frontend/src/api/Project.js @@ -1,7 +1,7 @@ -import { ProjectActions } from '../store/slices/ProjectSlice'; -import CoreModules from '../shared/CoreModules'; -import environment from '../environment'; -import { task_priority_str } from '../types/enums'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +import CoreModules from '@/shared/CoreModules'; +import environment from '@/environment'; +import { task_priority_str } from '@/types/enums'; export const ProjectById = (existingProjectList, projectId) => { return async (dispatch) => { diff --git a/src/frontend/src/api/ProjectTaskStatus.js b/src/frontend/src/api/ProjectTaskStatus.js index 6e88a54b06..5e3dbe6960 100755 --- a/src/frontend/src/api/ProjectTaskStatus.js +++ b/src/frontend/src/api/ProjectTaskStatus.js @@ -1,9 +1,9 @@ -import { ProjectActions } from '../store/slices/ProjectSlice'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; import { easeIn, easeOut } from 'ol/easing'; -import { HomeActions } from '../store/slices/HomeSlice'; -import CoreModules from '../shared/CoreModules'; -import { CommonActions } from '../store/slices/CommonSlice'; -import { task_priority_str } from '../types/enums'; +import { HomeActions } from '@/store/slices/HomeSlice'; +import CoreModules from '@/shared/CoreModules'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import { task_priority_str } from '@/types/enums'; const UpdateTaskStatus = (url, style, existingData, currentProjectId, feature, map, view, taskId, body) => { return async (dispatch) => { diff --git a/src/frontend/src/api/Submission.ts b/src/frontend/src/api/Submission.ts index c282acefe5..fe96f2c484 100644 --- a/src/frontend/src/api/Submission.ts +++ b/src/frontend/src/api/Submission.ts @@ -1,5 +1,5 @@ import axios from 'axios'; -import { SubmissionActions } from '../store/slices/SubmissionSlice'; +import { SubmissionActions } from '@/store/slices/SubmissionSlice'; export const SubmissionService: Function = (url: string) => { return async (dispatch) => { diff --git a/src/frontend/src/api/SubmissionService.ts b/src/frontend/src/api/SubmissionService.ts index 66aedadc34..70d4e2a308 100644 --- a/src/frontend/src/api/SubmissionService.ts +++ b/src/frontend/src/api/SubmissionService.ts @@ -1,6 +1,6 @@ -import CoreModules from '../shared/CoreModules'; -import { ProjectActions } from '../store/slices/ProjectSlice'; -// import { HomeProjectCardModel } from '../models/home/homeModel'; +import CoreModules from '@/shared/CoreModules'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +// import { HomeProjectCardModel } from '@/models/home/homeModel'; export const ProjectSubmissionService: Function = (url: string) => { return async (dispatch) => { diff --git a/src/frontend/src/api/index.js b/src/frontend/src/api/index.js new file mode 100644 index 0000000000..1c74257b01 --- /dev/null +++ b/src/frontend/src/api/index.js @@ -0,0 +1,6 @@ +import axios from 'axios'; + +export const API = axios.create({ + baseURL: import.meta.env.VITE_API_URL, + withCredentials: true, +}); diff --git a/src/frontend/src/api/task.ts b/src/frontend/src/api/task.ts index 19b5158fe6..629fa90a99 100644 --- a/src/frontend/src/api/task.ts +++ b/src/frontend/src/api/task.ts @@ -1,5 +1,5 @@ -import CoreModules from '../shared/CoreModules'; -import { CommonActions } from '../store/slices/CommonSlice'; +import CoreModules from '@/shared/CoreModules'; +import { CommonActions } from '@/store/slices/CommonSlice'; export const fetchInfoTask: Function = (url: string) => { return async (dispatch) => { diff --git a/src/frontend/src/components/Activities.jsx b/src/frontend/src/components/Activities.jsx index 16c1b863e3..f916003585 100755 --- a/src/frontend/src/components/Activities.jsx +++ b/src/frontend/src/components/Activities.jsx @@ -1,9 +1,9 @@ import React from 'react'; -import IconButtonCard from '../utilities/IconButtonCard'; -import environment from '../environment'; +import IconButtonCard from '@/utilities/IconButtonCard'; +import environment from '@/environment'; import { easeIn, easeOut } from 'ol/easing'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; //Activity Model to be display in Activities panel const Activities = ({ history, defaultTheme, mapDivPostion, map, view, state, params }) => { const index = state.projectTaskBoundries.findIndex((project) => project.id == environment.decode(params.id)); diff --git a/src/frontend/src/components/ActivitiesPanel.jsx b/src/frontend/src/components/ActivitiesPanel.jsx index 2725bad4d8..0c5a0d2976 100755 --- a/src/frontend/src/components/ActivitiesPanel.jsx +++ b/src/frontend/src/components/ActivitiesPanel.jsx @@ -1,9 +1,9 @@ import React, { useEffect, useState } from 'react'; -import BasicCard from '../utilities/BasicCard'; -import Activities from '../components/Activities'; -import environment from '../environment'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import BasicCard from '@/utilities/BasicCard'; +import Activities from '@/components/Activities'; +import environment from '@/environment'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; const Search = AssetModules.styled('div')(({ theme }) => ({ position: 'relative', diff --git a/src/frontend/src/components/DialogTaskActions.jsx b/src/frontend/src/components/DialogTaskActions.jsx index 5b0a3179c7..cfdcb100ff 100755 --- a/src/frontend/src/components/DialogTaskActions.jsx +++ b/src/frontend/src/components/DialogTaskActions.jsx @@ -1,10 +1,10 @@ import React, { useState, useEffect } from 'react'; -import environment from '../environment'; -import ProjectTaskStatus from '../api/ProjectTaskStatus'; -import MapStyles from '../hooks/MapStyles'; -import CoreModules from '../shared/CoreModules'; -import { CommonActions } from '../store/slices/CommonSlice'; -import { task_priority_str } from '../types/enums'; +import environment from '@/environment'; +import ProjectTaskStatus from '@/api/ProjectTaskStatus'; +import MapStyles from '@/hooks/MapStyles'; +import CoreModules from '@/shared/CoreModules'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import { task_priority_str } from '@/types/enums'; export default function Dialog({ taskId, feature, map, view }) { // const featureStatus = feature.id_ != undefined ? feature.id_.replace("_", ",").split(',')[1] : null; diff --git a/src/frontend/src/components/GenerateBasemap.jsx b/src/frontend/src/components/GenerateBasemap.jsx index bc6c5bfcd9..d00f39664f 100644 --- a/src/frontend/src/components/GenerateBasemap.jsx +++ b/src/frontend/src/components/GenerateBasemap.jsx @@ -1,8 +1,8 @@ import React, { useEffect, useState } from 'react'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; -import environment from '../environment'; -import { DownloadTile, GenerateProjectTiles, GetTilesList } from '../api/Project'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import environment from '@/environment'; +import { DownloadTile, GenerateProjectTiles, GetTilesList } from '@/api/Project'; const GenerateBasemap = ({ setToggleGenerateModal, toggleGenerateModal, projectInfo }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js b/src/frontend/src/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js index a574f4f5d7..1e5ca13fac 100644 --- a/src/frontend/src/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js +++ b/src/frontend/src/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js @@ -161,12 +161,18 @@ const LayerSwitcherControl = ({ map, visible = 'osm' }) => { layerSwitcher.style.justifyContent = 'center'; layerSwitcher.style.alignItems = 'center'; } - if (location.pathname.includes('project_details')) { + if ( + location.pathname.includes('project_details') || + location.pathname.includes('upload-area') || + location.pathname.includes('select-form') || + location.pathname.includes('data-extract') || + location.pathname.includes('split-tasks') + ) { const olZoom = document.querySelector('.ol-zoom'); if (olZoom) { olZoom.style.display = 'none'; } - if (layerSwitcher) { + if (layerSwitcher && location.pathname.includes('project_details')) { layerSwitcher.style.right = '19px'; layerSwitcher.style.top = '250px'; layerSwitcher.style.zIndex = '1000'; diff --git a/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/ClusterLayer.js b/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/ClusterLayer.js index bbe26f93f4..7ebaa41501 100644 --- a/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/ClusterLayer.js +++ b/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/ClusterLayer.js @@ -8,9 +8,9 @@ import Fill from 'ol/style/Fill'; import { Cluster, OSM as OSMSource } from 'ol/source'; import { Text, Circle, Icon } from 'ol/style'; import VectorSource from 'ol/source/Vector'; -import { hexToRgba } from '../../../MapComponent/OpenLayersComponent/helpers/styleUtils'; import SelectCluster from 'ol-ext/interaction/SelectCluster'; -import MarkerIcon from '../../../../assets/images/red_marker.png'; +import { hexToRgba } from '@/components/MapComponent/OpenLayersComponent/helpers/styleUtils'; +import MarkerIcon from '@/assets/images/red_marker.png'; function setAsyncStyle(style, feature, getIndividualStyle) { const styleCache = {}; @@ -202,8 +202,8 @@ const ClusterLayer = ({ font: '15px Times New Roman', }), }); - return style; fillColor = '#96bfff'; + return style; } else { return; } diff --git a/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/VectorLayer.js b/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/VectorLayer.js index 546bd7fddf..f88a824ef3 100644 --- a/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/VectorLayer.js +++ b/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/VectorLayer.js @@ -8,8 +8,8 @@ import { Circle as CircleStyle, Fill, Stroke, Style } from 'ol/style.js'; import GeoJSON from 'ol/format/GeoJSON'; import { Vector as VectorSource } from 'ol/source'; import OLVectorLayer from 'ol/layer/Vector'; -import { defaultStyles, getStyles } from '../helpers/styleUtils'; -import { isExtentValid } from '../helpers/layerUtils'; +import { defaultStyles, getStyles } from '@/components/MapComponent/OpenLayersComponent/helpers/styleUtils'; +import { isExtentValid } from '@/components/MapComponent/OpenLayersComponent/helpers/layerUtils'; import { Draw, Modify, Snap, Select, defaults as defaultInteractions } from 'ol/interaction.js'; import { getArea } from 'ol/sphere'; import { valid } from 'geojson-validation'; @@ -74,14 +74,18 @@ const VectorLayer = ({ dataProjection: 'EPSG:4326', featureProjection: 'EPSG:3857', }); + const geometry = vectorLayer.getSource().getFeatures()?.[0].getGeometry(); + const area = formatArea(geometry); - onModify(geoJSONString); + onModify(geoJSONString, area); }); map.addInteraction(modify); map.addInteraction(select); return () => { - // map.removeInteraction(defaultInteractions().extend([select, modify])) + // map.removeInteraction(defaultInteractions().extend([select, modify])); + map.removeInteraction(modify); + map.removeInteraction(select); }; }, [map, vectorLayer, onModify]); @@ -191,23 +195,27 @@ const VectorLayer = ({ useEffect(() => { if (!vectorLayer || !style.visibleOnMap || setStyle) return; - vectorLayer.setStyle((feature, resolution) => [ - new Style({ - image: new CircleStyle({ - radius: 5, - fill: new Fill({ - color: 'orange', - }), - }), - geometry: function (feature) { - // return the coordinates of the first ring of the polygon - const coordinates = feature.getGeometry().getCoordinates()[0]; - return new MultiPoint(coordinates); - }, - }), - getStyles({ style, feature, resolution }), - ]); - }, [vectorLayer, style, setStyle]); + vectorLayer.setStyle((feature, resolution) => { + return onModify + ? [ + new Style({ + image: new CircleStyle({ + radius: 5, + fill: new Fill({ + color: 'orange', + }), + }), + geometry: function (feature) { + // return the coordinates of the first ring of the polygon + const coordinates = feature.getGeometry().getCoordinates()[0]; + return new MultiPoint(coordinates); + }, + }), + getStyles({ style, feature, resolution }), + ] + : [getStyles({ style, feature, resolution })]; + }); + }, [vectorLayer, style, setStyle, onModify]); useEffect(() => { if (!vectorLayer) return; @@ -254,7 +262,6 @@ const VectorLayer = ({ }); function pointerMovefn(event) { vectorLayer.getFeatures(event.pixel).then((features) => { - console.log(selection, 'selection'); if (!features.length) { selection = {}; hoverEffect(undefined, vectorLayer); diff --git a/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/VectorTileLayer.js b/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/VectorTileLayer.js index c7cd0d7f45..3110bbd959 100644 --- a/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/VectorTileLayer.js +++ b/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/VectorTileLayer.js @@ -6,8 +6,8 @@ import VectorTileSource from 'ol/source/VectorTile'; import { transformExtent } from 'ol/proj'; import Stroke from 'ol/style/Stroke'; import Style from 'ol/style/Style'; -import { getStyles, defaultStyles } from '../helpers/styleUtils'; -import { isExtentValid } from '../helpers/layerUtils'; +import { getStyles, defaultStyles } from '@/components/MapComponent/OpenLayersComponent/helpers/styleUtils'; +import { isExtentValid } from '@/components/MapComponent/OpenLayersComponent/helpers/layerUtils'; const selectElement = 'singleselect'; diff --git a/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/index.js b/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/index.js index da4c7b0197..6df3d4417f 100644 --- a/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/index.js +++ b/src/frontend/src/components/MapComponent/OpenLayersComponent/Layers/index.js @@ -1,4 +1,4 @@ -export { default as VectorTileLayer } from './VectorTileLayer'; +export { default as VectorTileLayer } from '@/components/MapComponent/OpenLayersComponent/Layers/VectorTileLayer'; -export { default as VectorLayer } from './VectorLayer'; -export { default as ClusterLayer } from './ClusterLayer'; +export { default as VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers/VectorLayer'; +export { default as ClusterLayer } from '@/components/MapComponent/OpenLayersComponent/Layers/ClusterLayer'; diff --git a/src/frontend/src/components/MapComponent/OpenLayersComponent/MapContainer/index.jsx b/src/frontend/src/components/MapComponent/OpenLayersComponent/MapContainer/index.jsx index b7abafa404..b3dc95b82d 100644 --- a/src/frontend/src/components/MapComponent/OpenLayersComponent/MapContainer/index.jsx +++ b/src/frontend/src/components/MapComponent/OpenLayersComponent/MapContainer/index.jsx @@ -34,4 +34,6 @@ MapContainer.propTypes = { mapInstance: PropTypes.oneOfType([PropTypes.object, PropTypes.number]), }; +MapContainer.displayName = 'MapContainer'; + export default MapContainer; diff --git a/src/frontend/src/components/MapComponent/OpenLayersComponent/index.js b/src/frontend/src/components/MapComponent/OpenLayersComponent/index.js index 590d8e71b3..1921fe57f3 100644 --- a/src/frontend/src/components/MapComponent/OpenLayersComponent/index.js +++ b/src/frontend/src/components/MapComponent/OpenLayersComponent/index.js @@ -1,3 +1,3 @@ -export { default as MapContainer } from './MapContainer'; +export { default as MapContainer } from '@/components/MapComponent/OpenLayersComponent/MapContainer'; -export { default as useOLMap } from './useOLMap'; +export { default as useOLMap } from '@/components/MapComponent/OpenLayersComponent/useOLMap'; diff --git a/src/frontend/src/components/MapDescriptionComponents.jsx b/src/frontend/src/components/MapDescriptionComponents.jsx index 3fcd0bf23f..98f738dff2 100755 --- a/src/frontend/src/components/MapDescriptionComponents.jsx +++ b/src/frontend/src/components/MapDescriptionComponents.jsx @@ -1,6 +1,6 @@ import React from 'react'; -import CustomizedMenus from '../utilities/CustomizedMenus'; -import CoreModules from '../shared/CoreModules'; +import CustomizedMenus from '@/utilities/CustomizedMenus'; +import CoreModules from '@/shared/CoreModules'; const MapDescriptionComponents = ({ type, state, defaultTheme }) => { const descriptionData = [ diff --git a/src/frontend/src/components/MapLegends.jsx b/src/frontend/src/components/MapLegends.jsx index 9361c9afa4..c0c31adf4e 100755 --- a/src/frontend/src/components/MapLegends.jsx +++ b/src/frontend/src/components/MapLegends.jsx @@ -1,6 +1,6 @@ import React from 'react'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; const MapLegends = ({ direction, spacing, iconBtnProps, defaultTheme, valueStatus }) => { const MapDetails = [ diff --git a/src/frontend/src/components/OpenLayersMap.jsx b/src/frontend/src/components/OpenLayersMap.jsx index 191c45a2c1..8682c3de80 100755 --- a/src/frontend/src/components/OpenLayersMap.jsx +++ b/src/frontend/src/components/OpenLayersMap.jsx @@ -1,27 +1,27 @@ import React, { useEffect, useState } from 'react'; -import DialogTaskActions from '../components/DialogTaskActions'; +import DialogTaskActions from '@/components/DialogTaskActions'; import '../styles/home.scss'; -import CoreModules from '../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; import Control from 'ol/control/Control'; -import locationImg from '../assets/images/location.png'; -import accDownImg from '../assets/images/acc-down.png'; -import accUpImg from '../assets/images/acc-up.png'; -import gridIcon from '../assets/images/grid.png'; -import QrcodeComponent from './QrcodeComponent'; +import locationImg from '@/assets/images/location.png'; +import accDownImg from '@/assets/images/acc-down.png'; +import accUpImg from '@/assets/images/acc-up.png'; +import gridIcon from '@/assets/images/grid.png'; +import QrcodeComponent from '@/components/QrcodeComponent'; import * as ol from 'ol'; import { Point } from 'ol/geom'; import Vector from 'ol/layer/Vector'; import VectorSource from 'ol/source/Vector'; import { transform } from 'ol/proj'; import { Icon, Style } from 'ol/style'; -import LocationImage from '../assets/images/location.png'; -import AssetModules from '../shared/AssetModules'; -import { Modal } from '../components/common/Modal'; -import Button from './common/Button'; -import { ProjectActions } from '../store/slices/ProjectSlice'; -import TaskSectionModal from './ProjectDetails/TaskSectionPopup'; +import LocationImage from '@/assets/images/location.png'; +import AssetModules from '@/shared/AssetModules'; +import { Modal } from '@/components/common/Modal'; +import Button from '@/components/common/Button'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +import TaskSectionModal from '@/components/ProjectDetails/TaskSectionPopup'; import VectorLayer from 'ol/layer/Vector'; -import WindowDimension from '../hooks/WindowDimension'; +import WindowDimension from '@/hooks/WindowDimension'; let currentLocationLayer = null; const OpenLayersMap = ({ diff --git a/src/frontend/src/components/ProjectDetails/MapControlComponent.tsx b/src/frontend/src/components/ProjectDetails/MapControlComponent.tsx index a0ab6ea5fd..329510978f 100644 --- a/src/frontend/src/components/ProjectDetails/MapControlComponent.tsx +++ b/src/frontend/src/components/ProjectDetails/MapControlComponent.tsx @@ -1,8 +1,8 @@ import React, { useState } from 'react'; -import AssetModules from '../../shared/AssetModules'; +import AssetModules from '@/shared/AssetModules'; import VectorLayer from 'ol/layer/Vector'; -import CoreModules from '../../shared/CoreModules.js'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; +import CoreModules from '@/shared/CoreModules.js'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; const MapControlComponent = ({ map }) => { const btnList = [ diff --git a/src/frontend/src/components/ProjectDetails/MobileActivitiesContents.tsx b/src/frontend/src/components/ProjectDetails/MobileActivitiesContents.tsx index d8cd5e6764..c989785d43 100644 --- a/src/frontend/src/components/ProjectDetails/MobileActivitiesContents.tsx +++ b/src/frontend/src/components/ProjectDetails/MobileActivitiesContents.tsx @@ -1,6 +1,6 @@ import React from 'react'; -import ActivitiesPanel from '../ActivitiesPanel'; -import CoreModules from '../../shared/CoreModules'; +import ActivitiesPanel from '@/components/ActivitiesPanel'; +import CoreModules from '@/shared/CoreModules'; const MobileActivitiesContents = ({ map, mainView, mapDivPostion }) => { const params = CoreModules.useParams(); diff --git a/src/frontend/src/components/ProjectDetails/MobileFooter.tsx b/src/frontend/src/components/ProjectDetails/MobileFooter.tsx index 54233e25a2..63b458d7ab 100644 --- a/src/frontend/src/components/ProjectDetails/MobileFooter.tsx +++ b/src/frontend/src/components/ProjectDetails/MobileFooter.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import AssetModules from '../../shared/AssetModules.js'; -import CoreModules from '../../shared/CoreModules'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; +import AssetModules from '@/shared/AssetModules.js'; +import CoreModules from '@/shared/CoreModules'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; const MobileFooter = () => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/ProjectDetails/MobileProjectInfoContent.tsx b/src/frontend/src/components/ProjectDetails/MobileProjectInfoContent.tsx index ff15c16a12..35757d1d85 100644 --- a/src/frontend/src/components/ProjectDetails/MobileProjectInfoContent.tsx +++ b/src/frontend/src/components/ProjectDetails/MobileProjectInfoContent.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import AssetModules from '../../shared/AssetModules'; +import AssetModules from '@/shared/AssetModules'; const MobileProjectInfoContent = ({ projectInfo }) => { return ( diff --git a/src/frontend/src/components/ProjectDetails/ProjectOptions.tsx b/src/frontend/src/components/ProjectDetails/ProjectOptions.tsx index 440c2d2b1c..3e4fb42119 100644 --- a/src/frontend/src/components/ProjectDetails/ProjectOptions.tsx +++ b/src/frontend/src/components/ProjectDetails/ProjectOptions.tsx @@ -1,9 +1,9 @@ import React, { useState } from 'react'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; -import environment from '../../environment'; -import { DownloadDataExtract, DownloadProjectForm } from '../../api/Project'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import environment from '@/environment'; +import { DownloadDataExtract, DownloadProjectForm } from '@/api/Project'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; const ProjectOptions = ({ setToggleGenerateModal }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/ProjectDetails/TaskSectionPopup.tsx b/src/frontend/src/components/ProjectDetails/TaskSectionPopup.tsx index 2f91da89f9..0a46469b6d 100644 --- a/src/frontend/src/components/ProjectDetails/TaskSectionPopup.tsx +++ b/src/frontend/src/components/ProjectDetails/TaskSectionPopup.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; const TaskSectionPopup = ({ body }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/ProjectDetailsV2/ActivitiesPanel.tsx b/src/frontend/src/components/ProjectDetailsV2/ActivitiesPanel.tsx index fcd4a4869e..8022e67482 100644 --- a/src/frontend/src/components/ProjectDetailsV2/ActivitiesPanel.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/ActivitiesPanel.tsx @@ -1,13 +1,13 @@ /* eslint-disable react/jsx-key */ import React, { useEffect, useState } from 'react'; -import environment from '../../environment'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; -import { CustomSelect } from '../../components/common/Select'; -import profilePic from '../../assets/images/project_icon.png'; +import environment from '@/environment'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import { CustomSelect } from '@/components/common/Select'; +import profilePic from '@/assets/images/project_icon.png'; import { Feature } from 'ol'; import { Polygon } from 'ol/geom'; -import { ActivitiesCardSkeletonLoader, ShowingCountSkeletonLoader } from './SkeletonLoader'; +import { ActivitiesCardSkeletonLoader, ShowingCountSkeletonLoader } from '@/components/ProjectDetailsV2/SkeletonLoader'; const sortByList = [ { id: 'activities', name: 'Activities' }, @@ -133,13 +133,13 @@ const ActivitiesPanel = ({ defaultTheme, state, params, map, view, mapDivPostion return (
-
+
diff --git a/src/frontend/src/components/ProjectDetailsV2/MapControlComponent.tsx b/src/frontend/src/components/ProjectDetailsV2/MapControlComponent.tsx index a0ab6ea5fd..329510978f 100644 --- a/src/frontend/src/components/ProjectDetailsV2/MapControlComponent.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/MapControlComponent.tsx @@ -1,8 +1,8 @@ import React, { useState } from 'react'; -import AssetModules from '../../shared/AssetModules'; +import AssetModules from '@/shared/AssetModules'; import VectorLayer from 'ol/layer/Vector'; -import CoreModules from '../../shared/CoreModules.js'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; +import CoreModules from '@/shared/CoreModules.js'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; const MapControlComponent = ({ map }) => { const btnList = [ diff --git a/src/frontend/src/components/ProjectDetailsV2/MobileActivitiesContents.tsx b/src/frontend/src/components/ProjectDetailsV2/MobileActivitiesContents.tsx index 177a28585f..cffe8e4c72 100644 --- a/src/frontend/src/components/ProjectDetailsV2/MobileActivitiesContents.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/MobileActivitiesContents.tsx @@ -1,6 +1,6 @@ import React from 'react'; -import ActivitiesPanel from './ActivitiesPanel'; -import CoreModules from '../../shared/CoreModules'; +import ActivitiesPanel from '@/components/ProjectDetailsV2/ActivitiesPanel'; +import CoreModules from '@/shared/CoreModules'; const MobileActivitiesContents = ({ map, mainView, mapDivPostion }) => { const params = CoreModules.useParams(); diff --git a/src/frontend/src/components/ProjectDetailsV2/MobileFooter.tsx b/src/frontend/src/components/ProjectDetailsV2/MobileFooter.tsx index cab3cc8ca0..0028a127da 100644 --- a/src/frontend/src/components/ProjectDetailsV2/MobileFooter.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/MobileFooter.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import AssetModules from '../../shared/AssetModules.js'; -import CoreModules from '../../shared/CoreModules'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; +import AssetModules from '@/shared/AssetModules.js'; +import CoreModules from '@/shared/CoreModules'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; const MobileFooter = () => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/ProjectDetailsV2/MobileProjectInfoContent.tsx b/src/frontend/src/components/ProjectDetailsV2/MobileProjectInfoContent.tsx index ff15c16a12..35757d1d85 100644 --- a/src/frontend/src/components/ProjectDetailsV2/MobileProjectInfoContent.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/MobileProjectInfoContent.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import AssetModules from '../../shared/AssetModules'; +import AssetModules from '@/shared/AssetModules'; const MobileProjectInfoContent = ({ projectInfo }) => { return ( diff --git a/src/frontend/src/components/ProjectDetailsV2/ProjectInfo.tsx b/src/frontend/src/components/ProjectDetailsV2/ProjectInfo.tsx index d3e22756cd..58a3692a36 100644 --- a/src/frontend/src/components/ProjectDetailsV2/ProjectInfo.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/ProjectInfo.tsx @@ -1,7 +1,7 @@ import React, { useEffect, useRef, useState } from 'react'; -import AssetModules from '../../shared/AssetModules.js'; -import ProjectIcon from '../../assets/images/project_icon.png'; -import CoreModules from '../../shared/CoreModules'; +import AssetModules from '@/shared/AssetModules.js'; +import ProjectIcon from '@/assets/images/project_icon.png'; +import CoreModules from '@/shared/CoreModules'; const ProjectInfo = () => { const paraRef = useRef(null); @@ -87,12 +87,12 @@ const ProjectInfo = () => {
Organization Photo
-

{projectDashboardDetail?.organization}

+

{projectDashboardDetail?.organisation}

)}
diff --git a/src/frontend/src/components/ProjectDetailsV2/ProjectOptions.tsx b/src/frontend/src/components/ProjectDetailsV2/ProjectOptions.tsx index 66ad5e5074..e89fd5c936 100644 --- a/src/frontend/src/components/ProjectDetailsV2/ProjectOptions.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/ProjectOptions.tsx @@ -1,9 +1,9 @@ import React, { useState } from 'react'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; -import environment from '../../environment'; -import { DownloadDataExtract, DownloadProjectForm } from '../../api/Project'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import environment from '@/environment'; +import { DownloadDataExtract, DownloadProjectForm } from '@/api/Project'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; const ProjectOptions = ({ setToggleGenerateModal }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/ProjectDetailsV2/SkeletonLoader.tsx b/src/frontend/src/components/ProjectDetailsV2/SkeletonLoader.tsx index 629102c5aa..d22843bcf2 100644 --- a/src/frontend/src/components/ProjectDetailsV2/SkeletonLoader.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/SkeletonLoader.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; export const ActivitiesCardSkeletonLoader = () => { return ( diff --git a/src/frontend/src/components/ProjectDetailsV2/TaskSectionPopup.tsx b/src/frontend/src/components/ProjectDetailsV2/TaskSectionPopup.tsx index 2f91da89f9..0a46469b6d 100644 --- a/src/frontend/src/components/ProjectDetailsV2/TaskSectionPopup.tsx +++ b/src/frontend/src/components/ProjectDetailsV2/TaskSectionPopup.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; const TaskSectionPopup = ({ body }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/ProjectInfo/ProjectCard.jsx b/src/frontend/src/components/ProjectInfo/ProjectCard.jsx index d6542004e5..b981c4babb 100644 --- a/src/frontend/src/components/ProjectInfo/ProjectCard.jsx +++ b/src/frontend/src/components/ProjectInfo/ProjectCard.jsx @@ -1,7 +1,7 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; -import IconButtonCard from '../../utilities/IconButtonCard'; -import AssetModules from '../../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import IconButtonCard from '@/utilities/IconButtonCard'; +import AssetModules from '@/shared/AssetModules'; const ProjectCard = () => { return ( diff --git a/src/frontend/src/components/ProjectInfo/ProjectInfoCountCard.jsx b/src/frontend/src/components/ProjectInfo/ProjectInfoCountCard.jsx index b85d6f9046..fa4da6ec3f 100644 --- a/src/frontend/src/components/ProjectInfo/ProjectInfoCountCard.jsx +++ b/src/frontend/src/components/ProjectInfo/ProjectInfoCountCard.jsx @@ -1,7 +1,7 @@ import React from 'react'; -import AssetModules from '../../shared/AssetModules'; -import CoreModules from '../../shared/CoreModules'; -import ProjectInfoCountSkeleton from './ProjectInfoCountCardSkeleton'; +import AssetModules from '@/shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import ProjectInfoCountSkeleton from '@/components/ProjectInfo/ProjectInfoCountCardSkeleton'; const ProjectInfoCountCard = () => { const taskData = CoreModules.useAppSelector((state) => state.task.taskData); diff --git a/src/frontend/src/components/ProjectInfo/ProjectInfoCountCardSkeleton.jsx b/src/frontend/src/components/ProjectInfo/ProjectInfoCountCardSkeleton.jsx index 631f7225a1..0ec681aab2 100644 --- a/src/frontend/src/components/ProjectInfo/ProjectInfoCountCardSkeleton.jsx +++ b/src/frontend/src/components/ProjectInfo/ProjectInfoCountCardSkeleton.jsx @@ -1,5 +1,5 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const ProjectInfoCountCardSkeleton = () => { return ( @@ -11,7 +11,7 @@ const ProjectInfoCountCardSkeleton = () => { }} > {Array.from({ length: 3 }).map((i) => ( -
+
))} diff --git a/src/frontend/src/components/ProjectInfo/ProjectInfoSidebar.jsx b/src/frontend/src/components/ProjectInfo/ProjectInfoSidebar.jsx index 27bb104036..5695777307 100644 --- a/src/frontend/src/components/ProjectInfo/ProjectInfoSidebar.jsx +++ b/src/frontend/src/components/ProjectInfo/ProjectInfoSidebar.jsx @@ -1,8 +1,8 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; -import ProjectCard from './ProjectCard'; -import environment from '../../environment'; -import ProjectInfoSidebarSkeleton from './ProjectInfoSidebarSkeleton'; +import CoreModules from '@/shared/CoreModules'; +import ProjectCard from '@/components/ProjectInfo/ProjectCard'; +import environment from '@/environment'; +import ProjectInfoSidebarSkeleton from '@/components/ProjectInfo/ProjectInfoSidebarSkeleton'; const ProjectInfoSidebar = ({ projectId, taskInfo }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/ProjectInfo/ProjectInfoSidebarSkeleton.jsx b/src/frontend/src/components/ProjectInfo/ProjectInfoSidebarSkeleton.jsx index 15cb754854..1299f7498f 100644 --- a/src/frontend/src/components/ProjectInfo/ProjectInfoSidebarSkeleton.jsx +++ b/src/frontend/src/components/ProjectInfo/ProjectInfoSidebarSkeleton.jsx @@ -1,5 +1,5 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const ProjectInfoSidebarSkeleton = () => { return ( diff --git a/src/frontend/src/components/ProjectInfo/ProjectInfomap.jsx b/src/frontend/src/components/ProjectInfo/ProjectInfomap.jsx index fadf775d35..d100b730b5 100644 --- a/src/frontend/src/components/ProjectInfo/ProjectInfomap.jsx +++ b/src/frontend/src/components/ProjectInfo/ProjectInfomap.jsx @@ -1,20 +1,20 @@ import React, { useCallback, useState, useEffect } from 'react'; -import CoreModules from '../../shared/CoreModules'; -import { MapContainer as MapComponent } from '../MapComponent/OpenLayersComponent'; -import { useOLMap } from '../MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../MapComponent/OpenLayersComponent/LayerSwitcher'; -import { VectorLayer } from '../MapComponent/OpenLayersComponent/Layers'; +import CoreModules from '@/shared/CoreModules'; +import { MapContainer as MapComponent } from '@/components/MapComponent/OpenLayersComponent'; +import { useOLMap } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; import { Vector as VectorSource } from 'ol/source'; import GeoJSON from 'ol/format/GeoJSON'; import { get } from 'ol/proj'; -import { ProjectBuildingGeojsonService } from '../../api/SubmissionService'; -import environment from '../../environment'; -import { getStyles } from '../MapComponent/OpenLayersComponent/helpers/styleUtils'; -import { ProjectActions } from '../../store/slices/ProjectSlice'; -import { basicGeojsonTemplate } from '../../utilities/mapUtils'; -import ProjectInfoMapLegend from './ProjectInfoMapLegend'; -import Accordion from '../common/Accordion'; +import { ProjectBuildingGeojsonService } from '@/api/SubmissionService'; +import environment from '@/environment'; +import { getStyles } from '@/components/MapComponent/OpenLayersComponent/helpers/styleUtils'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +import { basicGeojsonTemplate } from '@/utilities/mapUtils'; +import ProjectInfoMapLegend from '@/components/ProjectInfo/ProjectInfoMapLegend'; +import Accordion from '@/components/common/Accordion'; export const defaultStyles = { lineColor: '#000000', diff --git a/src/frontend/src/components/ProjectMap/ProjectMap.jsx b/src/frontend/src/components/ProjectMap/ProjectMap.jsx index cf6ae604b7..84056cf1ec 100644 --- a/src/frontend/src/components/ProjectMap/ProjectMap.jsx +++ b/src/frontend/src/components/ProjectMap/ProjectMap.jsx @@ -1,9 +1,9 @@ import React, { useState } from 'react'; import CoreModules from 'fmtm/CoreModules'; -import { useOLMap } from '../MapComponent/OpenLayersComponent'; -import { MapContainer as MapComponent } from '../MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; -import { VectorLayer } from '../MapComponent/OpenLayersComponent/Layers'; +import { useOLMap } from '@/components/MapComponent/OpenLayersComponent'; +import { MapContainer as MapComponent } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; const basicGeojsonTemplate = { type: 'FeatureCollection', diff --git a/src/frontend/src/components/QrcodeComponent.jsx b/src/frontend/src/components/QrcodeComponent.jsx index ba9ec80c64..3cbc603598 100755 --- a/src/frontend/src/components/QrcodeComponent.jsx +++ b/src/frontend/src/components/QrcodeComponent.jsx @@ -1,11 +1,11 @@ import React, { useState } from 'react'; -import BasicCard from '../utilities/BasicCard'; +import BasicCard from '@/utilities/BasicCard'; // import Activities from "./Activities"; -import environment from '../environment'; -import { ProjectFilesById } from '../api/Files'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; -import { HomeActions } from '../store/slices/HomeSlice'; +import environment from '@/environment'; +import { ProjectFilesById } from '@/api/Files'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import { HomeActions } from '@/store/slices/HomeSlice'; const TasksComponent = ({ type, task, defaultTheme }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/SubmissionMap/SubmissionMap.jsx b/src/frontend/src/components/SubmissionMap/SubmissionMap.jsx index 1fe5f16c05..881910627c 100644 --- a/src/frontend/src/components/SubmissionMap/SubmissionMap.jsx +++ b/src/frontend/src/components/SubmissionMap/SubmissionMap.jsx @@ -1,8 +1,8 @@ import React, { useState } from 'react'; -import useOLMap from '../../hooks/useOlMap'; -import { MapContainer as MapComponent } from '../MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; -import { VectorLayer } from '../MapComponent/OpenLayersComponent/Layers'; +import useOLMap from '@/hooks/useOlMap'; +import { MapContainer as MapComponent } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; function elastic(t) { return Math.pow(2, -10 * t) * Math.sin(((t - 0.075) * (2 * Math.PI)) / 0.3) + 1; diff --git a/src/frontend/src/components/TasksLayer.jsx b/src/frontend/src/components/TasksLayer.jsx index 643fdc7b32..7dfab9bf63 100755 --- a/src/frontend/src/components/TasksLayer.jsx +++ b/src/frontend/src/components/TasksLayer.jsx @@ -2,10 +2,10 @@ import React, { useEffect } from 'react'; import { Vector as VectorLayer } from 'ol/layer.js'; import GeoJSON from 'ol/format/GeoJSON'; import { Vector as VectorSource } from 'ol/source.js'; -import { geojsonObjectModel } from '../models/geojsonObjectModel'; -import MapStyles from '../hooks/MapStyles'; -import environment from '../environment'; -import CoreModules from '../shared/CoreModules'; +import { geojsonObjectModel } from '@/models/geojsonObjectModel'; +import MapStyles from '@/hooks/MapStyles'; +import environment from '@/environment'; +import CoreModules from '@/shared/CoreModules'; import { get } from 'ol/proj'; let geojsonObject; const TasksLayer = (map, view, feature) => { diff --git a/src/frontend/src/components/TasksMap/TasksMap.jsx b/src/frontend/src/components/TasksMap/TasksMap.jsx index 78322e2f3b..c15b806099 100644 --- a/src/frontend/src/components/TasksMap/TasksMap.jsx +++ b/src/frontend/src/components/TasksMap/TasksMap.jsx @@ -1,8 +1,8 @@ import React from 'react'; -import useOLMap from '../../hooks/useOlMap'; -import { MapContainer as MapComponent } from '../MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; -import { VectorLayer } from '../MapComponent/OpenLayersComponent/Layers'; +import useOLMap from '@/hooks/useOlMap'; +import { MapContainer as MapComponent } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; function elastic(t) { return Math.pow(2, -10 * t) * Math.sin(((t - 0.075) * (2 * Math.PI)) / 0.3) + 1; diff --git a/src/frontend/src/components/common/Accordion.tsx b/src/frontend/src/components/common/Accordion.tsx index 9cbc69296a..bc15e58d17 100644 --- a/src/frontend/src/components/common/Accordion.tsx +++ b/src/frontend/src/components/common/Accordion.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useState } from 'react'; -import AssetModules from '../../shared/AssetModules'; +import AssetModules from '@/shared/AssetModules'; interface IAccordion { collapsed?: boolean; diff --git a/src/frontend/src/components/common/BottomSheet.tsx b/src/frontend/src/components/common/BottomSheet.tsx index 6b0d59cad6..28a2d31695 100644 --- a/src/frontend/src/components/common/BottomSheet.tsx +++ b/src/frontend/src/components/common/BottomSheet.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useRef, useState } from 'react'; -import FmtmLogo from '../../assets/images/hotLog.png'; +import FmtmLogo from '@/assets/images/hotLog.png'; const BottomSheet = ({ body, onClose }) => { const sheetContentRef: any = useRef(null); diff --git a/src/frontend/src/components/common/Button.tsx b/src/frontend/src/components/common/Button.tsx index 49168f80e7..a5f69f748d 100644 --- a/src/frontend/src/components/common/Button.tsx +++ b/src/frontend/src/components/common/Button.tsx @@ -12,6 +12,7 @@ interface IButton { icon?: React.ReactNode; isLoading?: boolean; disabled?: boolean; + loadingText?: string; } const btnStyle = (btnType, className) => { @@ -24,13 +25,25 @@ const btnStyle = (btnType, className) => { case 'other': return `fmtm-py-1 fmtm-px-5 fmtm-bg-red-500 fmtm-text-white fmtm-rounded-lg hover:fmtm-bg-red-600`; case 'disabled': - return `fmtm-py-1 fmtm-px-4 fmtm-text-white fmtm-rounded-lg fmtm-bg-gray-400 fmtm-cursor-not-allowed`; + return `fmtm-py-1 fmtm-px-4 fmtm-text-white fmtm-rounded-lg fmtm-bg-gray-400 fmtm-cursor-not-allowed ${className}`; default: return 'fmtm-primary'; } }; -const Button = ({ btnText, btnType, type, onClick, disabled, className, count, dataTip, icon, isLoading }: IButton) => ( +const Button = ({ + btnText, + btnType, + type, + onClick, + disabled, + className, + count, + dataTip, + icon, + isLoading, + loadingText, +}: IButton) => (
diff --git a/src/frontend/src/components/createnewproject/MapControlComponent.tsx b/src/frontend/src/components/createnewproject/MapControlComponent.tsx new file mode 100644 index 0000000000..f62b9e6da4 --- /dev/null +++ b/src/frontend/src/components/createnewproject/MapControlComponent.tsx @@ -0,0 +1,68 @@ +import React, { useState } from 'react'; +import VectorLayer from 'ol/layer/Vector'; +import AssetModules from '@/shared/AssetModules'; +import CoreModules from '@/shared/CoreModules.js'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; + +const MapControlComponent = ({ map, hasEditUndo }) => { + const dispatch = CoreModules.useAppDispatch(); + const toggleSplittedGeojsonEdit = CoreModules.useAppSelector( + (state) => state.createproject.toggleSplittedGeojsonEdit, + ); + const btnList = [ + { + id: 'Add', + icon: , + }, + { + id: 'Minus', + icon: , + }, + { + id: 'Edit', + icon: ( + + ), + }, + ]; + + const handleOnClick = (btnId) => { + if (btnId === 'Add') { + const actualZoom = map.getView().getZoom(); + map.getView().setZoom(actualZoom + 1); + } else if (btnId === 'Minus') { + const actualZoom = map.getView().getZoom(); + map.getView().setZoom(actualZoom - 1); + } else if (btnId === 'Edit') { + dispatch(CreateProjectActions.SetToggleSplittedGeojsonEdit(!toggleSplittedGeojsonEdit)); + } + }; + + return ( +
+ {btnList.map((btn) => { + return ( +
+ {((btn.id !== 'Edit' && btn.id !== 'Undo') || (btn.id === 'Edit' && hasEditUndo)) && ( +
handleOnClick(btn.id)} + > + {btn.icon} +
+ )} +
+ ); + })} +
+ ); +}; + +export default MapControlComponent; diff --git a/src/frontend/src/components/createnewproject/ProjectDetailsForm.tsx b/src/frontend/src/components/createnewproject/ProjectDetailsForm.tsx index 3a5bb25f01..42ce617fe7 100644 --- a/src/frontend/src/components/createnewproject/ProjectDetailsForm.tsx +++ b/src/frontend/src/components/createnewproject/ProjectDetailsForm.tsx @@ -1,27 +1,27 @@ -import TextArea from '../../components/common/TextArea'; -import InputTextField from '../../components/common/InputTextField'; +import TextArea from '@/components/common/TextArea'; +import InputTextField from '@/components/common/InputTextField'; import React, { useEffect } from 'react'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; import { useDispatch } from 'react-redux'; import { useNavigate } from 'react-router-dom'; -import { useAppSelector } from '../../types/reduxTypes'; -import useForm from '../../hooks/useForm'; -import CreateProjectValidation from '../../components/createproject/validation/CreateProjectValidation'; -import Button from '../../components/common/Button'; -import { CommonActions } from '../../store/slices/CommonSlice'; -import AssetModules from '../../shared/AssetModules.js'; -import { createPopup } from '../../utilfunctions/createPopup'; -import { CustomSelect } from '../../components/common/Select'; -import { OrganisationService } from '../../api/CreateProjectService'; +import { useAppSelector } from '@/types/reduxTypes'; +import useForm from '@/hooks/useForm'; +import CreateProjectValidation from '@/components/createproject/validation//CreateProjectValidation'; +import Button from '@/components/common/Button'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import AssetModules from '@/shared/AssetModules.js'; +import { createPopup } from '@/utilfunctions/createPopup'; +import { CustomSelect } from '@/components/common/Select'; +import { OrganisationService } from '@/api/CreateProjectService'; const ProjectDetailsForm = ({ flag }) => { const dispatch = useDispatch(); const navigate = useNavigate(); const projectDetails: any = useAppSelector((state) => state.createproject.projectDetails); - const organizationListData: any = useAppSelector((state) => state.createproject.organizationList); + const organisationListData: any = useAppSelector((state) => state.createproject.organisationList); - const organizationList = organizationListData.map((item) => ({ label: item.name, value: item.id })); + const organisationList = organisationListData.map((item) => ({ label: item.name, value: item.id })); const submission = () => { dispatch(CreateProjectActions.SetIndividualProjectDetailsData(values)); @@ -36,7 +36,7 @@ const ProjectDetailsForm = ({ flag }) => { ); const onFocus = () => { - dispatch(OrganisationService(`${import.meta.env.VITE_API_URL}/organization/`)); + dispatch(OrganisationService(`${import.meta.env.VITE_API_URL}/organisation/`)); }; useEffect(() => { @@ -81,7 +81,7 @@ const ProjectDetailsForm = ({ flag }) => {
Project Details
-

+

Fill in your project basic information such as name, description, hashtag, etc. This captures essential information about your project. @@ -99,7 +99,7 @@ const ProjectDetailsForm = ({ flag }) => { https://docs.getodk.org/central-install-digital-ocean/
-

+
{ { /> createPopup('Create Organization', 'createOrganization?popup=true')} + onClick={() => createPopup('Create Organization', 'createOrganisation?popup=true')} />
{errors.organisation_id && ( diff --git a/src/frontend/src/components/createnewproject/SelectForm.tsx b/src/frontend/src/components/createnewproject/SelectForm.tsx index 1bb5e2ea3f..4d54fc193c 100644 --- a/src/frontend/src/components/createnewproject/SelectForm.tsx +++ b/src/frontend/src/components/createnewproject/SelectForm.tsx @@ -1,17 +1,17 @@ import React, { useEffect, useRef, useState } from 'react'; import { useDispatch } from 'react-redux'; -import { CommonActions } from '../../store/slices/CommonSlice'; -import Button from '../../components/common/Button'; -import { CustomSelect } from '../../components/common/Select'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import Button from '@/components/common/Button'; +import { CustomSelect } from '@/components/common/Select'; import { useNavigate } from 'react-router-dom'; -import RadioButton from '../common/RadioButton'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; -import useForm from '../../hooks/useForm'; -import { useAppSelector } from '../../types/reduxTypes'; -import FileInputComponent from '../common/FileInputComponent'; -import SelectFormValidation from './validation/SelectFormValidation'; -import { FormCategoryService, ValidateCustomForm } from '../../api/CreateProjectService'; -import NewDefineAreaMap from '../../views/NewDefineAreaMap'; +import RadioButton from '@/components/common/RadioButton'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; +import useForm from '@/hooks/useForm'; +import { useAppSelector } from '@/types/reduxTypes'; +import FileInputComponent from '@/components/common/FileInputComponent'; +import SelectFormValidation from '@/components/createproject/validation/SelectFormValidation'; +import { FormCategoryService, ValidateCustomForm } from '@/api/CreateProjectService'; +import NewDefineAreaMap from '@/views/NewDefineAreaMap'; const osmFeatureTypeOptions = [ { name: 'form_ways', value: 'existing_form', label: 'Use Existing Form' }, @@ -118,7 +118,7 @@ const SelectForm = ({ flag, geojsonFile, customFormFile, setCustomFormFile }) => > tags {' '} - that match the selected category in OSM database, if you don't choose to upload custom data extract. + {`that match the selected category in OSM database, if you don't choose to upload custom data extract.`}

state.createproject.taskSplittingGeojsonLoading, ); const isTasksGenerated = CoreModules.useAppSelector((state) => state.createproject.isTasksGenerated); + const isFgbFetching = CoreModules.useAppSelector((state) => state.createproject.isFgbFetching); + const toggleSplittedGeojsonEdit = CoreModules.useAppSelector( + (state) => state.createproject.toggleSplittedGeojsonEdit, + ); const toggleStep = (step, url) => { dispatch(CommonActions.SetCurrentStepFormStep({ flag: flag, step: step })); @@ -130,7 +133,6 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo } else { projectData = { ...projectData, task_split_dimension: projectDetails.dimension }; } - console.log(projectData, 'projectData'); dispatch( CreateProjectService( `${import.meta.env.VITE_API_URL}/projects/create_project`, @@ -379,8 +381,9 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo className="" icon={} disabled={ - splitTasksSelection === task_split_type['task_splitting_algorithm'] && - !formValues?.average_buildings_per_task + (splitTasksSelection === task_split_type['task_splitting_algorithm'] && + !formValues?.average_buildings_per_task) || + isFgbFetching ? true : false } @@ -426,6 +429,17 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo splittedGeojson={dividedTaskGeojson} uploadedOrDrawnGeojsonFile={drawnGeojson} buildingExtractedGeojson={dataExtractGeojson} + onModify={ + toggleSplittedGeojsonEdit + ? (geojson) => { + handleCustomChange('drawnGeojson', geojson); + dispatch(CreateProjectActions.SetDividedTaskGeojson(JSON.parse(geojson))); + setGeojsonFile(null); + } + : null + } + // toggleSplittedGeojsonEdit + hasEditUndo />
{generateProjectLog ? ( diff --git a/src/frontend/src/components/createnewproject/UploadArea.tsx b/src/frontend/src/components/createnewproject/UploadArea.tsx index 01f3c93281..903ef768ab 100644 --- a/src/frontend/src/components/createnewproject/UploadArea.tsx +++ b/src/frontend/src/components/createnewproject/UploadArea.tsx @@ -1,18 +1,19 @@ import React, { useEffect, useRef, useState } from 'react'; -import { CommonActions } from '../../store/slices/CommonSlice'; -import Button from '../../components/common/Button'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import Button from '@/components/common/Button'; import { useDispatch } from 'react-redux'; -import RadioButton from '../../components/common/RadioButton'; -import AssetModules from '../../shared/AssetModules.js'; -import DrawSvg from './DrawSvg'; +import RadioButton from '@/components/common/RadioButton'; +import AssetModules from '@/shared/AssetModules.js'; +import DrawSvg from '@/components/createnewproject/DrawSvg'; import { useNavigate } from 'react-router-dom'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; -import useForm from '../../hooks/useForm'; -import { useAppSelector } from '../../types/reduxTypes'; -import UploadAreaValidation from './validation/UploadAreaValidation'; -import FileInputComponent from '../common/FileInputComponent'; -import NewDefineAreaMap from '../../views/NewDefineAreaMap'; -import { checkWGS84Projection } from '../../utilfunctions/checkWGS84Projection.js'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; +import useForm from '@/hooks/useForm'; +import { useAppSelector } from '@/types/reduxTypes'; +import UploadAreaValidation from '@/components/createnewproject/validation/UploadAreaValidation'; +import FileInputComponent from '@/components/common/FileInputComponent'; +import NewDefineAreaMap from '@/views/NewDefineAreaMap'; +import { checkWGS84Projection } from '@/utilfunctions/checkWGS84Projection.js'; + // @ts-ignore const DefineAreaMap = React.lazy(() => import('../../views/DefineAreaMap')); @@ -31,7 +32,7 @@ const uploadAreaOptions = [ }, ]; -const UploadArea = ({ flag, geojsonFile, setGeojsonFile }) => { +const UploadArea = ({ flag, geojsonFile, setGeojsonFile, setCustomLineUpload, setCustomPolygonUpload }) => { const dispatch = useDispatch(); const navigate = useNavigate(); // const [uploadAreaFile, setUploadAreaFile] = useState(null); @@ -44,6 +45,20 @@ const UploadArea = ({ flag, geojsonFile, setGeojsonFile }) => { const totalAreaSelection = useAppSelector((state) => state.createproject.totalAreaSelection); const submission = () => { + if (totalAreaSelection) { + const totalArea = parseFloat(totalAreaSelection?.split(' ')[0]); + if (totalArea > 1000) { + dispatch( + CommonActions.SetSnackBar({ + open: true, + message: 'Cannot create project of project area exceeding 1000 Sq.KM.', + variant: 'error', + duration: 3000, + }), + ); + return; + } + } dispatch(CreateProjectActions.SetIndividualProjectDetailsData(formValues)); dispatch(CommonActions.SetCurrentStepFormStep({ flag: flag, step: 3 })); navigate('/select-form'); @@ -99,17 +114,19 @@ const UploadArea = ({ flag, geojsonFile, setGeojsonFile }) => { }; useEffect(() => { - const isWGS84 = () => { - if (uploadAreaSelection === 'upload_file') { - const isWGS84Projection = checkWGS84Projection(drawnGeojson); - setIsGeojsonWG84(isWGS84Projection); - return isWGS84Projection; + if (drawnGeojson) { + const isWGS84 = () => { + if (uploadAreaSelection === 'upload_file') { + const isWGS84Projection = checkWGS84Projection(drawnGeojson); + setIsGeojsonWG84(isWGS84Projection); + return isWGS84Projection; + } + setIsGeojsonWG84(true); + return true; + }; + if (!isWGS84() && drawnGeojson) { + showSpatialError(); } - setIsGeojsonWG84(true); - return true; - }; - if (!isWGS84() && drawnGeojson) { - showSpatialError(); } return () => {}; }, [drawnGeojson]); @@ -133,6 +150,32 @@ const UploadArea = ({ flag, geojsonFile, setGeojsonFile }) => { dispatch(CreateProjectActions.SetTotalAreaSelection(null)); }; + useEffect(() => { + if (totalAreaSelection) { + const totalArea = parseFloat(totalAreaSelection?.split(' ')[0]); + if (totalArea > 100) { + dispatch( + CommonActions.SetSnackBar({ + open: true, + message: 'The project area exceeded over 100 Sq.KM.', + variant: 'warning', + duration: 3000, + }), + ); + } + if (totalArea > 1000) { + dispatch( + CommonActions.SetSnackBar({ + open: true, + message: 'The project area exceeded 1000 Sq.KM. and must be less than 1000 Sq.KM.', + variant: 'error', + duration: 3000, + }), + ); + } + } + }, [totalAreaSelection]); + return (
@@ -264,6 +307,9 @@ const UploadArea = ({ flag, geojsonFile, setGeojsonFile }) => { handleCustomChange('drawnGeojson', geojson); dispatch(CreateProjectActions.SetDrawnGeojson(JSON.parse(geojson))); dispatch(CreateProjectActions.SetTotalAreaSelection(area)); + dispatch(CreateProjectActions.ClearProjectStepState(formValues)); + setCustomLineUpload(null); + setCustomPolygonUpload(null); setGeojsonFile(null); }} /> diff --git a/src/frontend/src/components/createnewproject/validation/DataExtractValidation.tsx b/src/frontend/src/components/createnewproject/validation/DataExtractValidation.tsx index 3a72dfdc88..c913144e34 100644 --- a/src/frontend/src/components/createnewproject/validation/DataExtractValidation.tsx +++ b/src/frontend/src/components/createnewproject/validation/DataExtractValidation.tsx @@ -36,7 +36,6 @@ function DataExtractValidation(values: ProjectValues) { errors.customPolygonUpload = 'A GeoJSON file is required.'; } - console.log(errors); return errors; } diff --git a/src/frontend/src/components/createnewproject/validation/DefineTaskValidation.tsx b/src/frontend/src/components/createnewproject/validation/DefineTaskValidation.tsx index 37f8191dab..b11b1968e9 100644 --- a/src/frontend/src/components/createnewproject/validation/DefineTaskValidation.tsx +++ b/src/frontend/src/components/createnewproject/validation/DefineTaskValidation.tsx @@ -1,4 +1,4 @@ -import { task_split_type } from '../../../types/enums'; +import { task_split_type } from '@/types/enums'; interface ProjectValues { task_split_type: number; diff --git a/src/frontend/src/components/createproject/BasemapSelection.tsx b/src/frontend/src/components/createproject/BasemapSelection.tsx index 73dc380314..571d58b205 100644 --- a/src/frontend/src/components/createproject/BasemapSelection.tsx +++ b/src/frontend/src/components/createproject/BasemapSelection.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules.js'; +import CoreModules from '@/shared/CoreModules.js'; // import { SelectPicker } from 'rsuite'; import { useNavigate } from 'react-router-dom'; diff --git a/src/frontend/src/components/createproject/DataExtract.tsx b/src/frontend/src/components/createproject/DataExtract.tsx index 4fc7355532..15dde7e1a1 100755 --- a/src/frontend/src/components/createproject/DataExtract.tsx +++ b/src/frontend/src/components/createproject/DataExtract.tsx @@ -1,16 +1,16 @@ import React, { useEffect } from 'react'; -import enviroment from '../../environment'; -import CoreModules from '../../shared/CoreModules.js'; +import enviroment from '@/environment'; +import CoreModules from '@/shared/CoreModules.js'; import FormGroup from '@mui/material/FormGroup'; -import { FormCategoryService } from '../../api/CreateProjectService'; +import { FormCategoryService } from '@/api/CreateProjectService'; import { useNavigate, Link } from 'react-router-dom'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; import { Grid, InputLabel, MenuItem, Select } from '@mui/material'; -import useForm from '../../hooks/useForm'; +import useForm from '@/hooks/useForm'; //@ts-ignore -// import DefineAreaMap from './views/DefineAreaMap'; -import DefineAreaMap from '../../views//DefineAreaMap'; -import DataExtractValidation from './validation/DataExtractValidation'; +// import DefineAreaMap from '@/views/DefineAreaMap'; +import DefineAreaMap from '@/views//DefineAreaMap'; +import DataExtractValidation from '@/components/createproject/validation/DataExtractValidation'; // import { SelectPicker } from 'rsuite'; let generateProjectLogIntervalCb: any = null; diff --git a/src/frontend/src/components/createproject/DefineTasks.tsx b/src/frontend/src/components/createproject/DefineTasks.tsx index ed699382a5..33644ac6a9 100755 --- a/src/frontend/src/components/createproject/DefineTasks.tsx +++ b/src/frontend/src/components/createproject/DefineTasks.tsx @@ -1,17 +1,17 @@ import React from 'react'; -import enviroment from '../../environment'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules.js'; +import enviroment from '@/environment'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules.js'; import FormGroup from '@mui/material/FormGroup'; -import { GetDividedTaskFromGeojson, TaskSplittingPreviewService } from '../../api/CreateProjectService'; +import { GetDividedTaskFromGeojson, TaskSplittingPreviewService } from '@/api/CreateProjectService'; import { useNavigate, Link } from 'react-router-dom'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; import { InputLabel, MenuItem, Select } from '@mui/material'; //@ts-ignore -import DefineAreaMap from '../../views/DefineAreaMap'; -import useForm from '../../hooks/useForm'; -import DefineTaskValidation from './validation/DefineTaskValidation'; -import { useAppSelector } from '../../types/reduxTypes'; +import DefineAreaMap from '@/views/DefineAreaMap'; +import useForm from '@/hooks/useForm'; +import DefineTaskValidation from '@/components/createproject/validation/DefineTaskValidation'; +import { useAppSelector } from '@/types/reduxTypes'; const alogrithmList = [ { id: 1, value: 'Divide on Square', label: 'Divide on Square' }, diff --git a/src/frontend/src/components/createproject/FormSelection.tsx b/src/frontend/src/components/createproject/FormSelection.tsx index 0d9f3f5c24..c348b7782c 100755 --- a/src/frontend/src/components/createproject/FormSelection.tsx +++ b/src/frontend/src/components/createproject/FormSelection.tsx @@ -1,23 +1,22 @@ import React, { useEffect, useRef } from 'react'; -import enviroment from '../../environment'; -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; import FormGroup from '@mui/material/FormGroup'; import { CreateProjectService, FormCategoryService, GenerateProjectLog, ValidateCustomForm, -} from '../../api/CreateProjectService'; +} from '@/api/CreateProjectService'; import { useNavigate, Link } from 'react-router-dom'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; import { Grid, InputLabel, MenuItem, Select } from '@mui/material'; -import AssetModules from '../../shared/AssetModules.js'; -import useForm from '../../hooks/useForm'; -import SelectFormValidation from './validation/SelectFormValidation'; -import { CommonActions } from '../../store/slices/CommonSlice'; -import LoadingBar from './LoadingBar'; -import environment from '../../environment'; -import { useAppSelector } from '../../types/reduxTypes'; +import AssetModules from '@/shared/AssetModules.js'; +import useForm from '@/hooks/useForm'; +import SelectFormValidation from '@/components/createproject/validation/SelectFormValidation'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import LoadingBar from '@/components/createproject/LoadingBar'; +import environment from '@/environment'; +import { useAppSelector } from '@/types/reduxTypes'; // import { SelectPicker } from 'rsuite'; let generateProjectLogIntervalCb: any = null; diff --git a/src/frontend/src/components/createproject/LoadingBar.tsx b/src/frontend/src/components/createproject/LoadingBar.tsx index d04c925a8d..c74f426741 100644 --- a/src/frontend/src/components/createproject/LoadingBar.tsx +++ b/src/frontend/src/components/createproject/LoadingBar.tsx @@ -2,7 +2,7 @@ import React from 'react'; import { Box, Tooltip } from '@mui/material'; import { LinearProgress } from '@mui/material'; import { createTheme } from '@mui/material/styles'; -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const LoadingBar = ({ activeStep, totalSteps, title }) => { const theme = createTheme({ diff --git a/src/frontend/src/components/createproject/ProjectDetailsForm.tsx b/src/frontend/src/components/createproject/ProjectDetailsForm.tsx index 36337f1f62..ae01c0296c 100755 --- a/src/frontend/src/components/createproject/ProjectDetailsForm.tsx +++ b/src/frontend/src/components/createproject/ProjectDetailsForm.tsx @@ -1,15 +1,14 @@ import React, { useEffect } from 'react'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; import { useNavigate } from 'react-router-dom'; -import useForm from '../../hooks/useForm'; -import CreateProjectValidation from './validation/CreateProjectValidation'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; -import { OrganisationService } from '../../api/CreateProjectService'; -import environment from '../../environment'; +import useForm from '@/hooks/useForm'; +import CreateProjectValidation from '@/components/createproject/validation/CreateProjectValidation'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; +import { OrganisationService } from '@/api/CreateProjectService'; import { MenuItem, Select } from '@mui/material'; -import { createPopup } from '../../utilfunctions/createPopup'; -import { useAppSelector } from '../../types/reduxTypes'; +import { createPopup } from '@/utilfunctions/createPopup'; +import { useAppSelector } from '@/types/reduxTypes'; const ProjectDetailsForm: React.FC = () => { const defaultTheme: any = CoreModules.useAppSelector((state) => state.theme.hotTheme); @@ -22,13 +21,9 @@ const ProjectDetailsForm: React.FC = () => { const projectDetails: any = useAppSelector((state) => state.createproject.projectDetails); //we use use selector from redux to get all state of projectDetails from createProject slice - const organizationListData: any = useAppSelector((state) => state.createproject.organizationList); + const organisationListData: any = useAppSelector((state) => state.createproject.organisationList); //we use use selector from redux to get all state of projectDetails from createProject slice - useEffect(() => { - // dispatch(OrganisationService(`${import.meta.env.VITE_API_URL}/organization/`)); - }, []); - const submission = () => { // submitForm(); dispatch(CreateProjectActions.SetIndividualProjectDetailsData(values)); @@ -60,12 +55,12 @@ const ProjectDetailsForm: React.FC = () => { }, // or className: 'your-class' }; }; - // Changed OrganizationList Data into the Picker Component Format i.e label and value - const organizationList = organizationListData.map((item) => ({ label: item.name, value: item.id })); + // Changed OrganisationList Data into the Picker Component Format i.e label and value + const organisationList = organisationListData.map((item) => ({ label: item.name, value: item.id })); // User has switched back to the tab const onFocus = () => { - dispatch(OrganisationService(`${import.meta.env.VITE_API_URL}/organization/`)); + dispatch(OrganisationService(`${import.meta.env.VITE_API_URL}/organisation/`)); }; useEffect(() => { window.addEventListener('focus', onFocus); @@ -155,14 +150,14 @@ const ProjectDetailsForm: React.FC = () => { handleCustomChange('organisation_id', e.target.value); }} > - {organizationList?.map((org) => ( + {organisationList?.map((org) => ( {org.label} ))} createPopup('Create Organization', 'createOrganization?popup=true')} + onClick={() => createPopup('Create Organization', 'createOrganisation?popup=true')} sx={{ width: 'auto' }} // disabled={qrcode == "" ? true : false} color="info" diff --git a/src/frontend/src/components/createproject/UploadArea.tsx b/src/frontend/src/components/createproject/UploadArea.tsx index 9ad74d8eaf..6f51239c5f 100755 --- a/src/frontend/src/components/createproject/UploadArea.tsx +++ b/src/frontend/src/components/createproject/UploadArea.tsx @@ -1,10 +1,10 @@ import React, { useEffect } from 'react'; -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; import FormControl from '@mui/material/FormControl'; import FormGroup from '@mui/material/FormGroup'; import { useNavigate, Link } from 'react-router-dom'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; -import DrawSvg from '../createproject/DrawSvg'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; +import DrawSvg from '@/components/createproject/DrawSvg'; // @ts-ignore const DefineAreaMap = React.lazy(() => import('../../views/DefineAreaMap')); diff --git a/src/frontend/src/components/editproject/EditProjectDetails.tsx b/src/frontend/src/components/editproject/EditProjectDetails.tsx index 2c85073550..7f51bcd7d3 100644 --- a/src/frontend/src/components/editproject/EditProjectDetails.tsx +++ b/src/frontend/src/components/editproject/EditProjectDetails.tsx @@ -1,17 +1,17 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; -import useForm from '../../hooks/useForm'; -import EditProjectValidation from './validation/EditProjectDetailsValidation'; -import { diffObject } from '../../utilfunctions/compareUtils'; -import environment from '../../environment'; -import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; -import { PatchProjectDetails } from '../../api/CreateProjectService'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import useForm from '@/hooks/useForm'; +import EditProjectValidation from '@/components/editproject/validation/EditProjectDetailsValidation'; +import { diffObject } from '@/utilfunctions/compareUtils'; +import environment from '@/environment'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; +import { PatchProjectDetails } from '@/api/CreateProjectService'; const EditProjectDetails = ({ projectId }) => { const editProjectDetails: any = CoreModules.useAppSelector((state) => state.createproject.editProjectDetails); // //we use use selector from redux to get all state of projectDetails from createProject slice - const organizationListData: any = CoreModules.useAppSelector((state) => state.createproject.organizationList); + const organisationListData: any = CoreModules.useAppSelector((state) => state.createproject.organisationList); // //we use use selector from redux to get all state of projectDetails from createProject slice const defaultTheme: any = CoreModules.useAppSelector((state) => state.theme.hotTheme); @@ -35,7 +35,7 @@ const EditProjectDetails = ({ projectId }) => { submission, EditProjectValidation, ); - const organizationList = organizationListData.map((item) => ({ label: item.name, value: item.id })); + const organisationList = organisationListData.map((item) => ({ label: item.name, value: item.id })); const inputFormStyles = () => { return { style: { @@ -93,18 +93,18 @@ const EditProjectDetails = ({ projectId }) => { }} labelId="demo-simple-select-label" id="demo-simple-select" - value={values.organization || ''} + value={values.organisation || ''} onChange={(e) => { - handleCustomChange('organization', e.target.value); }} + handleCustomChange('organisation', e.target.value); }} > - {organizationList?.map((org) => ( + {organisationList?.map((org) => ( {org.label} ))} - {errors.organization && ( + {errors.organisation && ( - {errors.organization} + {errors.organisation} )} */} diff --git a/src/frontend/src/components/editproject/UpdateForm.tsx b/src/frontend/src/components/editproject/UpdateForm.tsx index 3ea351ce64..183876e433 100644 --- a/src/frontend/src/components/editproject/UpdateForm.tsx +++ b/src/frontend/src/components/editproject/UpdateForm.tsx @@ -1,9 +1,9 @@ import React, { useEffect, useState } from 'react'; -import CoreModules from '../../shared/CoreModules'; -import environment from '../../environment'; -import { FormCategoryService, PostFormUpdate } from '../../api/CreateProjectService'; +import CoreModules from '@/shared/CoreModules'; +import environment from '@/environment'; +import { FormCategoryService, PostFormUpdate } from '@/api/CreateProjectService'; import { MenuItem } from '@mui/material'; -import { diffObject } from '../../utilfunctions/compareUtils.js'; +import { diffObject } from '@/utilfunctions/compareUtils.js'; const UpdateForm = ({ projectId }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/editproject/UpdateProjectArea.tsx b/src/frontend/src/components/editproject/UpdateProjectArea.tsx index 9c83b6860d..12f0a69c38 100644 --- a/src/frontend/src/components/editproject/UpdateProjectArea.tsx +++ b/src/frontend/src/components/editproject/UpdateProjectArea.tsx @@ -1,9 +1,9 @@ import React, { useEffect, useState } from 'react'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; -import EditProjectArea from '../../views/EditProjectArea'; -import enviroment from '../../environment'; -import { EditProjectBoundaryService, GetDividedTaskFromGeojson } from '../../api/CreateProjectService'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import EditProjectArea from '@/views/EditProjectArea'; +import enviroment from '@/environment'; +import { EditProjectBoundaryService, GetDividedTaskFromGeojson } from '@/api/CreateProjectService'; const UpdateProjectArea = ({ projectId }) => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/components/editproject/validation/EditProjectDetailsValidation.ts b/src/frontend/src/components/editproject/validation/EditProjectDetailsValidation.ts index 06a0c6676a..b2f553953c 100644 --- a/src/frontend/src/components/editproject/validation/EditProjectDetailsValidation.ts +++ b/src/frontend/src/components/editproject/validation/EditProjectDetailsValidation.ts @@ -1,5 +1,5 @@ interface ProjectValues { - organization: string; + organisation: string; name: string; username: string; id: string; @@ -10,7 +10,7 @@ interface ProjectValues { // odk_central_password: string; } interface ValidationErrors { - organization?: string; + organisation?: string; name?: string; username?: string; id?: string; @@ -25,8 +25,8 @@ const regexForSymbol = /_/g; function EditProjectValidation(values: ProjectValues) { const errors: ValidationErrors = {}; - // if (!values?.organization) { - // errors.organization = 'Organization is Required.'; + // if (!values?.organisation) { + // errors.organisation = 'Organization is Required.'; // } // if (!values?.odk_central_url) { // errors.odk_central_url = 'ODK Central Url is Required.'; diff --git a/src/frontend/src/components/home/ExploreProjectCard.tsx b/src/frontend/src/components/home/ExploreProjectCard.tsx index 5803ccccf0..2b22a337aa 100755 --- a/src/frontend/src/components/home/ExploreProjectCard.tsx +++ b/src/frontend/src/components/home/ExploreProjectCard.tsx @@ -1,11 +1,11 @@ import * as React from 'react'; -import CustomizedImage from '../../utilities/CustomizedImage'; -import CustomizedProgressBar from '../../utilities/CustomizedProgressBar'; -import environment from '../../environment'; -import { HomeActions } from '../../store/slices/HomeSlice'; -import { HomeProjectCardModel } from '../../models/home/homeModel'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; +import CustomizedImage from '@/utilities/CustomizedImage'; +import CustomizedProgressBar from '@/utilities/CustomizedProgressBar'; +import environment from '@/environment'; +import { HomeActions } from '@/store/slices/HomeSlice'; +import { HomeProjectCardModel } from '@/models/home/homeModel'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; //Explore Project Card Model to be renderd in home view export default function ExploreProjectCard({ data }) { diff --git a/src/frontend/src/components/home/HomePageFilters.tsx b/src/frontend/src/components/home/HomePageFilters.tsx index 85fa0fa40b..368afec4ce 100755 --- a/src/frontend/src/components/home/HomePageFilters.tsx +++ b/src/frontend/src/components/home/HomePageFilters.tsx @@ -1,9 +1,9 @@ import React, { useState } from 'react'; -import windowDimention from '../../hooks/WindowDimension'; -import CoreModules from '../../shared/CoreModules'; -import AssetModules from '../../shared/AssetModules'; -import Switch from '../common/Switch'; -import { HomeActions } from '../../store/slices/HomeSlice'; +import windowDimention from '@/hooks/WindowDimension'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import Switch from '@/components/common/Switch'; +import { HomeActions } from '@/store/slices/HomeSlice'; //Home Filter const HomePageFilters = ({ onSearch, filteredProjectCount, totalProjectCount }) => { diff --git a/src/frontend/src/components/home/ProjectCardSkeleton.tsx b/src/frontend/src/components/home/ProjectCardSkeleton.tsx index ef1d1c0174..c71b4bab43 100755 --- a/src/frontend/src/components/home/ProjectCardSkeleton.tsx +++ b/src/frontend/src/components/home/ProjectCardSkeleton.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; // Skeleton card main purpose is to perfom loading in case of any delay in retrieving project const ProjectCardSkeleton = ({ cardsPerRow, defaultTheme }) => { return cardsPerRow.map((data, index) => { diff --git a/src/frontend/src/components/home/ProjectListMap.tsx b/src/frontend/src/components/home/ProjectListMap.tsx index 54b3890084..63b86ea52c 100644 --- a/src/frontend/src/components/home/ProjectListMap.tsx +++ b/src/frontend/src/components/home/ProjectListMap.tsx @@ -1,15 +1,15 @@ import React, { useState, useEffect } from 'react'; -import { useOLMap } from '../MapComponent/OpenLayersComponent'; -import { MapContainer as MapComponent } from '../MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; -import { VectorLayer } from '../MapComponent/OpenLayersComponent/Layers'; -import { ClusterLayer } from '../MapComponent/OpenLayersComponent/Layers'; -import CoreModules from '../../shared/CoreModules'; -import { geojsonObjectModel } from '../../constants/geojsonObjectModal'; -import { defaultStyles, getStyles } from '../MapComponent/OpenLayersComponent/helpers/styleUtils'; -import MarkerIcon from '../../assets/images/red_marker.png'; +import { useOLMap } from '@/components/MapComponent/OpenLayersComponent'; +import { MapContainer as MapComponent } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; +import { ClusterLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; +import CoreModules from '@/shared/CoreModules'; +import { geojsonObjectModel } from '@/constants/geojsonObjectModal'; +import { defaultStyles, getStyles } from '@/components/MapComponent/OpenLayersComponent/helpers/styleUtils'; +import MarkerIcon from '@/assets/images/red_marker.png'; import { useNavigate } from 'react-router-dom'; -import environment from '../../environment'; +import environment from '@/environment'; import { Style, Text, Icon, Fill } from 'ol/style'; type HomeProjectSummaryType = { diff --git a/src/frontend/src/components/organization/OrganizationAddForm.tsx b/src/frontend/src/components/organisation/OrganisationAddForm.tsx similarity index 89% rename from src/frontend/src/components/organization/OrganizationAddForm.tsx rename to src/frontend/src/components/organisation/OrganisationAddForm.tsx index 701d2f5cbd..398f5cb5ed 100644 --- a/src/frontend/src/components/organization/OrganizationAddForm.tsx +++ b/src/frontend/src/components/organisation/OrganisationAddForm.tsx @@ -1,28 +1,27 @@ import React from 'react'; -import CoreModules from '../../shared/CoreModules.js'; -import useForm from '../../hooks/useForm'; -import OrganizationAddValidation from './Validation/OrganizationAddValidation'; +import CoreModules from '@/shared/CoreModules.js'; +import useForm from '@/hooks/useForm.js'; +import OrganisationAddValidation from '@/components/organisation/Validation/OrganisationAddValidation.js'; import { MenuItem, Select } from '@mui/material'; -import { OrganizationService } from '../../api/OrganizationService'; -import environment from '../../environment'; +import { OrganisationService } from '@/api/OrganisationService'; const formData = {}; -const organizationTypeList = ['FREE', 'DISCOUNTED', 'FULL_FEE']; -const organizationDataList = organizationTypeList.map((item, index) => ({ label: item, value: index + 1 })); -const OrganizationAddForm = () => { +const organisationTypeList = ['FREE', 'DISCOUNTED', 'FULL_FEE']; +const organisationDataList = organisationTypeList.map((item, index) => ({ label: item, value: index + 1 })); +const OrganisationAddForm = () => { const dispatch = CoreModules.useAppDispatch(); const defaultTheme: any = CoreModules.useAppSelector((state) => state.theme.hotTheme); const submission = () => { // eslint-disable-next-line no-use-before-define // submitForm(); - dispatch(OrganizationService(`${import.meta.env.VITE_API_URL}/organization/`, values)); + dispatch(OrganisationService(`${import.meta.env.VITE_API_URL}/organisation/`, values)); // navigate("/select-form", { replace: true, state: { values: values } }); }; const { handleSubmit, handleCustomChange, values, errors }: any = useForm( formData, submission, - OrganizationAddValidation, + OrganisationAddValidation, ); const inputFormStyles = () => { return { @@ -165,10 +164,14 @@ const OrganizationAddForm = () => { label="Organization Type" onChange={(e) => { handleCustomChange('type', e.target.value); - // dispatch(CreateProjectActions.SetProjectDetails({ key: 'organization', value: e.target.value })) + // dispatch(CreateProjectActions.SetProjectDetails({ key: 'organisation', value: e.target.value })) }} > - {organizationDataList?.map((org) => {org.label})} + {organisationDataList?.map((org) => ( + + {org.label} + + ))} {errors.type && ( @@ -198,6 +201,6 @@ const OrganizationAddForm = () => { ); }; -OrganizationAddForm.propTypes = {}; +OrganisationAddForm.propTypes = {}; -export default OrganizationAddForm; +export default OrganisationAddForm; diff --git a/src/frontend/src/components/organization/Validation/OrganizationAddValidation.tsx b/src/frontend/src/components/organisation/Validation/OrganisationAddValidation.tsx similarity index 84% rename from src/frontend/src/components/organization/Validation/OrganizationAddValidation.tsx rename to src/frontend/src/components/organisation/Validation/OrganisationAddValidation.tsx index d8fe122dda..891daebbb8 100644 --- a/src/frontend/src/components/organization/Validation/OrganizationAddValidation.tsx +++ b/src/frontend/src/components/organisation/Validation/OrganisationAddValidation.tsx @@ -1,4 +1,4 @@ -interface OrganizationValues { +interface OrganisationValues { logo: string; name: string; description: string; @@ -22,7 +22,7 @@ function isValidUrl(url: string) { } } -function OrganizationAddValidation(values: OrganizationValues) { +function OrganisationAddValidation(values: OrganisationValues) { const errors: ValidationErrors = {}; // if (!values?.logo) { @@ -43,4 +43,4 @@ function OrganizationAddValidation(values: OrganizationValues) { return errors; } -export default OrganizationAddValidation; +export default OrganisationAddValidation; diff --git a/src/frontend/src/hooks/MapStyles.js b/src/frontend/src/hooks/MapStyles.js index 57c18fe183..29980a59c5 100755 --- a/src/frontend/src/hooks/MapStyles.js +++ b/src/frontend/src/hooks/MapStyles.js @@ -2,8 +2,8 @@ import Fill from 'ol/style/Fill'; import Stroke from 'ol/style/Stroke'; import { Icon, Style } from 'ol/style'; import React, { useEffect, useState } from 'react'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; import { getCenter } from 'ol/extent'; import Point from 'ol/geom/Point.js'; import { transform } from 'ol/proj'; diff --git a/src/frontend/src/hooks/Prompt.tsx b/src/frontend/src/hooks/Prompt.tsx index 79915cc18a..88288a7e79 100644 --- a/src/frontend/src/hooks/Prompt.tsx +++ b/src/frontend/src/hooks/Prompt.tsx @@ -1,11 +1,10 @@ import React from 'react'; -import pathNotToBlock from '../constants/blockerUrl'; +import pathNotToBlock from '@/constants/blockerUrl'; import { unstable_useBlocker as useBlocker } from 'react-router-dom'; function Prompt(props) { const block = props.when; useBlocker(({ nextLocation }) => { - console.log(nextLocation, 'next'); if (block && !pathNotToBlock.includes(nextLocation.pathname)) { return !window.confirm(props.message); } diff --git a/src/frontend/src/hooks/useForm.tsx b/src/frontend/src/hooks/useForm.tsx index 4000b0b4b0..fc6939435d 100755 --- a/src/frontend/src/hooks/useForm.tsx +++ b/src/frontend/src/hooks/useForm.tsx @@ -59,8 +59,6 @@ const useForm = (initialState, callback, validate) => { useEffect(() => { setValues(initialState); setErrors({}); - - // eslint-disable-next-line react-hooks/exhaustive-deps }, [initialState]); return { diff --git a/src/frontend/src/models/createproject/createProjectModel.ts b/src/frontend/src/models/createproject/createProjectModel.ts index cd89ed1893..1c78c11d86 100755 --- a/src/frontend/src/models/createproject/createProjectModel.ts +++ b/src/frontend/src/models/createproject/createProjectModel.ts @@ -74,7 +74,6 @@ export interface OrganisationListModel { slug: string; description: string; type: number; - subscription_tier: null | string; id: number; logo: string; url: string; diff --git a/src/frontend/src/models/organization/organizationModel.ts b/src/frontend/src/models/organisation/organisationModel.ts similarity index 71% rename from src/frontend/src/models/organization/organizationModel.ts rename to src/frontend/src/models/organisation/organisationModel.ts index a7795a55b1..50e913c78e 100644 --- a/src/frontend/src/models/organization/organizationModel.ts +++ b/src/frontend/src/models/organisation/organisationModel.ts @@ -1,4 +1,4 @@ -export interface OrganizationModal { +export interface OrganisationModal { name: string; description: string; url: string; @@ -14,28 +14,25 @@ export interface OrganisationListModel { slug: string; description: string; type: number; - subscription_tier: null | string; id: number; logo: string; url: string; } -export interface GetOrganizationDataModel { +export interface GetOrganisationDataModel { name: string; slug: string; description: string; type: number; - subscription_tier: null; id: number; logo: string; url: string; } -export interface PostOrganizationDataModel { +export interface PostOrganisationDataModel { name: string; slug: string; description: string; type: number; - subscription_tier: null; id: number; logo: string; url: string; diff --git a/src/frontend/src/routes.jsx b/src/frontend/src/routes.jsx index c25a443cce..ceaa0c591c 100755 --- a/src/frontend/src/routes.jsx +++ b/src/frontend/src/routes.jsx @@ -3,23 +3,23 @@ import { createBrowserRouter, // Navigate, } from 'react-router-dom'; -import Home from './views/Home'; -import Tabbed from './views/Tabbed'; -import MainView from './views/MainView'; -import CreateProject from './views/CreateProject'; -import EditProject from './views/EditProject'; -import ProtectedRoute from './utilities/ProtectedRoute'; -import NotFoundPage from './views/NotFound404'; -import Organization from './views/Organization'; -import CreateOrganization from './views/CreateOrganization'; -import Authorized from './views/Authorized'; -import SubmissionDetails from './views/SubmissionDetails'; -import CreateNewProject from './views/CreateNewProject'; -import ProjectDetails from './views/ProjectDetails'; -import UnderConstruction from './views/UnderConstruction'; -import ErrorBoundary from './views/ErrorBoundary'; -import NewProjectDetails from './views/NewProjectDetails'; -import ProjectDetailsV2 from './views/ProjectDetailsV2'; +import Home from '@/views/Home'; +import Tabbed from '@/views/Tabbed'; +import MainView from '@/views/MainView'; +import CreateProject from '@/views/CreateProject'; +import EditProject from '@/views/EditProject'; +import ProtectedRoute from '@/utilities/ProtectedRoute'; +import NotFoundPage from '@/views/NotFound404'; +import Organisation from '@/views/Organisation'; +import CreateOrganisation from '@/views/CreateOrganisation'; +import Authorized from '@/views/Authorized'; +import SubmissionDetails from '@/views/SubmissionDetails'; +import CreateNewProject from '@/views/CreateNewProject'; +import ProjectDetails from '@/views/ProjectDetails'; +import UnderConstruction from '@/views/UnderConstruction'; +import ErrorBoundary from '@/views/ErrorBoundary'; +import NewProjectDetails from '@/views/NewProjectDetails'; +import ProjectDetailsV2 from '@/views/ProjectDetailsV2'; // const ProjectDetails = React.lazy(() => import('./views/ProjectDetails')); const Submissions = React.lazy(() => import('./views/Submissions')); @@ -39,18 +39,18 @@ const routes = createBrowserRouter([ ), }, { - path: '/organization', + path: '/organisation', element: ( - + ), }, { - path: '/createOrganization', + path: '/createOrganisation', element: ( - + ), }, diff --git a/src/frontend/src/shared/AssetModules.js b/src/frontend/src/shared/AssetModules.js index adf038da20..58790691ea 100755 --- a/src/frontend/src/shared/AssetModules.js +++ b/src/frontend/src/shared/AssetModules.js @@ -56,9 +56,11 @@ import { AccessTime as AccessTimeIcon, ImportExport as ImportExportIcon, Check as CheckIcon, + Undo as UndoIcon, + Timeline as TimelineIcon, } from '@mui/icons-material'; -import LockPng from '../assets/images/lock.png'; -import RedLockPng from '../assets/images/red-lock.png'; +import LockPng from '@/assets/images/lock.png'; +import RedLockPng from '@/assets/images/red-lock.png'; import { styled, alpha } from '@mui/material/styles'; export default { ExitToAppIcon, @@ -122,4 +124,6 @@ export default { AccessTimeIcon, ImportExportIcon, CheckIcon, + UndoIcon, + TimelineIcon, }; diff --git a/src/frontend/src/shared/CoreModules.js b/src/frontend/src/shared/CoreModules.js index e773a37f9c..7409e1ca6b 100755 --- a/src/frontend/src/shared/CoreModules.js +++ b/src/frontend/src/shared/CoreModules.js @@ -68,10 +68,10 @@ import { import { Provider } from 'react-redux'; import { createSlice, configureStore, getDefaultMiddleware } from '@reduxjs/toolkit'; import { combineReducers } from 'redux'; -import LoadingBar from '../components/createproject/LoadingBar'; -import { TaskActions } from '../store/slices/TaskSlice'; -import { useAppDispatch, useAppSelector } from '../types/reduxTypes'; -import CustomizedModal from '../utilities/CustomizedModal'; +import LoadingBar from '@/components/createproject/LoadingBar'; +import { TaskActions } from '@/store/slices/TaskSlice'; +import { useAppDispatch, useAppSelector } from '@/types/reduxTypes'; +import CustomizedModal from '@/utilities/CustomizedModal'; export default { Provider, diff --git a/src/frontend/src/store/Store.ts b/src/frontend/src/store/Store.ts index cc20f452e9..8a33f6dcfb 100755 --- a/src/frontend/src/store/Store.ts +++ b/src/frontend/src/store/Store.ts @@ -1,14 +1,14 @@ -import HomeSlice from './slices/HomeSlice'; -import ThemeSlice from './slices/ThemeSlice'; +import HomeSlice from '@/store/slices/HomeSlice'; +import ThemeSlice from '@/store/slices/ThemeSlice'; import { persistStore } from 'redux-persist'; import storage from 'redux-persist/lib/storage'; -import ProjectSlice from './slices/ProjectSlice'; -import CreateProjectReducer from './slices/CreateProjectSlice'; -import CommonSlice from './slices/CommonSlice'; -import LoginSlice from './slices/LoginSlice'; -import OrganizationSlice from './slices/organizationSlice'; -import SubmissionSlice from './slices/SubmissionSlice'; -import TaskSlice from './slices/TaskSlice'; +import ProjectSlice from '@/store/slices/ProjectSlice'; +import CreateProjectReducer from '@/store/slices/CreateProjectSlice'; +import CommonSlice from '@/store/slices/CommonSlice'; +import LoginSlice from '@/store/slices/LoginSlice'; +import OrganisationSlice from '@/store/slices/organisationSlice'; +import SubmissionSlice from '@/store/slices/SubmissionSlice'; +import TaskSlice from '@/store/slices/TaskSlice'; import { persistReducer } from 'redux-persist'; import { combineReducers, configureStore, getDefaultMiddleware } from '@reduxjs/toolkit'; @@ -31,7 +31,7 @@ const rootReducer = combineReducers({ theme: ThemeSlice.reducer, createproject: CreateProjectReducer, // createproject: persist('createproject', ['projectDetails', 'projectInfo'], CreateProjectReducer), - organization: OrganizationSlice.reducer, + organisation: OrganisationSlice.reducer, // added common slice in order to handle all the common things like snackbar etc common: CommonSlice.reducer, submission: SubmissionSlice.reducer, diff --git a/src/frontend/src/store/slices/CommonSlice.ts b/src/frontend/src/store/slices/CommonSlice.ts index c239074a54..b3f9ddca23 100755 --- a/src/frontend/src/store/slices/CommonSlice.ts +++ b/src/frontend/src/store/slices/CommonSlice.ts @@ -1,4 +1,4 @@ -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const CommonSlice = CoreModules.createSlice({ name: 'common', initialState: { @@ -9,7 +9,7 @@ const CommonSlice = CoreModules.createSlice({ duration: 0, }, loading: false, - postOrganizationLoading: false, + postOrganisationLoading: false, currentStepFormStep: { create_project: { step: 1, @@ -23,8 +23,8 @@ const CommonSlice = CoreModules.createSlice({ SetLoading(state, action) { state.loading = action.payload; }, - PostOrganizationLoading(state, action) { - state.organization = action.payload; + PostOrganisationLoading(state, action) { + state.organisation = action.payload; }, SetCurrentStepFormStep(state, action) { state.currentStepFormStep[action.payload.flag] = { step: action.payload.step }; diff --git a/src/frontend/src/store/slices/CreateProjectSlice.ts b/src/frontend/src/store/slices/CreateProjectSlice.ts index 7f9cc75856..af1a2c5790 100755 --- a/src/frontend/src/store/slices/CreateProjectSlice.ts +++ b/src/frontend/src/store/slices/CreateProjectSlice.ts @@ -1,4 +1,4 @@ -import { CreateProjectStateTypes } from '../types/ICreateProject'; +import { CreateProjectStateTypes } from '@/types/ICreateProject'; import { createSlice } from '@reduxjs/toolkit'; export const initialState: CreateProjectStateTypes = { @@ -24,8 +24,8 @@ export const initialState: CreateProjectStateTypes = { formCategoryList: [], formCategoryLoading: false, generateQrLoading: false, - organizationList: [], - organizationListLoading: false, + organisationList: [], + organisationListLoading: false, generateQrSuccess: null, generateProjectLogLoading: false, generateProjectLog: null, @@ -48,6 +48,8 @@ export const initialState: CreateProjectStateTypes = { isUnsavedChanges: false, canSwitchCreateProjectSteps: false, isTasksGenerated: { divide_on_square: false, task_splitting_algorithm: false }, + isFgbFetching: false, + toggleSplittedGeojsonEdit: false, }; const CreateProject = createSlice({ @@ -112,10 +114,10 @@ const CreateProject = createSlice({ state.generateQrLoading = action.payload; }, GetOrganisationList(state, action) { - state.organizationList = action.payload; + state.organisationList = action.payload; }, GetOrganisationListLoading(state, action) { - state.organizationListLoading = action.payload; + state.organisationListLoading = action.payload; }, GenerateProjectQRSuccess(state, action) { if (action.payload.status === 'SUCCESS') { @@ -215,6 +217,18 @@ const CreateProject = createSlice({ [action.payload.key]: action.payload.value, }; }, + SetFgbFetchingStatus(state, action) { + state.isFgbFetching = action.payload; + }, + ClearProjectStepState(state, action) { + state.dividedTaskGeojson = null; + state.splitTasksSelection = null; + state.dataExtractGeojson = null; + state.projectDetails = { ...action.payload, customLineUpload: null, customPolygonUpload: null }; + }, + SetToggleSplittedGeojsonEdit(state, action) { + state.toggleSplittedGeojsonEdit = action.payload; + }, }, }); diff --git a/src/frontend/src/store/slices/HomeSlice.ts b/src/frontend/src/store/slices/HomeSlice.ts index 6c2afee6aa..430a7a0f21 100755 --- a/src/frontend/src/store/slices/HomeSlice.ts +++ b/src/frontend/src/store/slices/HomeSlice.ts @@ -1,4 +1,4 @@ -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const HomeSlice = CoreModules.createSlice({ name: 'home', initialState: { diff --git a/src/frontend/src/store/slices/LoginSlice.ts b/src/frontend/src/store/slices/LoginSlice.ts index 6afdd45ee1..bb7381900b 100755 --- a/src/frontend/src/store/slices/LoginSlice.ts +++ b/src/frontend/src/store/slices/LoginSlice.ts @@ -1,4 +1,4 @@ -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; import storage from 'redux-persist/lib/storage'; const LoginSlice = CoreModules.createSlice({ name: 'login', diff --git a/src/frontend/src/store/slices/ThemeSlice.ts b/src/frontend/src/store/slices/ThemeSlice.ts index 781cdcdb57..70051fcd2f 100755 --- a/src/frontend/src/store/slices/ThemeSlice.ts +++ b/src/frontend/src/store/slices/ThemeSlice.ts @@ -1,4 +1,4 @@ -import CoreModules from '../../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const ThemeSlice = CoreModules.createSlice({ name: 'theme', diff --git a/src/frontend/src/store/slices/organisationSlice.ts b/src/frontend/src/store/slices/organisationSlice.ts new file mode 100644 index 0000000000..30af04dbbb --- /dev/null +++ b/src/frontend/src/store/slices/organisationSlice.ts @@ -0,0 +1,32 @@ +import CoreModules from '@/shared/CoreModules.js'; + +const OrganisationSlice = CoreModules.createSlice({ + name: 'organisation', + initialState: { + organisationFormData: {}, + organisationData: [], + postOrganisationData: null, + organisationDataLoading: false, + postOrganisationDataLoading: false, + }, + reducers: { + GetOrganisationsData(state, action) { + state.oraganizationData = action.payload; + }, + GetOrganisationDataLoading(state, action) { + state.organisationDataLoading = action.payload; + }, + postOrganisationData(state, action) { + state.postOrganisationData = action.payload; + }, + PostOrganisationDataLoading(state, action) { + state.postOrganisationDataLoading = action.payload; + }, + SetOrganisationFormData(state, action) { + state.organisationFormData = action.payload; + }, + }, +}); + +export const OrganisationAction = OrganisationSlice.actions; +export default OrganisationSlice; diff --git a/src/frontend/src/store/slices/organizationSlice.ts b/src/frontend/src/store/slices/organizationSlice.ts deleted file mode 100644 index f4d85c9937..0000000000 --- a/src/frontend/src/store/slices/organizationSlice.ts +++ /dev/null @@ -1,32 +0,0 @@ -import CoreModules from '../../shared/CoreModules.js'; - -const OrganizationSlice = CoreModules.createSlice({ - name: 'organization', - initialState: { - organizationFormData: {}, - organizationData: [], - postOrganizationData: null, - organizationDataLoading: false, - postOrganizationDataLoading: false, - }, - reducers: { - GetOrganizationsData(state, action) { - state.oraganizationData = action.payload; - }, - GetOrganizationDataLoading(state, action) { - state.organizationDataLoading = action.payload; - }, - postOrganizationData(state, action) { - state.postOrganizationData = action.payload; - }, - PostOrganizationDataLoading(state, action) { - state.postOrganizationDataLoading = action.payload; - }, - SetOrganizationFormData(state, action) { - state.organizationFormData = action.payload; - }, - }, -}); - -export const OrganizationAction = OrganizationSlice.actions; -export default OrganizationSlice; diff --git a/src/frontend/src/store/types/ICreateProject.ts b/src/frontend/src/store/types/ICreateProject.ts index 900dff66de..adfd719988 100644 --- a/src/frontend/src/store/types/ICreateProject.ts +++ b/src/frontend/src/store/types/ICreateProject.ts @@ -10,8 +10,8 @@ export type CreateProjectStateTypes = { formCategoryList: FormCategoryListTypes | []; formCategoryLoading: boolean; generateQrLoading: boolean; - organizationList: OrganizationListTypes[]; - organizationListLoading: boolean; + organisationList: OrganisationListTypes[]; + organisationListLoading: boolean; generateQrSuccess: GenerateQrSuccessTypes | null; generateProjectLogLoading: boolean; generateProjectLog: GenerateProjectLogTypes | null; @@ -34,6 +34,8 @@ export type CreateProjectStateTypes = { isUnsavedChanges: boolean; canSwitchCreateProjectSteps: boolean; isTasksGenerated: {}; + isFgbFetching: boolean; + toggleSplittedGeojsonEdit: boolean; }; export type ValidateCustomFormResponse = { detail: { message: string; possible_reason: string }; @@ -101,7 +103,7 @@ export type ProjectDetailsTypes = { no_of_buildings: number; odk_central_user?: string; odk_central_password?: string; - organization?: number; + organisation?: number; odk_central_url?: string; name?: string; hashtags?: string; @@ -129,7 +131,7 @@ export type GenerateQrSuccessTypes = { task_id: string; }; -export type OrganizationListTypes = { +export type OrganisationListTypes = { logo: string; id: number; url: string; diff --git a/src/frontend/src/types/reduxTypes.ts b/src/frontend/src/types/reduxTypes.ts index be668677a9..37408bc699 100644 --- a/src/frontend/src/types/reduxTypes.ts +++ b/src/frontend/src/types/reduxTypes.ts @@ -1,6 +1,6 @@ import { useDispatch, useSelector } from 'react-redux'; import type { TypedUseSelectorHook } from 'react-redux'; -import type { RootState, AppDispatch } from '../store/Store'; +import type { RootState, AppDispatch } from '@/store/Store'; // Use throughout your app instead of plain `useDispatch` and `useSelector` export const useAppDispatch: () => AppDispatch = useDispatch; diff --git a/src/frontend/src/utilfunctions/checkWGS84Projection.js b/src/frontend/src/utilfunctions/checkWGS84Projection.js index 2699ea0ef8..638126b19c 100644 --- a/src/frontend/src/utilfunctions/checkWGS84Projection.js +++ b/src/frontend/src/utilfunctions/checkWGS84Projection.js @@ -1,26 +1,35 @@ -function checkWGS84Projection(geojson) { +import OLVectorLayer from 'ol/layer/Vector'; +import GeoJSON from 'ol/format/GeoJSON'; +import { Vector as VectorSource } from 'ol/source'; + +function checkWGS84Projection(drawnGeojson) { + const vectorLyr = new OLVectorLayer({ + source: new VectorSource({ + features: new GeoJSON().readFeatures(drawnGeojson), + }), + declutter: true, + }); + + const extent = vectorLyr.getSource()?.getExtent(); + try { - for (const feature of geojson.features) { - const coordinates = feature.geometry.coordinates; - for (const coord of coordinates[0]) { - const [longitude, latitude] = coord; - if ( - isNaN(latitude) || - isNaN(longitude) || - latitude < -90 || - latitude > 90 || - longitude < -180 || - longitude > 180 - ) { - // setIsGeojsonWG84(false); - return false; // Coordinates are out of WGS 84 range - } + if (extent?.length > 0) { + const longitude = extent[0]; + const latitude = extent[1]; + if ( + isNaN(latitude) || + isNaN(longitude) || + latitude < -90 || + latitude > 90 || + longitude < -180 || + longitude > 180 + ) { + return false; } + return true; // All coordinates are within WGS 84 range } - // setIsGeojsonWG84(true); - return true; // All coordinates are within WGS 84 range + return false; } catch (error) { - // setIsGeojsonWG84(false); return false; } } diff --git a/src/frontend/src/utilfunctions/getTaskStatusStyle.js b/src/frontend/src/utilfunctions/getTaskStatusStyle.js index ee572dbc8d..370e91edad 100644 --- a/src/frontend/src/utilfunctions/getTaskStatusStyle.js +++ b/src/frontend/src/utilfunctions/getTaskStatusStyle.js @@ -1,8 +1,8 @@ import { Fill, Icon, Stroke, Style } from 'ol/style'; import { transform } from 'ol/proj'; import { Point } from 'ol/geom'; -import AssetModules from '../shared/AssetModules'; -import { task_priority_str } from '../types/enums'; +import AssetModules from '@/shared/AssetModules'; +import { task_priority_str } from '@/types/enums'; function createPolygonStyle(fillColor, strokeColor) { return new Style({ diff --git a/src/frontend/src/utilfunctions/login.ts b/src/frontend/src/utilfunctions/login.ts index 503dabd1da..a3306379dd 100644 --- a/src/frontend/src/utilfunctions/login.ts +++ b/src/frontend/src/utilfunctions/login.ts @@ -1,4 +1,4 @@ -import { createPopup } from './createPopup'; +import { createPopup } from '@/utilfunctions/createPopup'; export const createLoginWindow = (redirectTo) => { fetch(`${import.meta.env.VITE_API_URL}/auth/osm_login/`) diff --git a/src/frontend/src/utilfunctions/testUtils.jsx b/src/frontend/src/utilfunctions/testUtils.jsx index 3bc4787685..f39bf8d922 100644 --- a/src/frontend/src/utilfunctions/testUtils.jsx +++ b/src/frontend/src/utilfunctions/testUtils.jsx @@ -1,6 +1,6 @@ import React from 'react'; import { Provider } from 'react-redux'; -import { store } from '../store/Store'; +import { store } from '@/store/Store'; import { BrowserRouter } from 'react-router-dom'; import { act, render } from '@testing-library/react'; export const renderWithRouter = (ui, { route = '/' } = {}) => { diff --git a/src/frontend/src/utilities/AppLoader.jsx b/src/frontend/src/utilities/AppLoader.jsx index 8833a0c670..b4da0ebf9f 100644 --- a/src/frontend/src/utilities/AppLoader.jsx +++ b/src/frontend/src/utilities/AppLoader.jsx @@ -11,7 +11,7 @@ import { ScaleLoader, DotLoader, } from 'react-spinners'; -import CoreModules from '../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const override = { display: 'block', diff --git a/src/frontend/src/utilities/BasicCard.tsx b/src/frontend/src/utilities/BasicCard.tsx index fb5eadd501..dda8b0f4ef 100755 --- a/src/frontend/src/utilities/BasicCard.tsx +++ b/src/frontend/src/utilities/BasicCard.tsx @@ -1,5 +1,5 @@ import * as React from 'react'; -import CoreModules from '../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; export default function BasicCard({ title, subtitle, content, variant, contentProps, headerStatus }) { return ( diff --git a/src/frontend/src/utilities/BasicDialog.jsx b/src/frontend/src/utilities/BasicDialog.jsx index be97285132..789258f90c 100755 --- a/src/frontend/src/utilities/BasicDialog.jsx +++ b/src/frontend/src/utilities/BasicDialog.jsx @@ -1,6 +1,6 @@ import * as React from 'react'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; export default function BasicDialog({ open, actions, title, onClose, subtitle }) { return ( diff --git a/src/frontend/src/utilities/BasicDialog.tsx b/src/frontend/src/utilities/BasicDialog.tsx index 6533ff11ab..f3d8e9f3a8 100755 --- a/src/frontend/src/utilities/BasicDialog.tsx +++ b/src/frontend/src/utilities/BasicDialog.tsx @@ -1,7 +1,7 @@ import * as React from 'react'; import { TransitionProps } from '@mui/material/transitions'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; const Transition = React.forwardRef(function Transition( props: TransitionProps & { diff --git a/src/frontend/src/utilities/BasicTabs.tsx b/src/frontend/src/utilities/BasicTabs.tsx index 6d7c77e257..8fb08b27f5 100755 --- a/src/frontend/src/utilities/BasicTabs.tsx +++ b/src/frontend/src/utilities/BasicTabs.tsx @@ -1,7 +1,7 @@ import * as React from 'react'; import PropTypes from 'prop-types'; -import windowDimention from '../hooks/WindowDimension'; -import CoreModules from '../shared/CoreModules'; +import windowDimention from '@/hooks/WindowDimension'; +import CoreModules from '@/shared/CoreModules'; function TabPanel(props) { const { children, value, index, ...other } = props; diff --git a/src/frontend/src/utilities/CustomDrawer.jsx b/src/frontend/src/utilities/CustomDrawer.jsx index 268e42a080..3996f6f090 100644 --- a/src/frontend/src/utilities/CustomDrawer.jsx +++ b/src/frontend/src/utilities/CustomDrawer.jsx @@ -1,12 +1,12 @@ import * as React from 'react'; import SwipeableDrawer from '@mui/material/SwipeableDrawer'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; import { NavLink } from 'react-router-dom'; -import { createLoginWindow, revokeCookie } from '../utilfunctions/login'; -import { CommonActions } from '../store/slices/CommonSlice'; -import { LoginActions } from '../store/slices/LoginSlice'; -import { ProjectActions } from '../store/slices/ProjectSlice'; +import { createLoginWindow, revokeCookie } from '@/utilfunctions/login'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import { LoginActions } from '@/store/slices/LoginSlice'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; export default function CustomDrawer({ open, placement, size, type, onClose, onSignOut, setOpen }) { const defaultTheme = CoreModules.useAppSelector((state) => state.theme.hotTheme); @@ -50,7 +50,7 @@ export default function CustomDrawer({ open, placement, size, type, onClose, onS }, { name: 'Manage Organizations', - ref: '/organization', + ref: '/organisation', isExternalLink: false, isActive: true, }, diff --git a/src/frontend/src/utilities/CustomizedImage.jsx b/src/frontend/src/utilities/CustomizedImage.jsx index bfa2c7c15b..373e9e8e59 100755 --- a/src/frontend/src/utilities/CustomizedImage.jsx +++ b/src/frontend/src/utilities/CustomizedImage.jsx @@ -1,6 +1,6 @@ import React from 'react'; -import cardImg from '../assets/images/project_icon.png'; -import logo from '../assets/images/hotLog.png'; +import cardImg from '@/assets/images/project_icon.png'; +import logo from '@/assets/images/hotLog.png'; import { LazyLoadImage } from 'react-lazy-load-image-component'; const Switcher = ({ status, width, height }) => { switch (status) { diff --git a/src/frontend/src/utilities/CustomizedMenus.tsx b/src/frontend/src/utilities/CustomizedMenus.tsx index 5b36f53097..5c47628c86 100755 --- a/src/frontend/src/utilities/CustomizedMenus.tsx +++ b/src/frontend/src/utilities/CustomizedMenus.tsx @@ -1,7 +1,7 @@ import * as React from 'react'; import { MenuProps } from '@mui/material/Menu'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; const StyledMenu = AssetModules.styled((props: MenuProps) => ( ; }); +Backdrop.displayName = 'ModalBackdrop'; const StyledModal = styled(Modal)` position: fixed; diff --git a/src/frontend/src/utilities/CustomizedProgressBar.tsx b/src/frontend/src/utilities/CustomizedProgressBar.tsx index 797d78ef26..8ccba25342 100755 --- a/src/frontend/src/utilities/CustomizedProgressBar.tsx +++ b/src/frontend/src/utilities/CustomizedProgressBar.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import CoreModules from '../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const CustomizedProgressBar = ({ height, data }) => { const defaultTheme: any = CoreModules.useAppSelector((state) => state.theme.hotTheme); diff --git a/src/frontend/src/utilities/CustomizedSnackbar.jsx b/src/frontend/src/utilities/CustomizedSnackbar.jsx index bb70fbb4fd..6a6c111908 100755 --- a/src/frontend/src/utilities/CustomizedSnackbar.jsx +++ b/src/frontend/src/utilities/CustomizedSnackbar.jsx @@ -1,5 +1,5 @@ import * as React from 'react'; -import CoreModules from '../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; const Alert = React.forwardRef(function Alert(props, ref) { return ; }); diff --git a/src/frontend/src/utilities/IconButtonCard.jsx b/src/frontend/src/utilities/IconButtonCard.jsx index 3e421028b9..4c8e33959c 100755 --- a/src/frontend/src/utilities/IconButtonCard.jsx +++ b/src/frontend/src/utilities/IconButtonCard.jsx @@ -1,5 +1,5 @@ import React from 'react'; -import CoreModules from '../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; export default function IconButtonCard({ element, style, radius }) { return ( { const onToggleOutline = (e) => { diff --git a/src/frontend/src/utilities/PrimaryAppBar.tsx b/src/frontend/src/utilities/PrimaryAppBar.tsx index 6bee67c1a9..49b0c84e39 100755 --- a/src/frontend/src/utilities/PrimaryAppBar.tsx +++ b/src/frontend/src/utilities/PrimaryAppBar.tsx @@ -1,14 +1,14 @@ import * as React from 'react'; -import windowDimention from '../hooks/WindowDimension'; -import DrawerComponent from './CustomDrawer'; -import CustomizedImage from '../utilities/CustomizedImage'; -import { ThemeActions } from '../store/slices/ThemeSlice'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; -import { CommonActions } from '../store/slices/CommonSlice'; -import { LoginActions } from '../store/slices/LoginSlice'; -import { ProjectActions } from '../store/slices/ProjectSlice'; -import { createLoginWindow, revokeCookie } from '../utilfunctions/login'; +import windowDimention from '@/hooks/WindowDimension'; +import DrawerComponent from '@/utilities/CustomDrawer'; +import CustomizedImage from '@/utilities/CustomizedImage'; +import { ThemeActions } from '@/store/slices/ThemeSlice'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import { LoginActions } from '@/store/slices/LoginSlice'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +import { createLoginWindow, revokeCookie } from '@/utilfunctions/login'; import { useState } from 'react'; export default function PrimaryAppBar() { @@ -115,7 +115,7 @@ export default function PrimaryAppBar() { onClick={() => setActiveTab(0)} /> - + { // Bypass check if NODE_ENV=development (local dev) diff --git a/src/frontend/src/views/Authorized.tsx b/src/frontend/src/views/Authorized.tsx index 29aba80d0a..2204e32314 100644 --- a/src/frontend/src/views/Authorized.tsx +++ b/src/frontend/src/views/Authorized.tsx @@ -1,7 +1,7 @@ import React, { useEffect, useState } from 'react'; import { useNavigate, useLocation } from 'react-router-dom'; -import { LoginActions } from '../store/slices/LoginSlice'; -import CoreModules from '../shared/CoreModules.js'; +import { LoginActions } from '@/store/slices/LoginSlice'; +import CoreModules from '@/shared/CoreModules.js'; function Authorized() { const navigate = useNavigate(); diff --git a/src/frontend/src/views/CreateNewProject.tsx b/src/frontend/src/views/CreateNewProject.tsx index 14f6ac96bb..a16858219b 100644 --- a/src/frontend/src/views/CreateNewProject.tsx +++ b/src/frontend/src/views/CreateNewProject.tsx @@ -1,17 +1,17 @@ -import StepSwitcher from '../components/common/StepSwitcher'; -import CreateProjectHeader from '../components/createnewproject/CreateProjectHeader'; +import StepSwitcher from '@/components/common/StepSwitcher'; +import CreateProjectHeader from '@/components/createnewproject/CreateProjectHeader'; import React, { useEffect, useState } from 'react'; -import { createProjectSteps } from '../constants/StepFormConstants'; -import ProjectDetailsForm from '../components/createnewproject/ProjectDetailsForm'; -import UploadArea from '../components/createnewproject/UploadArea'; -import DataExtract from '../components/createnewproject/DataExtract'; -import SplitTasks from '../components/createnewproject/SplitTasks'; -import SelectForm from '../components/createnewproject/SelectForm'; +import { createProjectSteps } from '@/constants/StepFormConstants'; +import ProjectDetailsForm from '@/components/createnewproject/ProjectDetailsForm'; +import UploadArea from '@/components/createnewproject/UploadArea'; +import DataExtract from '@/components/createnewproject/DataExtract'; +import SplitTasks from '@/components/createnewproject/SplitTasks'; +import SelectForm from '@/components/createnewproject/SelectForm'; import { useLocation, useNavigate } from 'react-router-dom'; import { useDispatch } from 'react-redux'; -import { CommonActions } from '.././store/slices/CommonSlice'; -import { useAppSelector } from '../types/reduxTypes'; -import Prompt from '../hooks/Prompt'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import { useAppSelector } from '@/types/reduxTypes'; +import Prompt from '@/hooks/Prompt'; const CreateNewProject = () => { const location = useLocation(); const dispatch = useDispatch(); @@ -61,7 +61,15 @@ const CreateNewProject = () => { case '/create-project': return ; case '/upload-area': - return ; + return ( + + ); case '/select-form': return ( { +const CreateOrganisationForm = () => { const dispatch = CoreModules.useAppDispatch(); const navigate = useNavigate(); const [searchParams, setSearchParams] = useSearchParams(); const defaultTheme: any = CoreModules.useAppSelector((state) => state.theme.hotTheme); - const postOrganizationData: any = CoreModules.useAppSelector((state) => state.organization.postOrganizationData); + const postOrganisationData: any = CoreModules.useAppSelector((state) => state.organisation.postOrganisationData); - const organizationFormData: any = CoreModules.useAppSelector((state) => state.organization.organizationFormData); + const organisationFormData: any = CoreModules.useAppSelector((state) => state.organisation.organisationFormData); const submission = () => { - dispatch(PostOrganizationDataService(`${import.meta.env.VITE_API_URL}/organization/`, values)); + dispatch(PostOrganisationDataService(`${import.meta.env.VITE_API_URL}/organisation/`, values)); }; const { handleSubmit, handleCustomChange, values, errors }: any = useForm( - organizationFormData, + organisationFormData, submission, - OrganizationAddValidation, + OrganisationAddValidation, ); const inputFormStyles = () => { return { @@ -35,16 +35,16 @@ const CreateOrganizationForm = () => { }; useEffect(() => { - if (postOrganizationData) { - dispatch(OrganizationAction.postOrganizationData(null)); - dispatch(OrganizationAction.SetOrganizationFormData({})); + if (postOrganisationData) { + dispatch(OrganisationAction.postOrganisationData(null)); + dispatch(OrganisationAction.SetOrganisationFormData({})); if (searchParams.get('popup') === 'true') { window.close(); } else { - navigate('/organization'); + navigate('/organisation'); } } - }, [postOrganizationData]); + }, [postOrganisationData]); return ( { ); }; -export default CreateOrganizationForm; +export default CreateOrganisationForm; diff --git a/src/frontend/src/views/CreateProject.tsx b/src/frontend/src/views/CreateProject.tsx index 7f23205697..692f97a6a1 100755 --- a/src/frontend/src/views/CreateProject.tsx +++ b/src/frontend/src/views/CreateProject.tsx @@ -1,13 +1,13 @@ import React, { useEffect, useState } from 'react'; import '../styles/home.css'; -import CoreModules from '../shared/CoreModules'; -import UploadArea from '../components/createproject/UploadArea'; +import CoreModules from '@/shared/CoreModules'; +import UploadArea from '@/components/createproject/UploadArea'; import { useLocation, Link } from 'react-router-dom'; -import ProjectDetailsForm from '../components/createproject/ProjectDetailsForm'; -import FormSelection from '../components/createproject/FormSelection'; -import DefineTasks from '../components/createproject/DefineTasks'; -import { CreateProjectActions } from '../store/slices/CreateProjectSlice'; -import DataExtract from '../components/createproject/DataExtract'; +import ProjectDetailsForm from '@/components/createproject/ProjectDetailsForm'; +import FormSelection from '@/components/createproject/FormSelection'; +import DefineTasks from '@/components/createproject/DefineTasks'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; +import DataExtract from '@/components/createproject/DataExtract'; const CreateProject: React.FC = () => { const [geojsonFile, setGeojsonFile] = useState(null); diff --git a/src/frontend/src/views/DefineAreaMap.tsx b/src/frontend/src/views/DefineAreaMap.tsx index 0b193e8229..acf5b12c58 100644 --- a/src/frontend/src/views/DefineAreaMap.tsx +++ b/src/frontend/src/views/DefineAreaMap.tsx @@ -1,10 +1,10 @@ import React, { useEffect, useState } from 'react'; -import useOLMap from '../hooks/useOlMap'; -import { MapContainer as MapComponent } from '../components/MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; -import { VectorLayer } from '../components/MapComponent/OpenLayersComponent/Layers'; -import CoreModules from '../shared/CoreModules'; -import { CreateProjectActions } from '../store/slices/CreateProjectSlice'; +import useOLMap from '@/hooks/useOlMap'; +import { MapContainer as MapComponent } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; +import CoreModules from '@/shared/CoreModules'; +import { CreateProjectActions } from '@/store/slices/CreateProjectSlice'; const DefineAreaMap = ({ uploadedGeojson, diff --git a/src/frontend/src/views/EditProject.tsx b/src/frontend/src/views/EditProject.tsx index 8f49a394a8..41ae4f2032 100755 --- a/src/frontend/src/views/EditProject.tsx +++ b/src/frontend/src/views/EditProject.tsx @@ -1,14 +1,14 @@ import React, { useEffect, useState } from 'react'; import '../styles/home.css'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; -import environment from '../environment'; -import { FormCategoryService, GetIndividualProjectDetails, OrganisationService } from '../api/CreateProjectService'; -import EditProjectDetails from '../components/editproject/EditProjectDetails'; -import SidebarContent from '../constants/EditProjectSidebarContent'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import environment from '@/environment'; +import { FormCategoryService, GetIndividualProjectDetails, OrganisationService } from '@/api/CreateProjectService'; +import EditProjectDetails from '@/components/editproject/EditProjectDetails'; +import SidebarContent from '@/constants/EditProjectSidebarContent'; import { useNavigate } from 'react-router-dom'; -import UpdateForm from '../components/editproject/UpdateForm'; -import UpdateProjectArea from '../components/editproject/UpdateProjectArea'; +import UpdateForm from '@/components/editproject/UpdateForm'; +import UpdateProjectArea from '@/components/editproject/UpdateProjectArea'; const EditProject: React.FC = () => { const dispatch = CoreModules.useAppDispatch(); @@ -26,7 +26,7 @@ const EditProject: React.FC = () => { }, }; useEffect(() => { - dispatch(OrganisationService(`${import.meta.env.VITE_API_URL}/organization/`)); + dispatch(OrganisationService(`${import.meta.env.VITE_API_URL}/organisation/`)); if (decodedProjectId) { dispatch(GetIndividualProjectDetails(`${import.meta.env.VITE_API_URL}/projects/${decodedProjectId}`)); diff --git a/src/frontend/src/views/EditProjectArea.tsx b/src/frontend/src/views/EditProjectArea.tsx index 898df1fa30..8a86ee96ab 100644 --- a/src/frontend/src/views/EditProjectArea.tsx +++ b/src/frontend/src/views/EditProjectArea.tsx @@ -1,8 +1,8 @@ import React from 'react'; -import useOLMap from '../hooks/useOlMap'; -import { MapContainer as MapComponent } from '../components/MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; -import { VectorLayer } from '../components/MapComponent/OpenLayersComponent/Layers'; +import useOLMap from '@/hooks/useOlMap'; +import { MapContainer as MapComponent } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; const EditProjectArea = ({ geojson }) => { const { mapRef, map } = useOLMap({ diff --git a/src/frontend/src/views/ErrorBoundary.tsx b/src/frontend/src/views/ErrorBoundary.tsx index e4385658ea..375f969021 100644 --- a/src/frontend/src/views/ErrorBoundary.tsx +++ b/src/frontend/src/views/ErrorBoundary.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import SomethingWentWrongImage from '../assets/images/something_went_wrong.png'; -import Button from '../components/common/Button'; -import AssetModules from '../shared/AssetModules'; +import SomethingWentWrongImage from '@/assets/images/something_went_wrong.png'; +import Button from '@/components/common/Button'; +import AssetModules from '@/shared/AssetModules'; interface ErrorBoundaryProps { showError?: boolean; diff --git a/src/frontend/src/views/Home.jsx b/src/frontend/src/views/Home.jsx index 8622ce89aa..e06b6930d4 100755 --- a/src/frontend/src/views/Home.jsx +++ b/src/frontend/src/views/Home.jsx @@ -1,14 +1,14 @@ import React, { useEffect, useState } from 'react'; import '../styles/home.css'; -import ExploreProjectCard from '../components/home/ExploreProjectCard'; -import windowDimention from '../hooks/WindowDimension'; -import { HomeSummaryService } from '../api/HomeService'; -import enviroment from '../environment'; -import ProjectCardSkeleton from '../components/home/ProjectCardSkeleton'; -import HomePageFilters from '../components/home/HomePageFilters'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; -import ProjectListMap from '../components/home/ProjectListMap'; +import ExploreProjectCard from '@/components/home/ExploreProjectCard'; +import windowDimention from '@/hooks/WindowDimension'; +import { HomeSummaryService } from '@/api/HomeService'; +import enviroment from '@/environment'; +import ProjectCardSkeleton from '@/components/home/ProjectCardSkeleton'; +import HomePageFilters from '@/components/home/HomePageFilters'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import ProjectListMap from '@/components/home/ProjectListMap'; const Home = () => { const [searchQuery, setSearchQuery] = useState(''); diff --git a/src/frontend/src/views/MainView.jsx b/src/frontend/src/views/MainView.jsx index 88d85ee61d..12f9fcbcf7 100755 --- a/src/frontend/src/views/MainView.jsx +++ b/src/frontend/src/views/MainView.jsx @@ -1,11 +1,11 @@ import React from 'react'; -import windowDimention from '../hooks/WindowDimension'; -import PrimaryAppBar from '../utilities/PrimaryAppBar'; -import CoreModules from '../shared/CoreModules'; -import CustomizedSnackbars from '../utilities/CustomizedSnackbar'; -import { CommonActions } from '../store/slices/CommonSlice'; -import Loader from '../utilities/AppLoader'; -import MappingHeader from '../utilities/MappingHeader'; +import windowDimention from '@/hooks/WindowDimension'; +import PrimaryAppBar from '@/utilities/PrimaryAppBar'; +import CoreModules from '@/shared/CoreModules'; +import CustomizedSnackbars from '@/utilities/CustomizedSnackbar'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import Loader from '@/utilities/AppLoader'; +import MappingHeader from '@/utilities/MappingHeader'; import { useLocation, useSearchParams } from 'react-router-dom'; const MainView = () => { diff --git a/src/frontend/src/views/NewDefineAreaMap.tsx b/src/frontend/src/views/NewDefineAreaMap.tsx index 72d0b8d82e..bc9d906e1d 100644 --- a/src/frontend/src/views/NewDefineAreaMap.tsx +++ b/src/frontend/src/views/NewDefineAreaMap.tsx @@ -1,17 +1,20 @@ import React from 'react'; -import useOLMap from '../hooks/useOlMap'; -import { MapContainer as MapComponent } from '../components/MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; -import { VectorLayer } from '../components/MapComponent/OpenLayersComponent/Layers'; -import { GeoJSONFeatureTypes } from '../store/types/ICreateProject'; +import useOLMap from '@/hooks/useOlMap'; +import { MapContainer as MapComponent } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index.js'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; +import { GeoJSONFeatureTypes } from '@/store/types/ICreateProject'; +import MapControlComponent from '@/components/createnewproject/MapControlComponent'; type NewDefineAreaMapProps = { drawToggle?: boolean; - splittedGeojson: GeoJSONFeatureTypes; + splittedGeojson: GeoJSONFeatureTypes | null; uploadedOrDrawnGeojsonFile: GeoJSONFeatureTypes; buildingExtractedGeojson?: GeoJSONFeatureTypes; lineExtractedGeojson?: GeoJSONFeatureTypes; - onDraw?: () => void; + onDraw?: (geojson: any, area: number) => void; + onModify?: ((geojson: any, area?: number) => void) | null; + hasEditUndo?: boolean; }; const NewDefineAreaMap = ({ drawToggle, @@ -21,6 +24,7 @@ const NewDefineAreaMap = ({ lineExtractedGeojson, onDraw, onModify, + hasEditUndo, }: NewDefineAreaMapProps) => { const { mapRef, map } = useOLMap({ // center: fromLonLat([85.3, 27.7]), @@ -42,6 +46,7 @@ const NewDefineAreaMap = ({ }} > + {splittedGeojson && ( )} {isDrawOrGeojsonFile && !splittedGeojson && ( diff --git a/src/frontend/src/views/NewProjectDetails.jsx b/src/frontend/src/views/NewProjectDetails.jsx index 7bcc0af6ba..6249b6cbbf 100644 --- a/src/frontend/src/views/NewProjectDetails.jsx +++ b/src/frontend/src/views/NewProjectDetails.jsx @@ -1,43 +1,43 @@ import React, { useEffect, useState } from 'react'; import '../styles/home.scss'; -import WindowDimension from '../hooks/WindowDimension'; -import MapDescriptionComponents from '../components/MapDescriptionComponents'; -import ActivitiesPanel from '../components/ActivitiesPanel'; -import environment from '../environment'; -import { ProjectById } from '../api/Project'; -import { ProjectActions } from '../store/slices/ProjectSlice'; -import CustomizedSnackbar from '../utilities/CustomizedSnackbar'; -import OnScroll from '../hooks/OnScroll'; -import { HomeActions } from '../store/slices/HomeSlice'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; -import FmtmLogo from '../assets/images/hotLog.png'; -import GenerateBasemap from '../components/GenerateBasemap'; -import { ProjectBuildingGeojsonService } from '../api/SubmissionService'; -import TaskSectionPopup from '../components/ProjectDetails/TaskSectionPopup'; -import DialogTaskActions from '../components/DialogTaskActions'; -import QrcodeComponent from '../components/QrcodeComponent'; -import MobileFooter from '../components/ProjectDetails/MobileFooter'; -import MobileActivitiesContents from '../components/ProjectDetails/MobileActivitiesContents'; -import BottomSheet from '../components/common/BottomSheet'; -import MobileProjectInfoContent from '../components/ProjectDetails/MobileProjectInfoContent'; +import WindowDimension from '@/hooks/WindowDimension'; +import MapDescriptionComponents from '@/components/MapDescriptionComponents'; +import ActivitiesPanel from '@/components/ActivitiesPanel'; +import environment from '@/environment'; +import { ProjectById } from '@/api/Project'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +import CustomizedSnackbar from '@/utilities/CustomizedSnackbar'; +import OnScroll from '@/hooks/OnScroll'; +import { HomeActions } from '@/store/slices/HomeSlice'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import FmtmLogo from '@/assets/images/hotLog.png'; +import GenerateBasemap from '@/components/GenerateBasemap'; +import { ProjectBuildingGeojsonService } from '@/api/SubmissionService'; +import TaskSectionPopup from '@/components/ProjectDetails/TaskSectionPopup'; +import DialogTaskActions from '@/components/DialogTaskActions'; +import QrcodeComponent from '@/components/QrcodeComponent'; +import MobileFooter from '@/components/ProjectDetails/MobileFooter'; +import MobileActivitiesContents from '@/components/ProjectDetails/MobileActivitiesContents'; +import BottomSheet from '@/components/common/BottomSheet'; +import MobileProjectInfoContent from '@/components/ProjectDetails/MobileProjectInfoContent'; import { useNavigate } from 'react-router-dom'; -import ProjectOptions from '../components/ProjectDetails/ProjectOptions'; -import { MapContainer as MapComponent, useOLMap } from '../components/MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../components/MapComponent/OpenLayersComponent/LayerSwitcher/index'; -import MapControlComponent from '../components/ProjectDetails/MapControlComponent'; -import { VectorLayer } from '../components/MapComponent/OpenLayersComponent/Layers'; -import { geojsonObjectModel } from '../constants/geojsonObjectModal'; -import { basicGeojsonTemplate } from '../utilities/mapUtils'; -import getTaskStatusStyle from '../utilfunctions/getTaskStatusStyle'; -import { defaultStyles } from '../components/MapComponent/OpenLayersComponent/helpers/styleUtils'; -import MapLegends from '../components/MapLegends'; -import Accordion from '../components/common/Accordion'; +import ProjectOptions from '@/components/ProjectDetails/ProjectOptions'; +import { MapContainer as MapComponent, useOLMap } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index'; +import MapControlComponent from '@/components/ProjectDetails/MapControlComponent'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; +import { geojsonObjectModel } from '@/constants/geojsonObjectModal'; +import { basicGeojsonTemplate } from '@/utilities/mapUtils'; +import getTaskStatusStyle from '@/utilfunctions/getTaskStatusStyle'; +import { defaultStyles } from '@/components/MapComponent/OpenLayersComponent/helpers/styleUtils'; +import MapLegends from '@/components/MapLegends'; +import Accordion from '@/components/common/Accordion'; import { Geolocation } from '@capacitor/geolocation'; import { Icon, Style } from 'ol/style'; import { Motion } from '@capacitor/motion'; -import locationArc from '../assets/images/locationArc.png'; -import { CommonActions } from '../store/slices/CommonSlice'; +import locationArc from '@/assets/images/locationArc.png'; +import { CommonActions } from '@/store/slices/CommonSlice'; const Home = () => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/views/NotFound404.jsx b/src/frontend/src/views/NotFound404.jsx index ee8ce79070..e1152480a1 100644 --- a/src/frontend/src/views/NotFound404.jsx +++ b/src/frontend/src/views/NotFound404.jsx @@ -1,6 +1,6 @@ import React from 'react'; -import pageNoFound from '../assets/images/notFound.png'; -import CoreModules from '../shared/CoreModules'; +import pageNoFound from '@/assets/images/notFound.png'; +import CoreModules from '@/shared/CoreModules'; const NotFoundPage = () => { return ( { p={3} > - The page you were looking for doesn't exist. + {`The page you were looking for doesn't exist.`} You may have mistyped the address or the page may have moved. diff --git a/src/frontend/src/views/Organization.tsx b/src/frontend/src/views/Organisation.tsx similarity index 91% rename from src/frontend/src/views/Organization.tsx rename to src/frontend/src/views/Organisation.tsx index 24aa6ed8fe..4507371b22 100644 --- a/src/frontend/src/views/Organization.tsx +++ b/src/frontend/src/views/Organisation.tsx @@ -1,10 +1,10 @@ import React, { useEffect, useState } from 'react'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; -import environment from '../environment'; -import { OrganizationDataService } from '../api/OrganizationService'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import environment from '@/environment'; +import { OrganisationDataService } from '@/api/OrganisationService'; -const Organization = () => { +const Organisation = () => { const cardStyle = { padding: 2, display: 'flex', @@ -25,14 +25,14 @@ const Organization = () => { const dispatch = CoreModules.useAppDispatch(); - const oraganizationData: any = CoreModules.useAppSelector((state) => state.organization.oraganizationData); + const oraganizationData: any = CoreModules.useAppSelector((state) => state.organisation.oraganizationData); console.log(oraganizationData, 'oraganizationData'); const filteredCardData = oraganizationData?.filter((data) => data.name.toLowerCase().includes(searchKeyword.toLowerCase()), ); useEffect(() => { - dispatch(OrganizationDataService(`${import.meta.env.VITE_API_URL}/organization/`)); + dispatch(OrganisationDataService(`${import.meta.env.VITE_API_URL}/organisation/`)); }, []); return ( @@ -48,7 +48,7 @@ const Organization = () => { >

MANAGE ORGANIZATIONS

- + { ); }; -export default Organization; +export default Organisation; diff --git a/src/frontend/src/views/ProjectDetails.jsx b/src/frontend/src/views/ProjectDetails.jsx index dde433ab7b..080e7dd062 100755 --- a/src/frontend/src/views/ProjectDetails.jsx +++ b/src/frontend/src/views/ProjectDetails.jsx @@ -1,42 +1,42 @@ import React, { useEffect, useRef, useState } from 'react'; import '../styles/home.scss'; -import WindowDimension from '../hooks/WindowDimension'; -import MapDescriptionComponents from '../components/MapDescriptionComponents'; -import ActivitiesPanel from '../components/ActivitiesPanel'; -import OpenLayersMap from '../components/OpenLayersMap'; -import environment from '../environment'; -import { ProjectById } from '../api/Project'; -import { ProjectActions } from '../store/slices/ProjectSlice'; -import CustomizedSnackbar from '../utilities/CustomizedSnackbar'; +import WindowDimension from '@/hooks/WindowDimension'; +import MapDescriptionComponents from '@/components/MapDescriptionComponents'; +import ActivitiesPanel from '@/components/ActivitiesPanel'; +import OpenLayersMap from '@/components/OpenLayersMap'; +import environment from '@/environment'; +import { ProjectById } from '@/api/Project'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +import CustomizedSnackbar from '@/utilities/CustomizedSnackbar'; import { defaults } from 'ol/control/defaults'; -import OnScroll from '../hooks/OnScroll'; +import OnScroll from '@/hooks/OnScroll'; import { Tile as TileLayer } from 'ol/layer.js'; import { OSM } from 'ol/source.js'; import VectorLayer from 'ol/layer/Vector'; import VectorSource from 'ol/source/Vector'; -import TasksLayer from '../components/TasksLayer'; +import TasksLayer from '@/components/TasksLayer'; import Map from 'ol/Map'; import View from 'ol/View'; -import { HomeActions } from '../store/slices/HomeSlice'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; +import { HomeActions } from '@/store/slices/HomeSlice'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; import GeoJSON from 'ol/format/GeoJSON'; -import FmtmLogo from '../assets/images/hotLog.png'; +import FmtmLogo from '@/assets/images/hotLog.png'; -import GenerateBasemap from '../components/GenerateBasemap'; -import { ProjectBuildingGeojsonService } from '../api/SubmissionService'; +import GenerateBasemap from '@/components/GenerateBasemap'; +import { ProjectBuildingGeojsonService } from '@/api/SubmissionService'; import { get } from 'ol/proj'; -import { buildingStyle, basicGeojsonTemplate } from '../utilities/mapUtils'; -import MapLegends from '../components/MapLegends'; -import TaskSectionPopup from '../components/ProjectDetails/TaskSectionPopup'; -import DialogTaskActions from '../components/DialogTaskActions'; -import QrcodeComponent from '../components/QrcodeComponent'; -import MobileFooter from '../components/ProjectDetails/MobileFooter'; -import MobileActivitiesContents from '../components/ProjectDetails/MobileActivitiesContents'; -import BottomSheet from '../components/common/BottomSheet'; -import MobileProjectInfoContent from '../components/ProjectDetails/MobileProjectInfoContent'; +import { buildingStyle, basicGeojsonTemplate } from '@/utilities/mapUtils'; +import MapLegends from '@/components/MapLegends'; +import TaskSectionPopup from '@/components/ProjectDetails/TaskSectionPopup'; +import DialogTaskActions from '@/components/DialogTaskActions'; +import QrcodeComponent from '@/components/QrcodeComponent'; +import MobileFooter from '@/components/ProjectDetails/MobileFooter'; +import MobileActivitiesContents from '@/components/ProjectDetails/MobileActivitiesContents'; +import BottomSheet from '@/components/common/BottomSheet'; +import MobileProjectInfoContent from '@/components/ProjectDetails/MobileProjectInfoContent'; import { useNavigate } from 'react-router-dom'; -import ProjectOptions from '../components/ProjectDetails/ProjectOptions'; +import ProjectOptions from '@/components/ProjectDetails/ProjectOptions'; const Home = () => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/views/ProjectDetailsV2.tsx b/src/frontend/src/views/ProjectDetailsV2.tsx index cbb0ffda7c..7e16daafc2 100644 --- a/src/frontend/src/views/ProjectDetailsV2.tsx +++ b/src/frontend/src/views/ProjectDetailsV2.tsx @@ -1,46 +1,46 @@ import React, { useEffect, useState } from 'react'; import '../../node_modules/ol/ol.css'; import '../styles/home.scss'; -import WindowDimension from '../hooks/WindowDimension'; -import MapDescriptionComponents from '../components/MapDescriptionComponents'; -import ActivitiesPanel from '../components/ProjectDetailsV2/ActivitiesPanel'; -import environment from '../environment'; -import { ProjectById, GetProjectDashboard } from '../api/Project'; -import { ProjectActions } from '../store/slices/ProjectSlice'; -import CustomizedSnackbar from '../utilities/CustomizedSnackbar'; -import OnScroll from '../hooks/OnScroll'; -import { HomeActions } from '../store/slices/HomeSlice'; -import CoreModules from '../shared/CoreModules'; -import AssetModules from '../shared/AssetModules'; -import FmtmLogo from '../assets/images/hotLog.png'; -import GenerateBasemap from '../components/GenerateBasemap'; -import { ProjectBuildingGeojsonService } from '../api/SubmissionService'; -import TaskSectionPopup from '../components/ProjectDetailsV2/TaskSectionPopup'; -import DialogTaskActions from '../components/DialogTaskActions'; -import QrcodeComponent from '../components/QrcodeComponent'; -import MobileFooter from '../components/ProjectDetailsV2/MobileFooter'; -import MobileActivitiesContents from '../components/ProjectDetailsV2/MobileActivitiesContents'; -import BottomSheet from '../components/common/BottomSheet'; -import MobileProjectInfoContent from '../components/ProjectDetailsV2/MobileProjectInfoContent'; +import WindowDimension from '@/hooks/WindowDimension'; +import MapDescriptionComponents from '@/components/MapDescriptionComponents'; +import ActivitiesPanel from '@/components/ProjectDetailsV2/ActivitiesPanel'; +import environment from '@/environment'; +import { ProjectById, GetProjectDashboard } from '@/api/Project'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +import CustomizedSnackbar from '@/utilities/CustomizedSnackbar'; +import OnScroll from '@/hooks/OnScroll'; +import { HomeActions } from '@/store/slices/HomeSlice'; +import CoreModules from '@/shared/CoreModules'; +import AssetModules from '@/shared/AssetModules'; +import FmtmLogo from '@/assets/images/hotLog.png'; +import GenerateBasemap from '@/components/GenerateBasemap'; +import { ProjectBuildingGeojsonService } from '@/api/SubmissionService'; +import TaskSectionPopup from '@/components/ProjectDetailsV2/TaskSectionPopup'; +import DialogTaskActions from '@/components/DialogTaskActions'; +import QrcodeComponent from '@/components/QrcodeComponent'; +import MobileFooter from '@/components/ProjectDetailsV2/MobileFooter'; +import MobileActivitiesContents from '@/components/ProjectDetailsV2/MobileActivitiesContents'; +import BottomSheet from '@/components/common/BottomSheet'; +import MobileProjectInfoContent from '@/components/ProjectDetailsV2/MobileProjectInfoContent'; import { useNavigate } from 'react-router-dom'; -import ProjectOptions from '../components/ProjectDetails/ProjectOptions'; -import { MapContainer as MapComponent, useOLMap } from '../components/MapComponent/OpenLayersComponent'; -import LayerSwitcherControl from '../components/MapComponent/OpenLayersComponent/LayerSwitcher/index'; -import MapControlComponent from '../components/ProjectDetailsV2/MapControlComponent'; -import { VectorLayer } from '../components/MapComponent/OpenLayersComponent/Layers'; -import { geojsonObjectModel } from '../constants/geojsonObjectModal'; -import { basicGeojsonTemplate } from '../utilities/mapUtils'; -import getTaskStatusStyle from '../utilfunctions/getTaskStatusStyle'; -import { defaultStyles } from '../components/MapComponent/OpenLayersComponent/helpers/styleUtils'; -import MapLegends from '../components/MapLegends'; -import Accordion from '../components/common/Accordion'; +import ProjectOptions from '@/components/ProjectDetails/ProjectOptions'; +import { MapContainer as MapComponent, useOLMap } from '@/components/MapComponent/OpenLayersComponent'; +import LayerSwitcherControl from '@/components/MapComponent/OpenLayersComponent/LayerSwitcher/index'; +import MapControlComponent from '@/components/ProjectDetailsV2/MapControlComponent'; +import { VectorLayer } from '@/components/MapComponent/OpenLayersComponent/Layers'; +import { geojsonObjectModel } from '@/constants/geojsonObjectModal'; +import { basicGeojsonTemplate } from '@/utilities/mapUtils'; +import getTaskStatusStyle from '@/utilfunctions/getTaskStatusStyle'; +import { defaultStyles } from '@/components/MapComponent/OpenLayersComponent/helpers/styleUtils'; +import MapLegends from '@/components/MapLegends'; +import Accordion from '@/components/common/Accordion'; import { Geolocation } from '@capacitor/geolocation'; import { Icon, Style } from 'ol/style'; import { Motion } from '@capacitor/motion'; -import locationArc from '../assets/images/locationArc.png'; -import { CommonActions } from '../store/slices/CommonSlice'; -import Button from '../components/common/Button'; -import ProjectInfo from '../components/ProjectDetailsV2/ProjectInfo'; +import locationArc from '@/assets/images/locationArc.png'; +import { CommonActions } from '@/store/slices/CommonSlice'; +import Button from '@/components/common/Button'; +import ProjectInfo from '@/components/ProjectDetailsV2/ProjectInfo'; const Home = () => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/views/ProjectInfo.tsx b/src/frontend/src/views/ProjectInfo.tsx index d87ea19d51..63c4174757 100644 --- a/src/frontend/src/views/ProjectInfo.tsx +++ b/src/frontend/src/views/ProjectInfo.tsx @@ -1,9 +1,9 @@ import React, { useEffect, useState } from 'react'; -import CoreModules from '../shared/CoreModules'; -import ProjectInfoSidebar from '../components/ProjectInfo/ProjectInfoSidebar'; -import ProjectInfomap from '../components/ProjectInfo/ProjectInfomap'; -import environment from '../environment'; -import { ProjectActions } from '../store/slices/ProjectSlice'; +import CoreModules from '@/shared/CoreModules'; +import ProjectInfoSidebar from '@/components/ProjectInfo/ProjectInfoSidebar'; +import ProjectInfomap from '@/components/ProjectInfo/ProjectInfomap'; +import environment from '@/environment'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; import { ConvertXMLToJOSM, @@ -11,11 +11,11 @@ import { fetchInfoTask, getDownloadProjectSubmission, getDownloadProjectSubmissionJson, -} from '../api/task'; -import AssetModules from '../shared/AssetModules'; -import { ProjectById } from '../api/Project'; -import ProjectInfoCountCard from '../components/ProjectInfo/ProjectInfoCountCard'; -import { CommonActions } from '../store/slices/CommonSlice'; +} from '@/api/task'; +import AssetModules from '@/shared/AssetModules'; +import { ProjectById } from '@/api/Project'; +import ProjectInfoCountCard from '@/components/ProjectInfo/ProjectInfoCountCard'; +import { CommonActions } from '@/store/slices/CommonSlice'; const boxStyles = { animation: 'blink 1s infinite', diff --git a/src/frontend/src/views/SubmissionDetails.tsx b/src/frontend/src/views/SubmissionDetails.tsx index a6af3890fc..85c82e0cb9 100644 --- a/src/frontend/src/views/SubmissionDetails.tsx +++ b/src/frontend/src/views/SubmissionDetails.tsx @@ -1,7 +1,7 @@ -import CoreModules from '../shared/CoreModules.js'; +import CoreModules from '@/shared/CoreModules.js'; import React, { useEffect } from 'react'; -import environment from '../environment'; -import { SubmissionService } from '../api/Submission'; +import environment from '@/environment'; +import { SubmissionService } from '@/api/Submission'; const SubmissionDetails = () => { const dispatch = CoreModules.useAppDispatch(); diff --git a/src/frontend/src/views/Submissions.tsx b/src/frontend/src/views/Submissions.tsx index 8adb588691..6b69859902 100755 --- a/src/frontend/src/views/Submissions.tsx +++ b/src/frontend/src/views/Submissions.tsx @@ -1,13 +1,13 @@ import React, { useEffect } from 'react'; // import '../styles/home.css' -import CoreModules from '../shared/CoreModules'; +import CoreModules from '@/shared/CoreModules'; // import { useLocation, useNavigate } from 'react-router-dom'; -import Avatar from '../assets/images/avatar.png'; -import SubmissionMap from '../components/SubmissionMap/SubmissionMap'; -import environment from '../environment'; -import { ProjectBuildingGeojsonService, ProjectSubmissionService } from '../api/SubmissionService'; -import { ProjectActions } from '../store/slices/ProjectSlice'; -import { ProjectById } from '../api/Project'; +import Avatar from '@/assets/images/avatar.png'; +import SubmissionMap from '@/components/SubmissionMap/SubmissionMap'; +import environment from '@/environment'; +import { ProjectBuildingGeojsonService, ProjectSubmissionService } from '@/api/SubmissionService'; +import { ProjectActions } from '@/store/slices/ProjectSlice'; +import { ProjectById } from '@/api/Project'; const Submissions = () => { const dispatch = CoreModules.useAppDispatch(); @@ -100,6 +100,7 @@ const Submissions = () => { const formattedDate = date.toLocaleDateString('en-US', dateOptions); return ( { const formattedDate = date.toLocaleDateString('en-US', dateOptions); return ( { return ( diff --git a/src/frontend/tests/App.test.tsx b/src/frontend/tests/App.test.tsx index f3c06e18ff..c4a646e076 100644 --- a/src/frontend/tests/App.test.tsx +++ b/src/frontend/tests/App.test.tsx @@ -1,8 +1,8 @@ import React from 'react'; import { screen } from '@testing-library/react'; -import MainView from '../src/views/MainView'; -import { store } from '../src/store/Store'; -import { renderWithRouter } from '../src/utilfunctions/testUtils'; +import MainView from '@/views/MainView'; +import { store } from '@/store/Store'; +import { renderWithRouter } from '@/utilfunctions/testUtils'; import { Provider } from 'react-redux'; import { expect, test, it, describe } from 'vitest'; diff --git a/src/frontend/tests/CreateProject.test.tsx b/src/frontend/tests/CreateProject.test.tsx index 24f928ea9b..92928b3820 100644 --- a/src/frontend/tests/CreateProject.test.tsx +++ b/src/frontend/tests/CreateProject.test.tsx @@ -1,8 +1,8 @@ import React from 'react'; -import MainView from '../src/views/MainView'; +import MainView from '@/views/MainView'; import { Provider } from 'react-redux'; -import { store } from '../src/store/Store'; -import { renderWithRouter } from '../src/utilfunctions/testUtils'; +import { store } from '@/store/Store'; +import { renderWithRouter } from '@/utilfunctions/testUtils'; import { expect, it, describe } from 'vitest'; describe('Frontend Application Running', () => { diff --git a/src/frontend/tsconfig.json b/src/frontend/tsconfig.json index 42eacebc78..66ee59f81c 100644 --- a/src/frontend/tsconfig.json +++ b/src/frontend/tsconfig.json @@ -18,7 +18,10 @@ "noEmit": true, // "jsx": "react-jsx", "jsx": "react", - "baseUrl": "./src", + "baseUrl": ".", + "paths": { + "@/*": ["src/*"] + }, "noImplicitAny": false, //FIXME: Change This "true" to "false" To Integrate Types Instead Of "Any" Types. "strictNullChecks": true, "strictFunctionTypes": true, diff --git a/src/frontend/vite.config.ts b/src/frontend/vite.config.ts index a1322b69dd..3513f2a70f 100644 --- a/src/frontend/vite.config.ts +++ b/src/frontend/vite.config.ts @@ -1,4 +1,5 @@ /// +import path from 'path'; import { defineConfig } from 'vite'; import react from '@vitejs/plugin-react'; import { VitePWA } from 'vite-plugin-pwa'; @@ -13,6 +14,11 @@ export default defineConfig({ usePolling: true, }, }, + resolve: { + alias: { + '@': path.resolve(__dirname, 'src'), + }, + }, test: { environment: 'jsdom', setupFiles: './setupTests.ts',