From ba8400988faffbb508f6a91c652c8dae641c0a15 Mon Sep 17 00:00:00 2001 From: spwoodcock Date: Thu, 23 Nov 2023 09:36:10 +0000 Subject: [PATCH 1/9] ci: set odk,proxy,release images to multi_arch --- .github/workflows/build_odk_imgs.yml | 2 ++ .github/workflows/build_proxy_imgs.yml | 5 +++++ .github/workflows/tag_build.yml | 1 + 3 files changed, 8 insertions(+) diff --git a/.github/workflows/build_odk_imgs.yml b/.github/workflows/build_odk_imgs.yml index 4895ef736b..aa44163993 100644 --- a/.github/workflows/build_odk_imgs.yml +++ b/.github/workflows/build_odk_imgs.yml @@ -21,6 +21,7 @@ jobs: "ghcr.io/${{ github.repository }}/odkcentral:latest" extra_build_args: | ODK_CENTRAL_TAG=${{ vars.ODK_CENTRAL_TAG }} + multi_arch: true build-odkcentral-ui: uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.0 @@ -31,3 +32,4 @@ jobs: "ghcr.io/${{ github.repository }}/odkcentral-ui:latest" extra_build_args: | ODK_CENTRAL_TAG=${{ vars.ODK_CENTRAL_TAG }} + multi_arch: true diff --git a/.github/workflows/build_proxy_imgs.yml b/.github/workflows/build_proxy_imgs.yml index 6022c1fcb8..2038262f13 100644 --- a/.github/workflows/build_proxy_imgs.yml +++ b/.github/workflows/build_proxy_imgs.yml @@ -18,6 +18,7 @@ jobs: "ghcr.io/${{ github.repository }}/proxy:certs-init-main" extra_build_args: | NGINX_TAG=${{ vars.NGINX_TAG }} + multi_arch: true build-cert-init-dev: uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.0 @@ -29,6 +30,7 @@ jobs: "ghcr.io/${{ github.repository }}/proxy:certs-init-staging" extra_build_args: | NGINX_TAG=${{ vars.NGINX_TAG }} + multi_arch: true build-proxy-main: uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.0 @@ -39,6 +41,7 @@ jobs: "ghcr.io/${{ github.repository }}/proxy:main" extra_build_args: | NGINX_TAG=${{ vars.NGINX_TAG }} + multi_arch: true build-proxy-main-plus-script: uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.0 @@ -49,6 +52,7 @@ jobs: "ghcr.io/${{ github.repository }}/proxy:main-plus-script" extra_build_args: | NGINX_TAG=${{ vars.NGINX_TAG }} + multi_arch: true build-proxy-dev: uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.4.0 @@ -60,3 +64,4 @@ jobs: "ghcr.io/${{ github.repository }}/proxy:staging" extra_build_args: | NGINX_TAG=${{ vars.NGINX_TAG }} + multi_arch: true diff --git a/.github/workflows/tag_build.yml b/.github/workflows/tag_build.yml index fec9575652..d486044ac2 100644 --- a/.github/workflows/tag_build.yml +++ b/.github/workflows/tag_build.yml @@ -14,4 +14,5 @@ jobs: context: src/backend build_target: prod image_name: ghcr.io/${{ github.repository }}/backend + multi_arch: true # Frontend is not built as build variables are required From 6aaf547ada0d2c40bdebb1f1d30f8bc3704bbb1c Mon Sep 17 00:00:00 2001 From: spwoodcock Date: Thu, 23 Nov 2023 09:36:29 +0000 Subject: [PATCH 2/9] docs: extra info for ARM64 deployments --- INSTALL.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/INSTALL.md b/INSTALL.md index 89cff0ff32..d4739f4a3a 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -274,6 +274,10 @@ colima start Then continue with the FMTM [installation](#software-requirements). +> Note: only tagged backend images are multi-architecture, supporting +> MacOS. The regular images for fast continuous deployment are not: +> `backend:development`, `backend:staging`, `backend:main`. + ### A Note on Docker Desktop While in theory FMTM should run using Docker-Desktop, it has not From b12a9d3b82e330ed12333c5d97d100e84b760a7b Mon Sep 17 00:00:00 2001 From: spwoodcock Date: Thu, 23 Nov 2023 10:39:18 +0000 Subject: [PATCH 3/9] build: development deploy default CENTRAL_DB_HOST --- docker-compose.development.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.development.yml b/docker-compose.development.yml index 0e975f691a..a79359026c 100644 --- a/docker-compose.development.yml +++ b/docker-compose.development.yml @@ -125,7 +125,7 @@ services: - SYSADMIN_EMAIL=${ODK_CENTRAL_USER} - SYSADMIN_PASSWD=${ODK_CENTRAL_PASSWD} - HTTPS_PORT=443 - - DB_HOST=central-db + - DB_HOST=${CENTRAL_DB_HOST:-central-db} - DB_USER=${CENTRAL_DB_USER} - DB_PASSWORD=${CENTRAL_DB_PASSWORD} - DB_NAME=${CENTRAL_DB_NAME} From d9005da425e0e0c02bffbcc9a63bd25657d92cdf Mon Sep 17 00:00:00 2001 From: spwoodcock Date: Thu, 23 Nov 2023 10:45:08 +0000 Subject: [PATCH 4/9] docs: extra info on db backup / restore --- docs/dev/Production.md | 111 +++++++++++++---------------------------- 1 file changed, 36 insertions(+), 75 deletions(-) diff --git a/docs/dev/Production.md b/docs/dev/Production.md index 5ad9347c1a..644f3783e9 100644 --- a/docs/dev/Production.md +++ b/docs/dev/Production.md @@ -10,72 +10,24 @@ your own cloud server. - Get a cloud server (tested with Ubuntu 22.04). - Set up a domain name, and point the DNS to your cloud server. - SSH into your server. Set up a user with sudo called - fmtm. [this](https://www.digitalocean.com/community/tutorials/initial-server-setup-with-ubuntu-22-04) + svcfmtm. [this](https://www.digitalocean.com/community/tutorials/initial-server-setup-with-ubuntu-22-04) is a good guide for basic server setup including creation of a user. -### Install some stuff it'll need - -#### Docker - -- Install - Docker. [Here](https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-22-04) - is a good tutorial for that; do steps 1 and 2. At the time of - writing that consisted of: - - sudo apt update - sudo apt install apt-transport-https ca-certificates curl software-properties-common - curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg - echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - sudo apt update - sudo apt install docker-ce - sudo usermod -aG docker ${USER} - su - ${USER} - -- Now install Docker Compose (as per [this - tutorial](https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-compose-on-ubuntu-22-04)). At - the time of writing (the latest version of Docker Compose may - change, so the version number might be out of date, but the rest - shouldn't change) this consisted of: - - mkdir -p ~/.docker/cli-plugins/ - curl -SL https://github.com/docker/compose/releases/download/v2.12.2/docker-compose-linux-x86_64 -o ~/.docker/cli-plugins/docker-compose - sudo chmod +x ~/.docker/cli-plugins/docker-compose - -### Grab the FMTM code - -Clone the Git repo for the fmtm with `git clone https://github.com/hotosm/fmtm.git`. Step into the resulting directory -with `cd fmtm`. - -### Set up the environment and utilities to launch - -Create the env file interactively with: +### Run the install script ```bash -bash scripts/gen-env.sh -``` - -OR +curl -L https://get.fmtm.dev -o install.sh +bash install.sh +# Alternative URL: https://fmtm.hotosm.org/install.sh -```bash -cp .env.example .env - -# Then edit values manually +# Then follow the prompts ``` -Main variables of note to update: +#### Additional Environment Variables -```dotenv -ODK_CENTRAL_USER=`` -ODK_CENTRAL_PASSWD=`` - -CERT_EMAIL=`` -OSM_CLIENT_ID=`` -OSM_CLIENT_SECRET=`` - -S3_ACCESS_KEY=`` -S3_SECRET_KEY=`` -``` +Variables are set in `.env`. +Some can be updated manually, as required. #### EXTRA_CORS_ORIGINS @@ -113,7 +65,7 @@ stack, and variables should be set accordingly. If you run FMTM with ODK and Minio (S3) included, then the domains will default to: -``` +```dotenv ${FMTM_DOMAIN} --> Frontend api.${FMTM_DOMAIN} --> Backend odk.${FMTM_DOMAIN} --> ODK Central @@ -122,26 +74,17 @@ s3.${FMTM_DOMAIN} --> S3 / Minio These defaults can be overriden with respective environment variables: -``` +```dotenv FMTM_API_DOMAIN FMTM_ODK_DOMAIN FMTM_S3_DOMAIN ``` -### Start the Compose Stack - -Run the production docker-compose config: -`docker compose -f docker-compose.main.yml up -d` - -> Note: The images should be built already on Github. - -With any luck, this will launch the docker container where the project -runs, and you can access the working website from the domain name! - ### Connecting to a remote database - A database may be located on a headless Linux server in the cloud. -- To access the database via GUI tool such as PGAdmin, it is possible using port tunneling. +- To access the database via GUI tool such as PGAdmin, + it is possible using port tunneling. ```bash ssh username@server.domain -N -f -L {local_port}:localhost:{remote_port} @@ -156,10 +99,19 @@ This will map port 5432 on the remote machine to port 5430 on your local machine ```bash GIT_BRANCH=development -backup_filename="fmtm-db-backup-$(date +'%Y-%m-%d').sql.gz" +backup_filename="fmtm-db-${GIT_BRANCH}-$(date +'%Y-%m-%d').sql.gz" echo $backup_filename -docker exec -i -e PGPASSWORD=PASSWORD_HERE fmtm-db-${GIT_BRANCH} pg_dump --verbose --format c -U fmtm fmtm | gzip -9 > "$backup_filename" +docker exec -i -e PGPASSWORD=PASSWORD_HERE \ +fmtm-db-${GIT_BRANCH} \ +pg_dump --verbose --format c -U fmtm fmtm \ +| gzip -9 > "$backup_filename" + +# For ODK +docker exec -i -e PGPASSWORD=PASSWORD_HERE \ +fmtm-central-db-${GIT_BRANCH} \ +pg_dump --verbose --format c -U odk odk | \ +gzip -9 > "$backup_filename" ``` ## Manual Database Restores @@ -167,7 +119,16 @@ docker exec -i -e PGPASSWORD=PASSWORD_HERE fmtm-db-${GIT_BRANCH} pg_dump --verbo ```bash # On a different machine (else change the container name) GIT_BRANCH=development -backup_filename=fmtm-db-backup-XXXX-XX-XX-sql.gz - -cat "$backup_filename" | gunzip | docker exec -i -e PGPASSWORD=NEW_PASSWORD_HERE fmtm-db-${GIT_BRANCH} pg_restore --verbose -U fmtm -d fmtm +backup_filename=fmtm-db-${GIT_BRANCH}-XXXX-XX-XX-sql.gz + +cat "$backup_filename" | gunzip | \ +docker exec -i -e PGPASSWORD=NEW_PASSWORD_HERE \ +fmtm-db-${GIT_BRANCH} \ +pg_restore --verbose -U fmtm -d fmtm + +# For ODK +cat "$backup_filename" | gunzip | \ +docker exec -i -e PGPASSWORD=NEW_PASSWORD_HERE \ +fmtm-central-db-${GIT_BRANCH} \ +pg_restore --verbose -U odk -d odk ``` From bda93a93b6232ceb4312eb31efbf3ff504857734 Mon Sep 17 00:00:00 2001 From: spwoodcock Date: Thu, 23 Nov 2023 12:01:47 +0000 Subject: [PATCH 5/9] docs: additional info on backup/restore (tested) --- docs/dev/Production.md | 45 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/docs/dev/Production.md b/docs/dev/Production.md index 644f3783e9..21e2ae513a 100644 --- a/docs/dev/Production.md +++ b/docs/dev/Production.md @@ -114,8 +114,16 @@ pg_dump --verbose --format c -U odk odk | \ gzip -9 > "$backup_filename" ``` +> Note: if you are dumping to import into a pre-existing +> database, you should also include the --clean flag. +> +> This will drop the existing tables prior to import, +> and should prevent conflicts. + ## Manual Database Restores +The restore should be as easy as: + ```bash # On a different machine (else change the container name) GIT_BRANCH=development @@ -132,3 +140,40 @@ docker exec -i -e PGPASSWORD=NEW_PASSWORD_HERE \ fmtm-central-db-${GIT_BRANCH} \ pg_restore --verbose -U odk -d odk ``` + +However, in some cases you may have existing data +in the database (i.e. if you started the docker +compose stack & the API ran the migrations!). + +In this case you can import into a fresh db, before +attaching to the FMTM containers: + +```bash +export GIT_BRANCH=development + +# Shut down the running database & delete the data +docker compose -f docker-compose.$GIT_BRANCH.yml down -v + +# First, ensure you have a suitable .env with database vars +# Start the databases only +docker compose -f docker-compose.$GIT_BRANCH.yml up -d fmtm-db central-db + +# (Optional) restore odk central from the backup +backup_filename=fmtm-central-db-${GIT_BRANCH}-XXXX-XX-XX-sql.gz + +cat "$backup_filename" | gunzip | \ +docker exec -i \ +fmtm-central-db-${GIT_BRANCH} \ +pg_restore --verbose -U odk -d odk + +# Restore fmtm from the backup +backup_filename=fmtm-db-${GIT_BRANCH}-XXXX-XX-XX-sql.gz + +cat "$backup_filename" | gunzip | \ +docker exec -i \ +fmtm-db-${GIT_BRANCH} \ +pg_restore --verbose -U fmtm -d fmtm + +# Run the entire docker compose stack +docker compose -f docker-compose.$GIT_BRANCH.yml up -d +``` From 79d6a908ca64553eb674a4348fa1f5160c852233 Mon Sep 17 00:00:00 2001 From: spwoodcock Date: Thu, 23 Nov 2023 12:02:00 +0000 Subject: [PATCH 6/9] build: fix staging docker-compose file with all svc --- docker-compose.staging.yml | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/docker-compose.staging.yml b/docker-compose.staging.yml index 68d53796b9..f917372b54 100644 --- a/docker-compose.staging.yml +++ b/docker-compose.staging.yml @@ -18,13 +18,26 @@ version: "3" volumes: - fmtm_data: + fmtm_frontend: + name: fmtm-frontend-${GIT_BRANCH} fmtm_db_data: + name: fmtm-db-data-${GIT_BRANCH} + fmtm_data: + name: fmtm-s3-data-${GIT_BRANCH} fmtm_logs: + name: fmtm-logs-${GIT_BRANCH} fmtm_images: + name: fmtm-images-${GIT_BRANCH} fmtm_tiles: - central_db_data: + name: fmtm-tiles-${GIT_BRANCH} certs: + name: fmtm-certs-${GIT_BRANCH} + certbot_data: + name: fmtm-certbot-data-${GIT_BRANCH} + central_db_data: + name: fmtm-central-db-data-${GIT_BRANCH} + central_frontend: + name: fmtm-central-frontend-${GIT_BRANCH} networks: fmtm-net: @@ -48,6 +61,10 @@ services: extends: file: docker-compose.development.yml service: central + central-ui: + extends: + file: docker-compose.development.yml + service: central-ui s3: extends: file: docker-compose.development.yml From aa0bd46ba3857e664ab632d9ff93aa9456cb22cc Mon Sep 17 00:00:00 2001 From: Sujan Adhikari <109404840+Sujanadh@users.noreply.github.com> Date: Thu, 23 Nov 2023 19:08:16 +0545 Subject: [PATCH 7/9] fix: solve pagination error (#997) * fix : pagination error * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * separate pagination function for reusability * created search api * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * minor fixes --------- Co-authored-by: sujanadh Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- src/backend/app/projects/project_crud.py | 31 ++++++++++-- src/backend/app/projects/project_routes.py | 59 ++++++++++++++++------ 2 files changed, 71 insertions(+), 19 deletions(-) diff --git a/src/backend/app/projects/project_crud.py b/src/backend/app/projects/project_crud.py index 450a5b79d4..9a234e266b 100644 --- a/src/backend/app/projects/project_crud.py +++ b/src/backend/app/projects/project_crud.py @@ -102,6 +102,7 @@ def get_projects( .limit(limit) .all() ) + project_count = db.query(db_models.DbProject).filter(and_(*filters)).count() else: db_projects = ( @@ -111,9 +112,10 @@ def get_projects( .limit(limit) .all() ) + project_count = db.query(db_models.DbProject).count() if db_objects: - return db_projects - return convert_to_app_projects(db_projects) + return project_count, db_projects + return project_count, convert_to_app_projects(db_projects) def get_project_summaries( @@ -139,8 +141,10 @@ def get_project_summaries( # .filter( # db_models.DbProject.author_id == user_id).offset(skip).limit(limit).all() - db_projects = get_projects(db, user_id, skip, limit, True, hashtags, search) - return convert_to_project_summaries(db_projects) + project_count, db_projects = get_projects( + db, user_id, skip, limit, True, hashtags, search + ) + return project_count, convert_to_project_summaries(db_projects) def get_project(db: Session, project_id: int): @@ -2854,3 +2858,22 @@ def get_tasks_count(db: Session, project_id: int): ) task_count = len(db_task.tasks) return task_count + + +def get_pagintaion(page: int, count: int, results_per_page: int, total: int): + total_pages = (count + results_per_page - 1) // results_per_page + hasNext = (page * results_per_page) < count + hasPrev = page > 1 + + pagination = project_schemas.PaginationInfo( + hasNext=hasNext, + hasPrev=hasPrev, + nextNum=page + 1 if hasNext else None, + page=page, + pages=total_pages, + prevNum=page - 1 if hasPrev else None, + perPage=results_per_page, + total=total, + ) + + return pagination diff --git a/src/backend/app/projects/project_routes.py b/src/backend/app/projects/project_routes.py index dd6dbd0e2c..d4ced03ab2 100644 --- a/src/backend/app/projects/project_routes.py +++ b/src/backend/app/projects/project_routes.py @@ -123,7 +123,6 @@ async def read_project_summaries( page: int = Query(1, ge=1), # Default to page 1, must be greater than or equal to 1 results_per_page: int = Query(13, le=100), db: Session = Depends(database.get_db), - search: str = None, ): if hashtags: hashtags = hashtags.split(",") # create list of hashtags @@ -131,33 +130,63 @@ async def read_project_summaries( filter(lambda hashtag: hashtag.startswith("#"), hashtags) ) # filter hashtags that do start with # + total_projects = db.query(db_models.DbProject).count() skip = (page - 1) * results_per_page limit = results_per_page + project_count, projects = project_crud.get_project_summaries( + db, user_id, skip, limit, hashtags, None + ) + + pagination = project_crud.get_pagintaion( + page, project_count, results_per_page, total_projects + ) + project_summaries = [ + project_schemas.ProjectSummary.from_db_project(project) for project in projects + ] + + response = project_schemas.PaginatedProjectSummaries( + results=project_summaries, + pagination=pagination, + ) + return response + + +@router.get( + "/search_projects", response_model=project_schemas.PaginatedProjectSummaries +) +async def search_project( + search: str, + user_id: int = None, + hashtags: str = None, + page: int = Query(1, ge=1), # Default to page 1, must be greater than or equal to 1 + results_per_page: int = Query(13, le=100), + db: Session = Depends(database.get_db), +): + if hashtags: + hashtags = hashtags.split(",") # create list of hashtags + hashtags = list( + filter(lambda hashtag: hashtag.startswith("#"), hashtags) + ) # filter hashtags that do start with # + total_projects = db.query(db_models.DbProject).count() - hasNext = (page * results_per_page) < total_projects - hasPrev = page > 1 - total_pages = (total_projects + results_per_page - 1) // results_per_page + skip = (page - 1) * results_per_page + limit = results_per_page - projects = project_crud.get_project_summaries( + project_count, projects = project_crud.get_project_summaries( db, user_id, skip, limit, hashtags, search ) + + pagination = project_crud.get_pagintaion( + page, project_count, results_per_page, total_projects + ) project_summaries = [ project_schemas.ProjectSummary.from_db_project(project) for project in projects ] response = project_schemas.PaginatedProjectSummaries( results=project_summaries, - pagination=project_schemas.PaginationInfo( - hasNext=hasNext, - hasPrev=hasPrev, - nextNum=page + 1 if hasNext else None, - page=page, - pages=total_pages, - prevNum=page - 1 if hasPrev else None, - perPage=results_per_page, - total=total_projects, - ), + pagination=pagination, ) return response From 3385f1410ba4e97847756e190e9ca71f2e1b2ccb Mon Sep 17 00:00:00 2001 From: Nishit Suwal <81785002+NSUWAL123@users.noreply.github.com> Date: Thu, 23 Nov 2023 19:12:42 +0545 Subject: [PATCH 8/9] fix: post task split type, dimension, num buildings (#1003) * feat (enums): task_split_type enum added * fix (createNewProject): splitTasks - algorithm value replaced with enums & createnewproject definetaskvalidation imported * fix (createNewProject): defineTasksValidation - comparing value replaced with enums * fix (createNewProject): projectDetailsType - types renamed to task_split_type, task_split_dimension & task_num_buildings addeed * fix (createNewProject): splitTasks - adding params conditionally on the basis of splitTasksSelection * fix (createNewroject): condtion replaced with enum --- src/frontend/src/api/CreateProjectService.ts | 3 +- .../createnewproject/SplitTasks.tsx | 98 +++++++++++-------- .../validation/DefineTaskValidation.tsx | 25 +++-- .../src/store/types/ICreateProject.ts | 5 +- src/frontend/src/types/enums.ts | 5 + 5 files changed, 83 insertions(+), 53 deletions(-) create mode 100644 src/frontend/src/types/enums.ts diff --git a/src/frontend/src/api/CreateProjectService.ts b/src/frontend/src/api/CreateProjectService.ts index 3ece9b4030..5b8a24c485 100755 --- a/src/frontend/src/api/CreateProjectService.ts +++ b/src/frontend/src/api/CreateProjectService.ts @@ -7,6 +7,7 @@ import { } from '../models/createproject/createProjectModel'; import { CommonActions } from '../store/slices/CommonSlice'; import { ValidateCustomFormResponse } from 'store/types/ICreateProject'; +import { task_split_type } from '../types/enums'; const CreateProjectService: Function = ( url: string, @@ -26,7 +27,7 @@ const CreateProjectService: Function = ( const resp: ProjectDetailsModel = postNewProjectDetails.data; await dispatch(CreateProjectActions.PostProjectDetails(resp)); - if (payload.splitting_algorithm === 'choose_area_as_task') { + if (payload.task_split_type === task_split_type['choose_area_as_task']) { await dispatch( UploadAreaService(`${import.meta.env.VITE_API_URL}/projects/${resp.id}/upload_multi_polygon`, fileUpload), ); diff --git a/src/frontend/src/components/createnewproject/SplitTasks.tsx b/src/frontend/src/components/createnewproject/SplitTasks.tsx index de3060112b..380a90fb42 100644 --- a/src/frontend/src/components/createnewproject/SplitTasks.tsx +++ b/src/frontend/src/components/createnewproject/SplitTasks.tsx @@ -8,7 +8,7 @@ import { useNavigate } from 'react-router-dom'; import { CreateProjectActions } from '../../store/slices/CreateProjectSlice'; import CoreModules from '../../shared/CoreModules'; import useForm from '../../hooks/useForm'; -import DefineTaskValidation from '../../components/createproject/validation/DefineTaskValidation'; +import DefineTaskValidation from '../../components/createnewproject/validation/DefineTaskValidation'; import NewDefineAreaMap from '../../views/NewDefineAreaMap'; import { useAppSelector } from '../../types/reduxTypes'; import { @@ -21,11 +21,16 @@ import environment from '../../environment'; import LoadingBar from '../../components/createproject/LoadingBar'; import { Modal } from '../../components/common/Modal'; import ProgressBar from '../../components/common/ProgressBar'; +import { task_split_type } from '../../types/enums'; const alogrithmList = [ - { name: 'define_tasks', value: 'divide_on_square', label: 'Divide on square' }, - { name: 'define_tasks', value: 'choose_area_as_task', label: 'Choose area as task' }, - { name: 'define_tasks', value: 'task_splitting_algorithm', label: 'Task Splitting Algorithm' }, + { name: 'define_tasks', value: task_split_type['divide_on_square'].toString(), label: 'Divide on square' }, + { name: 'define_tasks', value: task_split_type['choose_area_as_task'].toString(), label: 'Choose area as task' }, + { + name: 'define_tasks', + value: task_split_type['task_splitting_algorithm'].toString(), + label: 'Task Splitting Algorithm', + }, ]; let generateProjectLogIntervalCb: any = null; @@ -75,34 +80,41 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo .split('#') .map((item) => item.trim()) .filter(Boolean); + + let projectData = { + project_info: { + name: projectDetails.name, + short_description: projectDetails.short_description, + description: projectDetails.description, + }, + author: { + username: userDetails.username, + id: userDetails.id, + }, + odk_central: { + odk_central_url: projectDetails.odk_central_url, + odk_central_user: projectDetails.odk_central_user, + odk_central_password: projectDetails.odk_central_password, + }, + // dont send xform_title if upload custom form is selected + xform_title: projectDetails.formCategorySelection, + task_split_type: splitTasksSelection, + form_ways: projectDetails.formWays, + // "uploaded_form": projectDetails.uploaded_form, + data_extractWays: projectDetails.data_extractWays, + hashtags: arrayHashtag, + organisation_id: projectDetails.organisation_id, + }; + if (splitTasksSelection === task_split_type['task_splitting_algorithm']) { + projectData = { ...projectData, task_num_buildings: projectDetails.average_buildings_per_task }; + } else { + projectData = { ...projectData, task_split_dimension: projectDetails.dimension }; + } + console.log(projectData, 'projectData'); dispatch( CreateProjectService( `${import.meta.env.VITE_API_URL}/projects/create_project`, - { - project_info: { - name: projectDetails.name, - short_description: projectDetails.short_description, - description: projectDetails.description, - }, - author: { - username: userDetails.username, - id: userDetails.id, - }, - odk_central: { - odk_central_url: projectDetails.odk_central_url, - odk_central_user: projectDetails.odk_central_user, - odk_central_password: projectDetails.odk_central_password, - }, - // dont send xform_title if upload custom form is selected - xform_title: projectDetails.formCategorySelection, - dimension: projectDetails.dimension, - splitting_algorithm: splitTasksSelection, - form_ways: projectDetails.formWays, - // "uploaded_form": projectDetails.uploaded_form, - data_extractWays: projectDetails.data_extractWays, - hashtags: arrayHashtag, - organisation_id: projectDetails.organisation_id, - }, + projectData, drawnGeojsonFile, customFormFile, customPolygonUpload, @@ -114,7 +126,7 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo }; useEffect(() => { - if (splitTasksSelection === 'choose_area_as_task') { + if (splitTasksSelection === task_split_type['choose_area_as_task']) { dispatch(CreateProjectActions.SetDividedTaskGeojson(null)); } }, [splitTasksSelection]); @@ -135,14 +147,14 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo // Create a file object from the Blob const drawnGeojsonFile = new File([blob], 'data.json', { type: 'application/json' }); - if (splitTasksSelection === 'divide_on_square') { + if (splitTasksSelection === task_split_type['divide_on_square']) { dispatch( GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_tasks/`, { geojson: drawnGeojsonFile, dimension: formValues?.dimension, }), ); - } else if (splitTasksSelection === 'task_splitting_algorithm') { + } else if (splitTasksSelection === task_split_type['task_splitting_algorithm']) { // const a = document.createElement('a'); // a.href = URL.createObjectURL(drawnGeojsonFile); // a.download = 'test.json'; @@ -289,17 +301,17 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo
{ - handleCustomChange('splitTaskOption', value); - dispatch(CreateProjectActions.SetSplitTasksSelection(value)); + handleCustomChange('task_split_type', parseInt(value)); + dispatch(CreateProjectActions.SetSplitTasksSelection(parseInt(value))); }} - errorMsg={errors.splitTaskOption} + errorMsg={errors.task_split_type} /> - {splitTasksSelection === 'divide_on_square' && ( + {splitTasksSelection === task_split_type['divide_on_square'] && ( <>

Dimension of square in metres:

@@ -315,14 +327,14 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo )} )} - {splitTasksSelection === 'task_splitting_algorithm' && ( + {splitTasksSelection === task_split_type['task_splitting_algorithm'] && ( <>

Average number of buildings per task:

handleCustomChange('average_buildings_per_task', e.target.value)} + onChange={(e) => handleCustomChange('average_buildings_per_task', parseInt(e.target.value))} className="fmtm-outline-none fmtm-border-[1px] fmtm-border-gray-600 fmtm-h-7 fmtm-w-16 fmtm-px-2 " />
@@ -333,8 +345,8 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo )} )} - {(splitTasksSelection === 'divide_on_square' || - splitTasksSelection === 'task_splitting_algorithm') && ( + {(splitTasksSelection === task_split_type['divide_on_square'] || + splitTasksSelection === task_split_type['task_splitting_algorithm']) && (
)} - {splitTasksSelection && ( + {(splitTasksSelection === task_split_type['divide_on_square'] || + splitTasksSelection === task_split_type['task_splitting_algorithm'] || + splitTasksSelection === task_split_type['choose_area_as_task']) && (

Total number of task: {totalSteps}

diff --git a/src/frontend/src/components/createnewproject/validation/DefineTaskValidation.tsx b/src/frontend/src/components/createnewproject/validation/DefineTaskValidation.tsx index 83027d4c25..37f8191dab 100644 --- a/src/frontend/src/components/createnewproject/validation/DefineTaskValidation.tsx +++ b/src/frontend/src/components/createnewproject/validation/DefineTaskValidation.tsx @@ -1,26 +1,35 @@ +import { task_split_type } from '../../../types/enums'; + interface ProjectValues { - splitting_algorithm: string; + task_split_type: number; dimension: number; + average_buildings_per_task: number; } interface ValidationErrors { - splitting_algorithm?: string; + task_split_type?: string; dimension?: string; + average_buildings_per_task?: string; } function DefineTaskValidation(values: ProjectValues) { const errors: ValidationErrors = {}; - - if (!values?.splitting_algorithm) { - errors.splitting_algorithm = 'Splitting Algorithm is Required.'; + if ( + values?.task_split_type !== task_split_type['choose_area_as_task'] && + values?.task_split_type !== task_split_type['divide_on_square'] && + values?.task_split_type !== task_split_type['task_splitting_algorithm'] + ) { + errors.task_split_type = 'Splitting Algorithm is required.'; } - if (values?.splitting_algorithm === 'Divide on Square' && !values?.dimension) { + if (values?.task_split_type === task_split_type['divide_on_square'] && !values?.dimension) { errors.dimension = 'Dimension is Required.'; } - if (values?.splitting_algorithm === 'Divide on Square' && values?.dimension && values.dimension < 9) { + if (values?.task_split_type === task_split_type['divide_on_square'] && values?.dimension && values.dimension < 9) { errors.dimension = 'Dimension should be greater than 10 or equal to 10.'; } + if (values?.task_split_type === task_split_type['task_splitting_algorithm'] && !values?.average_buildings_per_task) { + errors.average_buildings_per_task = 'Average number of buildings per task is required.'; + } - console.log(errors); return errors; } diff --git a/src/frontend/src/store/types/ICreateProject.ts b/src/frontend/src/store/types/ICreateProject.ts index bc7147c66c..c0c1b10297 100644 --- a/src/frontend/src/store/types/ICreateProject.ts +++ b/src/frontend/src/store/types/ICreateProject.ts @@ -95,7 +95,8 @@ export type EditProjectDetailsTypes = { }; export type ProjectDetailsTypes = { - dimension: number; + task_split_dimension: number; + task_num_buildings: number; no_of_buildings: number; odk_central_user?: string; odk_central_password?: string; @@ -105,7 +106,7 @@ export type ProjectDetailsTypes = { hashtags?: string; short_description?: string; description?: string; - splitting_algorithm?: string; + task_split_type?: number; xform_title?: string; data_extract_options?: string; data_extractWays?: string; diff --git a/src/frontend/src/types/enums.ts b/src/frontend/src/types/enums.ts new file mode 100644 index 0000000000..76bd6aeb7a --- /dev/null +++ b/src/frontend/src/types/enums.ts @@ -0,0 +1,5 @@ +export enum task_split_type { + divide_on_square = 0, + choose_area_as_task = 1, + task_splitting_algorithm = 2, +} From fe0ca83af17ff06c88fe450557ac86c60420f47f Mon Sep 17 00:00:00 2001 From: Nishit Suwal <81785002+NSUWAL123@users.noreply.github.com> Date: Thu, 23 Nov 2023 19:14:18 +0545 Subject: [PATCH 9/9] fix (createNewProject): splitTasks - PROCEED btn removed from the popup, text replaced (#1004) --- .../src/components/createnewproject/SplitTasks.tsx | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/frontend/src/components/createnewproject/SplitTasks.tsx b/src/frontend/src/components/createnewproject/SplitTasks.tsx index 380a90fb42..f04d9f1f12 100644 --- a/src/frontend/src/components/createnewproject/SplitTasks.tsx +++ b/src/frontend/src/components/createnewproject/SplitTasks.tsx @@ -264,17 +264,9 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo

- You may click Proceed button whilst the process runs in the background. Click Cancel to terminate the - process. + Please stay on this page until the process is complete. Your changes might be lost if you cancel the + pop-up.

-
- {/* */} - -
} open={toggleStatus}