diff --git a/.env.example b/.env.example index 7eba10db47..359ea6237c 100644 --- a/.env.example +++ b/.env.example @@ -18,7 +18,7 @@ API_PREFIX=${API_PREFIX} OSM_CLIENT_ID=${OSM_CLIENT_ID} OSM_CLIENT_SECRET=${OSM_CLIENT_SECRET} OSM_URL=${OSM_URL:-"https://www.openstreetmap.org"} -OSM_SCOPE=${OSM_SCOPE:-["read_prefs", "send_messages"]} +OSM_SCOPE=${OSM_SCOPE:-'["read_prefs", "send_messages"]'} OSM_LOGIN_REDIRECT_URI="http${FMTM_DOMAIN:+s}://${FMTM_DOMAIN:-127.0.0.1:7051}/osmauth/" OSM_SECRET_KEY=${OSM_SECRET_KEY} diff --git a/Justfile b/Justfile index f38977bbd6..d8f5a16171 100644 --- a/Justfile +++ b/Justfile @@ -16,6 +16,8 @@ # along with FMTM. If not, see . # +set dotenv-load + mod start 'contrib/just/start/Justfile' mod stop 'contrib/just/stop/Justfile' mod build 'contrib/just/build/Justfile' diff --git a/contrib/just/start/Justfile b/contrib/just/start/Justfile index e3574dffc0..f84169bb1d 100644 --- a/contrib/just/start/Justfile +++ b/contrib/just/start/Justfile @@ -78,7 +78,7 @@ tunnel: docker compose \ -f docker-compose.yml \ -f contrib/tunnel/docker-compose.yml \ - up -d --wait + up --wait # Workaround to until PR merged: # https://github.com/cloudflare/cloudflared/pull/1135 @@ -93,18 +93,14 @@ tunnel: just --unstable dotenv update "EXTRA_CORS_ORIGINS" "${fmtm_url}" just --unstable dotenv update "S3_ENDPOINT" "${s3_url}" - # Restart the API and UI with environment variables set - API_URL="${api_url}" docker compose \ - -f docker-compose.yml \ - -f contrib/tunnel/docker-compose.yml \ - up -d api ui - - # Restart ODK Central with domain override (for form download urls) + # Restart the containers with env vars set + # (API url for frontend, domain for Central form download urls) CENTRAL_DOMAIN_OVERRIDE="$(echo "${odk_url}" | sed 's|^https://||')" \ + API_URL="$(echo "${api_url}" | sed 's|^https://||')" \ docker compose \ -f docker-compose.yml \ -f contrib/tunnel/docker-compose.yml \ - up -d central + up -d api ui central just --unstable start _print-tunnel-urls "$fmtm_url" "$api_url" "$odk_url" "$s3_url" diff --git a/contrib/just/test/Justfile b/contrib/just/test/Justfile index 2cb91faeea..6cdb0d2e29 100644 --- a/contrib/just/test/Justfile +++ b/contrib/just/test/Justfile @@ -55,3 +55,37 @@ frontend-interactive: coverage: docker compose run --rm --entrypoint='sh -c' api \ 'coverage run -m pytest && coverage report -m' + +# Load prod data into current database (WARNING: deletes local db data) +[no-cd] +load-prod-data: + #!/usr/bin/env sh + cd {{justfile_directory()}} + + docker compose up --wait + # We cannot have electric using a logical replication slot though + docker compose down electric + + # Get latest db dump filename + docker compose exec --no-TTY s3 mc alias set prod https://s3.fmtm.hotosm.org "" "" + latest_file=$(docker compose exec --no-TTY s3 mc ls prod/fmtm-db-backups/fmtm \ + | awk '{print $NF}' | sort | tail -n 1) + echo "Latest backup file: $latest_file" + + # Copy file to current machine + docker compose exec --no-TTY s3 \ + mc cp prod/fmtm-db-backups/fmtm/"$latest_file" /tmp/"$latest_file" + docker compose cp s3:/tmp/"$latest_file" /tmp/"$latest_file" + + echo "Dropping existing database ${FMTM_DB_NAME} as user ${FMTM_DB_USER}" + docker compose exec --no-TTY -e PGPASSWORD=${FMTM_DB_PASSWORD} ${FMTM_DB_HOST} \ + dropdb --echo --if-exists --force -U ${FMTM_DB_USER} ${FMTM_DB_NAME} + + echo "Creating new database ${FMTM_DB_NAME} as user ${FMTM_DB_USER}" + docker compose exec --no-TTY -e PGPASSWORD=${FMTM_DB_PASSWORD} ${FMTM_DB_HOST} \ + createdb --echo -U ${FMTM_DB_USER} -O ${FMTM_DB_USER} ${FMTM_DB_NAME} + + echo "Loading data into database ${FMTM_DB_NAME} as user ${FMTM_DB_USER}" + gunzip -c /tmp/"$latest_file" | \ + docker compose exec --no-TTY -e PGPASSWORD=${FMTM_DB_PASSWORD} ${FMTM_DB_HOST} \ + pg_restore --verbose -U ${FMTM_DB_USER} -d ${FMTM_DB_NAME} diff --git a/contrib/playwright/docker-compose.yml b/contrib/playwright/docker-compose.yml index 1e5346598f..2670ee449f 100644 --- a/contrib/playwright/docker-compose.yml +++ b/contrib/playwright/docker-compose.yml @@ -30,12 +30,6 @@ services: "critical", "--no-access-log", ] - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8000/__lbheartbeat__"] - start_period: 60s - interval: 10s - timeout: 5s - retries: 10 ui: # This hostname is used for Playwright test internal networking @@ -50,6 +44,19 @@ services: start_period: 5s timeout: 5s + # ui-mapper: + # # This hostname is used for Playwright test internal networking + # hostname: fmtm.dev.test + # environment: + # VITE_API_URL: "http://fmtm.dev.test:8000" + # command: --port 80 + # healthcheck: + # test: timeout 5s bash -c ':> /dev/tcp/127.0.0.1/80' || exit 1 + # interval: 5s + # retries: 3 + # start_period: 5s + # timeout: 5s + ui-test: image: "mcr.microsoft.com/playwright:${PLAYWRIGHT_TAG:-v1.48.1}" depends_on: diff --git a/contrib/tunnel/docker-compose.yml b/contrib/tunnel/docker-compose.yml index 5ae8742926..3b29b9fb0d 100644 --- a/contrib/tunnel/docker-compose.yml +++ b/contrib/tunnel/docker-compose.yml @@ -21,7 +21,7 @@ networks: services: frontend-tunnel: - image: "docker.io/cloudflare/cloudflared:2024.5.0" + image: "docker.io/cloudflare/cloudflared:2024.10.1" depends_on: proxy: condition: service_healthy @@ -31,7 +31,7 @@ services: command: tunnel --url http://proxy:80 api-tunnel: - image: "docker.io/cloudflare/cloudflared:2024.5.0" + image: "docker.io/cloudflare/cloudflared:2024.10.1" depends_on: api: condition: service_healthy @@ -41,7 +41,7 @@ services: command: tunnel --url http://api:8000 central-tunnel: - image: "docker.io/cloudflare/cloudflared:2024.5.0" + image: "docker.io/cloudflare/cloudflared:2024.10.1" depends_on: central: condition: service_healthy @@ -51,7 +51,7 @@ services: command: tunnel --url http://central:8383 s3-tunnel: - image: "docker.io/cloudflare/cloudflared:2024.5.0" + image: "docker.io/cloudflare/cloudflared:2024.10.1" depends_on: s3: condition: service_healthy diff --git a/docker-compose.yml b/docker-compose.yml index 9d503c6934..c758944ee2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -43,6 +43,8 @@ services: condition: service_started ui: condition: service_started + ui-mapper: + condition: service_started central: condition: service_started required: false @@ -97,6 +99,12 @@ services: networks: - fmtm-net restart: "unless-stopped" + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/__lbheartbeat__"] + start_period: 60s + interval: 10s + timeout: 5s + retries: 10 deploy: replicas: ${API_REPLICAS:-1} resources: @@ -237,7 +245,7 @@ services: image: "postgis/postgis:${POSTGIS_TAG:-14-3.5-alpine}" # Temp workaround until https://github.com/postgis/docker-postgis/issues/216 build: - context: https://github.com/postgis/docker-postgis.git#master:14-3.4/alpine + context: https://github.com/postgis/docker-postgis.git#master:14-3.5/alpine command: -c 'max_connections=300' -c 'wal_level=logical' volumes: - fmtm_db_data:/var/lib/postgresql/data/ @@ -274,7 +282,7 @@ services: # AUTH_JWT_KEY: ${ENCRYPTION_KEY} # AUTH_JWT_AUD: ${FMTM_DOMAIN} ports: - - "7055:3000" + - "7055:7055" networks: - fmtm-net restart: "unless-stopped" diff --git a/nginx/templates/dev/fmtm.conf.template b/nginx/templates/dev/fmtm.conf.template index 1bb5a3a10a..d562d9041b 100644 --- a/nginx/templates/dev/fmtm.conf.template +++ b/nginx/templates/dev/fmtm.conf.template @@ -27,7 +27,7 @@ upstream mapper { # Enable sticky sessions based on an incoming client IP address ip_hash; - server ui-mapper:3000; + server ui-mapper:7055; } server { diff --git a/src/backend/app/central/central_crud.py b/src/backend/app/central/central_crud.py index 47e7af8e65..6bf8916339 100644 --- a/src/backend/app/central/central_crud.py +++ b/src/backend/app/central/central_crud.py @@ -33,7 +33,7 @@ from app.central import central_deps, central_schemas from app.config import settings -from app.db.enums import EntityStatus, HTTPStatus +from app.db.enums import EntityState, HTTPStatus from app.db.models import DbXLSForm from app.db.postgis_utils import ( geojson_to_javarosa_geom, @@ -511,7 +511,7 @@ async def feature_geojson_to_entity_dict( properties = { str(key): str(value) for key, value in feature.get("properties", {}).items() } - # Set to TaskStatus enum READY value (0) + # Set to MappingState enum READY value (0) properties["status"] = "0" task_id = properties.get("task_id") @@ -769,7 +769,7 @@ async def update_entity_mapping_status( odk_id: int, entity_uuid: str, label: str, - status: EntityStatus, + status: EntityState, dataset_name: str = "features", ) -> dict: """Update the Entity mapping status. @@ -781,7 +781,7 @@ async def update_entity_mapping_status( odk_id (str): The project ID in ODK Central. entity_uuid (str): The unique entity UUID for ODK Central. label (str): New label, with emoji prepended for status. - status (EntityStatus): New EntityStatus to assign, in string form. + status (EntityState): New EntityState to assign, in string form. dataset_name (str): Override the default dataset / Entity list name 'features'. Returns: diff --git a/src/backend/app/central/central_schemas.py b/src/backend/app/central/central_schemas.py index 62faa4f33b..04bce7af2c 100644 --- a/src/backend/app/central/central_schemas.py +++ b/src/backend/app/central/central_schemas.py @@ -26,7 +26,7 @@ from pydantic.functional_validators import field_validator, model_validator from app.config import HttpUrlStr, decrypt_value, encrypt_value -from app.db.enums import EntityStatus +from app.db.enums import EntityState class ODKCentral(BaseModel): @@ -225,7 +225,7 @@ class EntityMappingStatus(EntityOsmID, EntityTaskID): """The status for mapping an Entity/feature.""" updatedAt: Optional[str] = Field(exclude=True) # noqa: N815 - status: Optional[EntityStatus] = None + status: Optional[EntityState] = None @computed_field @property @@ -238,18 +238,18 @@ class EntityMappingStatusIn(BaseModel): """Update the mapping status for an Entity.""" entity_id: str - status: EntityStatus + status: EntityState label: str @field_validator("label", mode="before") @classmethod def append_status_emoji(cls, value: str, info: ValidationInfo) -> str: """Add 🔒 (locked), ✅ (complete) or ❌ (invalid) emojis.""" - status = info.data.get("status", EntityStatus.UNLOCKED.value) + status = info.data.get("status", EntityState.READY.value) emojis = { - str(EntityStatus.LOCKED.value): "🔒", - str(EntityStatus.MAPPED.value): "✅", - str(EntityStatus.BAD.value): "❌", + str(EntityState.OPENED_IN_ODK.value): "🔒", + str(EntityState.SURVEY_SUBMITTED.value): "✅", + str(EntityState.MARKED_BAD.value): "❌", } # Remove any existing emoji at the start of the label @@ -265,6 +265,6 @@ def append_status_emoji(cls, value: str, info: ValidationInfo) -> str: @field_validator("status", mode="after") @classmethod - def integer_status_to_string(cls, value: EntityStatus) -> str: + def integer_status_to_string(cls, value: EntityState) -> str: """Convert integer status to string for ODK Entity data.""" return str(value.value) diff --git a/src/backend/app/db/enums.py b/src/backend/app/db/enums.py index 6773bf0323..9b8ea4d31c 100644 --- a/src/backend/app/db/enums.py +++ b/src/backend/app/db/enums.py @@ -19,9 +19,6 @@ from enum import Enum, IntEnum, StrEnum -TILES_SOURCE = ["esri", "bing", "google"] -TILES_FORMATS = ["mbtiles", "sqlitedb", "pmtiles"] - class HTTPStatus(IntEnum): """All HTTP status codes used in endpoints.""" @@ -110,51 +107,61 @@ class MappingLevel(StrEnum, Enum): ADVANCED = "ADVANCED" -class TaskStatus(StrEnum, Enum): - """Available Task Statuses.""" +class TaskEvent(StrEnum, Enum): + """Task events via API. + + `MAP` -- Set to *locked for mapping*, i.e. mapping in progress. + `FINISH` -- Set to *unlocked to validate*, i.e. is mapped. + `VALIDATE` -- Set to *locked for validation*, i.e. validation in progress. + `GOOD` -- Set the state to *unlocked done*. + `BAD` -- Set the state *unlocked to map* again, to be mapped once again. + `SPLIT` -- Set the state *unlocked done* then generate additional + subdivided task areas. + `MERGE` -- Set the state *unlocked done* then generate additional + merged task area. + `ASSIGN` -- For a requester user to assign a task to another user. + Set the state *locked for mapping* passing in the required user id. + Also notify the user they should map the area. + `COMMENT` -- Keep the state the same, but simply add a comment. + """ + + MAP = "MAP" + FINISH = "FINISH" + VALIDATE = "VALIDATE" + GOOD = "GOOD" + BAD = "BAD" + SPLIT = "SPLIT" + MERGE = "MERGE" + ASSIGN = "ASSIGN" + COMMENT = "COMMENT" - READY = "READY" - LOCKED_FOR_MAPPING = "LOCKED_FOR_MAPPING" - MAPPED = "MAPPED" - LOCKED_FOR_VALIDATION = "LOCKED_FOR_VALIDATION" - VALIDATED = "VALIDATED" - INVALIDATED = "INVALIDATED" - BAD = "BAD" # Task cannot be mapped - SPLIT = "SPLIT" # Task has been split - ARCHIVED = "ARCHIVED" # When renew replacement task has been uploaded +class MappingState(StrEnum, Enum): + """State options for tasks in FMTM. -class TaskAction(StrEnum, Enum): - """All possible task actions, recorded in task history.""" + NOTE We no longer have states invalidated / bad, and instead rely on the + EntityState.MARKED_BAD buildings to display red on the map. + """ - RELEASED_FOR_MAPPING = "RELEASED_FOR_MAPPING" + UNLOCKED_TO_MAP = "UNLOCKED_TO_MAP" LOCKED_FOR_MAPPING = "LOCKED_FOR_MAPPING" - MARKED_MAPPED = "MARKED_MAPPED" + UNLOCKED_TO_VALIDATE = "UNLOCKED_TO_VALIDATE" LOCKED_FOR_VALIDATION = "LOCKED_FOR_VALIDATION" - VALIDATED = "VALIDATED" - MARKED_INVALID = "MARKED_INVALID" - MARKED_BAD = "MARKED_BAD" # Task cannot be mapped - SPLIT_NEEDED = "SPLIT_NEEDED" # Task needs split - RECREATED = "RECREATED" - COMMENT = "COMMENT" + UNLOCKED_DONE = "UNLOCKED_DONE" -class EntityStatus(IntEnum, Enum): - """Statuses for Entities in ODK. +class EntityState(IntEnum, Enum): + """State options for Entities in ODK. NOTE here we started with int enums and it's hard to migrate. NOTE we will continue to use int values in the form. + NOTE we keep BAD=6 for legacy reasons too. """ - UNLOCKED = 0 - LOCKED = 1 - MAPPED = 2 - BAD = 6 - # Should we also add extra statuses? - # LUMPED - # SPLIT - # VALIDATED - # INVALIDATED + READY = 0 + OPENED_IN_ODK = 1 + SURVEY_SUBMITTED = 2 + MARKED_BAD = 6 class TaskType(StrEnum, Enum): @@ -209,11 +216,14 @@ class CommunityType(StrEnum, Enum): class ReviewStateEnum(StrEnum, Enum): - """Review states of submission.""" + """Review states of submission. + + NOTE that these values must be camelCase to match what ODK Central requires. + """ - HASISSUES = "HASISSUES" - APPROVED = "APPROVED" - REJECTED = "REJECTED" + HASISSUES = "hasIssues" + APPROVED = "approved" + REJECTED = "rejected" class GeometryType(StrEnum, Enum): @@ -241,49 +251,3 @@ class XLSFormType(StrEnum, Enum): # religious = "religious" # landusage = "landusage" # waterways = "waterways" - - -def get_action_for_status_change(task_status: TaskStatus) -> TaskAction: - """Update task action inferred from previous state.""" - match task_status: - case TaskStatus.READY: - return TaskAction.RELEASED_FOR_MAPPING - case TaskStatus.LOCKED_FOR_MAPPING: - return TaskAction.LOCKED_FOR_MAPPING - case TaskStatus.MAPPED: - return TaskAction.MARKED_MAPPED - case TaskStatus.LOCKED_FOR_VALIDATION: - return TaskAction.LOCKED_FOR_VALIDATION - case TaskStatus.VALIDATED: - return TaskAction.VALIDATED - case TaskStatus.BAD: - return TaskAction.MARKED_BAD - case TaskStatus.SPLIT: - return TaskAction.SPLIT_NEEDED - case TaskStatus.INVALIDATED: - return TaskAction.MARKED_INVALID - case _: - return TaskAction.RELEASED_FOR_MAPPING - - -def get_status_for_action(task_action: TaskAction) -> TaskStatus: - """Get the task status inferred from the action.""" - match task_action: - case TaskAction.RELEASED_FOR_MAPPING: - return TaskStatus.READY - case TaskAction.LOCKED_FOR_MAPPING: - return TaskStatus.LOCKED_FOR_MAPPING - case TaskAction.MARKED_MAPPED: - return TaskStatus.MAPPED - case TaskAction.LOCKED_FOR_VALIDATION: - return TaskStatus.LOCKED_FOR_VALIDATION - case TaskAction.VALIDATED: - return TaskStatus.VALIDATED - case TaskAction.MARKED_BAD: - return TaskStatus.BAD - case TaskAction.SPLIT_NEEDED: - return TaskStatus.SPLIT - case TaskAction.MARKED_INVALID: - return TaskStatus.INVALIDATED - case _: - return TaskStatus.READY diff --git a/src/backend/app/db/models.py b/src/backend/app/db/models.py index ce2ea2ca96..6ef90816b9 100644 --- a/src/backend/app/db/models.py +++ b/src/backend/app/db/models.py @@ -22,7 +22,7 @@ """ import json -from datetime import datetime, timedelta +from datetime import timedelta from io import BytesIO from re import sub from typing import TYPE_CHECKING, Annotated, Optional, Self @@ -33,7 +33,7 @@ from loguru import logger as log from psycopg import Connection from psycopg.rows import class_row -from pydantic import BaseModel, Field, ValidationInfo +from pydantic import AwareDatetime, BaseModel, Field, ValidationInfo from pydantic.functional_validators import field_validator from app.central.central_schemas import ODKCentralDecrypted @@ -43,16 +43,18 @@ CommunityType, HTTPStatus, MappingLevel, + MappingState, OrganisationType, ProjectPriority, ProjectRole, ProjectStatus, ProjectVisibility, - TaskAction, + TaskEvent, TaskSplitType, UserRole, XLSFormType, ) +from app.db.postgis_utils import timestamp from app.s3 import add_obj_to_bucket, delete_all_objs_under_prefix # Avoid cyclical dependencies when only type checking @@ -69,7 +71,7 @@ ProjectIn, ProjectUpdate, ) - from app.tasks.task_schemas import TaskHistoryIn + from app.tasks.task_schemas import TaskEventIn from app.users.user_schemas import UserIn @@ -151,7 +153,7 @@ class DbUser(BaseModel): tasks_validated: Optional[int] = None tasks_invalidated: Optional[int] = None projects_mapped: Optional[list[int]] = None - registered_at: Optional[datetime] = None + registered_at: Optional[AwareDatetime] = None # Relationships project_roles: Optional[list[DbUserRole]] = None @@ -590,24 +592,26 @@ async def all( return [{"id": form.id, "title": form.title} for form in forms] -class DbTaskHistory(BaseModel): - """Table task_history. +class DbTaskEvent(BaseModel): + """Table task_events. Task events such as locking, marking mapped, and comments. """ event_id: UUID - task_id: int - project_id: Optional[int] = None - user_id: Optional[int] = None + task_id: Annotated[Optional[int], Field(gt=0)] = None + event: TaskEvent - action: TaskAction - action_text: Optional[str] = None - action_date: Optional[datetime] = None + project_id: Annotated[Optional[int], Field(gt=0)] = None + user_id: Annotated[Optional[int], Field(gt=0)] = None + comment: Optional[str] = None + created_at: Optional[AwareDatetime] = None # Computed username: Optional[str] = None profile_img: Optional[str] = None + # Computed via database trigger + state: Optional[MappingState] = None @classmethod async def all( @@ -618,7 +622,7 @@ async def all( days: Optional[int] = None, comments: Optional[bool] = None, ) -> Optional[list[Self]]: - """Fetch all task history entries for a project. + """Fetch all task event entries for a project. Args: db (Connection): the database connection. @@ -628,7 +632,7 @@ async def all( comments (bool): show comments rather than events. Returns: - list[DbTaskHistory]: list of task event objects. + list[DbTaskEvent]: list of task event objects. """ if project_id and task_id: raise ValueError("Specify either project_id or task_id, not both.") @@ -645,13 +649,13 @@ async def all( filters.append("task_id = %(task_id)s") params["task_id"] = task_id if days is not None: - end_date = datetime.now() - timedelta(days=days) - filters.append("action_date >= %(end_date)s") + end_date = timestamp() - timedelta(days=days) + filters.append("created_at >= %(end_date)s") params["end_date"] = end_date if comments: - filters.append("action = 'COMMENT'") + filters.append("event = 'COMMENT'") else: - filters.append("action != 'COMMENT'") + filters.append("event != 'COMMENT'") filters_joined = " AND ".join(filters) @@ -661,11 +665,11 @@ async def all( u.username, u.profile_img FROM - public.task_history + public.task_events JOIN - users u ON u.id = task_history.user_id + users u ON u.id = task_events.user_id WHERE {filters_joined} - ORDER BY action_date DESC; + ORDER BY created_at DESC; """ async with db.cursor(row_factory=class_row(cls)) as cur: @@ -676,18 +680,19 @@ async def all( async def create( cls, db: Connection, - task_in: "TaskHistoryIn", + event_in: "TaskEventIn", ) -> Self: """Create a new task event.""" - model_dump = dump_and_check_model(task_in) + model_dump = dump_and_check_model(event_in) columns = ", ".join(model_dump.keys()) value_placeholders = ", ".join(f"%({key})s" for key in model_dump.keys()) + # NOTE the project_id need not be passed, as it's extracted from the task async with db.cursor(row_factory=class_row(cls)) as cur: await cur.execute( f""" WITH inserted AS ( - INSERT INTO public.task_history ( + INSERT INTO public.task_events ( event_id, project_id, {columns} @@ -730,7 +735,7 @@ class DbTask(BaseModel): feature_count: Optional[int] = None # Calculated - task_status: Optional[TaskAction] = None + task_state: Optional[MappingState] = None actioned_by_uid: Optional[int] = None actioned_by_username: Optional[str] = None @@ -747,26 +752,27 @@ async def one(cls, db: Connection, task_id: int) -> Self: tasks.*, ST_AsGeoJSON(tasks.outline)::jsonb AS outline, COALESCE( - latest_event.action, 'RELEASED_FOR_MAPPING' - ) AS task_status, + latest_event.state, 'UNLOCKED_TO_MAP' + ) AS task_state, COALESCE(latest_event.user_id, NULL) AS actioned_by_uid, COALESCE(latest_event.username, NULL) AS actioned_by_username FROM tasks LEFT JOIN LATERAL ( SELECT - th.action, + th.event, + th.state, th.user_id, u.username FROM - task_history th + task_events th LEFT JOIN users u ON u.id = th.user_id WHERE th.task_id = tasks.id - AND th.action != 'COMMENT' + AND th.event != 'COMMENT' ORDER BY - th.action_date DESC + th.created_at DESC LIMIT 1 ) latest_event ON true WHERE @@ -795,25 +801,26 @@ async def all( SELECT tasks.*, ST_AsGeoJSON(tasks.outline)::jsonb AS outline, - COALESCE(latest_event.action, 'RELEASED_FOR_MAPPING') AS task_status, + COALESCE(latest_event.state, 'UNLOCKED_TO_MAP') AS task_state, COALESCE(latest_event.user_id, NULL) AS actioned_by_uid, COALESCE(latest_event.username, NULL) AS actioned_by_username FROM tasks LEFT JOIN LATERAL ( SELECT - th.action, + th.event, + th.state, th.user_id, u.username FROM - task_history th + task_events th LEFT JOIN users u ON u.id = th.user_id WHERE th.task_id = tasks.id - AND th.action != 'COMMENT' + AND th.event != 'COMMENT' ORDER BY - th.action_date DESC + th.created_at DESC LIMIT 1 ) latest_event ON true WHERE @@ -908,9 +915,9 @@ class DbProject(BaseModel): task_split_dimension: Optional[int] = None task_num_buildings: Optional[int] = None hashtags: Optional[list[str]] = None - due_date: Optional[datetime] = None - updated_at: Optional[datetime] = None - created_at: Optional[datetime] = None + due_date: Optional[AwareDatetime] = None + updated_at: Optional[AwareDatetime] = None + created_at: Optional[AwareDatetime] = None # Relationships tasks: Optional[list[DbTask]] = None @@ -920,7 +927,7 @@ class DbProject(BaseModel): organisation_logo: Optional[str] = None centroid: Optional[dict] = None bbox: Optional[list[float]] = None - last_active: Optional[datetime] = None + last_active: Optional[AwareDatetime] = None odk_credentials: Annotated[ Optional["ODKCentralDecrypted"], Field(validate_default=True), @@ -948,21 +955,22 @@ async def one(cls, db: Connection, project_id: int) -> Self: """Get project by ID, including all tasks and other details.""" async with db.cursor(row_factory=class_row(cls)) as cur: sql = """ - WITH latest_event_per_task AS ( + WITH latest_status_per_task AS ( SELECT DISTINCT ON (task_id) th.task_id, - th.action, - th.action_date, + th.event, + th.state, + th.created_at, th.user_id, u.username AS username FROM - task_history th + task_events th LEFT JOIN users u ON u.id = th.user_id WHERE - th.action != 'COMMENT' + th.event != 'COMMENT' ORDER BY - th.task_id, th.action_date DESC + th.task_id, th.created_at DESC ), project_bbox AS ( @@ -983,7 +991,7 @@ async def one(cls, db: Connection, project_id: int) -> Self: ] AS bbox, project_org.name AS organisation_name, project_org.logo AS organisation_logo, - latest_event_per_task.action_date AS last_active, + MAX(latest_status_per_task.created_at)::timestamptz AS last_active, COALESCE( NULLIF(p.odk_central_url, ''), project_org.odk_central_url @@ -1004,16 +1012,16 @@ async def one(cls, db: Connection, project_id: int) -> Self: 'project_task_index', tasks.project_task_index, 'outline', ST_AsGeoJSON(tasks.outline)::jsonb, 'feature_count', tasks.feature_count, - 'task_status', COALESCE( - latest_event_per_task.action, - 'RELEASED_FOR_MAPPING' + 'task_state', COALESCE( + latest_status_per_task.state, + 'UNLOCKED_TO_MAP' ), 'actioned_by_uid', COALESCE( - latest_event_per_task.user_id, + latest_status_per_task.user_id, NULL ), 'actioned_by_username', COALESCE( - latest_event_per_task.username, + latest_status_per_task.username, NULL ) ) @@ -1030,16 +1038,15 @@ async def one(cls, db: Connection, project_id: int) -> Self: tasks ON tasks.project_id = p.id -- Link latest event per task with project tasks LEFT JOIN - latest_event_per_task ON - tasks.id = latest_event_per_task.task_id + latest_status_per_task ON + tasks.id = latest_status_per_task.task_id -- Required to get the BBOX object JOIN project_bbox ON project_bbox.bbox IS NOT NULL WHERE p.id = %(project_id)s GROUP BY - p.id, project_org.id, project_bbox.bbox, - latest_event_per_task.action_date; + p.id, project_org.id, project_bbox.bbox; """ # Simpler query without additional metadata @@ -1271,7 +1278,7 @@ async def delete(cls, db: Connection, project_id: int) -> bool: ) await cur.execute( """ - DELETE FROM task_history WHERE project_id = %(project_id)s; + DELETE FROM task_events WHERE project_id = %(project_id)s; """, {"project_id": project_id}, ) @@ -1341,12 +1348,12 @@ async def create( INSERT INTO background_tasks ( id, - {", ".join(columns)} + {columns} ) VALUES ( gen_random_uuid(), - {", ".join(value_placeholders)} + {value_placeholders} ) RETURNING id; """, @@ -1409,7 +1416,7 @@ class DbBasemap(BaseModel): tile_source: Optional[str] = None background_task_id: Optional[UUID] = None status: Optional[BackgroundTaskStatus] = None - created_at: Optional[datetime] = None + created_at: Optional[AwareDatetime] = None # Calculated bbox: Optional[list[float]] = None diff --git a/src/backend/app/projects/project_crud.py b/src/backend/app/projects/project_crud.py index 266b865670..254cb1982f 100644 --- a/src/backend/app/projects/project_crud.py +++ b/src/backend/app/projects/project_crud.py @@ -810,7 +810,7 @@ def generate_project_basemap( # return json2osm(geojson_file) -def get_pagination(page: int, count: int, results_per_page: int, total: int): +async def get_pagination(page: int, count: int, results_per_page: int, total: int): """Pagination result for splash page.""" total_pages = (count + results_per_page - 1) // results_per_page has_next = (page * results_per_page) < count # noqa: N806 @@ -857,27 +857,13 @@ async def get_paginated_projects( total_project_count = await cur.fetchone() total_project_count = total_project_count[0] - pagination = get_pagination( + pagination = await get_pagination( page, len(projects), results_per_page, total_project_count ) return {"results": projects, "pagination": pagination} -async def get_dashboard_detail(db: Connection, project: DbProject): - """Get project details for project dashboard.""" - xform = central_crud.get_odk_form(project.odk_credentials) - submission_meta_data = xform.getFullDetails(project.odkid, project.odk_form_id) - - contributors_dict = await get_project_users_plus_contributions(db, project.id) - return { - "total_submission": submission_meta_data.get("submissions", 0), - "last_active": submission_meta_data.get("lastSubmission"), - "total_tasks": len(project.tasks), - "total_contributors": len(contributors_dict), - } - - async def get_project_users_plus_contributions(db: Connection, project_id: int): """Get the users and their contributions for a project. @@ -897,7 +883,7 @@ async def get_project_users_plus_contributions(db: Connection, project_id: int): FROM users u JOIN - task_history th ON u.id = th.user_id + task_events th ON u.id = th.user_id WHERE th.project_id = %(project_id)s GROUP BY u.username diff --git a/src/backend/app/projects/project_routes.py b/src/backend/app/projects/project_routes.py index a0f92520fd..9e16d025d7 100644 --- a/src/backend/app/projects/project_routes.py +++ b/src/backend/app/projects/project_routes.py @@ -53,8 +53,6 @@ from app.config import settings from app.db.database import db_conn from app.db.enums import ( - TILES_FORMATS, - TILES_SOURCE, HTTPStatus, ProjectRole, XLSFormType, @@ -326,24 +324,16 @@ async def download_tiles( ) -@router.get("/{project_id}/tiles-generate") +@router.post("/{project_id}/tiles-generate") async def generate_project_basemap( background_tasks: BackgroundTasks, - project_id: int, db: Annotated[Connection, Depends(db_conn)], project_user: Annotated[ProjectUserDict, Depends(mapper)], - source: str = Query( - ..., description="Select a source for tiles", enum=TILES_SOURCE - ), - format: str = Query( - "mbtiles", description="Select an output format", enum=TILES_FORMATS - ), - tms: str = Query( - None, - description="Provide a custom TMS URL, optional", - ), + basemap_in: project_schemas.BasemapGenerate, ): """Returns basemap tiles for a project.""" + project_id = project_user.get("project").id + # Create task in db and return uuid log.debug( "Creating generate_project_basemap background task " @@ -357,24 +347,14 @@ async def generate_project_basemap( ), ) - # # FIXME delete this - # project_crud.generate_project_basemap( - # db, - # project_id, - # background_task_id, - # source, - # format, - # tms - # ) - background_tasks.add_task( project_crud.generate_project_basemap, db, project_id, background_task_id, - source, - format, - tms, + basemap_in.tile_source, + basemap_in.file_format, + basemap_in.tms_url, ) return {"Message": "Tile generation started"} @@ -1006,39 +986,21 @@ async def convert_fgb_to_geojson( @router.get( - "/task-status/{task_id}", + "/task-status/{bg_task_id}", response_model=project_schemas.BackgroundTaskStatus, ) async def get_task_status( - task_id: str, + bg_task_id: str, db: Annotated[Connection, Depends(db_conn)], ): """Get the background task status by passing the task ID.""" try: - return await DbBackgroundTask.one(db, task_id) + return await DbBackgroundTask.one(db, bg_task_id) except KeyError as e: log.warning(str(e)) raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=str(e)) from e -@router.get( - "/project_dashboard/{project_id}", response_model=project_schemas.ProjectDashboard -) -async def project_dashboard( - project_user: Annotated[ProjectUserDict, Depends(mapper)], - db: Annotated[Connection, Depends(db_conn)], -): - """Get the project dashboard details.""" - project = project_user.get("project") - details = await project_crud.get_dashboard_detail(db, project) - details["slug"] = project.slug - details["organisation_name"] = project.organisation_name - details["created_at"] = project.created_at - details["organisation_logo"] = project.organisation_logo - details["last_active"] = project.last_active - return details - - @router.get("/contributors/{project_id}") async def get_contributors( db: Annotated[Connection, Depends(db_conn)], diff --git a/src/backend/app/projects/project_schemas.py b/src/backend/app/projects/project_schemas.py index f8c3536263..9a45279226 100644 --- a/src/backend/app/projects/project_schemas.py +++ b/src/backend/app/projects/project_schemas.py @@ -19,7 +19,7 @@ from datetime import datetime from pathlib import Path -from typing import Annotated, Optional, Self +from typing import Annotated, Literal, Optional, Self from uuid import UUID from geojson_pydantic import Feature, FeatureCollection, MultiPolygon, Point, Polygon @@ -43,7 +43,6 @@ get_address_from_lat_lon, merge_polygons, polygon_to_centroid, - timestamp, ) @@ -262,38 +261,6 @@ class PaginatedProjectSummaries(BaseModel): pagination: PaginationInfo -class ProjectDashboard(BaseModel): - """Project details dashboard.""" - - slug: str - organisation_name: str - total_tasks: int - created_at: datetime - organisation_logo: Optional[str] = None - total_submissions: Optional[int] = None - total_contributors: Optional[int] = None - last_active: Optional[str | datetime] = None - - @field_serializer("last_active") - def get_last_active(self, last_active: Optional[str | datetime]): - """Date of last activity on project.""" - if last_active is None: - return None - - current_date = timestamp() - time_difference = current_date - last_active - days_difference = time_difference.days - - if days_difference == 0: - return "today" - elif days_difference == 1: - return "yesterday" - elif days_difference < 7: - return f'{days_difference} day{"s" if days_difference > 1 else ""} ago' - else: - return last_active.strftime("%d %b %Y") - - class ProjectUserContributions(BaseModel): """Users for a project, plus contribution count.""" @@ -301,6 +268,17 @@ class ProjectUserContributions(BaseModel): contributions: int +class BasemapGenerate(BaseModel): + """Params to generate a new basemap.""" + + tile_source: Annotated[Literal["esri", "bing", "google"], Field(default="esri")] + file_format: Annotated[ + Literal["mbtiles", "sqlitedb", "pmtiles"], + Field(default="mbtiles"), + ] + tms_url: Optional[str] = None + + class BasemapIn(DbBasemap): """Basemap tile creation.""" diff --git a/src/backend/app/submissions/submission_crud.py b/src/backend/app/submissions/submission_crud.py index 60a5c3389d..ab571ce19d 100644 --- a/src/backend/app/submissions/submission_crud.py +++ b/src/backend/app/submissions/submission_crud.py @@ -37,7 +37,8 @@ from app.config import settings from app.db.enums import BackgroundTaskStatus, HTTPStatus from app.db.models import DbBackgroundTask, DbProject, DbSubmissionPhoto -from app.projects import project_deps, project_schemas +from app.db.postgis_utils import timestamp +from app.projects import project_crud, project_deps, project_schemas from app.s3 import add_obj_to_bucket # async def convert_json_to_osm(file_path): @@ -173,7 +174,7 @@ async def get_submissions_by_date( dates = [ date.strftime("%m/%d") for date in end_dates - if datetime.now() - date <= timedelta(days=days) + if timestamp() - date <= timedelta(days=days) ] submission_counts = Counter(sorted(dates)) @@ -370,3 +371,20 @@ async def upload_attachment_to_s3( ), ) return False + + +async def get_dashboard_detail(db: Connection, project: DbProject): + """Get project details for project dashboard.""" + xform = get_odk_form(project.odk_credentials) + submission_meta_data = xform.getFullDetails(project.odkid, project.odk_form_id) + + contributors_dict = await project_crud.get_project_users_plus_contributions( + db, + project.id, + ) + return { + "total_submission": submission_meta_data.get("submissions", 0), + "last_active": submission_meta_data.get("lastSubmission"), + "total_tasks": len(project.tasks), + "total_contributors": len(contributors_dict), + } diff --git a/src/backend/app/submissions/submission_routes.py b/src/backend/app/submissions/submission_routes.py index 4688af715b..d2a65df633 100644 --- a/src/backend/app/submissions/submission_routes.py +++ b/src/backend/app/submissions/submission_routes.py @@ -34,7 +34,7 @@ from app.central import central_crud from app.db import postgis_utils from app.db.database import db_conn -from app.db.enums import HTTPStatus, ReviewStateEnum +from app.db.enums import HTTPStatus from app.db.models import DbBackgroundTask, DbSubmissionPhoto, DbTask from app.projects import project_crud, project_schemas from app.submissions import submission_crud, submission_schemas @@ -335,10 +335,12 @@ async def submission_table( return response -@router.post("/update_review_state") +@router.post( + "/update-review-state", + response_model=submission_schemas.ReviewStateOut, +) async def update_review_state( - instance_id: str, - review_state: ReviewStateEnum, + post_data: submission_schemas.ReviewStateIn, current_user: Annotated[ProjectUserDict, Depends(project_manager)], ): """Updates the review state of a project submission.""" @@ -349,8 +351,8 @@ async def update_review_state( response = odk_project.updateReviewState( project.odkid, project.odk_form_id, - instance_id, - {"reviewState": review_state}, + post_data.instance_id, + {"reviewState": post_data.review_state}, ) return response except Exception as e: @@ -434,20 +436,6 @@ async def conflate_geojson( ) from e -@router.get("/{submission_id}") -async def submission_detail( - submission_id: str, - project_user: Annotated[ProjectUserDict, Depends(mapper)], -) -> dict: - """This api returns the submission detail of individual submission.""" - project = project_user.get("project") - submission_detail = await submission_crud.get_submission_detail( - submission_id, - project, - ) - return submission_detail - - @router.get("/{submission_id}/photos") async def submission_photo( db: Annotated[Connection, Depends(db_conn)], @@ -498,3 +486,35 @@ async def submission_photo( status_code=HTTPStatus.INTERNAL_SERVER_ERROR, detail="Failed to get submission photos", ) from e + + +@router.get( + "/{project_id}/dashboard", response_model=submission_schemas.SubmissionDashboard +) +async def project_dashboard( + project_user: Annotated[ProjectUserDict, Depends(mapper)], + db: Annotated[Connection, Depends(db_conn)], +): + """Get the project dashboard details.""" + project = project_user.get("project") + details = await submission_crud.get_dashboard_detail(db, project) + details["slug"] = project.slug + details["organisation_name"] = project.organisation_name + details["created_at"] = project.created_at + details["organisation_logo"] = project.organisation_logo + details["last_active"] = project.last_active + return details + + +@router.get("/{submission_id}") +async def submission_detail( + submission_id: str, + project_user: Annotated[ProjectUserDict, Depends(mapper)], +) -> dict: + """This api returns the submission detail of individual submission.""" + project = project_user.get("project") + submission_detail = await submission_crud.get_submission_detail( + submission_id, + project, + ) + return submission_detail diff --git a/src/backend/app/submissions/submission_schemas.py b/src/backend/app/submissions/submission_schemas.py index 05a9f61bdc..5ab8e068d3 100644 --- a/src/backend/app/submissions/submission_schemas.py +++ b/src/backend/app/submissions/submission_schemas.py @@ -18,9 +18,13 @@ """Pydantic models for data submissions.""" -from typing import List, Optional +from typing import Optional -from pydantic import BaseModel +from pydantic import AwareDatetime, BaseModel +from pydantic.functional_serializers import field_serializer + +from app.db.enums import ReviewStateEnum +from app.db.postgis_utils import timestamp class PaginationInfo(BaseModel): @@ -39,5 +43,57 @@ class PaginationInfo(BaseModel): class PaginatedSubmissions(BaseModel): """Paginated Submissions.""" - results: List + results: list pagination: PaginationInfo + + +class ReviewStateIn(BaseModel): + """Update to the review state of a submission.""" + + instance_id: str + review_state: ReviewStateEnum + + +class ReviewStateOut(BaseModel): + """The response from ODK Central on review state update.""" + + instanceId: str # noqa: N815 + submitterId: int # noqa: N815 + deviceId: str # noqa: N815 + createdAt: str # noqa: N815 + updatedAt: str # noqa: N815 + reviewState: str # noqa: N815 + + +class SubmissionDashboard(BaseModel): + """Submission details dashboard for a project.""" + + slug: str + organisation_name: str + total_tasks: int + created_at: AwareDatetime + organisation_logo: Optional[str] = None + total_submissions: Optional[int] = None + total_contributors: Optional[int] = None + last_active: Optional[AwareDatetime] = None + + @field_serializer("last_active") + def get_last_active(self, last_active: Optional[AwareDatetime]) -> str: + """Date of last activity on project.""" + if last_active is None: + return None + if isinstance(last_active, str): + return last_active + + current_date = timestamp() + time_difference = current_date - last_active + days_difference = time_difference.days + + if days_difference == 0: + return "today" + elif days_difference == 1: + return "yesterday" + elif days_difference < 7: + return f'{days_difference} day{"s" if days_difference > 1 else ""} ago' + else: + return last_active.strftime("%d %b %Y") diff --git a/src/backend/app/tasks/task_crud.py b/src/backend/app/tasks/task_crud.py index 1ba0fc945b..e5657a7b15 100644 --- a/src/backend/app/tasks/task_crud.py +++ b/src/backend/app/tasks/task_crud.py @@ -17,55 +17,20 @@ # """Logic for FMTM tasks.""" -from datetime import datetime, timedelta +from datetime import timedelta -from fastapi import HTTPException -from loguru import logger as log from psycopg import Connection from psycopg.rows import class_row -from app.db.enums import ( - HTTPStatus, - TaskStatus, - get_action_for_status_change, -) -from app.db.models import DbTask, DbTaskHistory +from app.db.postgis_utils import timestamp from app.tasks import task_schemas -# TODO SQL refactor this to use case statements on /next -async def new_task_event( - db: Connection, task_id: int, user_id: int, new_status: TaskStatus -): - """Add a new entry to the task events.""" - log.debug(f"Checking if task ({task_id}) is already locked") - task_entry = await DbTask.one(db, task_id) - - if task_entry and task_entry.task_status in [ - TaskStatus.LOCKED_FOR_MAPPING, - TaskStatus.LOCKED_FOR_VALIDATION, - ]: - if task_entry.actioned_by_uid != user_id: - msg = f"Task is locked by user {task_entry.username}" - log.error(msg) - raise HTTPException(status_code=HTTPStatus.FORBIDDEN, detail=msg) - - log.info(f"Updating task ID {task_id} to status {new_status}") - new_event = task_schemas.TaskHistoryIn( - task_id=task_id, - user_id=user_id, - action=get_action_for_status_change(new_status), - # NOTE we don't include a comment unless necessary - ) - new_task_event = await DbTaskHistory.create(db, new_event) - return new_task_event - - async def get_project_task_activity( db: Connection, project_id: int, days: int, -) -> task_schemas.TaskHistoryCount: +) -> task_schemas.TaskEventCount: """Get number of tasks mapped and validated for project. Args: @@ -75,26 +40,26 @@ async def get_project_task_activity( db (Connection): The database connection. Returns: - list[task_schemas.TaskHistoryCount]: A list of task history counts. + list[task_schemas.TaskEventCount]: A list of task event counts. """ - end_date = datetime.now() - timedelta(days=days) + end_date = timestamp() - timedelta(days=days) sql = """ SELECT - to_char(action_date::date, 'dd/mm/yyyy') as date, - COUNT(*) FILTER (WHERE action = 'VALIDATED') AS validated, - COUNT(*) FILTER (WHERE action = 'MARKED_MAPPED') AS mapped + to_char(created_at::date, 'dd/mm/yyyy') as date, + COUNT(*) FILTER (WHERE state = 'UNLOCKED_DONE') AS validated, + COUNT(*) FILTER (WHERE state = 'UNLOCKED_TO_VALIDATE') AS mapped FROM - task_history + task_events WHERE project_id = %(project_id)s - AND action_date >= %(end_date)s + AND created_at >= %(end_date)s GROUP BY - action_date::date + created_at::date ORDER BY - action_date::date; + created_at::date; """ - async with db.cursor(row_factory=class_row(task_schemas.TaskHistoryCount)) as cur: + async with db.cursor(row_factory=class_row(task_schemas.TaskEventCount)) as cur: await cur.execute(sql, {"project_id": project_id, "end_date": end_date}) return await cur.fetchall() diff --git a/src/backend/app/tasks/task_routes.py b/src/backend/app/tasks/task_routes.py index ef5193562b..7e3f16d225 100644 --- a/src/backend/app/tasks/task_routes.py +++ b/src/backend/app/tasks/task_routes.py @@ -20,15 +20,15 @@ from typing import Annotated from fastapi import APIRouter, Depends, HTTPException +from loguru import logger as log from psycopg import Connection from app.auth.auth_schemas import ProjectUserDict -from app.auth.roles import get_uid, mapper +from app.auth.roles import mapper from app.db.database import db_conn -from app.db.enums import HTTPStatus, TaskAction, TaskStatus -from app.db.models import DbTask, DbTaskHistory +from app.db.enums import HTTPStatus +from app.db.models import DbTask, DbTaskEvent from app.tasks import task_crud, task_schemas -from app.tasks.task_deps import get_task router = APIRouter( prefix="/tasks", @@ -68,57 +68,23 @@ async def get_specific_task( raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=str(e)) from e -# TODO SQL update this to be something like /next -@router.post( - "/{task_id}/new-status/{new_status}", response_model=task_schemas.TaskHistoryOut -) +@router.post("/{task_id}/event", response_model=task_schemas.TaskEventOut) async def add_new_task_event( - db_task: Annotated[DbTask, Depends(get_task)], + task_id: int, + new_event: task_schemas.TaskEventIn, project_user: Annotated[ProjectUserDict, Depends(mapper)], - new_status: TaskStatus, db: Annotated[Connection, Depends(db_conn)], ): """Add a new event to the events table / update task status.""" - user_id = await get_uid(project_user.get("user")) - return await task_crud.new_task_event( - db, - db_task.id, - user_id, - new_status, - ) - - -@router.post("/{task_id}/comment/", response_model=task_schemas.TaskHistoryOut) -async def add_task_comment( - comment: str, - db_task: Annotated[DbTask, Depends(get_task)], - project_user: Annotated[ProjectUserDict, Depends(mapper)], - db: Annotated[Connection, Depends(db_conn)], -): - """Create a new task comment. + user_id = project_user.get("user").id + log.info(f"Task {task_id} event: {new_event.event.name} (by user {user_id})") - Parameters: - comment (str): The task comment to add. - db_task (DbTask): The database task entry. - Retrieving this ensures the task exists before updating. - project_user (ProjectUserDict): The authenticated user. - db (Connection): The database connection. + new_event.user_id = user_id + new_event.task_id = task_id + return await DbTaskEvent.create(db, new_event) - Returns: - TaskHistoryOut: The created task comment. - """ - user_id = await get_uid(project_user.get("user")) - new_comment = task_schemas.TaskHistoryIn( - task_id=db_task.id, - user_id=user_id, - action=TaskAction.COMMENT, - action_text=comment, - ) - return await DbTaskHistory.create(db, new_comment) - - -# NOTE this endpoint isn't used? -@router.get("/activity/", response_model=list[task_schemas.TaskHistoryCount]) + +@router.get("/activity/", response_model=list[task_schemas.TaskEventCount]) async def task_activity( project_id: int, db: Annotated[Connection, Depends(db_conn)], @@ -139,13 +105,13 @@ async def task_activity( return await task_crud.get_project_task_activity(db, project_id, days) -@router.get("/{task_id}/history/", response_model=list[task_schemas.TaskHistoryOut]) -async def task_history( +@router.get("/{task_id}/history/", response_model=list[task_schemas.TaskEventOut]) +async def get_task_event_history( + task_id: int, db: Annotated[Connection, Depends(db_conn)], - db_task: Annotated[DbTask, Depends(get_task)], project_user: Annotated[ProjectUserDict, Depends(mapper)], days: int = 10, comments: bool = False, ): """Get the detailed history for a task.""" - return await DbTaskHistory.all(db, task_id=db_task.id, days=days, comments=comments) + return await DbTaskEvent.all(db, task_id=task_id, days=days, comments=comments) diff --git a/src/backend/app/tasks/task_schemas.py b/src/backend/app/tasks/task_schemas.py index b087c002cf..eda923d0d7 100644 --- a/src/backend/app/tasks/task_schemas.py +++ b/src/backend/app/tasks/task_schemas.py @@ -21,10 +21,10 @@ from uuid import UUID from geojson_pydantic import Polygon -from pydantic import BaseModel, Field, computed_field +from pydantic import BaseModel, Field -from app.db.enums import TaskAction, TaskStatus, get_status_for_action -from app.db.models import DbTask, DbTaskHistory +from app.db.enums import MappingState +from app.db.models import DbTask, DbTaskEvent # NOTE we don't have a TaskIn as tasks are only generated once during project creation @@ -36,42 +36,31 @@ class TaskOut(DbTask): outline: Polygon -class TaskHistoryIn(DbTaskHistory): +class TaskEventIn(DbTaskEvent): """Create a new task event.""" # Exclude, as the uuid is generated in the database event_id: Annotated[Optional[UUID], Field(exclude=True)] = None # Exclude, as we get the project_id in the db from the task id project_id: Annotated[Optional[int], Field(exclude=True)] = None + # Exclude, as state is generated based on event type in db + state: Annotated[Optional[MappingState], Field(exclude=True)] = None # Omit computed fields username: Annotated[Optional[str], Field(exclude=True)] = None profile_img: Annotated[Optional[str], Field(exclude=True)] = None -class TaskHistoryOut(DbTaskHistory): +class TaskEventOut(DbTaskEvent): """A task event to display to the user.""" # Ensure project_id is removed, as we only use this to query for tasks project_id: Annotated[Optional[int], Field(exclude=True)] = None - # We calculate the 'status' field in place of the action enum - action: Annotated[Optional[TaskAction], Field(exclude=True)] = None - @computed_field - @property - def status(self) -> Optional[TaskStatus]: - """Get the status from the recent action. - TODO remove this, replace with 'action' or similar? - """ - if not self.action: - return None - return get_status_for_action(self.action) - - -class TaskHistoryCount(BaseModel): +class TaskEventCount(BaseModel): """Task mapping history status counts per day.""" date: str - validated: int mapped: int + validated: int diff --git a/src/backend/migrations/004-task-events.sql b/src/backend/migrations/004-task-events.sql index c0fd45de89..febb4868fe 100644 --- a/src/backend/migrations/004-task-events.sql +++ b/src/backend/migrations/004-task-events.sql @@ -6,12 +6,12 @@ BEGIN; -- Drop default from action_date -ALTER TABLE public.task_history ALTER COLUMN action_date DROP DEFAULT; +ALTER TABLE IF EXISTS public.task_history ALTER COLUMN action_date DROP DEFAULT; -- Add event_id field DO $$ BEGIN - IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'task_history' AND column_name = 'event_id') THEN + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'task_history' AND column_name = 'id') THEN -- Add new event_id UUID field with values ALTER TABLE public.task_history ADD COLUMN event_id UUID; UPDATE public.task_history SET event_id = gen_random_uuid() WHERE event_id IS NULL; diff --git a/src/backend/migrations/006-basemaps-table.sql b/src/backend/migrations/006-basemaps-table.sql index 522a5a9b69..23c63c47d5 100644 --- a/src/backend/migrations/006-basemaps-table.sql +++ b/src/backend/migrations/006-basemaps-table.sql @@ -4,7 +4,6 @@ -- * Update default background task status to 'PENDING'. -- * Update background_tasks.id --> UUID type. -- * Update basemaps.id --> UUID type. --- * Also add a composite index to task_history on task_id and action_date -- Start a transaction BEGIN; @@ -77,12 +76,5 @@ BEGIN END IF; END $$; --- Create extra index on task_history - -CREATE INDEX IF NOT EXISTS idx_task_history_date -ON public.task_history USING btree ( - task_id, action_date -); - -- Commit the transaction COMMIT; diff --git a/src/backend/migrations/007-rename-task-history.sql b/src/backend/migrations/007-rename-task-history.sql new file mode 100644 index 0000000000..123df1f3ba --- /dev/null +++ b/src/backend/migrations/007-rename-task-history.sql @@ -0,0 +1,204 @@ +-- ## Migration to: +-- * Rename table task_history --> task_events. +-- * Rename columns https://github.com/hotosm/fmtm/issues/1610 + +-- Start a transaction +BEGIN; + + +-- Rename table +ALTER TABLE IF EXISTS public.task_history RENAME TO task_events; + + + +-- Create new enums + +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'taskevent') THEN + CREATE TYPE public.taskevent AS ENUM ( + 'MAP', + 'FINISH', + 'VALIDATE', + 'GOOD', + 'BAD', + 'CONFLATE', + 'SPLIT', + 'MERGE', + 'ASSIGN', + 'COMMENT' + ); + END IF; +END $$; +ALTER TYPE public.taskevent OWNER TO fmtm; + +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'entitystate') THEN + CREATE TYPE public.entitystate AS ENUM ( + 'READY', + 'OPENED_IN_ODK', + 'SURVEY_SUBMITTED', + 'MARKED_BAD' + ); + END IF; +END $$; +ALTER TYPE public.entitystate OWNER TO fmtm; + +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'mappingstate') THEN + CREATE TYPE public.mappingstate AS ENUM ( + 'UNLOCKED_TO_MAP', + 'LOCKED_FOR_MAPPING', + 'UNLOCKED_TO_VALIDATE', + 'LOCKED_FOR_VALIDATION', + 'UNLOCKED_DONE', + 'CONFLATED' + ); + END IF; +END $$; +ALTER TYPE public.mappingstate OWNER TO fmtm; + + + +-- Update task_event fields prior to trigger addition + +DO $$ +BEGIN + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'task_events' AND column_name = 'action') THEN + ALTER TABLE public.task_events ADD COLUMN state public.mappingstate; + ALTER TABLE public.task_events RENAME COLUMN action TO event; + ALTER TABLE public.task_events RENAME COLUMN action_text TO comment; + ALTER TABLE public.task_events RENAME COLUMN action_date TO created_at; + END IF; +END $$; + + + +-- Create trigger function to set task state automatically + +CREATE OR REPLACE FUNCTION set_task_state() +RETURNS TRIGGER AS $$ +BEGIN + CASE NEW.event + WHEN 'MAP' THEN + NEW.state := 'LOCKED_FOR_MAPPING'; + WHEN 'FINISH' THEN + NEW.state := 'UNLOCKED_TO_VALIDATE'; + WHEN 'VALIDATE' THEN + NEW.state := 'LOCKED_FOR_VALIDATION'; + WHEN 'GOOD' THEN + NEW.state := 'UNLOCKED_DONE'; + WHEN 'BAD' THEN + NEW.state := 'UNLOCKED_TO_MAP'; + WHEN 'SPLIT' THEN + NEW.state := 'UNLOCKED_DONE'; + WHEN 'MERGE' THEN + NEW.state := 'UNLOCKED_DONE'; + WHEN 'ASSIGN' THEN + NEW.state := 'LOCKED_FOR_MAPPING'; + WHEN 'COMMENT' THEN + NEW.state := OLD.state; + ELSE + RAISE EXCEPTION 'Unknown task event type: %', NEW.event; + END CASE; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + + + +-- Apply trigger to task_events table +DO $$ +BEGIN + CREATE TRIGGER task_event_state_trigger + BEFORE INSERT ON public.task_events + FOR EACH ROW + EXECUTE FUNCTION set_task_state(); +EXCEPTION + WHEN duplicate_object THEN + RAISE NOTICE 'Trigger task_event_state_trigger already exists. Ignoring...'; +END$$; + + + + +-- Update action field values --> taskevent enum + +DO $$ +BEGIN + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'task_events' AND column_name = 'action') THEN + -- Change from taskaction --> taskevent + ALTER TABLE task_events + ALTER COLUMN event TYPE public.taskevent + USING CASE event + WHEN 'RELEASED_FOR_MAPPING' THEN 'BAD'::public.taskevent + WHEN 'LOCKED_FOR_MAPPING' THEN 'MAP'::public.taskevent + WHEN 'MARKED_MAPPED' THEN 'FINISH'::public.taskevent + WHEN 'LOCKED_FOR_VALIDATION' THEN 'VALIDATE'::public.taskevent + WHEN 'VALIDATED' THEN 'GOOD'::public.taskevent + WHEN 'MARKED_INVALID' THEN 'BAD'::public.taskevent + WHEN 'MARKED_BAD' THEN 'BAD'::public.taskevent + WHEN 'SPLIT_NEEDED' THEN 'SPLIT'::public.taskevent + WHEN 'RECREATED' THEN 'BAD'::public.taskevent + WHEN 'COMMENT' THEN 'COMMENT'::public.taskevent + ELSE NULL + END; + END IF; +END $$; + + + +-- Drop old enums + +DROP TYPE IF EXISTS public.taskaction; +-- Note this no longer used +DROP TYPE IF EXISTS public.taskstatus; + + + +-- Add task_events foreign keys + +DO $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_projects') THEN + ALTER TABLE ONLY public.task_events + ADD CONSTRAINT fk_projects FOREIGN KEY (project_id) + REFERENCES public.projects (id); + END IF; + + IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_project_task_id') THEN + ALTER TABLE ONLY public.task_events + ADD CONSTRAINT fk_project_task_id FOREIGN KEY (task_id, project_id) + REFERENCES public.tasks (id, project_id); + END IF; + + IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'fk_users') THEN + ALTER TABLE ONLY public.task_events + ADD CONSTRAINT fk_users FOREIGN KEY (user_id) + REFERENCES public.users (id); + END IF; +END $$; + + +-- Add default values for UUID fields +ALTER TABLE public.task_events +ALTER COLUMN event_id SET DEFAULT gen_random_uuid(), +ALTER COLUMN event_id SET NOT NULL, +ALTER COLUMN task_id SET NOT NULL, +ALTER COLUMN event SET NOT NULL, +ALTER COLUMN created_at SET DATA TYPE timestamp with time zone; + +ALTER TABLE public.basemaps +ALTER COLUMN id SET DEFAULT gen_random_uuid(), +ALTER COLUMN id SET NOT NULL; + +ALTER TABLE public.background_tasks +ALTER COLUMN id SET DEFAULT gen_random_uuid(), +ALTER COLUMN id SET NOT NULL; + + + +-- Commit the transaction +COMMIT; diff --git a/src/backend/migrations/008-recreate-indexes.sql b/src/backend/migrations/008-recreate-indexes.sql new file mode 100644 index 0000000000..ab59ab972b --- /dev/null +++ b/src/backend/migrations/008-recreate-indexes.sql @@ -0,0 +1,57 @@ +-- ## Migration to: +-- * Drop and recreate some indexes. +-- * Add some new indexes for task_events. + + +-- Start a transaction +BEGIN; + +-- Drop some existing indexes + +DROP INDEX IF EXISTS idx_geometry; +DROP INDEX IF EXISTS ix_projects_mapper_level; +DROP INDEX IF EXISTS ix_projects_organisation_id; +DROP INDEX IF EXISTS ix_tasks_project_id; +DROP INDEX IF EXISTS ix_users_id; +DROP INDEX IF EXISTS idx_task_history_composite; +DROP INDEX IF EXISTS idx_task_history_project_id_user_id; +DROP INDEX IF EXISTS ix_task_history_project_id; +DROP INDEX IF EXISTS ix_task_history_user_id; +DROP INDEX IF EXISTS idx_task_history_date; + +-- Create new indexes +CREATE INDEX IF NOT EXISTS idx_projects_mapper_level +ON public.projects USING btree ( + mapper_level +); +CREATE INDEX IF NOT EXISTS idx_projects_organisation_id +ON public.projects USING btree ( + organisation_id +); +CREATE INDEX IF NOT EXISTS idx_tasks_composite +ON public.tasks USING btree ( + id, project_id +); +CREATE INDEX IF NOT EXISTS idx_task_event_composite +ON public.task_events USING btree ( + task_id, project_id +); +CREATE INDEX IF NOT EXISTS idx_task_event_project_user +ON public.task_events USING btree ( + user_id, project_id +); +CREATE INDEX IF NOT EXISTS idx_task_event_project_id +ON public.task_events USING btree ( + task_id, project_id +); +CREATE INDEX IF NOT EXISTS idx_task_event_user_id +ON public.task_events USING btree ( + task_id, user_id +); +CREATE INDEX IF NOT EXISTS idx_task_event_date +ON public.task_events USING btree ( + task_id, created_at +); + +-- Commit the transaction +COMMIT; diff --git a/src/backend/migrations/init/fmtm_base_schema.sql b/src/backend/migrations/init/fmtm_base_schema.sql index 19089cc0e0..e295b56eec 100644 --- a/src/backend/migrations/init/fmtm_base_schema.sql +++ b/src/backend/migrations/init/fmtm_base_schema.sql @@ -80,33 +80,6 @@ CREATE TYPE public.projectstatus AS ENUM ( ); ALTER TYPE public.projectstatus OWNER TO fmtm; -CREATE TYPE public.taskaction AS ENUM ( - 'RELEASED_FOR_MAPPING', - 'LOCKED_FOR_MAPPING', - 'MARKED_MAPPED', - 'LOCKED_FOR_VALIDATION', - 'VALIDATED', - 'MARKED_INVALID', - 'MARKED_BAD', - 'SPLIT_NEEDED', - 'RECREATED', - 'COMMENT' -); -ALTER TYPE public.taskaction OWNER TO fmtm; - -CREATE TYPE public.taskstatus AS ENUM ( - 'READY', - 'LOCKED_FOR_MAPPING', - 'MAPPED', - 'LOCKED_FOR_VALIDATION', - 'VALIDATED', - 'INVALIDATED', - 'BAD', - 'SPLIT', - 'ARCHIVED' -); -ALTER TYPE public.taskstatus OWNER TO fmtm; - CREATE TYPE public.userrole AS ENUM ( 'READ_ONLY', 'MAPPER', @@ -146,6 +119,39 @@ CREATE TYPE public.communitytype AS ENUM ( ); ALTER TYPE public.communitytype OWNER TO fmtm; +CREATE TYPE public.taskevent AS ENUM ( + 'MAP', + 'FINISH', + 'VALIDATE', + 'GOOD', + 'BAD', + 'CONFLATE', + 'SPLIT', + 'MERGE', + 'ASSIGN', + 'COMMENT' +); +ALTER TYPE public.taskevent OWNER TO fmtm; + +CREATE TYPE public.mappingstate AS ENUM ( + 'UNLOCKED_TO_MAP', + 'LOCKED_FOR_MAPPING', + 'UNLOCKED_TO_VALIDATE', + 'LOCKED_FOR_VALIDATION', + 'UNLOCKED_DONE', + 'CONFLATED' +); +ALTER TYPE public.mappingstate OWNER TO fmtm; + +CREATE TYPE public.entitystate AS ENUM ( + 'READY', + 'OPENED_IN_ODK', + 'SURVEY_SUBMITTED', + 'MARKED_BAD' +); +ALTER TYPE public.entitystate OWNER TO fmtm; + + -- Extra SET default_tablespace = ''; @@ -161,8 +167,10 @@ CREATE TABLE IF NOT EXISTS public._migrations ( ALTER TABLE public._migrations OWNER TO fmtm; +-- Note we use UUID for interoperability with external databases, +-- such as PGLite or other microservices CREATE TABLE public.background_tasks ( - id UUID NOT NULL, + id UUID NOT NULL DEFAULT gen_random_uuid(), name character varying, project_id integer, status public.backgroundtaskstatus NOT NULL DEFAULT 'PENDING', @@ -171,8 +179,10 @@ CREATE TABLE public.background_tasks ( ALTER TABLE public.background_tasks OWNER TO fmtm; +-- Note we use UUID for interoperability with external databases, +-- such as PGLite or other microservices CREATE TABLE public.basemaps ( - id UUID NOT NULL, + id UUID NOT NULL DEFAULT gen_random_uuid(), project_id integer, status public.backgroundtaskstatus NOT NULL, url character varying, @@ -217,6 +227,7 @@ CACHE 1; ALTER TABLE public.organisations_id_seq OWNER TO fmtm; ALTER SEQUENCE public.organisations_id_seq OWNED BY public.organisations.id; + CREATE TABLE public.projects ( id integer NOT NULL, organisation_id integer, @@ -266,18 +277,19 @@ ALTER TABLE public.projects_id_seq OWNER TO fmtm; ALTER SEQUENCE public.projects_id_seq OWNED BY public.projects.id; --- TODO SQL rename this table & add foreign keys back in --- TODO SQL Also ensure we have an index -CREATE TABLE public.task_history ( - event_id UUID NOT NULL, - project_id integer, +-- Note we use UUID for interoperability with external databases, +-- such as PGLite or other microservices +CREATE TABLE public.task_events ( + event_id UUID NOT NULL DEFAULT gen_random_uuid(), + event public.taskevent NOT NULL, task_id integer NOT NULL, - action public.taskaction NOT NULL, - action_text character varying, - action_date timestamp with time zone NOT NULL DEFAULT now(), - user_id integer NOT NULL + project_id integer, + user_id integer, + state public.mappingstate, + comment text, + created_at timestamp with time zone NOT NULL DEFAULT now() ); -ALTER TABLE public.task_history OWNER TO fmtm; +ALTER TABLE public.task_events OWNER TO fmtm; CREATE TABLE public.tasks ( @@ -408,8 +420,8 @@ ADD CONSTRAINT organisations_slug_key UNIQUE (slug); ALTER TABLE ONLY public.projects ADD CONSTRAINT projects_pkey PRIMARY KEY (id); -ALTER TABLE ONLY public.task_history -ADD CONSTRAINT task_history_pkey PRIMARY KEY (event_id); +ALTER TABLE ONLY public.task_events +ADD CONSTRAINT task_events_pkey PRIMARY KEY (event_id); ALTER TABLE ONLY public.tasks ADD CONSTRAINT tasks_pkey PRIMARY KEY (id, project_id); @@ -434,39 +446,46 @@ ADD CONSTRAINT submission_photos_pkey PRIMARY KEY (id); -- Indexing -CREATE INDEX idx_geometry ON public.projects USING gist (outline); CREATE INDEX idx_projects_outline ON public.projects USING gist (outline); -CREATE INDEX idx_task_history_composite ON public.task_history USING btree ( - task_id, project_id -); -CREATE INDEX idx_task_history_project_id_user_id ON public.task_history -USING btree ( - user_id, project_id -); -CREATE INDEX ix_task_history_project_id ON public.task_history USING btree ( - project_id -); -CREATE INDEX ix_task_history_user_id ON public.task_history USING btree ( - user_id -); -CREATE INDEX idx_task_history_date ON public.task_history USING btree ( - task_id, action_date -); -CREATE INDEX idx_tasks_outline ON public.tasks USING gist (outline); -CREATE INDEX ix_projects_mapper_level ON public.projects USING btree ( +CREATE INDEX IF NOT EXISTS idx_projects_mapper_level +ON public.projects USING btree ( mapper_level ); -CREATE INDEX ix_projects_organisation_id ON public.projects USING btree ( +CREATE INDEX IF NOT EXISTS idx_projects_organisation_id +ON public.projects USING btree ( organisation_id ); -CREATE INDEX ix_tasks_project_id ON public.tasks USING btree (project_id); -CREATE INDEX ix_users_id ON public.users USING btree (id); +CREATE INDEX idx_tasks_outline ON public.tasks USING gist (outline); +CREATE INDEX IF NOT EXISTS idx_tasks_composite +ON public.tasks USING btree ( + id, project_id +); CREATE INDEX idx_user_roles ON public.user_roles USING btree ( project_id, user_id ); CREATE INDEX idx_org_managers ON public.organisation_managers USING btree ( user_id, organisation_id ); +CREATE INDEX IF NOT EXISTS idx_task_event_composite +ON public.task_events USING btree ( + task_id, project_id +); +CREATE INDEX IF NOT EXISTS idx_task_event_project_user +ON public.task_events USING btree ( + user_id, project_id +); +CREATE INDEX IF NOT EXISTS idx_task_event_project_id +ON public.task_events USING btree ( + task_id, project_id +); +CREATE INDEX IF NOT EXISTS idx_task_event_user_id +ON public.task_events USING btree ( + task_id, user_id +); +CREATE INDEX IF NOT EXISTS idx_task_history_date +ON public.task_history USING btree ( + task_id, created_at +); -- Foreign keys @@ -493,6 +512,21 @@ ADD CONSTRAINT tasks_project_id_fkey FOREIGN KEY ( project_id ) REFERENCES public.projects (id); +ALTER TABLE ONLY public.task_events +ADD CONSTRAINT fk_projects FOREIGN KEY ( + project_id +) REFERENCES public.projects (id); + +ALTER TABLE ONLY public.task_events +ADD CONSTRAINT fk_project_task_id FOREIGN KEY ( + task_id, project_id +) REFERENCES public.tasks (id, project_id); + +ALTER TABLE ONLY public.task_events +ADD CONSTRAINT fk_users FOREIGN KEY ( + user_id +) REFERENCES public.users (id); + ALTER TABLE ONLY public.user_roles ADD CONSTRAINT user_roles_project_id_fkey FOREIGN KEY ( project_id @@ -508,6 +542,42 @@ ADD CONSTRAINT fk_project_id FOREIGN KEY ( project_id ) REFERENCES public.projects (id); +-- Triggers + +CREATE OR REPLACE FUNCTION set_task_state() +RETURNS TRIGGER AS $$ +BEGIN + CASE NEW.event + WHEN 'MAP' THEN + NEW.state := 'LOCKED_FOR_MAPPING'; + WHEN 'FINISH' THEN + NEW.state := 'UNLOCKED_TO_VALIDATE'; + WHEN 'VALIDATE' THEN + NEW.state := 'LOCKED_FOR_VALIDATION'; + WHEN 'GOOD' THEN + NEW.state := 'UNLOCKED_DONE'; + WHEN 'BAD' THEN + NEW.state := 'UNLOCKED_TO_MAP'; + WHEN 'SPLIT' THEN + NEW.state := 'UNLOCKED_DONE'; + WHEN 'MERGE' THEN + NEW.state := 'UNLOCKED_DONE'; + WHEN 'ASSIGN' THEN + NEW.state := 'LOCKED_FOR_MAPPING'; + WHEN 'COMMENT' THEN + NEW.state := OLD.state; + ELSE + RAISE EXCEPTION 'Unknown task event type: %', NEW.event; + END CASE; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE TRIGGER task_event_state_trigger +BEFORE INSERT ON public.task_events +FOR EACH ROW +EXECUTE FUNCTION set_task_state(); + -- Finalise REVOKE USAGE ON SCHEMA public FROM public; diff --git a/src/backend/tests/conftest.py b/src/backend/tests/conftest.py index 4b0c1262f8..c8d3ecf3ad 100644 --- a/src/backend/tests/conftest.py +++ b/src/backend/tests/conftest.py @@ -39,13 +39,13 @@ from app.central.central_schemas import ODKCentralDecrypted, ODKCentralIn from app.config import encrypt_value, settings from app.db.database import db_conn -from app.db.enums import TaskStatus, UserRole, get_action_for_status_change -from app.db.models import DbProject, DbTask, DbTaskHistory +from app.db.enums import TaskEvent, UserRole +from app.db.models import DbProject, DbTask, DbTaskEvent from app.main import get_application from app.organisations.organisation_deps import get_organisation from app.projects import project_crud from app.projects.project_schemas import ProjectIn -from app.tasks.task_schemas import TaskHistoryIn +from app.tasks.task_schemas import TaskEventIn from app.users.user_deps import get_user from tests.test_data import test_data_path @@ -223,13 +223,13 @@ async def task_event(db, project, tasks, admin_user): """Create a new task event in the database.""" user = await get_user(admin_user.id, db) for task in tasks: - new_event = TaskHistoryIn( + new_event = TaskEventIn( task_id=task.id, user_id=user.id, - action=get_action_for_status_change(TaskStatus.READY), - action_text="We added a comment!", + event=TaskEvent.MAP, + comment="We added a comment!", ) - db_task_event = await DbTaskHistory.create(db, new_event) + db_task_event = await DbTaskEvent.create(db, new_event) return db_task_event diff --git a/src/backend/tests/test_projects_routes.py b/src/backend/tests/test_projects_routes.py index 12710d539d..3a9fe4d00c 100644 --- a/src/backend/tests/test_projects_routes.py +++ b/src/backend/tests/test_projects_routes.py @@ -31,7 +31,7 @@ from app.central.central_crud import create_odk_project from app.config import settings -from app.db.enums import EntityStatus, HTTPStatus, TaskAction +from app.db.enums import EntityState, HTTPStatus, MappingState from app.db.models import DbProject, slugify from app.db.postgis_utils import check_crs from app.projects import project_crud @@ -387,7 +387,7 @@ async def test_generate_project_files(db, client, project): # Now check required values were added to project new_project = await DbProject.one(db, project_id) assert len(new_project.tasks) == 1 - assert new_project.tasks[0].task_status == TaskAction.RELEASED_FOR_MAPPING + assert new_project.tasks[0].task_state == MappingState.UNLOCKED_TO_MAP assert isinstance(new_project.odk_token, str) @@ -463,7 +463,7 @@ async def test_project_by_id(client, project): async def test_set_entity_mapping_status(client, odk_project, entities): """Test set the ODK entity mapping status.""" entity = entities[0] - new_status = EntityStatus.LOCKED + new_status = EntityState.OPENED_IN_ODK response = await client.post( f"/projects/{odk_project.id}/entity/status", diff --git a/src/backend/tests/test_task_routes.py b/src/backend/tests/test_task_routes.py index 7d68f881c2..2df7930675 100644 --- a/src/backend/tests/test_task_routes.py +++ b/src/backend/tests/test_task_routes.py @@ -21,11 +21,11 @@ import pytest -from app.db.enums import TaskStatus +from app.db.enums import MappingState, TaskEvent async def test_read_task_history(client, task_event): - """Test task history for a project.""" + """Test task events for a project.""" task_id = task_event.task_id assert task_id is not None @@ -40,24 +40,65 @@ async def test_read_task_history(client, task_event): assert UUID(data["event_id"]) == task_event.event_id assert data["username"] == task_event.username assert data["profile_img"] == task_event.profile_img - assert data["action_text"] == task_event.action_text - assert data["status"] == TaskStatus.READY + assert data["comment"] == task_event.comment + assert data["state"] == MappingState.LOCKED_FOR_MAPPING -async def test_update_task_status(client, tasks): +async def test_submit_task_events(client, tasks): """Test update the task status.""" task_id = tasks[0].id project_id = tasks[0].project_id - new_status = TaskStatus.LOCKED_FOR_MAPPING + # LOCK MAP response = await client.post( - f"tasks/{task_id}/new-status/{new_status.value}?project_id={project_id}" + f"tasks/{task_id}/event?project_id={project_id}", + json={"event": TaskEvent.MAP}, ) assert response.status_code == 200 + data = response.json() + assert data["event"] == TaskEvent.MAP + assert data["state"] == MappingState.LOCKED_FOR_MAPPING + + # FINISH + response = await client.post( + f"tasks/{task_id}/event?project_id={project_id}", + json={"event": TaskEvent.FINISH}, + ) + assert response.status_code == 200 + data = response.json() + assert data["event"] == TaskEvent.FINISH + assert data["state"] == MappingState.UNLOCKED_TO_VALIDATE + # LOCK VALIDATE + response = await client.post( + f"tasks/{task_id}/event?project_id={project_id}", + json={"event": TaskEvent.VALIDATE}, + ) + assert response.status_code == 200 + data = response.json() + assert data["event"] == TaskEvent.VALIDATE + assert data["state"] == MappingState.LOCKED_FOR_VALIDATION + + # MARK GOOD / VALIDATED + response = await client.post( + f"tasks/{task_id}/event?project_id={project_id}", + json={"event": TaskEvent.GOOD}, + ) + assert response.status_code == 200 + data = response.json() + assert data["event"] == TaskEvent.GOOD + assert data["state"] == MappingState.UNLOCKED_DONE + + # COMMENT + response = await client.post( + f"tasks/{task_id}/event?project_id={project_id}", + json={"event": TaskEvent.COMMENT, "comment": "Hello!"}, + ) + assert response.status_code == 200 data = response.json() - assert "status" in data - assert data["status"] == new_status.name + assert data["event"] == TaskEvent.COMMENT + assert data["state"] is None + assert data["comment"] == "Hello!" if __name__ == "__main__": diff --git a/src/frontend/e2e/02-mapper-flow.spec.ts b/src/frontend/e2e/02-mapper-flow.spec.ts index b6cc32a7e7..bdc4749541 100644 --- a/src/frontend/e2e/02-mapper-flow.spec.ts +++ b/src/frontend/e2e/02-mapper-flow.spec.ts @@ -22,7 +22,7 @@ test.describe('mapper flow', () => { y: 95, }, }); - await expect(page.getByText('Status: READY')).toBeVisible(); + await expect(page.getByText('Status: UNLOCKED_TO_MAP')).toBeVisible(); await page.getByRole('alert').waitFor({ state: 'hidden' }); await page.getByTitle('Close').getByTestId('CloseIcon').click(); // Use maxDiffPixelRatio to avoid issues with OSM tile loading delay @@ -61,11 +61,11 @@ test.describe('mapper flow', () => { await page.getByRole('button', { name: 'MARK AS FULLY MAPPED' }).click(); // Required again for the confirmation dialog (0/4 features mapped) await page.getByRole('button', { name: 'MARK AS FULLY MAPPED' }).click(); - await page.waitForSelector('div:has-text("updated status to MAPPED"):nth-of-type(1)'); + await page.waitForSelector('div:has-text("updated status to UNLOCKED_TO_VALIDATE"):nth-of-type(1)'); await expect( page .locator('div') - .filter({ hasText: /updated status to MAPPED/ }) + .filter({ hasText: /updated status to UNLOCKED_TO_VALIDATE/ }) .first(), ).toBeVisible(); await page.getByRole('alert').waitFor({ state: 'hidden' }); @@ -86,8 +86,8 @@ test.describe('mapper flow', () => { // Click 'Fully Mapped' button on validation page await page.getByRole('button', { name: 'MARK AS VALIDATED' }).click(); - await page.getByText('has been updated to VALIDATED').waitFor({ state: 'visible' }); - await expect(page.getByText('has been updated to VALIDATED')).toBeVisible(); + await page.getByText('has been updated to UNLOCKED_DONE').waitFor({ state: 'visible' }); + await expect(page.getByText('has been updated to UNLOCKED_DONE')).toBeVisible(); // wait for map to render before continuing await page.waitForTimeout(4000); @@ -99,7 +99,7 @@ test.describe('mapper flow', () => { y: 95, }, }); - await expect(page.getByText('Status: VALIDATED')).toBeVisible(); + await expect(page.getByText('Status: UNLOCKED_DONE')).toBeVisible(); }); test('open feature (Entity) in ODK', async ({ browserName, page }) => { @@ -119,7 +119,7 @@ test.describe('mapper flow', () => { y: 220, }, }); - await expect(page.getByText('Status: READY')).toBeVisible(); + await expect(page.getByText('Status: UNLOCKED_TO_MAP')).toBeVisible(); await expect(page.getByRole('button', { name: 'START MAPPING' })).toBeVisible(); // 2. Click on a specific feature / Entity within a task diff --git a/src/frontend/src/api/Project.ts b/src/frontend/src/api/Project.ts index 111f2c43fc..a0c6cf4488 100755 --- a/src/frontend/src/api/Project.ts +++ b/src/frontend/src/api/Project.ts @@ -1,7 +1,7 @@ import { ProjectActions } from '@/store/slices/ProjectSlice'; import { CommonActions } from '@/store/slices/CommonSlice'; import CoreModules from '@/shared/CoreModules'; -import { task_status } from '@/types/enums'; +import { task_state, task_event } from '@/types/enums'; import { writeBinaryToOPFS } from '@/api/Files'; import { projectInfoType } from '@/models/project/projectModel'; @@ -19,7 +19,7 @@ export const ProjectById = (projectId: string) => { id: data.id, index: data.project_task_index, outline: data.outline, - task_status: task_status[data.task_status], + task_state: task_state[data.task_state], actioned_by_uid: data.actioned_by_uid, actioned_by_username: data.actioned_by_username, task_history: data.task_history, @@ -150,14 +150,14 @@ export const GetTilesList = (url: string) => { }; }; -export const GenerateProjectTiles = (url: string, payload: string) => { +export const GenerateProjectTiles = (url: string, projectId: string, data: object) => { return async (dispatch) => { dispatch(ProjectActions.SetGenerateProjectTilesLoading(true)); - const generateProjectTiles = async (url: string, payload: string) => { + const generateProjectTiles = async (url: string, projectId: string) => { try { - const response = await CoreModules.axios.get(url); - dispatch(GetTilesList(`${import.meta.env.VITE_API_URL}/projects/${payload}/tiles/`)); + await CoreModules.axios.post(url, data); + dispatch(GetTilesList(`${import.meta.env.VITE_API_URL}/projects/${projectId}/tiles/`)); dispatch(ProjectActions.SetGenerateProjectTilesLoading(false)); } catch (error) { dispatch(ProjectActions.SetGenerateProjectTilesLoading(false)); @@ -165,7 +165,7 @@ export const GenerateProjectTiles = (url: string, payload: string) => { dispatch(ProjectActions.SetGenerateProjectTilesLoading(false)); } }; - await generateProjectTiles(url, payload); + await generateProjectTiles(url, projectId); }; }; @@ -217,9 +217,9 @@ export const DownloadTile = (url: string, payload: Partial, toO }; }; -export const GetProjectDashboard = (url: string) => { +export const GetSubmissionDashboard = (url: string) => { return async (dispatch) => { - const getProjectDashboard = async (url: string) => { + const GetSubmissionDashboard = async (url: string) => { try { dispatch(ProjectActions.SetProjectDashboardLoading(true)); const response = await CoreModules.axios.get(url); @@ -234,7 +234,7 @@ export const GetProjectDashboard = (url: string) => { dispatch(ProjectActions.SetProjectDashboardLoading(false)); } }; - await getProjectDashboard(url); + await GetSubmissionDashboard(url); }; }; @@ -278,11 +278,17 @@ export const GetProjectComments = (url: string) => { }; }; -export const PostProjectComments = (url: string, payload: { task_id: number; project_id: any; comment: string }) => { +export const PostProjectComments = ( + url: string, + payload: { event: task_event.COMMENT; task_id: number; comment: string }, +) => { return async (dispatch) => { const postProjectComments = async (url: string) => { try { dispatch(ProjectActions.SetPostProjectCommentsLoading(true)); + if (!('event' in payload)) { + payload = { event: task_event.COMMENT, ...payload }; + } const response = await CoreModules.axios.post(url, payload); dispatch(ProjectActions.UpdateProjectCommentsList(response.data)); dispatch(ProjectActions.SetPostProjectCommentsLoading(false)); @@ -314,19 +320,19 @@ export const GetProjectTaskActivity = (url: string) => { }; }; -export const UpdateEntityStatus = (url: string, payload: { entity_id: string; status: number; label: string }) => { +export const UpdateEntityState = (url: string, payload: { entity_id: string; status: number; label: string }) => { return async (dispatch) => { - const updateEntityStatus = async (url: string, payload: { entity_id: string; status: number; label: string }) => { + const updateEntityState = async (url: string, payload: { entity_id: string; status: number; label: string }) => { try { - dispatch(ProjectActions.UpdateEntityStatusLoading(true)); + dispatch(ProjectActions.UpdateEntityStateLoading(true)); const response = await CoreModules.axios.post(url, payload); - dispatch(ProjectActions.UpdateEntityStatus(response.data)); - dispatch(ProjectActions.UpdateEntityStatusLoading(false)); + dispatch(ProjectActions.UpdateEntityState(response.data)); + dispatch(ProjectActions.UpdateEntityStateLoading(false)); } catch (error) { - dispatch(ProjectActions.UpdateEntityStatusLoading(false)); + dispatch(ProjectActions.UpdateEntityStateLoading(false)); } }; - await updateEntityStatus(url, payload); + await updateEntityState(url, payload); }; }; diff --git a/src/frontend/src/api/SubmissionService.ts b/src/frontend/src/api/SubmissionService.ts index 02c9b770c9..dc9b58ad31 100644 --- a/src/frontend/src/api/SubmissionService.ts +++ b/src/frontend/src/api/SubmissionService.ts @@ -81,12 +81,12 @@ export const SubmissionTableService: Function = (url: string, payload: filterTyp }; }; -export const UpdateReviewStateService: Function = (url: string) => { +export const UpdateReviewStateService: Function = (url: string, payload: object) => { return async (dispatch) => { const UpdateReviewState = async (url: string) => { try { dispatch(SubmissionActions.UpdateReviewStateLoading(true)); - const response = await CoreModules.axios.post(url); + const response = await CoreModules.axios.post(url, payload); dispatch(SubmissionActions.UpdateSubmissionTableDataReview(response.data)); } catch (error) { dispatch( diff --git a/src/frontend/src/api/ProjectTaskStatus.ts b/src/frontend/src/api/TaskEvent.ts similarity index 59% rename from src/frontend/src/api/ProjectTaskStatus.ts rename to src/frontend/src/api/TaskEvent.ts index 10834963f3..e225f442d8 100755 --- a/src/frontend/src/api/ProjectTaskStatus.ts +++ b/src/frontend/src/api/TaskEvent.ts @@ -2,16 +2,35 @@ import { ProjectActions } from '@/store/slices/ProjectSlice'; import { HomeActions } from '@/store/slices/HomeSlice'; import CoreModules from '@/shared/CoreModules'; import { CommonActions } from '@/store/slices/CommonSlice'; -import { projectTaskBoundriesType } from '@/models/project/projectModel'; +import { task_event as taskEventEnum, task_state as taskStateEnum } from '@/types/enums'; -export const UpdateTaskStatus = ( +// function getActionFromState(state: taskStateEnum): taskEventEnum { +// switch (state) { +// case taskStateEnum.UNLOCKED_TO_MAP: +// return taskEventEnum.MAP; +// case taskStateEnum.LOCKED_FOR_MAPPING: +// return taskEventEnum.FINISH; +// case taskStateEnum.UNLOCKED_TO_VALIDATE: +// return taskEventEnum.VALIDATE; +// case taskStateEnum.LOCKED_FOR_VALIDATION: +// return taskEventEnum.GOOD; +// // NOTE we also need to handle taskEventEnum.BAD somehow +// // case taskStateEnum.LOCKED_FOR_VALIDATION: +// // return taskEventEnum.BAD; + +// default: +// throw new Error(`Unhandled state: ${state}`); +// } +// } + +export const CreateTaskEvent = ( url: string, + action: taskEventEnum, currentProjectId: string, taskId: string, body: any, params: { project_id: string }, style?: any, - existingData?: projectTaskBoundriesType[], feature?: Record, ) => { return async (dispatch) => { @@ -24,15 +43,21 @@ export const UpdateTaskStatus = ( try { dispatch(CommonActions.SetLoading(true)); + body = { + event: action, + ...body, + }; const response = await CoreModules.axios.post(url, body, { params }); dispatch(ProjectActions.UpdateProjectTaskActivity(response.data)); - if (feature && style) { - await feature.setStyle(style); + // update task color based on current state + await feature.setStyle(style[response?.data?.state]); - // assign userId to actioned_by_uid if status is locked_for_mapping or locked_for_validation + // assign userId to actioned_by_uid if state is locked_for_mapping or locked_for_validation const prevProperties = feature.getProperties(); - const isTaskLocked = ['LOCKED_FOR_MAPPING', 'LOCKED_FOR_VALIDATION'].includes(response.data.status); + const isTaskLocked = [taskStateEnum.LOCKED_FOR_MAPPING, taskStateEnum.LOCKED_FOR_VALIDATION].includes( + response.data.state, + ); const updatedProperties = { ...prevProperties, actioned_by_uid: isTaskLocked ? body.id : null }; feature.setProperties(updatedProperties); @@ -42,7 +67,7 @@ export const UpdateTaskStatus = ( taskId, actioned_by_uid: body?.id, actioned_by_username: body?.username, - task_status: response.data.status, + task_state: response.data.state, }), ); } @@ -51,7 +76,7 @@ export const UpdateTaskStatus = ( dispatch( HomeActions.SetSnackBar({ open: true, - message: `Task #${taskId} has been updated to ${response.data.status}`, + message: `Task #${taskId} has been updated to ${response.data.state}`, variant: 'success', duration: 3000, }), diff --git a/src/frontend/src/components/DialogTaskActions.tsx b/src/frontend/src/components/DialogTaskActions.tsx index 5c274256c6..18006400b3 100755 --- a/src/frontend/src/components/DialogTaskActions.tsx +++ b/src/frontend/src/components/DialogTaskActions.tsx @@ -1,10 +1,10 @@ import React, { useState, useEffect } from 'react'; import environment from '@/environment'; -import { UpdateTaskStatus } from '@/api/ProjectTaskStatus'; +import { CreateTaskEvent } from '@/api/TaskEvent'; import MapStyles from '@/hooks/MapStyles'; import CoreModules from '@/shared/CoreModules'; import { CommonActions } from '@/store/slices/CommonSlice'; -import { task_status as taskStatusEnum } from '@/types/enums'; +import { task_event as taskEventEnum, task_state as taskStateEnum } from '@/types/enums'; import Button from '@/components/common/Button'; import { useNavigate } from 'react-router-dom'; import { GetProjectTaskActivity } from '@/api/Project'; @@ -17,7 +17,7 @@ type dialogPropType = { feature: Record; }; -type taskListstatusType = { +type taskListStateType = { value: string; key: string; btnBG: string; @@ -29,13 +29,12 @@ export default function Dialog({ taskId, feature }: dialogPropType) { const params = CoreModules.useParams(); const geojsonStyles = MapStyles(); - const [list_of_task_status, set_list_of_task_status] = useState([]); - const [task_status, set_task_status] = useState('RELEASED_FOR_MAPPING'); + const [list_of_task_actions, set_list_of_task_actions] = useState([]); + const [task_state, set_task_state] = useState(taskStateEnum.UNLOCKED_TO_MAP); const [currentTaskInfo, setCurrentTaskInfo] = useState(); const [toggleMappedConfirmationModal, setToggleMappedConfirmationModal] = useState(false); const projectInfo = useAppSelector((state) => state.project.projectInfo); - const taskBoundaryData = useAppSelector((state) => state.project.projectTaskBoundries); const loading = useAppSelector((state) => state.common.loading); const taskInfo = useAppSelector((state) => state.task.taskInfo); const projectData = useAppSelector((state) => state.project.projectTaskBoundries); @@ -44,19 +43,19 @@ export default function Dialog({ taskId, feature }: dialogPropType) { const currentProjectId: string = params.id; const projectIndex = projectData.findIndex((project) => project.id == parseInt(currentProjectId)); - const currentStatus = { - ...taskBoundaryData?.[projectIndex]?.taskBoundries?.filter((task) => { + const selectedTask = { + ...projectData?.[projectIndex]?.taskBoundries?.filter((task) => { return task?.id == taskId; })?.[0], }; const checkIfTaskAssignedOrNot = - currentStatus?.actioned_by_username === authDetails?.username || currentStatus?.actioned_by_username === null; + selectedTask?.actioned_by_username === authDetails?.username || selectedTask?.actioned_by_username === null; useEffect(() => { if (taskId) { dispatch( GetProjectTaskActivity( - `${import.meta.env.VITE_API_URL}/tasks/${currentStatus?.id}/history/?project_id=${currentProjectId}&comment=false`, + `${import.meta.env.VITE_API_URL}/tasks/${selectedTask?.id}/history/?project_id=${currentProjectId}&comments=false`, ), ); } @@ -72,24 +71,26 @@ export default function Dialog({ taskId, feature }: dialogPropType) { useEffect(() => { if (projectIndex != -1) { - const currentStatus = - projectTaskActivityList.length > 0 ? projectTaskActivityList[0].status : 'RELEASED_FOR_MAPPING'; - const findCorrectTaskStatusIndex = environment.tasksStatus.findIndex((data) => data.label == currentStatus); - const tasksStatus = - feature.id_ != undefined ? environment.tasksStatus[findCorrectTaskStatusIndex]?.['label'] : ''; - set_task_status(tasksStatus); - const tasksStatusList = - feature.id_ != undefined ? environment.tasksStatus[findCorrectTaskStatusIndex]?.['action'] : []; - set_list_of_task_status(tasksStatusList); + // Get current state of task + const selectedTask = + projectTaskActivityList.length > 0 ? projectTaskActivityList[0].state : taskStateEnum.UNLOCKED_TO_MAP; + const findCorrectTaskStateIndex = environment.tasksStatus.findIndex((data) => data.label == selectedTask); + const taskState = feature.id_ != undefined ? environment.tasksStatus[findCorrectTaskStateIndex]?.['label'] : ''; + set_task_state(taskState); + + // Get all available actions given current state + const taskActionsList = + feature.id_ != undefined ? environment.tasksStatus[findCorrectTaskStateIndex]?.['action'] : []; + set_list_of_task_actions(taskActionsList); } }, [projectTaskActivityList, taskId, feature]); const handleOnClick = async (event: React.MouseEvent) => { const btnId = event.currentTarget.dataset.btnid; if (!btnId) return; - const status = taskStatusEnum[btnId]; + const selectedAction = taskEventEnum[btnId]; const authDetailsCopy = authDetails != null ? { ...authDetails } : {}; - const geoStyle = geojsonStyles[btnId]; + if (btnId != undefined) { if (authDetailsCopy.hasOwnProperty('id')) { // if (btnId === 'MERGE_WITH_OSM') { @@ -97,18 +98,18 @@ export default function Dialog({ taskId, feature }: dialogPropType) { // return; // } await dispatch( - UpdateTaskStatus( - `${import.meta.env.VITE_API_URL}/tasks/${currentStatus?.id}/new-status/${status}`, + CreateTaskEvent( + `${import.meta.env.VITE_API_URL}/tasks/${selectedTask?.id}/event`, + selectedAction, currentProjectId, taskId.toString(), authDetailsCopy, { project_id: currentProjectId }, - geoStyle, - taskBoundaryData, + geojsonStyles, feature, ), ); - if (btnId === 'LOCKED_FOR_VALIDATION') + if (btnId === taskStateEnum.LOCKED_FOR_VALIDATION) navigate(`/project-submissions/${params.id}?tab=table&task_id=${taskId}`); } else { dispatch( @@ -159,7 +160,7 @@ export default function Dialog({ taskId, feature }: dialogPropType) { }} />