From fb3d0d6aea1b77bca1d0514f47265f17a2b342ce Mon Sep 17 00:00:00 2001 From: Sam <78538841+spwoodcock@users.noreply.github.com> Date: Mon, 11 Dec 2023 18:46:57 +0800 Subject: [PATCH] feat: integrate fmtm-splitter and remove splitting code in fmtm (#1037) * refactor: move xml.etree import to top of file, linting * build: remove unused qrcode & xmltodict deps * build: remove all >= pins, use == for specific deps * build: update pdm-pep517 version * build: remove upper bounds python pin (bad practice) * build: == pin test and docs dep groups * build: update latest osm-fieldwork, rawdata, splitter * build: lock group deps to latest * build: lock all dependency groups * refactor: migrate postgis splitting code to fmtm-splitter * build: bump fmtm-splitter 0.2.2 --> 0.2.3 * build: remove fmtm-splitter db tables from base schema * refactor: separate migration reversal scripts to dir * feat: add migration to remove fmtm-splitter db tables * refactor: remove fmtm-splitter tables from db_models * build: add optional fmtm-splitter & osm-rawdata mounts to dev * fix: more flexible json parsing for check_crs * refactor: use geojson.dump over json module * docs: add missing osm_id col from revert migration * feat: include split_by_square from fmtm-splitter, rename endpoint * refactor: remove create_task_grid unused func * feat: use fmtm-splitter split_by_sql for task splitting * refactor(frontend): replace isCustomDataExtract --> dataExtractFile * build: removed migration to delete fmtm-splitter tables * build: upgrade fmtm-splitter 0.2.3 --> 0.2.4 (drop_all fix) * build: use cp/rm over mv for ci img build * fix: do not delete project on boundary update fail * refactor: tasks_list --> task_list rename * refactor: get_tasks_list --> get_task_id_list rename * test: fix tests using run_in_threadpool --- .../building_clusters_of_5.sql | 148 --- .../Examples_and_tests/centroids.sql | 30 - .../Examples_and_tests/clean_and_simplify.sql | 44 - .../Examples_and_tests/clustering.sql | 21 - ...ve_hull_tasks_from_clustered_buildings.sql | 27 - .../count_building_tags.sql | 43 - ...lygons_with_no_features_into_neighbors.sql | 46 - .../Examples_and_tests/points_in_polygon.sql | 19 - .../Examples_and_tests/polygonize.sql | 10 - .../select_features_in_polygon.sql | 8 - .../split_aoi_by_osm_lines.sql | 26 - .../split_area_by_osm_lines.sql | 106 -- .../task_splitting_optimized.sql | 270 ------ .../Examples_and_tests/voronoi.sql | 15 - contrib/scripts/postgis_snippets/README.md | 3 - .../import_geojson_as_postgis_with_jsonb.md | 33 - .../postgis_snippets/postgis_resources.md | 16 - ...01_split_AOI_by_existing_line_features.sql | 106 -- ...it_02_count_buildings_for_subsplitting.sql | 106 -- .../fmtm-split_03_cluster_buildings.sql | 63 -- ...te_polygons_around_clustered_buildings.sql | 113 --- .../task_splitting_for_osm_buildings.sql | 309 ------ .../task_splitting_for_osm_roads.sql | 181 ---- .../task_splitting/task_splitting_readme.md | 74 -- docker-compose.yml | 2 + src/backend/Dockerfile | 6 +- src/backend/app/central/central_crud.py | 49 +- src/backend/app/db/db_models.py | 34 - src/backend/app/db/split_algorithm.sql | 284 ------ src/backend/app/projects/project_crud.py | 794 +++++---------- src/backend/app/projects/project_routes.py | 56 +- src/backend/app/submission/submission_crud.py | 4 +- .../app/submission/submission_routes.py | 2 +- src/backend/app/tasks/tasks_crud.py | 4 +- .../migrations/000-remove-user-password.sql | 12 - .../001-project-split-type-fields.sql | 31 - .../migrations/init/fmtm_base_schema.sql | 156 --- .../revert/000-remove-user-password.sql | 7 + .../revert/001-project-split-type-fields.sql | 24 + src/backend/pdm.lock | 908 ++++++++++++------ src/backend/pyproject.toml | 77 +- src/backend/tests/test_projects_routes.py | 60 +- src/frontend/src/api/CreateProjectService.ts | 10 +- .../createnewproject/DataExtract.tsx | 4 +- .../createnewproject/SplitTasks.tsx | 15 +- .../components/createproject/DefineTasks.tsx | 8 +- .../editproject/UpdateProjectArea.tsx | 2 +- src/frontend/src/views/CreateNewProject.tsx | 2 + 48 files changed, 1064 insertions(+), 3304 deletions(-) delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/building_clusters_of_5.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/centroids.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/clean_and_simplify.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/clustering.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/concave_hull_tasks_from_clustered_buildings.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/count_building_tags.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/merge_polygons_with_no_features_into_neighbors.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/points_in_polygon.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/polygonize.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/select_features_in_polygon.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/split_aoi_by_osm_lines.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/split_area_by_osm_lines.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/task_splitting_optimized.sql delete mode 100644 contrib/scripts/postgis_snippets/Examples_and_tests/voronoi.sql delete mode 100644 contrib/scripts/postgis_snippets/README.md delete mode 100644 contrib/scripts/postgis_snippets/import_geojson_as_postgis_with_jsonb.md delete mode 100644 contrib/scripts/postgis_snippets/postgis_resources.md delete mode 100644 contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_01_split_AOI_by_existing_line_features.sql delete mode 100644 contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_02_count_buildings_for_subsplitting.sql delete mode 100644 contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_03_cluster_buildings.sql delete mode 100644 contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_04_create_polygons_around_clustered_buildings.sql delete mode 100644 contrib/scripts/postgis_snippets/task_splitting/task_splitting_for_osm_buildings.sql delete mode 100644 contrib/scripts/postgis_snippets/task_splitting/task_splitting_for_osm_roads.sql delete mode 100644 contrib/scripts/postgis_snippets/task_splitting/task_splitting_readme.md delete mode 100644 src/backend/app/db/split_algorithm.sql create mode 100644 src/backend/migrations/revert/000-remove-user-password.sql create mode 100644 src/backend/migrations/revert/001-project-split-type-fields.sql diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/building_clusters_of_5.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/building_clusters_of_5.sql deleted file mode 100644 index 9c1f91248c..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/building_clusters_of_5.sql +++ /dev/null @@ -1,148 +0,0 @@ -/* -Licence: GPLv3 - -This script divides an layer of buildings into clusters suitable for field -mapping as part of the HOT Field Mapping Tasking Manager. - -It takes four inputs, all of which are tables in a Postgresql database with -the PostGIS extension enabled. The first three are PostGIS layers in -an EPSG:4326 projection. - -1) "project_aoi", a polygon layer with a single-feature Area of Interest -2) "ways_line", a line layer, usually from OpenStreetMap, covering the AOI -3) "ways_poly", a polygon layer, usually from OSM, covering the AOI -4) "project_config", a table with parameters for the splitting - - A desired number of features per task polygon - - A set of tags indicating what features should be included in the line - polygon layers - - Maybe other stuff. I don't know yet; I haven't implemented this table yet. - -TODO: implement the config table -*/ - --- The Area of Interest provided by the person creating the project -WITH aoi AS ( - SELECT * FROM "project-aoi" -) --- Extract all lines to be used as splitlines from a table of lines --- with the schema from Underpass (all tags as jsonb column called 'tags') --- TODO: add polygons (closed ways in OSM) with a 'highway' tag; --- some features such as roundabouts appear as polygons. --- TODO: add waterway polygons; now a beach doesn't show up as a splitline. --- TODO: these tags should come from another table rather than hardcoded --- so that they're easily configured during project creation. -,splitlines AS ( - SELECT ST_Intersection(a.geom, l.geom) AS geom - FROM aoi a, "ways_line" l - WHERE ST_Intersects(a.geom, l.geom) - -- TODO: these tags should come from a config table - -- All highways, waterways, and railways - AND (tags->>'highway' IS NOT NULL - OR tags->>'waterway' IS NOT NULL - OR tags->>'railway' IS NOT NULL - ) - -- A selection of highways, waterways, and all railways - /* - AND (tags->>'highway' = 'trunk' - OR tags->>'highway' = 'primary' - OR tags->>'highway' = 'secondary' - OR tags->>'highway' = 'tertiary' - OR tags->>'highway' = 'residential' - OR tags->>'highway' = 'unclassified' - OR tags->>'waterway' = 'river' - OR tags->>'waterway' = 'drain' - OR tags->>'railway' IS NOT NULL - ) - */ -) --- Merge all lines, necessary so that the polygonize function works later -,merged AS ( - SELECT ST_LineMerge(ST_Union(splitlines.geom)) AS geom - FROM splitlines -) --- Combine the boundary of the AOI with the splitlines --- First extract the Area of Interest boundary as a line -,boundary AS ( - SELECT ST_Boundary(geom) AS geom - FROM aoi -) --- Then combine it with the splitlines -,comb AS ( - SELECT ST_Union(boundary.geom, merged.geom) AS geom - FROM boundary, merged -) --- Create a polygon for each area enclosed by the splitlines -,splitpolysnoindex AS ( - SELECT (ST_Dump(ST_Polygonize(comb.geom))).geom as geom - FROM comb -) --- Add an index column to the split polygons --- Row numbers can function as temporary unique IDs for our new polygons -,splitpolygons AS( -SELECT row_number () over () as polyid, * -from splitpolysnoindex -) --- Grab the buildings. --- While we're at it, grab the ID of the polygon the buildings fall within. --- TODO: at the moment this uses building centroids. --- There's definitely a way to calculate which of several polygons the largest --- proportion of a building falls, that's what we should do instead. -,buildings AS ( - SELECT b.*, polys.polyid - FROM "ways_poly" b, splitpolygons polys - WHERE ST_Intersects(polys.geom, st_centroid(b.geom)) - AND b.tags->>'building' IS NOT NULL -) --- Count the building centroids in each polygon split by line features. -,polygonsfeaturecount AS ( - SELECT sp.polyid, sp.geom, count(b.geom) AS numfeatures - FROM "splitpolygons" sp - LEFT JOIN "buildings" b - ON sp.polyid = b.polyid - GROUP BY sp.polyid, sp.geom -) --- Filter out polygons with no features in them --- TODO: Merge the empty ones into their neighbors and replace the UIDs --- with consecutive integers for only the polygons with contents -,splitpolygonswithcontents AS ( - SELECT * - FROM polygonsfeaturecount pfc - WHERE pfc.numfeatures > 0 -) -/******************************************************************************* --- Uncomment this and stop here for split polygons before clustering -SELECT * FROM splitpolygonswithcontents -*******************************************************************************/ - --- Add the count of features in the splitpolygon each building belongs to --- to the buildings table; sets us up to be able to run the clustering. -,buildingswithcount AS ( - SELECT b.*, p.numfeatures - FROM buildings b - LEFT JOIN polygonsfeaturecount p - ON b.polyid = p.polyid -) --- Cluster the buildings within each splitpolygon. The second term in the --- call to the ST_ClusterKMeans function is the number of clusters to create, --- so we're dividing the number of features by a constant (10 in this case) --- to get the number of clusters required to get close to the right number --- of features per cluster. --- TODO: This should certainly not be a hardcoded, the number of features --- per cluster should come from a project configuration table -,buildingstocluster as ( - SELECT * FROM buildingswithcount bc - WHERE bc.numfeatures > 0 -) -,clusteredbuildingsnocombineduid AS ( -SELECT *, - ST_ClusterKMeans(geom, cast((b.numfeatures / 5) + 1 as integer)) - over (partition by polyid) as cid -FROM buildingstocluster b -) --- uid combining the id of the outer splitpolygon and inner cluster -,clusteredbuildings as ( - select *, - polyid::text || '-' || cid as clusteruid - from clusteredbuildingsnocombineduid -) -SELECT * FROM clusteredbuildings diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/centroids.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/centroids.sql deleted file mode 100644 index 63aa58eb05..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/centroids.sql +++ /dev/null @@ -1,30 +0,0 @@ -/* -Several recipes for creating centroids from a layer of polygons. -Here assuming the polygons are called "buildings". -*/ - --- simple version (does not retain attributes) -/* -SELECT st_centroid(geom) as geom -FROM "osm_polygons" where building is not null; -*/ - --- composed version (also does not retain attributes) -/* -with buildings as ( - select * - from "OSM_polygons" - where building is not null - ) -select st_centroid(geom) as geom -from buildings; -*/ - --- composed version that retains attributes -with buildings as ( - select * - from "OSM_polygons" - where building is not null - ) -select *, st_centroid(geom) as centroid_geom -from buildings; diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/clean_and_simplify.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/clean_and_simplify.sql deleted file mode 100644 index 0a89e799ee..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/clean_and_simplify.sql +++ /dev/null @@ -1,44 +0,0 @@ -/* -Task polygons made by creating and merging Voronoi polygons have lots of jagged edges. Simplifying them makes them both nicer to look at, and easier to render on a map. - -At the moment the algorithm is working, except that the dissolve step just before simplification only works using an external tool (see TODO below). -*/ - --- convert task polygon boundaries to linestrings -with rawlines as ( - select tp.clusteruid, st_boundary(tp.geom) as geom - from taskpolygons as tp -) --- Union, which eliminates duplicates from adjacent polygon boundaries -,unionlines as ( - select st_union(l.geom) as geom from rawlines l -) --- Dump, which gives unique segments. -,dumpedlinesegments as ( - select (st_dump(l.geom)).geom as geom - from unionlines l -) --- Dissolve segments into linestrings or multilinestrings for simplification - --- TODO: Using st_union, st_unaryunion, st_collect, st_node, st_linemerge --- and maybe a few others I've tried to dissolve the line segments --- appears to work, but the resulting multiline geometry fails to simplify. --- On the other hand, the QGIS Dissolve tool works, and produces multiline --- geometry that simplifies nicely. -678-- QGIS Multipart to Singleparts does something arguably even better: it --- unions all of the segments between intersections. -,dissolved as ( - select st_collect(l.geom) as geom from dumpedlinesegments l -) --- Simplify the line layer (using a tolerance in degrees to annoy Steve) --- (actually just because I haven't yet bothered to reproject) --- Cheating by loading an external layer because QGIS dissolve works. --- I'm loading the dumpedlinesegements to the QGIS canvas, dissolving them, --- and pulling that layer back into the DB as dissolvedfromdumpedlinesegments, --- which st_simplify appears happy with. -,simplified as ( - select st_simplify(l.geom, 0.000075) - as geom from dissolvedfromdumpedlinesegements l -- import from QGIS -) --- Rehydrate the task areas into polygons after simplification -select (st_dump(st_polygonize(s.geom))).geom as geom from simplified s diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/clustering.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/clustering.sql deleted file mode 100644 index f6ae65910a..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/clustering.sql +++ /dev/null @@ -1,21 +0,0 @@ -/* -Create a specified number of clusters from any geometry -(tested with points and polygons, no idea what it does with lines) -Simply adds a "cid" column to all features, with the same -value for all features in each cluster -*/ - -/* --- Version that does not retain attributes -select st_clusterkmeans(geom, 200) --creates 200 clusters -over () -as cid, geom -from features -*/ - --- This version retains attributes -select *, st_clusterkmeans(geom, 200) --creates 200 clusters -over () -as cid -from features - diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/concave_hull_tasks_from_clustered_buildings.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/concave_hull_tasks_from_clustered_buildings.sql deleted file mode 100644 index 54252cb182..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/concave_hull_tasks_from_clustered_buildings.sql +++ /dev/null @@ -1,27 +0,0 @@ -WITH clusteredbuildings AS ( - select * from "clustered-buildings" -) - -,hulls AS( - -- Using a very high param_pctconvex value; smaller values often produce - -- self-intersections and crash. It seems that anything below 1 produces - -- something massively better than 1 (which is just a convex hull) but - -- no different (i.e. 0.99 produces the same thing as 0.9999), so - -- there's nothing to lose choosing a value a miniscule fraction less than 1. - select clb.polyid, clb.cid, clb.clusteruid, - ST_ConcaveHull(ST_Collect(clb.geom), 0.9999) as geom - from clusteredbuildings clb - group by clb.clusteruid, clb.polyid, clb.cid -) --- Now we need to: --- - Get rid of the overlapping areas of the hulls --- - Create intersections for the hulls so all overlapping bits are separated --- - Check what's inside of the overlapping bits --- - If it's only stuff belonging to one of the original hulls, give that --- bit to the hull it belongs to --- - If the overlapping are contains stuff belonging to more than one hull, --- somehow split the overlapping bit such that each piece only contains --- stuff from one or another parent hull. Then merge them back. --- - Do something Voronoi-esque to expand the hulls until they tile the --- entire AOI, creating task polygons with no gaps -select * from hulls diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/count_building_tags.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/count_building_tags.sql deleted file mode 100644 index 73283ad4fb..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/count_building_tags.sql +++ /dev/null @@ -1,43 +0,0 @@ -/* -Selects every building within a set of OSM polygons that has a tag, any tag, -in addition to 'building'. - -This is a very rough proxy for buildings that have been field mapped. It's -rough because many buildings have tags other than 'building' that have -only been remotely mapped. Still, it's useful because most buildings that -have only been digitized in JOSM/ID only have the tag 'building'='yes'. -This filters all of those out. - -Works on OSM extracts converted to PostGIS layers using the Underpass -database schema: - -https://github.com/hotosm/underpass/blob/master/utils/raw.lua - -which converts all tags into a jsonb object in a 'tags' column. - -Probably useful at some point to punt tags that don't help -identify field mapping, such as 'source' (usually just a reference -to the imagery used to digitize) -*/ - - -with tagsarrayed as -( -select -*, -array(select jsonb_object_keys(tags)) as keys -from "ways_poly" -where tags->>'building' is not null -), -tagscounted as -( -select *, array_length(keys, 1) as numkeys -from tagsarrayed -)/*, -tagsignored as -( -select * from () -)*/ -select * -from tagscounted -where numkeys > 1 diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/merge_polygons_with_no_features_into_neighbors.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/merge_polygons_with_no_features_into_neighbors.sql deleted file mode 100644 index a73c190f34..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/merge_polygons_with_no_features_into_neighbors.sql +++ /dev/null @@ -1,46 +0,0 @@ -/* -Licence: GPLv3 - -This script divides an layer of buildings into clusters suitable for field -mapping as part of the HOT Field Mapping Tasking Manager. -*/ - --- Grab a reference to all of the polygons with area (for sorting) -allpolys AS ( - SELECT *, st_area(p.geom) AS area - FROM "splitpolygons" AS p -), --- Grab the areas with fewer than the requisite number of features -with lowfeaturecountpolys as ( - select * - from allpolys as p - -- TODO: feature count should not be hard-coded - where p.numfeatures < 5 -), - --- Find the neighbors of the low-feature-count polygons --- Store their ids as n_polyid, numfeatures as n_numfeatures, etc -allneighborlist as ( - select p.*, - pf.polyid as n_polyid, - pf.area as n_area, - p.numfeatures as n_numfeatures, - -- length of shared boundary to make nice merge decisions - st_length2d(st_intersection(p.geom, pf.geom)) as sharedbound - from lowfeaturecountpolys as p - inner join allpolys as pf - -- Anything that touches - on st_touches(p.geom, pf.geom) - -- But eliminate those whose intersection is a point, because - -- polygons that only touch at a corner shouldn't be merged - and st_geometrytype(st_intersection(p.geom, pf.geom)) != 'ST_Point' - -- Sort first by polyid of the low-feature-count polygons - -- Then by descending featurecount and area of the - -- high-feature-count neighbors (area is in case of equal - -- featurecounts, we'll just pick the biggest to add to) - order by p.polyid, p.numfeatures desc, pf.area desc - -- OR, maybe for more aesthetic merges: - -- order by p.polyid, sharedbound desc -) - -select distinct on (a.polyid) * from allneighborlist as a diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/points_in_polygon.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/points_in_polygon.sql deleted file mode 100644 index f9c0f35136..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/points_in_polygon.sql +++ /dev/null @@ -1,19 +0,0 @@ -/* -Takes a layer of points and a layer of polygons. -Counts the number of points in each polygon. -*/ - -select poly._uid_, poly.geom, count(cent.geom) as numpoints -from islingtonsplitpolygons poly -left join islingtonbuildingcentroids cent -on st_contains(poly.geom,cent.geom) -group by poly._uid_ - --- Count with intersect instead -/* -select sp.polyid, sp.geom, count(b.geom) as numfeatures -from "splitpolys" sp -left join "buildings" b -on st_intersects(sp.geom,b.geom) -group by sp.polyid, sp.geom -*/ diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/polygonize.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/polygonize.sql deleted file mode 100644 index f9387a2628..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/polygonize.sql +++ /dev/null @@ -1,10 +0,0 @@ -/* -Takes a bunch of lines (for example roads, waterways, etc from OSM) -and converts every area enclosed by those lines. -*/ -SELECT (ST_Dump(ST_Polygonize(ST_Node(multi_geom)))).geom -FROM ( - SELECT ST_Collect(geom) AS multi_geom - FROM osmlines -) q -; diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/select_features_in_polygon.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/select_features_in_polygon.sql deleted file mode 100644 index b07bcfa8ca..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/select_features_in_polygon.sql +++ /dev/null @@ -1,8 +0,0 @@ -/* -Selects all features (points, lines, or polygons) -for which any portion falls within a given polygon. -*/ - -select features.* -from "AOI_Polygon" poly, "OSM_features" features -where st_intersects(features.geom, poly.geom) diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/split_aoi_by_osm_lines.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/split_aoi_by_osm_lines.sql deleted file mode 100644 index 22fa94398d..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/split_aoi_by_osm_lines.sql +++ /dev/null @@ -1,26 +0,0 @@ -/* -This incorporates a number of the other scripts into one that accepts: -- A layer of OSM lines from osm2pgsql* -- A single AOI polygon. - -It splits the AOI into multiple polygons based on the roads, waterways, and -railways contained in the OSM line layer. - -* It is important to use a line layer from osm2pgsql, because this script -assumes that the lines have tags as JSON blobs in a single column called tags. -*/ - -with splitlines as( - select lines.* - from "AOI" poly, ways_line lines - where st_intersects(lines.geom, poly.geom) - and (tags->>'highway' is not null - or tags->>'waterway' is not null - or tags->>'railway' is not null) -) - - SELECT (ST_Dump(ST_Polygonize(ST_Node(multi_geom)))).geom - FROM ( - SELECT ST_Collect(s.geom) AS multi_geom - FROM splitlines s -) splitpolys; diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/split_area_by_osm_lines.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/split_area_by_osm_lines.sql deleted file mode 100644 index cb1a6092b0..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/split_area_by_osm_lines.sql +++ /dev/null @@ -1,106 +0,0 @@ - /* -Licence: GPLv3 - -This script divides an area into areas delimited by roads, waterways, -and railways as part of the HOT Field Mapping Tasking Manager. - -*/ - --- The Area of Interest provided by the person creating the project -WITH aoi AS ( - SELECT * FROM "project-aoi" -) --- Extract all lines to be used as splitlines from a table of lines --- with the schema from Underpass (all tags as jsonb column called 'tags') --- TODO: add polygons (closed ways in OSM) with a 'highway' tag; --- some features such as roundabouts appear as polygons. --- TODO: add waterway polygons; now a beach doesn't show up as a splitline. --- TODO: these tags should come from another table rather than hardcoded --- so that they're easily configured during project creation. -,splitlines AS ( - SELECT ST_Intersection(a.geom, l.geom) AS geom - FROM aoi a, "ways_line" l - WHERE ST_Intersects(a.geom, l.geom) - -- TODO: these tags should come from a config table - -- All highways, waterways, and railways - AND (tags->>'highway' IS NOT NULL - OR tags->>'waterway' IS NOT NULL - OR tags->>'railway' IS NOT NULL - ) - -- A selection of highways, waterways, and all railways - /* - AND (tags->>'highway' = 'trunk' - OR tags->>'highway' = 'primary' - OR tags->>'highway' = 'secondary' - OR tags->>'highway' = 'tertiary' - OR tags->>'highway' = 'residential' - OR tags->>'highway' = 'unclassified' - OR tags->>'waterway' = 'river' - OR tags->>'waterway' = 'drain' - OR tags->>'railway' IS NOT NULL - ) - */ -) --- Merge all lines, necessary so that the polygonize function works later -,merged AS ( - SELECT ST_LineMerge(ST_Union(splitlines.geom)) AS geom - FROM splitlines -) --- Combine the boundary of the AOI with the splitlines --- First extract the Area of Interest boundary as a line -,boundary AS ( - SELECT ST_Boundary(geom) AS geom - FROM aoi -) --- Then combine it with the splitlines -,comb AS ( - SELECT ST_Union(boundary.geom, merged.geom) AS geom - FROM boundary, merged -) --- Create a polygon for each area enclosed by the splitlines -,splitpolysnoindex AS ( - SELECT (ST_Dump(ST_Polygonize(comb.geom))).geom as geom - FROM comb -) --- Add an index column to the split polygons --- Row numbers can function as temporary unique IDs for our new polygons -,splitpolygons AS( -SELECT row_number () over () as polyid, * -from splitpolysnoindex -) --- Grab the buildings. --- While we're at it, grab the ID of the polygon the buildings fall within. --- TODO: at the moment this uses ST_Intersects, which is fine except when --- buildings cross polygon boundaries (which definitely happens in OSM). --- In that case, the building should probably be placed in the polygon --- where the largest proportion of its area falls. At the moment it --- duplicates the building in 2 polygons, which is bad! --- There's definitely a way to calculate which of several polygons the largest --- proportion of a building falls, that's what we should do. --- Doing it as a left join would also be better. --- Using the intersection of the centroid would also avoid duplication, --- but sometimes causes weird placements. -,buildings AS ( - SELECT b.*, polys.polyid - FROM "ways_poly" b, splitpolygons polys - WHERE ST_Intersects(polys.geom, b.geom) - AND b.tags->>'building' IS NOT NULL -) --- Count the features in each polygon split by line features. -,polygonsfeaturecount AS ( - SELECT sp.polyid, sp.geom, count(b.geom) AS numfeatures - FROM "splitpolygons" sp - LEFT JOIN "buildings" b - ON sp.polyid = b.polyid - GROUP BY sp.polyid, sp.geom -) --- Filter out polygons with no features in them --- TODO: Merge the empty ones into their neighbors and replace the UIDs --- with consecutive integers for only the polygons with contents -,splitpolygonswithcontents AS ( - SELECT * - FROM polygonsfeaturecount pfc - WHERE pfc.numfeatures > 0 -) - -SELECT * FROM splitpolygonswithcontents diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/task_splitting_optimized.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/task_splitting_optimized.sql deleted file mode 100644 index 1e9a598d03..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/task_splitting_optimized.sql +++ /dev/null @@ -1,270 +0,0 @@ - /* -Licence: GPLv3 -*/ - ---*************************Split by OSM lines*********************** --- Nuke whatever was there before -DROP TABLE IF EXISTS polygonsnocount; --- Create a new polygon layer of splits by lines -CREATE TABLE polygonsnocount AS ( - -- The Area of Interest provided by the person creating the project - WITH aoi AS ( - SELECT * FROM "project-aoi" - ) - -- Extract all lines to be used as splitlines from a table of lines - -- with the schema from Underpass (all tags as jsonb column called 'tags') - -- TODO: add polygons (closed ways in OSM) with a 'highway' tag; - -- some features such as roundabouts appear as polygons. - -- TODO: add waterway polygons; now a beach doesn't show up as a splitline. - -- TODO: these tags should come from another table rather than hardcoded - -- so that they're easily configured during project creation. - ,splitlines AS ( - SELECT ST_Intersection(a.geom, l.geom) AS geom - FROM aoi a, "ways_line" l - WHERE ST_Intersects(a.geom, l.geom) - -- TODO: these tags should come from a config table - -- All highways, waterways, and railways - AND (tags->>'highway' IS NOT NULL - OR tags->>'waterway' IS NOT NULL - OR tags->>'railway' IS NOT NULL - ) - ) - -- Merge all lines, necessary so that the polygonize function works later - ,merged AS ( - SELECT ST_LineMerge(ST_Union(splitlines.geom)) AS geom - FROM splitlines - ) - -- Combine the boundary of the AOI with the splitlines - -- First extract the Area of Interest boundary as a line - ,boundary AS ( - SELECT ST_Boundary(geom) AS geom - FROM aoi - ) - -- Then combine it with the splitlines - ,comb AS ( - SELECT ST_Union(boundary.geom, merged.geom) AS geom - FROM boundary, merged - ) - -- TODO add closed ways from OSM to lines (roundabouts etc) - -- Create a polygon for each area enclosed by the splitlines - ,splitpolysnoindex AS ( - SELECT (ST_Dump(ST_Polygonize(comb.geom))).geom as geom - FROM comb - ) - -- Add an index column to the split polygons - ,splitpolygons AS( - SELECT - row_number () over () as polyid, - ST_Transform(spni.geom,4326)::geography AS geog, - spni.* - from splitpolysnoindex spni - ) - SELECT * FROM splitpolygons -); --- Make that index column a primary key -ALTER TABLE polygonsnocount ADD PRIMARY KEY(polyid); --- Properly register geometry column (makes QGIS happy) -SELECT Populate_Geometry_Columns('public.polygonsnocount'::regclass); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX polygonsnocount_idx - ON polygonsnocount - USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing -VACUUM ANALYZE polygonsnocount; - --- ************************Grab the buildings************************** --- While we're at it, grab the ID of the polygon the buildings fall within. --- TODO add outer rings of buildings from relations table of OSM export -DROP TABLE IF EXISTS buildings; -CREATE TABLE buildings AS ( - SELECT b.*, polys.polyid - FROM "ways_poly" b, polygonsnocount polys - WHERE ST_Intersects(polys.geom, ST_Centroid(b.geom)) - AND b.tags->>'building' IS NOT NULL -); -ALTER TABLE buildings ADD PRIMARY KEY(osm_id); --- Properly register geometry column (makes QGIS happy) -SELECT Populate_Geometry_Columns('public.buildings'::regclass); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX buildings_idx - ON buildings - USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing -VACUUM ANALYZE buildings; - ---**************************Count features in polygons***************** -DROP TABLE IF EXISTS splitpolygons; -CREATE TABLE splitpolygons AS ( - WITH polygonsfeaturecount AS ( - SELECT sp.polyid, - sp.geom, - sp.geog, - count(b.geom) AS numfeatures, - ST_Area(sp.geog) AS area - FROM polygonsnocount sp - LEFT JOIN "buildings" b - ON sp.polyid = b.polyid - GROUP BY sp.polyid, sp.geom - ) - SELECT * from polygonsfeaturecount -); -ALTER TABLE splitpolygons ADD PRIMARY KEY(polyid); -SELECT Populate_Geometry_Columns('public.splitpolygons'::regclass); -CREATE INDEX splitpolygons_idx - ON splitpolygons - USING GIST (geom); -VACUUM ANALYZE splitpolygons; - -DROP TABLE polygonsnocount; - -DROP TABLE IF EXISTS lowfeaturecountpolygons; -CREATE TABLE lowfeaturecountpolygons AS ( - -- Grab the polygons with fewer than the requisite number of features - with lowfeaturecountpolys as ( - select * - from splitpolygons as p - -- TODO: feature count should not be hard-coded - where p.numfeatures < 20 - ), - -- Find the neighbors of the low-feature-count polygons - -- Store their ids as n_polyid, numfeatures as n_numfeatures, etc - allneighborlist as ( - select p.*, - pf.polyid as n_polyid, - pf.area as n_area, - p.numfeatures as n_numfeatures, - -- length of shared boundary to make nice merge decisions - st_length2d(st_intersection(p.geom, pf.geom)) as sharedbound - from lowfeaturecountpolys as p - inner join splitpolygons as pf - -- Anything that touches - on st_touches(p.geom, pf.geom) - -- But eliminate those whose intersection is a point, because - -- polygons that only touch at a corner shouldn't be merged - and st_geometrytype(st_intersection(p.geom, pf.geom)) != 'ST_Point' - -- Sort first by polyid of the low-feature-count polygons - -- Then by descending featurecount and area of the - -- high-feature-count neighbors (area is in case of equal - -- featurecounts, we'll just pick the biggest to add to) - order by p.polyid, p.numfeatures desc, pf.area desc - -- OR, maybe for more aesthetic merges: - -- order by p.polyid, sharedbound desc - ) - select distinct on (a.polyid) * from allneighborlist as a -); -ALTER TABLE lowfeaturecountpolygons ADD PRIMARY KEY(polyid); -SELECT Populate_Geometry_Columns('public.lowfeaturecountpolygons'::regclass); -CREATE INDEX lowfeaturecountpolygons_idx - ON lowfeaturecountpolygons - USING GIST (geom); -VACUUM ANALYZE lowfeaturecountpolygons; - ---****************Merge low feature count polygons with neighbors******* - - - ---****************Cluster buildings************************************* -DROP TABLE IF EXISTS clusteredbuildings; -CREATE TABLE clusteredbuildings AS ( - WITH splitpolygonswithcontents AS ( - SELECT * - FROM splitpolygons sp - WHERE sp.numfeatures > 0 - ) - -- Add the count of features in the splitpolygon each building belongs to - -- to the buildings table; sets us up to be able to run the clustering. - ,buildingswithcount AS ( - SELECT b.*, p.numfeatures - FROM buildings b - LEFT JOIN splitpolygons p - ON b.polyid = p.polyid - ) - -- Cluster the buildings within each splitpolygon. The second term in the - -- call to the ST_ClusterKMeans function is the number of clusters to create, - -- so we're dividing the number of features by a constant (10 in this case) - -- to get the number of clusters required to get close to the right number - -- of features per cluster. - -- TODO: This should certainly not be a hardcoded, the number of features - -- per cluster should come from a project configuration table - ,buildingstocluster as ( - SELECT * FROM buildingswithcount bc - WHERE bc.numfeatures > 0 - ) - ,clusteredbuildingsnocombineduid AS ( - SELECT *, - ST_ClusterKMeans(geom, cast((b.numfeatures / 20) + 1 as integer)) - over (partition by polyid) as cid - FROM buildingstocluster b - ) - -- uid combining the id of the outer splitpolygon and inner cluster - ,clusteredbuildings as ( - select *, - polyid::text || '-' || cid as clusteruid - from clusteredbuildingsnocombineduid - ) - SELECT * FROM clusteredbuildings -); -ALTER TABLE clusteredbuildings ADD PRIMARY KEY(osm_id); -SELECT Populate_Geometry_Columns('public.clusteredbuildings'::regclass); -CREATE INDEX clusteredbuildings_idx - ON clusteredbuildings - USING GIST (geom); -VACUUM ANALYZE clusteredbuildings; - ---*****************Densified dumped building nodes****************** -DROP TABLE IF EXISTS dumpedpoints; -CREATE TABLE dumpedpoints AS ( - SELECT cb.osm_id, cb.polyid, cb.cid, cb.clusteruid, - -- POSSIBLE BUG: PostGIS' Voronoi implementation seems to panic - -- with segments less than 0.00004 degrees. - -- Should probably use geography instead of geometry - (st_dumppoints(ST_Segmentize(geom, 0.00004))).geom - FROM clusteredbuildings cb -); -SELECT Populate_Geometry_Columns('public.dumpedpoints'::regclass); -CREATE INDEX dumpedpoints_idx - ON dumpedpoints - USING GIST (geom); -VACUUM ANALYZE dumpedpoints; - ---*******************voronoia**************************************** -DROP TABLE IF EXISTS voronoids; -CREATE TABLE voronoids AS ( - SELECT - st_intersection((ST_Dump(ST_VoronoiPolygons( - ST_Collect(points.geom) - ))).geom, - sp.geom) as geom - FROM dumpedpoints as points, - splitpolygons as sp - where st_contains(sp.geom, points.geom) - group by sp.geom -); -CREATE INDEX voronoids_idx - ON voronoids - USING GIST (geom); -VACUUM ANALYZE voronoids; - -DROP TABLE IF EXISTS voronois; -CREATE TABLE voronois AS ( - SELECT p.clusteruid, v.geom - FROM voronoids v, dumpedpoints p - WHERE st_within(p.geom, v.geom) -); -CREATE INDEX voronois_idx - ON voronois - USING GIST (geom); -VACUUM ANALYZE voronois; -DROP TABLE voronoids; - -DROP TABLE IF EXISTS taskpolygons; -CREATE TABLE taskpolygons AS ( - SELECT ST_Union(geom) as geom, clusteruid - FROM voronois - GROUP BY clusteruid -); -CREATE INDEX taskpolygons_idx - ON taskpolygons - USING GIST (geom); -VACUUM ANALYZE taskpolygons; - diff --git a/contrib/scripts/postgis_snippets/Examples_and_tests/voronoi.sql b/contrib/scripts/postgis_snippets/Examples_and_tests/voronoi.sql deleted file mode 100644 index 382aabcf3d..0000000000 --- a/contrib/scripts/postgis_snippets/Examples_and_tests/voronoi.sql +++ /dev/null @@ -1,15 +0,0 @@ -/* -Creates Voronoi polygons from a layer of points. From inside to out: -- Collects the points layer into a single multipoint geometry (st_collect) -- Creates voronoi polygons (st_voronoipolygons) -- Dumps them to separate features (st_dump) - -Caution: Does NOT retain the original UIDs of the points. You get a number of -Voronoi polygons equal to the original number of points, but the IDs of the -points don't match those of the polygons they fall within. -*/ - -with voronoi (v) as - (select st_dump(st_voronoipolygons(st_collect(geom))) - as geom from points) -select (v).geom from voronoi; diff --git a/contrib/scripts/postgis_snippets/README.md b/contrib/scripts/postgis_snippets/README.md deleted file mode 100644 index bd0339bf3f..0000000000 --- a/contrib/scripts/postgis_snippets/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# PostGIS Snippets - -Various useful PostGIS SQL commands contributed by @ivangayton. diff --git a/contrib/scripts/postgis_snippets/import_geojson_as_postgis_with_jsonb.md b/contrib/scripts/postgis_snippets/import_geojson_as_postgis_with_jsonb.md deleted file mode 100644 index 625e33e564..0000000000 --- a/contrib/scripts/postgis_snippets/import_geojson_as_postgis_with_jsonb.md +++ /dev/null @@ -1,33 +0,0 @@ -# Deprecated; now using pg_dump - -Import the GeoJSON file to PostGIS. To function in the same way as a layer directly imported from OSM using osm2psql, the `tags` column needs to be jsonb type. - -There probably is a simple way to combine changing the column type and casting the json string to the actual jsonb type, but I don't know how to do it. So here's the workaround: - -- Rename the tags column to tagsvarchar - -``` -alter table "Islington_AOI_polygons" -rename column tags to tagsvarchar; -``` - -- create a new tags column with the correct type - -``` -alter table "Islington_AOI_polygons" -add column tags jsonb -``` - -- Cast the json strings to jsonb and copy them over - -``` -update "Islington_AOI_polygons" -set tags = tagsvarchar::jsonb -``` - -- Nuke the renamed column with the varchar, leaving only the `tags` column with the jsonb type - -``` -alter table "Islington_AOI_polygons" -drop column tagsvarchar -``` diff --git a/contrib/scripts/postgis_snippets/postgis_resources.md b/contrib/scripts/postgis_snippets/postgis_resources.md deleted file mode 100644 index fd309cbe46..0000000000 --- a/contrib/scripts/postgis_snippets/postgis_resources.md +++ /dev/null @@ -1,16 +0,0 @@ -# PostGIS resources - -## Paul Ramsey or CleverElephant - -Paul Ramsey is a Canadian open source geographical analyst and developer. [His blog](https://blog.cleverelephant.ca/) (which is often _very_ technical) is a gold mine of information on many subjects, notably PostGIS. - -- [Overlays of polygons](https://blog.cleverelephant.ca/2019/07/postgis-overlays.html) - -## Matt Forest's Spatial SQL Cookbook - -- Intended for someone who's already an experienced GIS user and wants to transfer their knowledge to SQL. [Lots of clear, useful recipes](https://forrest.nyc/spatial-sql-cookbook/). - -## Random - -- [A nice Stack Exchange](https://gis.stackexchange.com/questions/172198/constructing-voronoi-diagram-in-postgis/174219#174219) on Voronoi Polygons -- [Best answer ever](https://stackoverflow.com/questions/49531535/pass-fields-when-applying-st-voronoipolygons-and-clip-output) on how to subdivide existing areas with Voronoi polygons diff --git a/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_01_split_AOI_by_existing_line_features.sql b/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_01_split_AOI_by_existing_line_features.sql deleted file mode 100644 index 01c05cd07e..0000000000 --- a/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_01_split_AOI_by_existing_line_features.sql +++ /dev/null @@ -1,106 +0,0 @@ - /* -Licence: GPLv3 -Part of the HOT Field Mapping Tasking Manager (FMTM) - -Part 01 of FMTM task splitting. - -This script splits an Area of Interest into polygons based on OpenStreetMap lines (roads, waterways, and railways). It doesn't take features into account. - -It takes three inputs, all PostGIS layers: -1) A polygon layer called project-aoi -2) A line layer formatted like OSM extracts with the Underpass schema (tags in a jsonb object) called ways_line -3) A polygon layer formatted like OSM extracts with the Underpass schema (tags in a jsonb object) called ways_poly - -It outputs a single PostGIS polygon layer called polygonsnocount (the "nocount" bit refers to the fact that at this stage we haven't determined if there's anything inside of these polygons; soon we'll count the features to be mapped within them, normally to split further). This layer should be considered a temporary product, but is the input to the next stage of the splitting algorithm. - -More information in the adjacent file task_splitting_readme.md. -*/ - -/* -***************************PARAMETERS FOR ROB************************ -- At the moment I split on anything with a highway, waterway, or railway tag of any kind. We'll probably want to allow users to configure which lines they actually use as splitting boundaries. -- At the moment it always uses the AOI polygon itself as a splitline. That should be optional. -*/ - --- If this table already exists, clobber it -DROP TABLE IF EXISTS polygonsnocount; --- Create a new polygon layer of splits by lines -CREATE TABLE polygonsnocount AS ( - -- The Area of Interest provided by the person creating the project - WITH aoi AS ( - SELECT * FROM "project-aoi" - ) - -- Extract all lines to be used as splitlines from a table of lines - -- with the schema from Underpass (all tags as jsonb column called 'tags') - -- TODO: add waterway polygons; now a beach doesn't show up as a splitline. - -- TODO: these tags should come from another table rather than hardcoded - -- so that they're easily configured during project creation. - ,splitlines AS ( - SELECT ST_Intersection(a.geom, l.geom) AS geom - FROM aoi a, "ways_line" l - WHERE ST_Intersects(a.geom, l.geom) - -- TODO: these tags should come from a config table - -- All highways, waterways, and railways - AND (tags->>'highway' IS NOT NULL - OR tags->>'waterway' IS NOT NULL - OR tags->>'railway' IS NOT NULL - ) - ) - -- Merge all lines, necessary so that the polygonize function works later - ,partlymerged AS ( - SELECT ST_LineMerge(ST_Union(splitlines.geom)) AS geom - FROM splitlines - ) - -- Add closed ways (polygons) that are actually roads (like roundabouts) - ,polyroads AS ( - SELECT ST_Boundary(wp.geom) AS geom - FROM aoi a, "ways_poly" wp - WHERE ST_Intersects(a.geom, wp.geom) - AND tags->>'highway' IS NOT NULL - ) - -- Merge all the lines from closed ways - ,prmerged AS ( - SELECT ST_LineMerge(ST_Union(polyroads.geom)) AS geom - from polyroads - ) - -- Add them to the merged lines from the open ways - ,merged AS ( - SELECT ST_Union(partlymerged.geom, prmerged.geom) AS geom - FROM partlymerged, prmerged - ) - -- Combine the boundary of the AOI with the splitlines - -- First extract the Area of Interest boundary as a line - ,boundary AS ( - SELECT ST_Boundary(geom) AS geom - FROM aoi - ) - -- Then combine it with the splitlines - ,comb AS ( - SELECT ST_Union(boundary.geom, merged.geom) AS geom - FROM boundary, merged - ) - -- Create a polygon for each area enclosed by the splitlines - ,splitpolysnoindex AS ( - SELECT (ST_Dump(ST_Polygonize(comb.geom))).geom as geom - FROM comb - ) - -- Add an index column to the split polygons - ,splitpolygons AS( - SELECT - row_number () over () as polyid, - ST_Transform(spni.geom,4326)::geography AS geog, - spni.* - from splitpolysnoindex spni - ) - SELECT * FROM splitpolygons -); --- Make that index column a primary key -ALTER TABLE polygonsnocount ADD PRIMARY KEY(polyid); --- Properly register geometry column (makes QGIS happy) -SELECT Populate_Geometry_Columns('public.polygonsnocount'::regclass); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX polygonsnocount_idx - ON polygonsnocount - USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing -VACUUM ANALYZE polygonsnocount; diff --git a/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_02_count_buildings_for_subsplitting.sql b/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_02_count_buildings_for_subsplitting.sql deleted file mode 100644 index 58bf15cd88..0000000000 --- a/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_02_count_buildings_for_subsplitting.sql +++ /dev/null @@ -1,106 +0,0 @@ - /* -Licence: GPLv3 -Part of the HOT Field Mapping Tasking Manager (FMTM) - -Inputs: -- Polygon layer polygonsnocount from previous fmtm-split step -- Polygon layer ways_poly formatted like OSM data from Underpass (tags in jsonb object) - -Outputs: -- Polygon layer buildings (OSM buildings with original tags plus the id of the polygon they are found in) -- Polygon layer splitpolygons (the polygons from the previous step, but now with a column with the count of the buildings found within each polygon) -- Polygon layer lowfeaturecountpolygons (All of the polygons with less than a specified number of buildings within them. These are also present in the splitpolygons layer; the idea is to merge them with neighbors, not implemented yet). -*/ - -/* -**************************PARAMETERS FOR ROB*************************** -- Line 70: The number of buildings desired per task (for now determines which polygons get added to the lowfeaturecountpolygons layer). -*/ - --- Grab the buildings --- While we're at it, grab the ID of the polygon the buildings fall within. --- TODO add outer rings of buildings from relations table of OSM export -DROP TABLE IF EXISTS buildings; -CREATE TABLE buildings AS ( - SELECT b.*, polys.polyid - FROM "ways_poly" b, polygonsnocount polys - WHERE ST_Intersects(polys.geom, ST_Centroid(b.geom)) - AND b.tags->>'building' IS NOT NULL -); -ALTER TABLE buildings ADD PRIMARY KEY(osm_id); --- Properly register geometry column (makes QGIS happy) -SELECT Populate_Geometry_Columns('public.buildings'::regclass); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX buildings_idx - ON buildings - USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing -VACUUM ANALYZE buildings; - ---**************************Count features in polygons***************** -DROP TABLE IF EXISTS splitpolygons; -CREATE TABLE splitpolygons AS ( - WITH polygonsfeaturecount AS ( - SELECT sp.polyid, - sp.geom, - sp.geog, - count(b.geom) AS numfeatures, - ST_Area(sp.geog) AS area - FROM polygonsnocount sp - LEFT JOIN "buildings" b - ON sp.polyid = b.polyid - GROUP BY sp.polyid, sp.geom - ) - SELECT * from polygonsfeaturecount -); -ALTER TABLE splitpolygons ADD PRIMARY KEY(polyid); -SELECT Populate_Geometry_Columns('public.splitpolygons'::regclass); -CREATE INDEX splitpolygons_idx - ON splitpolygons - USING GIST (geom); -VACUUM ANALYZE splitpolygons; - -DROP TABLE IF EXISTS lowfeaturecountpolygons; -CREATE TABLE lowfeaturecountpolygons AS ( - -- Grab the polygons with fewer than the requisite number of features - WITH lowfeaturecountpolys as ( - SELECT * - FROM splitpolygons AS p - -- TODO: feature count should not be hard-coded - WHERE p.numfeatures < 20 - ), - -- Find the neighbors of the low-feature-count polygons - -- Store their ids as n_polyid, numfeatures as n_numfeatures, etc - allneighborlist AS ( - SELECT p.*, - pf.polyid AS n_polyid, - pf.area AS n_area, - p.numfeatures AS n_numfeatures, - -- length of shared boundary to make nice merge decisions - st_length2d(st_intersection(p.geom, pf.geom)) as sharedbound - FROM lowfeaturecountpolys AS p - INNER JOIN splitpolygons AS pf - -- Anything that touches - ON st_touches(p.geom, pf.geom) - -- But eliminate those whose intersection is a point, because - -- polygons that only touch at a corner shouldn't be merged - AND st_geometrytype(st_intersection(p.geom, pf.geom)) != 'ST_Point' - -- Sort first by polyid of the low-feature-count polygons - -- Then by descending featurecount and area of the - -- high-feature-count neighbors (area is in case of equal - -- featurecounts, we'll just pick the biggest to add to) - ORDER BY p.polyid, p.numfeatures DESC, pf.area DESC - -- OR, maybe for more aesthetic merges: - -- order by p.polyid, sharedbound desc - ) - SELECT DISTINCT ON (a.polyid) * FROM allneighborlist AS a -); -ALTER TABLE lowfeaturecountpolygons ADD PRIMARY KEY(polyid); -SELECT Populate_Geometry_Columns('public.lowfeaturecountpolygons'::regclass); -CREATE INDEX lowfeaturecountpolygons_idx - ON lowfeaturecountpolygons - USING GIST (geom); -VACUUM ANALYZE lowfeaturecountpolygons; - ---****************Merge low feature count polygons with neighbors******* --- NOT IMPLEMENTED YET; not absolutely essential but highly desirable diff --git a/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_03_cluster_buildings.sql b/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_03_cluster_buildings.sql deleted file mode 100644 index 7a638619d8..0000000000 --- a/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_03_cluster_buildings.sql +++ /dev/null @@ -1,63 +0,0 @@ - /* -Licence: GPLv3 -Part of the HOT Field Mapping Tasking Manager (FMTM) - -Inputs: -- Polygon layer splitpolygons from previous fmtm-split step -- Polygon layer buildings from previous fmtm-split step (contains column with the id of the polygons the buildings are in) - -Outputs: -- Polygon layer clusteredbuildings; contains a column clusteruid -*/ - -/* -***********************PARAMETERS FOR ROB********************** -- Line 46: The desired number of buildings per task (determines cluster size) -*/ - -DROP TABLE IF EXISTS clusteredbuildings; -CREATE TABLE clusteredbuildings AS ( - WITH splitpolygonswithcontents AS ( - SELECT * - FROM splitpolygons sp - WHERE sp.numfeatures > 0 - ) - -- Add the count of features in the splitpolygon each building belongs to - -- to the buildings table; sets us up to be able to run the clustering. - ,buildingswithcount AS ( - SELECT b.*, p.numfeatures - FROM buildings b - LEFT JOIN splitpolygons p - ON b.polyid = p.polyid - ) - -- Cluster the buildings within each splitpolygon. The second term in the - -- call to the ST_ClusterKMeans function is the number of clusters to create, - -- so we're dividing the number of features by a constant (10 in this case) - -- to get the number of clusters required to get close to the right number - -- of features per cluster. - -- TODO: This should certainly not be a hardcoded, the number of features - -- per cluster should come from a project configuration table - ,buildingstocluster as ( - SELECT * FROM buildingswithcount bc - WHERE bc.numfeatures > 0 - ) - ,clusteredbuildingsnocombineduid AS ( - SELECT *, - ST_ClusterKMeans(geom, cast((b.numfeatures / 20) + 1 as integer)) - over (partition by polyid) as cid - FROM buildingstocluster b - ) - -- uid combining the id of the outer splitpolygon and inner cluster - ,clusteredbuildings as ( - select *, - polyid::text || '-' || cid as clusteruid - from clusteredbuildingsnocombineduid - ) - SELECT * FROM clusteredbuildings -); -ALTER TABLE clusteredbuildings ADD PRIMARY KEY(osm_id); -SELECT Populate_Geometry_Columns('public.clusteredbuildings'::regclass); -CREATE INDEX clusteredbuildings_idx - ON clusteredbuildings - USING GIST (geom); -VACUUM ANALYZE clusteredbuildings; diff --git a/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_04_create_polygons_around_clustered_buildings.sql b/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_04_create_polygons_around_clustered_buildings.sql deleted file mode 100644 index 95e9705858..0000000000 --- a/contrib/scripts/postgis_snippets/task_splitting/fmtm_task_splitting_for_buildings/fmtm-split_04_create_polygons_around_clustered_buildings.sql +++ /dev/null @@ -1,113 +0,0 @@ - /* -Licence: GPLv3 -Part of the HOT Field Mapping Tasking Manager (FMTM) - -Inputs: - -Outputs: -- Point layer dumpedpoints (building perimeters chopped into small segments and all nodes converted to points) -- Polygon layer voronoids (Voronoi polygons from the building segment points, only geometry without attributes because PostGIS is annoying on that score) -- Polygon layer voronois (the Voronoi polygons from the previous layer, re-associated with the ID of the points they were created from) -- Polygon layer taskpolygons (polygons mostly enclosing each task, made by dissolving the voronois by clusteruid) -- Polygon layer simplifiedpolygons (the polygons from above simplified to make them less jagged, easier to display, and smaller memory footprint) - -*/ - -/* -***************************PARAMETERS FOR ROB********************** -- Line 28: Segment length to chop up the building perimeters. Currently 0.00001 degrees (about a meter near the equator). When there are buildings very close together, this value needs to be small to reduce task polygons poking into buildings from neighboring tasks. When buildings are well-spaced, this value can be bigger to save on performance overhead. -- Line 101: Simplification tolerance. Currently 0.000075 (about 7.5 meters near the equator). The larger this value, the more smoothing of Voronoi jaggies happens, but the more likely task perimeters are to intersect buildings from neighboring tasks. -*/ - ---*****************Densify dumped building nodes****************** -DROP TABLE IF EXISTS dumpedpoints; -CREATE TABLE dumpedpoints AS ( - SELECT cb.osm_id, cb.polyid, cb.cid, cb.clusteruid, - -- POSSIBLE BUG: PostGIS' Voronoi implementation sometimes panics - -- with segments less than 0.00004 degrees. - (st_dumppoints(ST_Segmentize(geom, 0.00001))).geom - FROM clusteredbuildings cb -); -SELECT Populate_Geometry_Columns('public.dumpedpoints'::regclass); -CREATE INDEX dumpedpoints_idx - ON dumpedpoints - USING GIST (geom); -VACUUM ANALYZE dumpedpoints; - ---*******************voronoia**************************************** -DROP TABLE IF EXISTS voronoids; -CREATE TABLE voronoids AS ( - SELECT - st_intersection((ST_Dump(ST_VoronoiPolygons( - ST_Collect(points.geom) - ))).geom, - sp.geom) as geom - FROM dumpedpoints as points, - splitpolygons as sp - where st_contains(sp.geom, points.geom) - group by sp.geom -); -CREATE INDEX voronoids_idx - ON voronoids - USING GIST (geom); -VACUUM ANALYZE voronoids; - -DROP TABLE IF EXISTS voronois; -CREATE TABLE voronois AS ( - SELECT p.clusteruid, v.geom - FROM voronoids v, dumpedpoints p - WHERE st_within(p.geom, v.geom) -); -CREATE INDEX voronois_idx - ON voronois - USING GIST (geom); -VACUUM ANALYZE voronois; -DROP TABLE voronoids; - -DROP TABLE IF EXISTS taskpolygons; -CREATE TABLE taskpolygons AS ( - SELECT ST_Union(geom) as geom, clusteruid - FROM voronois - GROUP BY clusteruid -); -CREATE INDEX taskpolygons_idx - ON taskpolygons - USING GIST (geom); -VACUUM ANALYZE taskpolygons; - ---*****************************Simplify******************************* --- Extract unique line segments -DROP TABLE IF EXISTS simplifiedpolygons; -CREATE TABLE simplifiedpolygons AS ( - --Convert task polygon boundaries to linestrings - WITH rawlines AS ( - SELECT tp.clusteruid, st_boundary(tp.geom) AS geom - FROM taskpolygons AS tp - ) - -- Union, which eliminates duplicates from adjacent polygon boundaries - ,unionlines AS ( - SELECT st_union(l.geom) AS geom FROM rawlines l - ) - -- Dump, which gives unique segments. - ,segments AS ( - SELECT (st_dump(l.geom)).geom AS geom - FROM unionlines l - ) - ,agglomerated AS ( - SELECT st_linemerge(st_unaryunion(st_collect(s.geom))) AS geom - FROM segments s - ) - ,simplifiedlines AS ( - SELECT st_simplify(a.geom, 0.000075) AS geom - FROM agglomerated a - ) - SELECT (st_dump(st_polygonize(s.geom))).geom AS geom - FROM simplifiedlines s -); -CREATE INDEX simplifiedpolygons_idx - ON simplifiedpolygons - USING GIST (geom); -VACUUM ANALYZE simplifiedpolygons; - --- Clean results (nuke or merge polygons without features in them) - diff --git a/contrib/scripts/postgis_snippets/task_splitting/task_splitting_for_osm_buildings.sql b/contrib/scripts/postgis_snippets/task_splitting/task_splitting_for_osm_buildings.sql deleted file mode 100644 index 56755c3244..0000000000 --- a/contrib/scripts/postgis_snippets/task_splitting/task_splitting_for_osm_buildings.sql +++ /dev/null @@ -1,309 +0,0 @@ - /* -Licence: GPLv3 -Part of the HOT Field Mapping Tasking Manager (FMTM) - -This script splits an Area of Interest into task polygons based on OpenStreetMap lines (roads, waterways, and railways) and buildings. More information in the adjacent file task_splitting_readme.md. -*/ - ---*************************Split by OSM lines*********************** --- Nuke whatever was there before -DROP TABLE IF EXISTS polygonsnocount; --- Create a new polygon layer of splits by lines -CREATE TABLE polygonsnocount AS ( - -- The Area of Interest provided by the person creating the project - WITH aoi AS ( - SELECT * FROM "project-aoi" - ) - -- Extract all lines to be used as splitlines from a table of lines - -- with the schema from Underpass (all tags as jsonb column called 'tags') - -- TODO: add polygons (closed ways in OSM) with a 'highway' tag; - -- some features such as roundabouts appear as polygons. - -- TODO: add waterway polygons; now a beach doesn't show up as a splitline. - -- TODO: these tags should come from another table rather than hardcoded - -- so that they're easily configured during project creation. - ,splitlines AS ( - SELECT ST_Intersection(a.geom, l.geom) AS geom - FROM aoi a, "ways_line" l - WHERE ST_Intersects(a.geom, l.geom) - -- TODO: these tags should come from a config table - -- All highways, waterways, and railways - AND (tags->>'highway' IS NOT NULL - OR tags->>'waterway' IS NOT NULL - OR tags->>'railway' IS NOT NULL - ) - ) - -- Merge all lines, necessary so that the polygonize function works later - ,merged AS ( - SELECT ST_LineMerge(ST_Union(splitlines.geom)) AS geom - FROM splitlines - ) - -- Combine the boundary of the AOI with the splitlines - -- First extract the Area of Interest boundary as a line - ,boundary AS ( - SELECT ST_Boundary(geom) AS geom - FROM aoi - ) - -- Then combine it with the splitlines - ,comb AS ( - SELECT ST_Union(boundary.geom, merged.geom) AS geom - FROM boundary, merged - ) - -- TODO add closed ways from OSM to lines (roundabouts etc) - -- Create a polygon for each area enclosed by the splitlines - ,splitpolysnoindex AS ( - SELECT (ST_Dump(ST_Polygonize(comb.geom))).geom as geom - FROM comb - ) - -- Add an index column to the split polygons - ,splitpolygons AS( - SELECT - row_number () over () as polyid, - ST_Transform(spni.geom,4326)::geography AS geog, - spni.* - from splitpolysnoindex spni - ) - SELECT * FROM splitpolygons -); --- Make that index column a primary key -ALTER TABLE polygonsnocount ADD PRIMARY KEY(polyid); --- Properly register geometry column (makes QGIS happy) -SELECT Populate_Geometry_Columns('public.polygonsnocount'::regclass); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX polygonsnocount_idx - ON polygonsnocount - USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing -VACUUM ANALYZE polygonsnocount; - --- ************************Grab the buildings************************** --- While we're at it, grab the ID of the polygon the buildings fall within. --- TODO add outer rings of buildings from relations table of OSM export -DROP TABLE IF EXISTS buildings; -CREATE TABLE buildings AS ( - SELECT b.*, polys.polyid - FROM "ways_poly" b, polygonsnocount polys - WHERE ST_Intersects(polys.geom, ST_Centroid(b.geom)) - AND b.tags->>'building' IS NOT NULL -); -ALTER TABLE buildings ADD PRIMARY KEY(osm_id); --- Properly register geometry column (makes QGIS happy) -SELECT Populate_Geometry_Columns('public.buildings'::regclass); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX buildings_idx - ON buildings - USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing -VACUUM ANALYZE buildings; - ---**************************Count features in polygons***************** -DROP TABLE IF EXISTS splitpolygons; -CREATE TABLE splitpolygons AS ( - WITH polygonsfeaturecount AS ( - SELECT sp.polyid, - sp.geom, - sp.geog, - count(b.geom) AS numfeatures, - ST_Area(sp.geog) AS area - FROM polygonsnocount sp - LEFT JOIN "buildings" b - ON sp.polyid = b.polyid - GROUP BY sp.polyid, sp.geom - ) - SELECT * from polygonsfeaturecount -); -ALTER TABLE splitpolygons ADD PRIMARY KEY(polyid); -SELECT Populate_Geometry_Columns('public.splitpolygons'::regclass); -CREATE INDEX splitpolygons_idx - ON splitpolygons - USING GIST (geom); -VACUUM ANALYZE splitpolygons; - -DROP TABLE polygonsnocount; - -DROP TABLE IF EXISTS lowfeaturecountpolygons; -CREATE TABLE lowfeaturecountpolygons AS ( - -- Grab the polygons with fewer than the requisite number of features - WITH lowfeaturecountpolys as ( - SELECT * - FROM splitpolygons AS p - -- TODO: feature count should not be hard-coded - WHERE p.numfeatures < 20 - ), - -- Find the neighbors of the low-feature-count polygons - -- Store their ids as n_polyid, numfeatures as n_numfeatures, etc - allneighborlist AS ( - SELECT p.*, - pf.polyid AS n_polyid, - pf.area AS n_area, - p.numfeatures AS n_numfeatures, - -- length of shared boundary to make nice merge decisions - st_length2d(st_intersection(p.geom, pf.geom)) as sharedbound - FROM lowfeaturecountpolys AS p - INNER JOIN splitpolygons AS pf - -- Anything that touches - ON st_touches(p.geom, pf.geom) - -- But eliminate those whose intersection is a point, because - -- polygons that only touch at a corner shouldn't be merged - AND st_geometrytype(st_intersection(p.geom, pf.geom)) != 'ST_Point' - -- Sort first by polyid of the low-feature-count polygons - -- Then by descending featurecount and area of the - -- high-feature-count neighbors (area is in case of equal - -- featurecounts, we'll just pick the biggest to add to) - ORDER BY p.polyid, p.numfeatures DESC, pf.area DESC - -- OR, maybe for more aesthetic merges: - -- order by p.polyid, sharedbound desc - ) - SELECT DISTINCT ON (a.polyid) * FROM allneighborlist AS a -); -ALTER TABLE lowfeaturecountpolygons ADD PRIMARY KEY(polyid); -SELECT Populate_Geometry_Columns('public.lowfeaturecountpolygons'::regclass); -CREATE INDEX lowfeaturecountpolygons_idx - ON lowfeaturecountpolygons - USING GIST (geom); -VACUUM ANALYZE lowfeaturecountpolygons; - ---****************Merge low feature count polygons with neighbors******* - - - ---****************Cluster buildings************************************* -DROP TABLE IF EXISTS clusteredbuildings; -CREATE TABLE clusteredbuildings AS ( - WITH splitpolygonswithcontents AS ( - SELECT * - FROM splitpolygons sp - WHERE sp.numfeatures > 0 - ) - -- Add the count of features in the splitpolygon each building belongs to - -- to the buildings table; sets us up to be able to run the clustering. - ,buildingswithcount AS ( - SELECT b.*, p.numfeatures - FROM buildings b - LEFT JOIN splitpolygons p - ON b.polyid = p.polyid - ) - -- Cluster the buildings within each splitpolygon. The second term in the - -- call to the ST_ClusterKMeans function is the number of clusters to create, - -- so we're dividing the number of features by a constant (10 in this case) - -- to get the number of clusters required to get close to the right number - -- of features per cluster. - -- TODO: This should certainly not be a hardcoded, the number of features - -- per cluster should come from a project configuration table - ,buildingstocluster as ( - SELECT * FROM buildingswithcount bc - WHERE bc.numfeatures > 0 - ) - ,clusteredbuildingsnocombineduid AS ( - SELECT *, - ST_ClusterKMeans(geom, cast((b.numfeatures / 20) + 1 as integer)) - over (partition by polyid) as cid - FROM buildingstocluster b - ) - -- uid combining the id of the outer splitpolygon and inner cluster - ,clusteredbuildings as ( - select *, - polyid::text || '-' || cid as clusteruid - from clusteredbuildingsnocombineduid - ) - SELECT * FROM clusteredbuildings -); -ALTER TABLE clusteredbuildings ADD PRIMARY KEY(osm_id); -SELECT Populate_Geometry_Columns('public.clusteredbuildings'::regclass); -CREATE INDEX clusteredbuildings_idx - ON clusteredbuildings - USING GIST (geom); -VACUUM ANALYZE clusteredbuildings; - ---*****************Densify dumped building nodes****************** -DROP TABLE IF EXISTS dumpedpoints; -CREATE TABLE dumpedpoints AS ( - SELECT cb.osm_id, cb.polyid, cb.cid, cb.clusteruid, - -- POSSIBLE BUG: PostGIS' Voronoi implementation seems to panic - -- with segments less than 0.00004 degrees. - -- Should probably use geography instead of geometry - (st_dumppoints(ST_Segmentize(geom, 0.00001))).geom - FROM clusteredbuildings cb -); -SELECT Populate_Geometry_Columns('public.dumpedpoints'::regclass); -CREATE INDEX dumpedpoints_idx - ON dumpedpoints - USING GIST (geom); -VACUUM ANALYZE dumpedpoints; - ---*******************voronoia**************************************** -DROP TABLE IF EXISTS voronoids; -CREATE TABLE voronoids AS ( - SELECT - st_intersection((ST_Dump(ST_VoronoiPolygons( - ST_Collect(points.geom) - ))).geom, - sp.geom) as geom - FROM dumpedpoints as points, - splitpolygons as sp - where st_contains(sp.geom, points.geom) - group by sp.geom -); -CREATE INDEX voronoids_idx - ON voronoids - USING GIST (geom); -VACUUM ANALYZE voronoids; - -DROP TABLE IF EXISTS voronois; -CREATE TABLE voronois AS ( - SELECT p.clusteruid, v.geom - FROM voronoids v, dumpedpoints p - WHERE st_within(p.geom, v.geom) -); -CREATE INDEX voronois_idx - ON voronois - USING GIST (geom); -VACUUM ANALYZE voronois; -DROP TABLE voronoids; - -DROP TABLE IF EXISTS taskpolygons; -CREATE TABLE taskpolygons AS ( - SELECT ST_Union(geom) as geom, clusteruid - FROM voronois - GROUP BY clusteruid -); -CREATE INDEX taskpolygons_idx - ON taskpolygons - USING GIST (geom); -VACUUM ANALYZE taskpolygons; - ---*****************************Simplify******************************* --- Extract unique line segments -DROP TABLE IF EXISTS simplifiedpolygons; -CREATE TABLE simplifiedpolygons AS ( - --Convert task polygon boundaries to linestrings - WITH rawlines AS ( - SELECT tp.clusteruid, st_boundary(tp.geom) AS geom - FROM taskpolygons AS tp - ) - -- Union, which eliminates duplicates from adjacent polygon boundaries - ,unionlines AS ( - SELECT st_union(l.geom) AS geom FROM rawlines l - ) - -- Dump, which gives unique segments. - ,segments AS ( - SELECT (st_dump(l.geom)).geom AS geom - FROM unionlines l - ) - ,agglomerated AS ( - SELECT st_linemerge(st_unaryunion(st_collect(s.geom))) AS geom - FROM segments s - ) - ,simplifiedlines AS ( - SELECT st_simplify(a.geom, 0.000075) AS geom - FROM agglomerated a - ) - SELECT (st_dump(st_polygonize(s.geom))).geom AS geom - FROM simplifiedlines s -); -CREATE INDEX simplifiedpolygons_idx - ON simplifiedpolygons - USING GIST (geom); -VACUUM ANALYZE simplifiedpolygons; - --- Clean results (nuke or merge polygons without features in them) - diff --git a/contrib/scripts/postgis_snippets/task_splitting/task_splitting_for_osm_roads.sql b/contrib/scripts/postgis_snippets/task_splitting/task_splitting_for_osm_roads.sql deleted file mode 100644 index 25564eaa60..0000000000 --- a/contrib/scripts/postgis_snippets/task_splitting/task_splitting_for_osm_roads.sql +++ /dev/null @@ -1,181 +0,0 @@ - /* -Licence: GPLv3 -Part of the HOT Field Mapping Tasking Manager (FMTM) - -This script splits an Area of Interest into task polygons based on OpenStreetMap lines (roads, waterways, and railways) for the purposes of adding information (tags) to road segments. -*/ -/* ---*************************Extract road segments*********************** --- Nuke whatever was there before -DROP TABLE IF EXISTS roadsdissolved; --- Create a new polygon layer of splits by lines -CREATE TABLE roadsdissolved AS ( - -- The Area of Interest provided by the person creating the project - WITH aoi AS ( - SELECT * FROM "project-aoi" - ) - -- Grab all roads within the AOI - ,roadlines AS ( - SELECT ST_Collect(l.geom) AS geom - FROM aoi a, "ways_line" l - WHERE ST_Intersects(a.geom, l.geom) - AND tags->>'highway' IS NOT NULL - ) - -- Grab the roads that are polygons in OSM ("Closed ways" with highway tags) - ,roadpolystolines AS ( - SELECT ST_Collect(ST_Boundary(p.geom)) AS geom - FROM aoi a, "ways_poly" p - WHERE ST_Intersects(a.geom, p.geom) - AND tags->>'highway' IS NOT NULL - ) - -- Merge the roads from lines with the roads from polys - ,merged AS ( - SELECT ST_Union(ml.geom, mp.geom) as geom - FROM roadlines ml, roadpolystolines mp - ) - SELECT * - FROM merged mr -); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX roadsdissolved_idx - ON roadsdissolved - USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing -VACUUM ANALYZE roadsdissolved; - ---**************************MISSING BIT******************************** --- Here we use QGIS multipart to singleparts, which splits the roads --- on all intersections into sensible parts. Need to implement in PostGIS. --- Output here is a line layer table called roadparts which consists of one --- linestring for each portion of a road between any and all intersections. - --- *****************Re-associate parts with OSM ID and tags************* -*/ - -DROP TABLE IF EXISTS roadpartstagged; -CREATE TABLE roadpartstagged AS ( - SELECT - wl.osm_id, - wl.tags, - l.geom as geom - FROM "ways_line" wl, roadparts l - -- Funky hack here: checking if a roadpart is a subset of an OSM way is - -- terribly slow if you check for a line intersection, but if you check for - -- any intersection it'll often return the attributes of an intersecting road. - -- If you check for intersection with the start and end nodes, sometimes - -- cresecent roads (which touch another road at start and end) get the - -- attributes from the road that they touch. - -- So we check for intersection of the first and second nodes in the part - -- (if there are only two, they're the start and end by definition, but they - -- also can't be a crescent so that's ok). - WHERE st_intersects(st_startpoint(l.geom), wl.geom) - AND ST_Intersects(st_pointn(l.geom, 2), wl.geom) -); -CREATE INDEX roadpartstagged_idx - ON roadpartstagged - USING GIST (geom); -VACUUM ANALYZE roadpartstagged; - ---****************Cluster roadparts************************************* -DROP TABLE IF EXISTS clusteredroadparts; -CREATE TABLE clusteredroadparts AS ( - SELECT *, - -- TODO: replace 4500 with count of roadparts - ST_ClusterKMeans(geom, cast((4500 / 20) + 1 as integer)) - over () as cid - FROM roadpartstagged rp -); -CREATE INDEX clusteredroadparts_idx - ON clusteredroadparts - USING GIST (geom); -VACUUM ANALYZE clusteredroadparts; - ---***************** dump road segement nodes****************** -DROP TABLE IF EXISTS dumpedroadpoints; -CREATE TABLE dumpedroadpoints AS ( - SELECT crp.osm_id, crp.cid, - -- POSSIBLE BUG: PostGIS' Voronoi implementation seems to panic - -- with segments less than 0.00004 degrees. - -- Should probably use geography instead of geometry - (st_dumppoints(ST_Segmentize(crp.geom, 0.0001))).geom - --(st_dumppoints(crp.geom)).geom - FROM clusteredroadparts crp -); -SELECT Populate_Geometry_Columns('public.dumpedpoints'::regclass); -CREATE INDEX dumpedroadpoints_idx - ON dumpedroadpoints - USING GIST (geom); -VACUUM ANALYZE dumpedroadpoints; - ---*******************voronoia**************************************** -DROP TABLE IF EXISTS voronoids; -CREATE TABLE voronoids AS ( - SELECT - (ST_Dump(ST_VoronoiPolygons(ST_Collect(points.geom)))).geom as geom - FROM dumpedroadpoints as points -); -CREATE INDEX voronoids_idx - ON voronoids - USING GIST (geom); -VACUUM ANALYZE voronoids; - -DROP TABLE IF EXISTS voronois; -CREATE TABLE voronois AS ( - SELECT p.cid, st_intersection(v.geom, a.geom) as geom - FROM voronoids v, dumpedroadpoints p, "project-aoi" a - WHERE st_within(p.geom, v.geom) -); -CREATE INDEX voronois_idx - ON voronois - USING GIST (geom); -VACUUM ANALYZE voronois; -DROP TABLE voronoids; - -DROP TABLE IF EXISTS taskpolygons; -CREATE TABLE taskpolygons AS ( - SELECT ST_Union(geom) as geom, cid - FROM voronois - GROUP BY cid -); -CREATE INDEX taskpolygons_idx - ON taskpolygons - USING GIST (geom); -VACUUM ANALYZE taskpolygons; - ---*****************************Simplify******************************* --- Extract unique line segments -DROP TABLE IF EXISTS simplifiedpolygons; -CREATE TABLE simplifiedpolygons AS ( - --Convert task polygon boundaries to linestrings - WITH rawlines AS ( - SELECT tp.cid, st_boundary(tp.geom) AS geom - FROM taskpolygons AS tp - ) - -- Union, which eliminates duplicates from adjacent polygon boundaries - ,unionlines AS ( - SELECT st_union(l.geom) AS geom FROM rawlines l - ) - -- Dump, which gives unique segments. - ,segments AS ( - SELECT (st_dump(l.geom)).geom AS geom - FROM unionlines l - ) - ,agglomerated AS ( - SELECT st_linemerge(st_unaryunion(st_collect(s.geom))) AS geom - FROM segments s - ) - ,simplifiedlines AS ( - SELECT st_simplify(a.geom, 0.000075) AS geom - FROM agglomerated a - ) - SELECT (st_dump(st_polygonize(s.geom))).geom AS geom - FROM simplifiedlines s -); -CREATE INDEX simplifiedpolygons_idx - ON simplifiedpolygons - USING GIST (geom); -VACUUM ANALYZE simplifiedpolygons; - --- Clean results (nuke or merge polygons without features in them) - - diff --git a/contrib/scripts/postgis_snippets/task_splitting/task_splitting_readme.md b/contrib/scripts/postgis_snippets/task_splitting/task_splitting_readme.md deleted file mode 100644 index 7e7f29649b..0000000000 --- a/contrib/scripts/postgis_snippets/task_splitting/task_splitting_readme.md +++ /dev/null @@ -1,74 +0,0 @@ -# Task Splitting - -The file `task_splitting_optimized.sql` is a spatial Structured Query Language (SQL) script to split an area of interest for field mapping into small "task" areas. - -It operates within a Postgresql database with the the spatial extension PostGIS enabled. It requires write access to the database for performance reasons (there is another version without the suffice "\_optimized" that doesn't require write access, but it's not likely to ever work well enough for production. - -It takes into account roads, waterways, and railways to avoid forcing mappers to cross such features during mapping. - -It uses a clustering algorithm to divide the area into discrete polygons containing an average number of tasks. - -## Inputs (tables/layers) - -This script takes 4 inputs, all of which are Postgresql/PostGIS tables/layers. - -- `project-aoi`, a PostGIS polygon layer containing a single feature: a polygon containing the Area of Interest. -- `ways_line`, a PostGIS line layer containing all OpenStreetMap "open ways" (the OSM term for linestrings) in the Area of Interest. -- `ways_poly`, a Postgis polygon layer containing all OpenSTreetMap "closed ways" (the OSM term for polygons) in the AOI. -- `project-config`, a Postgresql table containing settings (for example, the average number of features desired per task). _This isn't yet implemented; these settings are hard-coded for the moment. The script runs without a `project-config` table, but the number of features per task needs to be tweaked within the code._ - -OSM data (`ways-line` and `ways_poly`) can be loaded into a PostGIS database using the [Underpass](https://github.com/hotosm/underpass) configuration file [raw.lua](https://github.com/hotosm/underpass/blob/master/utils/raw.lua). If these two layers are present in the same database and schema as the `project-aoi` layer, the script will make use of them automatically (non-desctructively; it doesn't modify any tables other than the ones it creates unless you're unlucky enough to have tables matching the very specific names I'm using, which I'll later change to names that should avoid all realistically possible collisions). - -## Running the script - -You need a Postgresql database with PostGIS extension enabled. If both Postgresql and PostGIS are installed and you have permissions set up properly (doing both of those things is way beyond scope here), this should do the trick (choose whatever database name you want): - -``` -createdb [databasename] -O [username] -``` - -``` -psql -U [username] -d [databasename] -c 'CREATE EXTENSION POSTGIS' -``` - -Now you need to get some OSM data in there. You can get OSM data from the GeoFabrik download tool or the HOT export tool in `.pbf` format. - -If you have your own way of getting the OSM data into the database, as long as it'll create the `ways_line` and `ways_poly` layers, go for it. Here's ho I'm doing it: - -``` -osm2pgsql --create -H localhost -U [username] -P 5432 -d [database name] -W --extra-attributes --output=flex --style /path/to/git/underpass/utils/raw.lua /path/to/my_extract.osm.pbf -``` - -Now you need an AOI. I'm using QGIS connect to the database using the Database Manager, then creating a polygon layer (make a "Temporary scratch layer' with polygon geometry, draw an AOI, and import that layer into the database using the Database Manager). If you don't want to use QGIS, you can get a GeoJSON polygon some other way ([geojson.io](geojson.io) comes to mind) and shove it into the database using ogr2ogr or some other tool. Whatever. Just ensure it's a polygon layer in EPSG:4326 and it's called `project-aoi`. - -``` -psql -U [username] -d [database name] -f path/to/fmtm/scripts/postgis_snippets/task_splitting/task_splitting_optimized.sql -``` - -If all is set up correctly, that'll run and spit out some console output. It's moderately likely to include some warning messages due to messy OSM data, and will very likely complain that some tables do not exist (that's because I clobber any tables with colliding names before creating my own tables; don't run this script on random production databases until I collision-proof the names, and probably not even then). - -## Outputs - -You should now have the following useful layers in your Postgresql/PostGIS database: - -- clusteredbuildings -- taskpolygons - -As well as the following non-useful layers (well, they're useful for debugging, but not for end users' purposes): - -- buildings -- dumpedpoints -- lowfeaturecountpolygons -- splitpolygons -- voronois - -The `taskpolygons` layer can be exported as GeoJSON and used as a task to upload to the FMTM. This works in at least some cases; I'm not sure if there are cases where whatever was in the AOI and OSM layers causes outputs that break somehow (there are definitely some cases where building footprints in OSM are sufficiently messed up that they create weird task geometries, but so far these haven't actually broken anything). - -## Next steps - -It's working OK now, but needs more work. - -- Still simply discards polygon delineated by roads/waterways/railways rather than merging them into neighbors, which causes the task polygons to not tile the full AOI. This isn't necessarily always a problem, but it would be better to have the option to merge rather than discard those areas. -- Task polygon edges can be rough, often jagged, occasionally poking into buildings from adjacent polygons (though never, I think to the centroid). Working on simplifying/smoothing these, but there are some complications... -- Task polygon edges can contain closed-off loops unconnected to their main bodies. May need to increase density of segmentation of buildings in some places. -- Clustering is really pretty good, but not very strict at keeping similar numbers of features per cluster; you get a bit of a range of task sizes (though much, much better than anything we've had previously). I think it's possible to tweak this, though I think it might be expensive in terms of performance. diff --git a/docker-compose.yml b/docker-compose.yml index 73e81a6013..d95d66d5ed 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -79,6 +79,8 @@ services: - ./src/backend/app:/opt/app - ./src/backend/tests:/opt/tests # - ../osm-fieldwork/osm_fieldwork:/home/appuser/.local/lib/python3.10/site-packages/osm_fieldwork + # - ../osm-rawdata/osm_rawdata:/home/appuser/.local/lib/python3.10/site-packages/osm_rawdata + # - ../fmtm-splitter/fmtm_splitter:/home/appuser/.local/lib/python3.10/site-packages/fmtm_splitter depends_on: fmtm-db: condition: service_healthy diff --git a/src/backend/Dockerfile b/src/backend/Dockerfile index 747db71dc7..cd23e34ab4 100644 --- a/src/backend/Dockerfile +++ b/src/backend/Dockerfile @@ -166,9 +166,11 @@ ARG PYTHON_IMG_TAG COPY --from=extract-deps \ /opt/python/requirements-ci.txt /opt/python/ # Copy packages from user to root dirs (run ci as root) -RUN mv /home/appuser/.local/bin/* /usr/local/bin/ \ - && mv /home/appuser/.local/lib/python${PYTHON_IMG_TAG}/site-packages/* \ +RUN cp -r /home/appuser/.local/bin/* /usr/local/bin/ \ + && cp -r /home/appuser/.local/lib/python${PYTHON_IMG_TAG}/site-packages/* \ /usr/local/lib/python${PYTHON_IMG_TAG}/site-packages/ \ + && rm -rf /home/appuser/.local/bin \ + && rm -rf /home/appuser/.local/lib/python${PYTHON_IMG_TAG}/site-packages \ && set -ex \ && apt-get update \ && DEBIAN_FRONTEND=noninteractive apt-get install \ diff --git a/src/backend/app/central/central_crud.py b/src/backend/app/central/central_crud.py index e599e45e58..e0a2ee6ba1 100644 --- a/src/backend/app/central/central_crud.py +++ b/src/backend/app/central/central_crud.py @@ -15,11 +15,13 @@ # You should have received a copy of the GNU General Public License # along with FMTM. If not, see . # +"""Logic for interaction with ODK Central & data.""" + import base64 import json import os -import pathlib import zlib +from xml.etree import ElementTree # import osm_fieldwork # Qr code imports @@ -201,6 +203,7 @@ def delete_app_user( def upload_xform_media( project_id: int, xform_id: str, filespec: str, odk_credentials: dict = None ): + """Upload and publish an XForm on ODKCentral.""" title = os.path.basename(os.path.splitext(filespec)[0]) if odk_credentials: @@ -301,6 +304,7 @@ def list_odk_xforms( def get_form_full_details( odk_project_id: int, form_id: str, odk_central: project_schemas.ODKCentral ): + """Get additional metadata for ODK Form.""" form = get_odk_form(odk_central) form_details = form.getFullDetails(odk_project_id, form_id) return form_details @@ -309,21 +313,14 @@ def get_form_full_details( def get_odk_project_full_details( odk_project_id: int, odk_central: project_schemas.ODKCentral ): + """Get additional metadata for ODK project.""" project = get_odk_project(odk_central) project_details = project.getFullDetails(odk_project_id) return project_details -def list_task_submissions( - odk_project_id: int, form_id: str, odk_central: project_schemas.ODKCentral = None -): - project = get_odk_form(odk_central) - submissions = project.listSubmissions(odk_project_id, form_id) - return submissions - - def list_submissions(project_id: int, odk_central: project_schemas.ODKCentral = None): - """List submissions from a remote ODK server.""" + """List all submissions for a project, aggregated from associated users.""" project = get_odk_project(odk_central) xform = get_odk_form(odk_central) submissions = list() @@ -366,7 +363,7 @@ def download_submissions( get_json: bool = True, odk_central: project_schemas.ODKCentral = None, ): - """Download submissions from a remote ODK server.""" + """Download all submissions for an XForm.""" xform = get_odk_form(odk_central) # FIXME: should probably filter by timestamps or status value data = xform.getSubmissions(project_id, xform_id, submission_id, True, get_json) @@ -376,9 +373,10 @@ def download_submissions( async def test_form_validity(xform_content: str, form_type: str): """Validate an XForm. - Parameters: - xform_content: form to be tested - form_type: type of form (xls or xlsx). + + Args: + xform_content (str): form to be tested + form_type (str): type of form (xls or xlsx). """ try: xlsform_path = f"/tmp/validate_form.{form_type}" @@ -395,12 +393,10 @@ async def test_form_validity(xform_content: str, form_type: str): "xforms": "http://www.w3.org/2002/xforms", } - import xml.etree.ElementTree as ET - with open(outfile, "r") as xml: data = xml.read() - root = ET.fromstring(data) + root = ElementTree.fromstring(data) instances = root.findall(".//xforms:instance[@src]", namespaces) geojson_list = [] @@ -465,7 +461,8 @@ def generate_updated_xform( # try: # if "@src" in inst: # if ( - # xml["h:html"]["h:head"]["model"]["instance"][index]["@src"].split( + # xml["h:html"]["h:head"]["model"]["instance"][index] \ + # ["@src"].split( # "." # )[1] # == "geojson" @@ -478,10 +475,13 @@ def generate_updated_xform( # print("data in inst") # if "data" == inst: # print("Data = inst ", inst) - # xml["h:html"]["h:head"]["model"]["instance"]["data"]["@id"] = id - # # xml["h:html"]["h:head"]["model"]["instance"]["data"]["@id"] = xform + # xml["h:html"]["h:head"]["model"]["instance"]["data"] \ + # ["@id"] = id + # # xml["h:html"]["h:head"]["model"]["instance"]["data"] \ + # # ["@id"] = xform # else: - # xml["h:html"]["h:head"]["model"]["instance"][0]["data"]["@id"] = id + # xml["h:html"]["h:head"]["model"]["instance"][0]["data"] \ + # ["@id"] = id # except Exception: # continue # index += 1 @@ -493,9 +493,7 @@ def generate_updated_xform( "xforms": "http://www.w3.org/2002/xforms", } - import xml.etree.ElementTree as ET - - root = ET.fromstring(data) + root = ElementTree.fromstring(data) head = root.find("h:head", namespaces) model = head.find("xforms:model", namespaces) instances = model.findall("xforms:instance", namespaces) @@ -517,7 +515,7 @@ def generate_updated_xform( index += 1 # Save the modified XML - newxml = ET.tostring(root) + newxml = ElementTree.tostring(root) # write the updated XML file outxml = open(outfile, "w") @@ -595,7 +593,6 @@ def convert_csv( data: bytes, ): """Convert ODK CSV to OSM XML and GeoJson.""" - pathlib.Path(osm_fieldwork.__file__).resolve().parent csvin = CSVDump("/xforms.yaml") osmoutfile = f"{filespec}.osm" diff --git a/src/backend/app/db/db_models.py b/src/backend/app/db/db_models.py index cd0502bae7..824f88aed0 100644 --- a/src/backend/app/db/db_models.py +++ b/src/backend/app/db/db_models.py @@ -636,40 +636,6 @@ class DbUserRoles(Base): role = Column(Enum(UserRole), nullable=False) -class DbProjectAOI(Base): - """The AOI geometry for a project.""" - - __tablename__ = "project_aoi" - - id = Column(Integer, primary_key=True) - project_id = Column(String) - geom = Column(Geometry(geometry_type="GEOMETRY", srid=4326)) - tags = Column(JSONB) - - -class DbOsmLines(Base): - """Associated OSM ways for a project.""" - - __tablename__ = "ways_line" - - id = Column(Integer, primary_key=True) - project_id = Column(String) - geom = Column(Geometry(geometry_type="GEOMETRY", srid=4326)) - tags = Column(JSONB) - - -class DbBuildings(Base): - """Associated OSM buildings for a project.""" - - __tablename__ = "ways_poly" - - id = Column(Integer, primary_key=True) - project_id = Column(String) - osm_id = Column(String) - geom = Column(Geometry(geometry_type="GEOMETRY", srid=4326)) - tags = Column(JSONB) - - class DbTilesPath(Base): """Keeping track of mbtile basemaps for a project.""" diff --git a/src/backend/app/db/split_algorithm.sql b/src/backend/app/db/split_algorithm.sql deleted file mode 100644 index 66943ce793..0000000000 --- a/src/backend/app/db/split_algorithm.sql +++ /dev/null @@ -1,284 +0,0 @@ -DROP TABLE IF EXISTS polygonsnocount; --- Create a new polygon layer of splits by lines - -CREATE TABLE polygonsnocount AS ( --- The Area of Interest provided by the person creating the project -WITH aoi AS ( - SELECT * FROM "project_aoi" -) --- Extract all lines to be used as splitlines from a table of lines --- with the schema from Underpass (all tags as jsonb column called 'tags') --- TODO: add polygons (closed ways in OSM) with a 'highway' tag; --- some features such as roundabouts appear as polygons. --- TODO: add waterway polygons; now a beach doesn't show up as a splitline. --- TODO: these tags should come from another table rather than hardcoded --- so that they're easily configured during project creation. -,splitlines AS ( - -- SELECT ST_Intersection(a.geom, l.geom) AS geom - -- FROM aoi a, "ways_line" l - -- WHERE ST_Intersects(a.geom, l.geom) - -- -- TODO: these tags should come from a config table - -- -- All highways, waterways, and railways - -- AND (l.tags->>'highway' IS NOT NULL - -- OR l.tags->>'waterway' IS NOT NULL - -- OR l.tags->>'railway' IS NOT NULL - -- ) - - select * from ways_line l - where l.tags->>'highway' IS NOT NULL - OR l.tags->>'waterway' IS NOT NULL - OR l.tags->>'railway' IS NOT NULL - -) --- Merge all lines, necessary so that the polygonize function works later -,merged AS ( - SELECT ST_LineMerge(ST_Union(splitlines.geom)) AS geom - FROM splitlines -) --- Combine the boundary of the AOI with the splitlines --- First extract the Area of Interest boundary as a line -,boundary AS ( - SELECT ST_Boundary(geom) AS geom - FROM aoi -) --- Then combine it with the splitlines -,comb AS ( - SELECT ST_Union(boundary.geom, merged.geom) AS geom - FROM boundary, merged -) --- TODO add closed ways from OSM to lines (roundabouts etc) --- Create a polygon for each area enclosed by the splitlines -,splitpolysnoindex AS ( - SELECT (ST_Dump(ST_Polygonize(comb.geom))).geom as geom - FROM comb -) --- Add an index column to the split polygons -,splitpolygons AS( - SELECT - row_number () over () as polyid, - ST_Transform(spni.geom,4326)::geography AS geog, - spni.* - from splitpolysnoindex spni -) -SELECT * FROM splitpolygons -); - - --- Make that index column a primary key -ALTER TABLE polygonsnocount ADD PRIMARY KEY(polyid); --- Properly register geometry column (makes QGIS happy) -SELECT Populate_Geometry_Columns('public.polygonsnocount'::regclass); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX polygonsnocount_idx -ON polygonsnocount -USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing --- VACUUM ANALYZE polygonsnocount; - - -DROP TABLE IF EXISTS buildings; -CREATE TABLE buildings AS ( -SELECT b.*, polys.polyid -FROM "ways_poly" b, polygonsnocount polys -WHERE ST_Intersects(polys.geom, ST_Centroid(b.geom)) -AND b.tags->>'building' IS NOT NULL -); - - --- ALTER TABLE buildings ADD PRIMARY KEY(osm_id); - - --- Properly register geometry column (makes QGIS happy) -SELECT Populate_Geometry_Columns('public.buildings'::regclass); --- Add a spatial index (vastly improves performance for a lot of operations) -CREATE INDEX buildings_idx -ON buildings -USING GIST (geom); --- Clean up the table which may have gaps and stuff from spatial indexing --- VACUUM ANALYZE buildings; - - -DROP TABLE IF EXISTS splitpolygons; -CREATE TABLE splitpolygons AS ( -WITH polygonsfeaturecount AS ( - SELECT sp.polyid, - sp.geom, - sp.geog, - count(b.geom) AS numfeatures, - ST_Area(sp.geog) AS area - FROM polygonsnocount sp - LEFT JOIN "buildings" b - ON sp.polyid = b.polyid - GROUP BY sp.polyid, sp.geom -) -SELECT * from polygonsfeaturecount -); -ALTER TABLE splitpolygons ADD PRIMARY KEY(polyid); -SELECT Populate_Geometry_Columns('public.splitpolygons'::regclass); -CREATE INDEX splitpolygons_idx -ON splitpolygons -USING GIST (geom); --- VACUUM ANALYZE splitpolygons; - -DROP TABLE polygonsnocount; - - -DROP TABLE IF EXISTS lowfeaturecountpolygons; -CREATE TABLE lowfeaturecountpolygons AS ( --- Grab the polygons with fewer than the requisite number of features -with lowfeaturecountpolys as ( - select * - from splitpolygons as p - -- TODO: feature count should not be hard-coded - where p.numfeatures < 20 -), --- Find the neighbors of the low-feature-count polygons --- Store their ids as n_polyid, numfeatures as n_numfeatures, etc -allneighborlist as ( - select p.*, - pf.polyid as n_polyid, - pf.area as n_area, - p.numfeatures as n_numfeatures, - -- length of shared boundary to make nice merge decisions - st_length2d(st_intersection(p.geom, pf.geom)) as sharedbound - from lowfeaturecountpolys as p - inner join splitpolygons as pf - -- Anything that touches - on st_touches(p.geom, pf.geom) - -- But eliminate those whose intersection is a point, because - -- polygons that only touch at a corner shouldn't be merged - and st_geometrytype(st_intersection(p.geom, pf.geom)) != 'ST_Point' - -- Sort first by polyid of the low-feature-count polygons - -- Then by descending featurecount and area of the - -- high-feature-count neighbors (area is in case of equal - -- featurecounts, we'll just pick the biggest to add to) - order by p.polyid, p.numfeatures desc, pf.area desc - -- OR, maybe for more aesthetic merges: - -- order by p.polyid, sharedbound desc -) -select distinct on (a.polyid) * from allneighborlist as a -); -ALTER TABLE lowfeaturecountpolygons ADD PRIMARY KEY(polyid); -SELECT Populate_Geometry_Columns('public.lowfeaturecountpolygons'::regclass); -CREATE INDEX lowfeaturecountpolygons_idx -ON lowfeaturecountpolygons -USING GIST (geom); --- VACUUM ANALYZE lowfeaturecountpolygons; - - -DROP TABLE IF EXISTS clusteredbuildings; -CREATE TABLE clusteredbuildings AS ( -WITH splitpolygonswithcontents AS ( - SELECT * - FROM splitpolygons sp - WHERE sp.numfeatures > 0 -) --- Add the count of features in the splitpolygon each building belongs to --- to the buildings table; sets us up to be able to run the clustering. -,buildingswithcount AS ( - SELECT b.*, p.numfeatures - FROM buildings b - LEFT JOIN splitpolygons p - ON b.polyid = p.polyid -) --- Cluster the buildings within each splitpolygon. The second term in the --- call to the ST_ClusterKMeans function is the number of clusters to create, --- so we're dividing the number of features by a constant (10 in this case) --- to get the number of clusters required to get close to the right number --- of features per cluster. --- TODO: This should certainly not be a hardcoded, the number of features --- per cluster should come from a project configuration table -,buildingstocluster as ( - SELECT * FROM buildingswithcount bc - WHERE bc.numfeatures > 0 -) -,clusteredbuildingsnocombineduid AS ( -SELECT *, - ST_ClusterKMeans(geom, cast((b.numfeatures / :num_buildings) + 1 as integer)) - over (partition by polyid) as cid -FROM buildingstocluster b -) --- uid combining the id of the outer splitpolygon and inner cluster -,clusteredbuildings as ( - select *, - polyid::text || '-' || cid as clusteruid - from clusteredbuildingsnocombineduid -) -SELECT * FROM clusteredbuildings -); --- ALTER TABLE clusteredbuildings ADD PRIMARY KEY(osm_id); -SELECT Populate_Geometry_Columns('public.clusteredbuildings'::regclass); -CREATE INDEX clusteredbuildings_idx -ON clusteredbuildings -USING GIST (geom); --- VACUUM ANALYZE clusteredbuildings; - - -DROP TABLE IF EXISTS dumpedpoints; -CREATE TABLE dumpedpoints AS ( -SELECT cb.osm_id, cb.polyid, cb.cid, cb.clusteruid, --- POSSIBLE BUG: PostGIS' Voronoi implementation seems to panic --- with segments less than 0.00004 degrees. --- Should probably use geography instead of geometry -(st_dumppoints(ST_Segmentize(geom, 0.00004))).geom -FROM clusteredbuildings cb -); -SELECT Populate_Geometry_Columns('public.dumpedpoints'::regclass); -CREATE INDEX dumpedpoints_idx -ON dumpedpoints -USING GIST (geom); --- VACUUM ANALYZE dumpedpoints; - -DROP TABLE IF EXISTS voronoids; -CREATE TABLE voronoids AS ( -SELECT - st_intersection((ST_Dump(ST_VoronoiPolygons( - ST_Collect(points.geom) - ))).geom, - sp.geom) as geom - FROM dumpedpoints as points, - splitpolygons as sp - where st_contains(sp.geom, points.geom) - group by sp.geom -); -CREATE INDEX voronoids_idx -ON voronoids -USING GIST (geom); --- VACUUM ANALYZE voronoids; - -DROP TABLE IF EXISTS voronois; -CREATE TABLE voronois AS ( -SELECT p.clusteruid, v.geom -FROM voronoids v, dumpedpoints p -WHERE st_within(p.geom, v.geom) -); -CREATE INDEX voronois_idx -ON voronois -USING GIST (geom); --- VACUUM ANALYZE voronois; -DROP TABLE voronoids; - -DROP TABLE IF EXISTS taskpolygons; -CREATE TABLE taskpolygons AS ( -SELECT ST_Union(geom) as geom, clusteruid -FROM voronois -GROUP BY clusteruid -); -CREATE INDEX taskpolygons_idx -ON taskpolygons -USING GIST (geom); --- VACUUM ANALYZE taskpolygons; - - -SELECT jsonb_build_object( - 'type', 'FeatureCollection', - 'features', jsonb_agg(feature) -) -FROM ( - SELECT jsonb_build_object( - 'type', 'Feature', - 'geometry', ST_AsGeoJSON(geom)::jsonb, - 'properties', jsonb_build_object() - ) AS feature - FROM taskpolygons -) AS features; diff --git a/src/backend/app/projects/project_crud.py b/src/backend/app/projects/project_crud.py index 680ecd9bd6..a90e296baf 100644 --- a/src/backend/app/projects/project_crud.py +++ b/src/backend/app/projects/project_crud.py @@ -26,23 +26,22 @@ from asyncio import gather from concurrent.futures import ThreadPoolExecutor, wait from io import BytesIO -from json import dumps, loads -from typing import List, Optional +from typing import List, Optional, Union from zipfile import ZipFile import geoalchemy2 import geojson -import numpy as np import pkg_resources -import pyproj import requests import segno import shapely.wkb as wkblib import sqlalchemy from asgiref.sync import async_to_sync from fastapi import File, HTTPException, UploadFile +from fastapi.concurrency import run_in_threadpool +from fmtm_splitter.splitter import split_by_sql, split_by_square from geoalchemy2.shape import from_shape, to_shape -from geojson import dump +from geojson import Feature, FeatureCollection from loguru import logger as log from osm_fieldwork.basemapper import create_basemap_file from osm_fieldwork.data_models import data_models_path @@ -57,14 +56,11 @@ MultiLineString, MultiPolygon, Polygon, - mapping, shape, ) -from shapely.ops import transform from sqlalchemy import and_, column, func, inspect, select, table, text from sqlalchemy.dialects.postgresql import insert from sqlalchemy.orm import Session -from sqlalchemy.sql import text from app.central import central_crud from app.config import settings @@ -88,6 +84,7 @@ async def get_projects( hashtags: List[str] = None, search: str = None, ): + """Get all projects.""" filters = [] if user_id: filters.append(db_models.DbProject.author_id == user_id) @@ -129,6 +126,7 @@ async def get_project_summaries( hashtags: str = None, search: str = None, ): + """Get project summary details for main page.""" project_count, db_projects = await get_projects( db, user_id, skip, limit, hashtags, search ) @@ -136,6 +134,7 @@ async def get_project_summaries( async def get_project(db: Session, project_id: int): + """Get a single project.""" db_project = ( db.query(db_models.DbProject) .filter(db_models.DbProject.id == project_id) @@ -145,6 +144,7 @@ async def get_project(db: Session, project_id: int): async def get_project_by_id(db: Session, project_id: int): + """Get a single project by id.""" db_project = ( db.query(db_models.DbProject) .filter(db_models.DbProject.id == project_id) @@ -155,6 +155,7 @@ async def get_project_by_id(db: Session, project_id: int): async def get_project_info_by_id(db: Session, project_id: int): + """Get the project info only by id.""" db_project_info = ( db.query(db_models.DbProjectInfo) .filter(db_models.DbProjectInfo.project_id == project_id) @@ -165,6 +166,7 @@ async def get_project_info_by_id(db: Session, project_id: int): async def delete_project_by_id(db: Session, project_id: int): + """Delete a project by id.""" try: db_project = ( db.query(db_models.DbProject) @@ -176,7 +178,7 @@ async def delete_project_by_id(db: Session, project_id: int): db.delete(db_project) db.commit() except Exception as e: - log.error(e) + log.exception(e) raise HTTPException(e) from e return f"Project {project_id} deleted" @@ -184,6 +186,7 @@ async def delete_project_by_id(db: Session, project_id: int): async def partial_update_project_info( db: Session, project_metadata: project_schemas.ProjectUpdate, project_id ): + """Partial project update for PATCH.""" # Get the project from db db_project = await get_project_by_id(db, project_id) @@ -214,6 +217,7 @@ async def partial_update_project_info( async def update_project_info( db: Session, project_metadata: project_schemas.ProjectUpload, project_id ): + """Full project update for PUT.""" user = project_metadata.author project_info = project_metadata.project_info @@ -261,6 +265,7 @@ async def update_project_info( async def create_project_with_project_info( db: Session, project_metadata: project_schemas.ProjectUpload, odk_project_id: int ): + """Create a new project, including all associated info.""" project_user = project_metadata.author project_info = project_metadata.project_info xform_title = project_metadata.xform_title @@ -360,6 +365,7 @@ async def upload_xlsform( name: str, category: str, ): + """Upload a custom XLSForm from the user.""" try: forms = table( "xlsforms", @@ -378,7 +384,7 @@ async def upload_xlsform( db.commit() return True except Exception as e: - log.error(e) + log.exception(e) raise HTTPException(status=400, detail={"message": str(e)}) from e @@ -387,9 +393,16 @@ async def update_multi_polygon_project_boundary( project_id: int, boundary: str, ): - """This function receives the project_id and boundary as a parameter + """Update the boundary for a project & update tasks. + + TODO requires refactoring, as it has too large of + a scope. It should update a project boundary only, then manage + tasks in another function. + + This function receives the project_id and boundary as a parameter and creates a task for each polygon in the database. - This function also creates a project outline from the multiple polygons received. + This function also creates a project outline from the multiple + polygons received. """ try: if isinstance(boundary, str): @@ -463,15 +476,19 @@ async def update_multi_polygon_project_boundary( return True except Exception as e: - log.error(e) + log.exception(e) raise HTTPException(e) from e -async def preview_tasks(boundary: str, dimension: int): - """Preview tasks by returning a list of task objects.""" - """Use a lambda function to remove the "z" dimension from each coordinate in the feature's geometry """ +async def preview_split_by_square(boundary: str, meters: int): + """Preview split by square for a project boundary. + + Use a lambda function to remove the "z" dimension from each + coordinate in the feature's geometry. + """ def remove_z_dimension(coord): + """Remove z dimension from geojson.""" return coord.pop() if len(coord) == 3 else None """ Check if the boundary is a Feature or a FeatureCollection """ @@ -492,96 +509,40 @@ def remove_z_dimension(coord): status_code=400, detail=f"Invalid GeoJSON type: {boundary['type']}" ) - """ Apply the lambda function to each coordinate in its geometry ro remove the z-dimension - if it exists""" + # Apply the lambda function to each coordinate in its geometry + # to remove the z-dimension - if it exists multi_polygons = [] for feature in features: list(map(remove_z_dimension, feature["geometry"]["coordinates"][0])) if feature["geometry"]["type"] == "MultiPolygon": multi_polygons.append(Polygon(feature["geometry"]["coordinates"][0][0])) - """Update the boundary polyon on the database.""" + # Merge multiple geometries into single polygon if multi_polygons: boundary = multi_polygons[0] for geom in multi_polygons[1:]: boundary = boundary.union(geom) - else: - boundary = shape(features[0]["geometry"]) - - minx, miny, maxx, maxy = boundary.bounds - - # 1 degree = 111139 m - value = dimension / 111139 - - nx = int((maxx - minx) / value) - ny = int((maxy - miny) / value) - # gx, gy = np.linspace(minx, maxx, nx), np.linspace(miny, maxy, ny) - - xdiff = abs(maxx - minx) - ydiff = abs(maxy - miny) - if xdiff > ydiff: - gx, gy = np.linspace(minx, maxx, ny), np.linspace(miny, miny + xdiff, ny) - else: - gx, gy = np.linspace(minx, minx + ydiff, nx), np.linspace(miny, maxy, nx) - grid = list() - - id = 0 - for i in range(len(gx) - 1): - for j in range(len(gy) - 1): - poly = Polygon( - [ - [gx[i], gy[j]], - [gx[i], gy[j + 1]], - [gx[i + 1], gy[j + 1]], - [gx[i + 1], gy[j]], - [gx[i], gy[j]], - ] - ) - - if boundary.intersection(poly): - feature = geojson.Feature( - geometry=boundary.intersection(poly), properties={"id": str(id)} - ) - id += 1 - - geom = shape(feature["geometry"]) - # Check if the geometry is a MultiPolygon - if geom.geom_type == "MultiPolygon": - # Get the constituent Polygon objects from the MultiPolygon - polygons = geom.geoms - - for x in range(len(polygons)): - # Convert the two polygons to GeoJSON format - feature1 = { - "type": "Feature", - "properties": {}, - "geometry": mapping(polygons[x]), - } - grid.append(feature1) - else: - grid.append(feature) - - collection = geojson.FeatureCollection(grid) - - # If project outline cannot be divided into multiple tasks, - # whole boundary is made into a single task. - if len(collection["features"]) == 0: - boundary = mapping(boundary) - out = { - "type": "FeatureCollection", - "features": [{"type": "Feature", "geometry": boundary, "properties": {}}], - } - return out - return collection + return await run_in_threadpool( + lambda: split_by_square( + boundary, + meters=meters, + ) + ) -async def get_osm_extracts(boundary: str): +async def get_osm_extracts(boundary: str) -> dict: """Request an extract from raw-data-api and extract the file contents. - The query is posted to raw-data-api and job initiated for fetching the extract. - The status of the job is polled every few seconds, until 'SUCCESS' is returned. - The resulting zip file is downloaded, extracted, and data returned. """ + # TODO update to use flatgeobuf file directly + # bind zip off + # fmtm-dev-project-xxx-extract + # FMTM/dev/xxx/fmtm-project-xxx-extract.flatgeobuf + # Filters for osm extracts query = { "filters": { @@ -594,12 +555,14 @@ async def get_osm_extracts(boundary: str): } # Boundary to extract data for - json_boundary = json.loads(boundary) - if json_boundary.get("features", None) is not None: - query["geometry"] = json_boundary - # query["geometry"] = json_boundary["features"][0]["geometry"] - else: - query["geometry"] = json_boundary + aoi = geojson.loads(boundary) + # Get first geom only from a FeatureCollection + # TODO perhaps add extra code for merging multiple geoms into one? + if isinstance(aoi, FeatureCollection): + aoi = aoi.get("features")[0] + if isinstance(aoi, Feature): + aoi = aoi.get("geometry") + query["geometry"] = aoi # Filename to generate query["fileName"] = "extract" @@ -614,12 +577,16 @@ async def get_osm_extracts(boundary: str): headers = {"accept": "application/json", "Content-Type": "application/json"} # Send the request to raw data api - result = requests.post(query_url, data=json.dumps(query), headers=headers) + try: + result = requests.post(query_url, data=json.dumps(query), headers=headers) + result.raise_for_status() + except requests.exceptions.HTTPError: + error_dict = result.json() + error_dict["status_code"] = result.status_code + log.error(f"Failed to get extract from raw data api: {error_dict}") + return error_dict - if result.status_code == 200: - task_id = result.json()["task_id"] - else: - return False + task_id = result.json()["task_id"] # Check status of task (PENDING, or SUCCESS) task_url = f"{base_url}/tasks/status/{task_id}" @@ -640,246 +607,72 @@ async def get_osm_extracts(boundary: str): fp = BytesIO(result.content) zfp = zipfile.ZipFile(fp, "r") zfp.extract(extract_filename, "/tmp/") - data = json.loads(zfp.read(extract_filename)) - - for feature in data["features"]: - properties = feature["properties"] - tags = properties.pop("tags", {}) - properties.update(tags) - + data = geojson.loads(zfp.read(extract_filename)) return data -async def split_into_tasks( - db: Session, project_geojson: str, no_of_buildings: int, has_data_extracts: bool +async def split_geojson_into_tasks( + db: Session, + project_geojson: Union[dict, FeatureCollection], + no_of_buildings: int, + custom_data_extract: Optional[str] = None, ): """Splits a project into tasks. Args: db (Session): A database session. - boundary (str): A GeoJSON string representing the boundary of the project to split into tasks. + project_geojson (str): A GeoJSON string representing the boundary of + the project to split into tasks. no_of_buildings (int): The number of buildings to include in each task. + custom_data_extract (str, optional): A GeoJSON string containing a + custom data extract. Returns: Any: A GeoJSON object containing the tasks for the specified project. """ - project_id = uuid.uuid4() - boundary_geoms = [] - split_geom_geojson = [] - - async def split_multi_geom_into_tasks(): - # Use asyncio.gather to concurrently process the async generator - split_poly = [ - split_polygon_into_tasks( - db, project_id, data, no_of_buildings, has_data_extracts - ) - for data in boundary_geoms - ] - - # Use asyncio.gather with list to collect results from the async generator - return ( - item for sublist in await gather(*split_poly) for item in sublist if sublist - ) - - if project_geojson["type"] == "FeatureCollection": - log.debug("Project boundary GeoJSON = FeatureCollection") - boundary_geoms.extend( - feature["geometry"] for feature in project_geojson["features"] - ) - geoms = await split_multi_geom_into_tasks() - split_geom_geojson.extend(geoms) - - elif project_geojson["type"] == "GeometryCollection": - log.debug("Project boundary GeoJSON = GeometryCollection") - geometries = project_geojson["geometries"] - boundary_geoms.extend(iter(geometries)) - geoms = await split_multi_geom_into_tasks() - split_geom_geojson.extend(geoms) - - elif project_geojson["type"] == "Feature": - log.debug("Project boundary GeoJSON = Feature") - boundary_geoms = project_geojson["geometry"] - geom = await split_polygon_into_tasks( - db, project_id, boundary_geoms, no_of_buildings, has_data_extracts - ) - split_geom_geojson.extend(geom) + # NOTE this is a temp id used interally for task splitting + # NOTE it's not the FMTM project id + aoi_id = uuid.uuid4() - elif project_geojson["type"] == "Polygon": - log.debug("Project boundary GeoJSON = Polygon") - boundary_geoms = project_geojson - geom = await split_polygon_into_tasks( - db, project_id, boundary_geoms, no_of_buildings, has_data_extracts - ) - split_geom_geojson.extend(geom) - - else: - log.error( - "Project boundary not one of: Polygon, Feature, GeometryCollection," - " FeatureCollection. Task splitting failed." - ) - return { - "type": "FeatureCollection", - "features": split_geom_geojson, - } - - -async def split_polygon_into_tasks( - db: Session, - project_id: uuid.UUID, - boundary_data: str, - no_of_buildings: int, - has_data_extracts: bool, -): - outline = shape(boundary_data) - db_task = db_models.DbProjectAOI( - project_id=project_id, - geom=outline.wkt, - ) - db.add(db_task) - db.commit() - - # Get the data extract from raw-data-api - # Input into DbBuildings and DbOsmLines - # TODO update to use flatgeobuf file directly - # No need to store in our database - if not has_data_extracts: - data = await get_osm_extracts(json.dumps(boundary_data)) - if not data: - return None - for feature in data["features"]: - feature_shape = shape(feature["geometry"]) - wkb_element = from_shape(feature_shape, srid=4326) - if feature["properties"].get("building") == "yes": - db_feature = db_models.DbBuildings( - project_id=project_id, geom=wkb_element, tags=feature["properties"] - ) - db.add(db_feature) - elif "highway" in feature["properties"]: - db_feature = db_models.DbOsmLines( - project_id=project_id, geom=wkb_element, tags=feature["properties"] - ) - db.add(db_feature) - - db.commit() + if custom_data_extract: + # TODO upload to s3 and get url + extract_geojson = await {} else: - # Remove the polygons outside of the project AOI using a parameterized query - query = text( - f""" - DELETE FROM ways_poly - WHERE NOT ST_Within(ST_Centroid(ways_poly.geom), (SELECT geom FROM project_aoi WHERE project_id = '{project_id}')); - """ - ) - result = db.execute(query) - db.commit() - - # TODO replace with fmtm_splitter algo - with open("app/db/split_algorithm.sql", "r") as sql_file: - query = sql_file.read() - log.debug(f"STARTED project {project_id} task splitting") - result = db.execute(text(query), params={"num_buildings": no_of_buildings}) - result = result.fetchall() - db.query(db_models.DbBuildings).delete() - db.query(db_models.DbOsmLines).delete() - db.query(db_models.DbProjectAOI).delete() - db.commit() - log.debug(f"COMPLETE project {project_id} task splitting") + # Generate a new data extract in raw-data-api + extract_geojson = await get_osm_extracts(json.dumps(project_geojson)) + if status_code := extract_geojson.get("status_code", None): + raise HTTPException( + status_code=status_code, + detail=(f"Failed to get data extract for reason: {extract_geojson}"), + ) - features = result[0][0]["features"] - if not features: - log.warning( - f"Project {project_id}: no tasks returned from splitting algorithm. " - f"Params: 'num_buildings': {no_of_buildings}" + log.debug(f"STARTED task splitting with id: {aoi_id}") + features = await run_in_threadpool( + lambda: split_by_sql( + project_geojson, + db, + num_buildings=no_of_buildings, + osm_extract=extract_geojson, ) - return [] - - log.debug(f"Project {project_id} split into {len(features)} tasks") + ) + log.debug(f"COMPLETE task splitting with id: {aoi_id}") return features -# async def update_project_boundary( -# db: Session, project_id: int, boundary: str, dimension: int -# ): -# # verify project exists in db -# db_project = await get_project_by_id(db, project_id) -# if not db_project: -# log.error(f"Project {project_id} doesn't exist!") -# return False - -# """Use a lambda function to remove the "z" dimension from each coordinate in the feature's geometry """ - -# def remove_z_dimension(coord): -# return coord.pop() if len(coord) == 3 else None - -# """ Check if the boundary is a Feature or a FeatureCollection """ -# if boundary["type"] == "Feature": -# features = [boundary] -# elif boundary["type"] == "FeatureCollection": -# features = boundary["features"] -# else: -# # Delete the created Project -# db.delete(db_project) -# db.commit() - -# # Raise an exception -# raise HTTPException( -# status_code=400, detail=f"Invalid GeoJSON type: {boundary['type']}" -# ) - -# """ Apply the lambda function to each coordinate in its geometry """ -# for feature in features: -# list(map(remove_z_dimension, feature["geometry"]["coordinates"][0])) - -# """Update the boundary polyon on the database.""" -# outline = shape(features[0]["geometry"]) - -# # If the outline is a multipolygon, use the first polygon -# if isinstance(outline, MultiPolygon): -# outline = outline.geoms[0] - -# db_project.outline = outline.wkt -# db_project.centroid = outline.centroid.wkt - -# db.commit() -# db.refresh(db_project) -# log.debug("Added project boundary!") - -# result = create_task_grid(db, project_id=project_id, delta=dimension) - -# tasks = eval(result) -# for poly in tasks["features"]: -# log.debug(poly) -# task_name = str(poly["properties"]["id"]) -# db_task = db_models.DbTask( -# project_id=project_id, -# project_task_name=task_name, -# outline=wkblib.dumps(shape(poly["geometry"]), hex=True), -# # qr_code=db_qr, -# # qr_code_id=db_qr.id, -# # project_task_index=feature["properties"]["fid"], -# project_task_index=1, -# # geometry_geojson=geojson.dumps(task_geojson), -# # initial_feature_count=len(task_geojson["features"]), -# ) - -# db.add(db_task) -# db.commit() - -# # FIXME: write to tasks table -# return True - - async def update_project_boundary( - db: Session, project_id: int, boundary: str, dimension: int + db: Session, project_id: int, boundary: str, meters: int ): + """Update the boundary for a project and update tasks.""" # verify project exists in db db_project = await get_project_by_id(db, project_id) if not db_project: log.error(f"Project {project_id} doesn't exist!") return False - """Use a lambda function to remove the "z" dimension from each coordinate in the feature's geometry """ - + # Use a lambda function to remove the "z" dimension from each + # coordinate in the feature's geometry def remove_z_dimension(coord): + """Remove the z dimension from a geojson.""" return coord.pop() if len(coord) == 3 else None """ Check if the boundary is a Feature or a FeatureCollection """ @@ -896,10 +689,6 @@ def remove_z_dimension(coord): } ] else: - # Delete the created Project - db.delete(db_project) - db.commit() - # Raise an exception raise HTTPException( status_code=400, detail=f"Invalid GeoJSON type: {boundary['type']}" @@ -912,7 +701,7 @@ def remove_z_dimension(coord): if feature["geometry"]["type"] == "MultiPolygon": multi_polygons.append(Polygon(feature["geometry"]["coordinates"][0][0])) - """Update the boundary polyon on the database.""" + """Update the boundary polygon on the database.""" if multi_polygons: outline = multi_polygons[0] for geom in multi_polygons[1:]: @@ -924,27 +713,19 @@ def remove_z_dimension(coord): db.commit() db.refresh(db_project) - log.debug("Added project boundary!") - - result = await create_task_grid(db, project_id=project_id, delta=dimension) - - # Delete features from the project - db.query(db_models.DbFeatures).filter( - db_models.DbFeatures.project_id == project_id - ).delete() + log.debug("Finished updating project boundary") - # Delete all tasks of the project if there are some - db.query(db_models.DbTask).filter( - db_models.DbTask.project_id == project_id - ).delete() - - tasks = eval(result) + log.debug("Splitting tasks") + tasks = split_by_square( + boundary, + meters=meters, + ) for poly in tasks["features"]: log.debug(poly) - task_name = str(poly["properties"]["id"]) + task_id = str(poly.get("properties", {}).get("id") or poly.get("id")) db_task = db_models.DbTask( project_id=project_id, - project_task_name=task_name, + project_task_name=task_id, outline=wkblib.dumps(shape(poly["geometry"]), hex=True), # qr_code=db_qr, # qr_code_id=db_qr.id, @@ -967,8 +748,11 @@ async def update_project_with_zip( task_type_prefix: str, uploaded_zip: UploadFile, ): - # TODO: ensure that logged in user is user who created this project, return 403 (forbidden) if not authorized + """Update a project from a zip file. + TODO ensure that logged in user is user who created this project, + return 403 (forbidden) if not authorized. + """ # ensure file upload is zip if uploaded_zip.content_type not in [ "application/zip", @@ -1007,14 +791,21 @@ async def update_project_with_zip( if outline_filename not in listed_files: raise HTTPException( status_code=400, - detail=f'Zip must contain file named "{outline_filename}" that contains a FeatureCollection outlining the project', + detail=( + f"Zip must contain file named '{outline_filename}' " + "that contains a FeatureCollection outlining the project" + ), ) task_outlines_filename = f"{project_name_prefix}_polygons.geojson" if task_outlines_filename not in listed_files: raise HTTPException( status_code=400, - detail=f'Zip must contain file named "{task_outlines_filename}" that contains a FeatureCollection where each Feature outlines a task', + detail=( + f"Zip must contain file named '{task_outlines_filename}' " + "that contains a FeatureCollection where each Feature " + "outlines a task" + ), ) # verify project exists in db @@ -1055,7 +846,10 @@ async def update_project_with_zip( db_qr = await get_dbqrcode_from_file( zip, QR_CODES_DIR + qr_filename, - f"QRCode for task {task_name} does not exist. File should be in {qr_filename}", + ( + f"QRCode for task {task_name} does not exist. " + f"File should be in {qr_filename}" + ), ) db.add(db_qr) @@ -1100,16 +894,21 @@ async def update_project_with_zip( return db_project - # Exception was raised by app logic and has an error message, just pass it along + # Exception was raised by app logic and has an error message, + # just pass it along except HTTPException as e: - raise e + log.error(e) + raise e from None # Unexpected exception except Exception as e: raise HTTPException( status_code=500, - detail=f"{task_count} tasks were created before the following error was thrown: {e}, on feature: {feature}", - ) + detail=( + f"{task_count} tasks were created before the " + f"following error was thrown: {e}, on feature: {feature}" + ), + ) from e # --------------------------- @@ -1192,6 +991,8 @@ async def upload_custom_data_extracts( db (Session): The database session object. project_id (int): The ID of the project. contents (str): The custom data extracts contents. + category (str, optional): The category assigned to the custom data extract. + Defaults to 'buildings'. Returns: bool: True if the upload is successful. @@ -1233,7 +1034,8 @@ async def upload_custom_data_extracts( else: wkb_element = from_shape(feature_shape, srid=4326) - # If the osm extracts contents do not have a title, provide an empty text for that. + # If the osm extracts contents do not have a title, + # provide an empty text for that. feature["properties"]["title"] = "" properties = flatten_dict(feature["properties"]) @@ -1279,6 +1081,7 @@ def generate_task_files( form_type: str, odk_credentials: project_schemas.ODKCentral, ): + """Generate all files for a task.""" project_log = log.bind(task="create_project", project_id=project_id) project_log.info(f"Generating files for task {task_id}") @@ -1366,10 +1169,11 @@ def generate_task_files( upload_media = False if features["features"] is None else True - # Update outfile containing osm extracts with the new geojson contents containing title in the properties. + # Update outfile containing osm extracts with the new geojson contents + # containing title in the properties. with open(extracts, "w") as jsonfile: jsonfile.truncate(0) # clear the contents of the file - dump(features, jsonfile) + geojson.dump(features, jsonfile) project_log.info( f"Generating xform for task: {task_id} " @@ -1426,7 +1230,7 @@ def generate_appuser_files( form_type: str, background_task_id: Optional[uuid.UUID] = None, ): - """Generate the files for each appuser. + """Generate the files for a project. QR code, new XForm, and the OSM data extract. @@ -1540,12 +1344,14 @@ def generate_appuser_files( feature_mappings = [] for feature in filtered_data_extract["features"]: - # If the osm extracts contents do not have a title, provide an empty text for that. + # If the osm extracts contents do not have a title, + # provide an empty text for that. feature["properties"]["title"] = "" feature_shape = shape(feature["geometry"]) - # If the centroid of the Polygon is not inside the outline, skip the feature. + # If the centroid of the Polygon is not inside the outline, + # skip the feature. if extract_polygon and ( not shape(outline).contains(shape(feature_shape.centroid)) ): @@ -1565,8 +1371,8 @@ def generate_appuser_files( # Generating QR Code, XForm and uploading OSM Extracts to the form. # Creating app users and updating the role of that user. - get_task_lists_sync = async_to_sync(tasks_crud.get_task_lists) - tasks_list = get_task_lists_sync(db, project_id) + get_task_id_list_sync = async_to_sync(tasks_crud.get_task_id_list) + task_list = get_task_id_list_sync(db, project_id) # Run with expensive task via threadpool def wrap_generate_task_files(task): @@ -1593,7 +1399,7 @@ def wrap_generate_task_files(task): # Submit tasks to the thread pool futures = [ executor.submit(wrap_generate_task_files, task) - for task in tasks_list + for task in task_list ] # Wait for all tasks to complete wait(futures) @@ -1626,6 +1432,7 @@ async def create_qrcode( project_name: str, odk_central_url: str = None, ): + """Create a QR code for a task.""" # Make QR code for an app_user. log.debug(f"Generating base64 encoded QR settings for token: {token}") qrcode_data = await central_crud.create_qrcode( @@ -1702,13 +1509,13 @@ async def get_task_geometry(db: Session, project_id: int): async def get_project_features_geojson(db: Session, project_id: int): + """Get a geojson of all features for a task.""" db_features = ( db.query(db_models.DbFeatures) .filter(db_models.DbFeatures.project_id == project_id) .all() ) - """Get a geojson of all features for a task.""" query = text( f"""SELECT jsonb_build_object( 'type', 'FeatureCollection', @@ -1740,115 +1547,23 @@ async def get_project_features_geojson(db: Session, project_id: int): return features -async def create_task_grid(db: Session, project_id: int, delta: int): - try: - # Query DB for project AOI - projects = table("projects", column("outline"), column("id")) - where = f"projects.id={project_id}" - sql = select(geoalchemy2.functions.ST_AsGeoJSON(projects.c.outline)).where( - text(where) - ) - result = db.execute(sql) - # There should only be one match - if result.rowcount != 1: - log.warning(str(sql)) - return False - data = result.fetchall() - boundary = shape(loads(data[0][0])) - minx, miny, maxx, maxy = boundary.bounds - - # 1 degree = 111139 m - value = delta / 111139 - - nx = int((maxx - minx) / value) - ny = int((maxy - miny) / value) - # gx, gy = np.linspace(minx, maxx, nx), np.linspace(miny, maxy, ny) - - xdiff = maxx - minx - ydiff = maxy - miny - if xdiff > ydiff: - gx, gy = np.linspace(minx, maxx, ny), np.linspace(miny, miny + xdiff, ny) - else: - gx, gy = np.linspace(minx, minx + ydiff, nx), np.linspace(miny, maxy, nx) - - grid = list() - - id = 0 - for i in range(len(gx) - 1): - for j in range(len(gy) - 1): - poly = Polygon( - [ - [gx[i], gy[j]], - [gx[i], gy[j + 1]], - [gx[i + 1], gy[j + 1]], - [gx[i + 1], gy[j]], - [gx[i], gy[j]], - ] - ) - - if boundary.intersection(poly): - feature = geojson.Feature( - geometry=boundary.intersection(poly), properties={"id": str(id)} - ) - - geom = shape(feature["geometry"]) - # Check if the geometry is a MultiPolygon - if geom.geom_type == "MultiPolygon": - # Get the constituent Polygon objects from the MultiPolygon - polygons = geom.geoms - - for x in range(len(polygons)): - id += 1 - # Convert the two polygons to GeoJSON format - feature1 = { - "type": "Feature", - "properties": {"id": str(id)}, - "geometry": mapping(polygons[x]), - } - grid.append(feature1) - else: - id += 1 - grid.append(feature) - - collection = geojson.FeatureCollection(grid) - # jsonout = open("tmp.geojson", 'w') - # out = dump(collection, jsonout) - out = dumps(collection) - - # If project outline cannot be divided into multiple tasks, - # whole boundary is made into a single task. - result = json.loads(out) - if len(result["features"]) == 0: - geom = loads(data[0][0]) - out = { - "type": "FeatureCollection", - "features": [ - { - "type": "Feature", - "geometry": geom, - "properties": {"id": project_id}, - } - ], - } - out = json.dumps(out) - - return out - except Exception as e: - log.exception(e) - - async def get_json_from_zip(zip, filename: str, error_detail: str): + """Extract json file from zip.""" try: with zip.open(filename) as file: data = file.read() return json.loads(data) except Exception as e: - raise HTTPException(status_code=400, detail=f"{error_detail} ----- Error: {e}") + log.exception(e) + raise HTTPException( + status_code=400, detail=f"{error_detail} ----- Error: {e}" + ) from e async def get_outline_from_geojson_file_in_zip( zip, filename: str, error_detail: str, feature_index: int = 0 ): + """Parse geojson outline within a zip.""" try: with zip.open(filename) as file: data = file.read() @@ -1860,7 +1575,7 @@ async def get_outline_from_geojson_file_in_zip( shape_from_geom = shape(geom) return shape_from_geom except Exception as e: - log.error(e) + log.exception(e) raise HTTPException( status_code=400, detail=f"{error_detail} ----- Error: {e} ----", @@ -1868,11 +1583,12 @@ async def get_outline_from_geojson_file_in_zip( async def get_shape_from_json_str(feature: str, error_detail: str): + """Parse geojson outline within a zip to shapely geom.""" try: geom = feature["geometry"] return shape(geom) except Exception as e: - log.error(e) + log.exception(e) raise HTTPException( status_code=400, detail=f"{error_detail} ----- Error: {e} ---- Json: {feature}", @@ -1880,6 +1596,7 @@ async def get_shape_from_json_str(feature: str, error_detail: str): async def get_dbqrcode_from_file(zip, qr_filename: str, error_detail: str): + """Get qr code from database during import.""" try: with zip.open(qr_filename) as qr_file: binary_qrcode = qr_file.read() @@ -1893,7 +1610,7 @@ async def get_dbqrcode_from_file(zip, qr_filename: str, error_detail: str): status_code=400, detail=f"{qr_filename} is an empty file" ) from None except Exception as e: - log.error(e) + log.exception(e) raise HTTPException( status_code=400, detail=f"{error_detail} ----- Error: {e}" ) from e @@ -1907,7 +1624,10 @@ async def get_dbqrcode_from_file(zip, qr_filename: str, error_detail: str): async def convert_to_app_project(db_project: db_models.DbProject): - # TODO refactor login to Pydantic models + """Legacy function to convert db models --> Pydantic. + + TODO refactor to use Pydantic model methods instead. + """ if not db_project: log.debug("convert_to_app_project called, but no project provided") return None @@ -1927,7 +1647,10 @@ async def convert_to_app_project(db_project: db_models.DbProject): async def convert_to_app_project_info(db_project_info: db_models.DbProjectInfo): - # TODO refactor login to Pydantic models + """Legacy function to convert db models --> Pydantic. + + TODO refactor to use Pydantic model methods instead. + """ if db_project_info: app_project_info: project_schemas.ProjectInfo = db_project_info return app_project_info @@ -1938,7 +1661,10 @@ async def convert_to_app_project_info(db_project_info: db_models.DbProjectInfo): async def convert_to_app_projects( db_projects: List[db_models.DbProject], ) -> List[project_schemas.ProjectOut]: - # TODO refactor login to Pydantic models + """Legacy function to convert db models --> Pydantic. + + TODO refactor to use Pydantic model methods instead. + """ if db_projects and len(db_projects) > 0: async def convert_project(project): @@ -1953,7 +1679,10 @@ async def convert_project(project): async def convert_to_project_summary(db_project: db_models.DbProject): - # TODO refactor login to Pydantic models + """Legacy function to convert db models --> Pydantic. + + TODO refactor to use Pydantic model methods instead. + """ if db_project: summary: project_schemas.ProjectSummary = db_project @@ -1977,7 +1706,10 @@ async def convert_to_project_summary(db_project: db_models.DbProject): async def convert_to_project_summaries( db_projects: List[db_models.DbProject], ) -> List[project_schemas.ProjectSummary]: - # TODO refactor login to Pydantic models + """Legacy function to convert db models --> Pydantic. + + TODO refactor to use Pydantic model methods instead. + """ if db_projects and len(db_projects) > 0: async def convert_summary(project): @@ -1992,7 +1724,10 @@ async def convert_summary(project): async def convert_to_project_feature(db_project_feature: db_models.DbFeatures): - # TODO refactor login to Pydantic models + """Legacy function to convert db models --> Pydantic. + + TODO refactor to use Pydantic model methods instead. + """ if db_project_feature: app_project_feature: project_schemas.Feature = db_project_feature @@ -2011,7 +1746,10 @@ async def convert_to_project_feature(db_project_feature: db_models.DbFeatures): async def convert_to_project_features( db_project_features: List[db_models.DbFeatures], ) -> List[project_schemas.Feature]: - # TODO refactor login to Pydantic models + """Legacy function to convert db models --> Pydantic. + + TODO refactor to use Pydantic model methods instead. + """ if db_project_features and len(db_project_features) > 0: async def convert_feature(project_feature): @@ -2026,6 +1764,7 @@ async def convert_feature(project_feature): async def get_project_features(db: Session, project_id: int, task_id: int = None): + """Get features from database for a project.""" if task_id: features = ( db.query(db_models.DbFeatures) @@ -2042,15 +1781,6 @@ async def get_project_features(db: Session, project_id: int, task_id: int = None return await convert_to_project_features(features) -async def get_extract_completion_count(project_id: int, db: Session): - project = ( - db.query(db_models.DbProject) - .filter(db_models.DbProject.id == project_id) - .first() - ) - return project.extract_completed_count - - async def get_background_task_status(task_id: uuid.UUID, db: Session): """Get the status of a background task.""" task = ( @@ -2065,16 +1795,17 @@ async def get_background_task_status(task_id: uuid.UUID, db: Session): async def insert_background_task_into_database( - db: Session, name: str = None, project_id=None + db: Session, name: str = None, project_id: str = None ) -> uuid.uuid4: - """Inserts a new task into the database - Params: - db: database session - name: name of the task. - project_id: associated project id - - Return: - task_id(uuid.uuid4): The background task uuid for tracking. + """Inserts a new task into the database. + + Args: + db (Session): database session + name (str): name of the task. + project_id (str): associated project id + + Returns: + task_id (uuid.uuid4): The background task uuid for tracking. """ task_id = uuid.uuid4() @@ -2091,12 +1822,17 @@ async def insert_background_task_into_database( async def update_background_task_status_in_database( db: Session, task_id: uuid.UUID, status: int, message: str = None -): - """Updates the status of a task in the database - Params: - db: database session - task_id: uuid of the task - status: status of the task. +) -> None: + """Updates the status of a task in the database. + + Args: + db (Session): database session. + task_id (uuid.UUID): uuid of the task. + status (int): status of the task. + message (str): optional message to add to the db task. + + Returns: + None """ db.query(db_models.BackgroundTasks).filter( db_models.BackgroundTasks.id == str(task_id) @@ -2111,7 +1847,8 @@ async def update_background_task_status_in_database( return True -# NOTE defined as non-async to run in separate thread +# TODO update to store extracts in S3 instead, not db +# TODO convert geojson to fgb and upload def add_custom_extract_to_db( db: Session, features: dict, @@ -2121,9 +1858,10 @@ def add_custom_extract_to_db( """Insert geojson features into db for a project. Args: - db: database session - project_id: id of the project - features: features to be added. + db: database session. + features: features to be added. + background_task_id (uuid.UUID): Task ID for database. + feature_type (str): feature type category in OSM. """ try: success = 0 @@ -2192,6 +1930,7 @@ def add_custom_extract_to_db( async def update_project_form( db: Session, project_id: int, form_type: str, form: UploadFile = File(None) ): + """Upload a new custom XLSForm for a project.""" project = await get_project(db, project_id) category = project.xform_title project_title = project.project_name_prefix @@ -2250,13 +1989,19 @@ async def update_project_form( feature_mappings = [] for feature in outline_geojson["features"]: - # If the osm extracts contents do not have a title, provide an empty text for that. + # If the osm extracts contents do not have a title, + # provide an empty text for that. feature["properties"]["title"] = "" feature_shape = shape(feature["geometry"]) - # # If the centroid of the Polygon is not inside the outline, skip the feature. - # if extract_polygon and (not shape(outline).contains(shape(feature_shape.centroid))): + # # If the centroid of the Polygon is not inside the outline, + # skip the feature. + # if extract_polygon and ( + # not shape(outline_geojson).contains( + # shape(feature_shape.centroid + # )) + # ): # continue wkb_element = from_shape(feature_shape, srid=4326) @@ -2279,7 +2024,7 @@ async def update_project_form( db.add(db_feature) db.commit() - tasks_list = await tasks_crud.get_task_lists(db, project_id) + tasks_list = await tasks_crud.get_task_id_list(db, project_id) for task in tasks_list: task_obj = await tasks_crud.get_task(db, task) @@ -2291,12 +2036,11 @@ async def update_project_form( f"""UPDATE features SET task_id={task} WHERE id in ( - SELECT id FROM features - WHERE project_id={project_id} and ST_Intersects(geometry, '{task_obj.outline}'::Geometry) - - )""" + WHERE project_id={project_id} and + ST_Intersects(geometry, '{task_obj.outline}'::Geometry) + )""" ) result = db.execute(query) @@ -2321,14 +2065,16 @@ async def update_project_form( result = db.execute(query) features = result.fetchone()[0] + # This file will store xml contents of an xls form. + xform = f"/tmp/{project_title}_{category}_{task}.xml" + # This file will store osm extracts + extracts = f"/tmp/{project_title}_{category}_{task}.geojson" - xform = f"/tmp/{project_title}_{category}_{task}.xml" # This file will store xml contents of an xls form. - extracts = f"/tmp/{project_title}_{category}_{task}.geojson" # This file will store osm extracts - - # Update outfile containing osm extracts with the new geojson contents containing title in the properties. + # Update outfile containing osm extracts with the new geojson contents + # containing title in the properties. with open(extracts, "w") as jsonfile: jsonfile.truncate(0) # clear the contents of the file - dump(features, jsonfile) + geojson.dump(features, jsonfile) outfile = central_crud.generate_updated_xform(xlsform, xform, form_type) @@ -2346,6 +2092,7 @@ async def update_odk_credentials_in_db( odkid: int, db: Session, ): + """Update odk credentials for a project.""" project_instance.odkid = odkid project_instance.odk_central_url = odk_central_cred.odk_central_url project_instance.odk_central_user = odk_central_cred.odk_central_user @@ -2377,10 +2124,11 @@ async def get_extracted_data_from_db(db: Session, project_id: int, outfile: str) result = db.execute(query) features = result.fetchone()[0] - # Update outfile containing osm extracts with the new geojson contents containing title in the properties. + # Update outfile containing osm extracts with the new geojson contents + # containing title in the properties. with open(outfile, "w") as jsonfile: jsonfile.truncate(0) - dump(features, jsonfile) + geojson.dump(features, jsonfile) # NOTE defined as non-async to run in separate thread @@ -2395,6 +2143,7 @@ def get_project_tiles( """Get the tiles for a project. Args: + db (Session): SQLAlchemy db session. project_id (int): ID of project to create tiles for. background_task_id (uuid.UUID): UUID of background task to track. source (str): Tile source ("esri", "bing", "topo", "google", "oam"). @@ -2470,7 +2219,7 @@ def get_project_tiles( log.info(f"Tiles generation process completed for project id {project_id}") except Exception as e: - log.error(str(e)) + log.exception(str(e)) log.error(f"Tiles generation process failed for project id {project_id}") tile_path_instance.status = 2 @@ -2482,6 +2231,7 @@ def get_project_tiles( async def get_mbtiles_list(db: Session, project_id: int): + """List mbtiles in database for a project.""" try: tiles_list = ( db.query( @@ -2507,8 +2257,8 @@ async def get_mbtiles_list(db: Session, project_id: int): return processed_tiles_list except Exception as e: - log.error(e) - raise HTTPException(status_code=400, detail=str(e)) + log.exception(e) + raise HTTPException(status_code=400, detail=str(e)) from e async def convert_geojson_to_osm(geojson_file: str): @@ -2559,31 +2309,8 @@ async def update_project_location_info( db_project.location_str = address if address is not None else "" -def convert_geojson_to_epsg4326(input_geojson): - source_crs = pyproj.CRS( - input_geojson.get("crs", {}).get("properties", {}).get("name", "EPSG:4326") - ) - transformer = pyproj.Transformer.from_crs(source_crs, "EPSG:4326", always_xy=True) - - # Convert the coordinates to EPSG:4326 - transformed_features = [] - for feature in input_geojson.get("features", []): - geom = shape(feature.get("geometry", {})) - transformed_geom = transform(transformer.transform, geom) - transformed_feature = { - "type": "Feature", - "geometry": transformed_geom.__geo_interface__, - "properties": feature.get("properties", {}), - } - transformed_features.append(transformed_feature) - - # Create a new GeoJSON with EPSG:4326 - output_geojson = {"type": "FeatureCollection", "features": transformed_features} - - return output_geojson - - -def check_crs(input_geojson: dict): +def check_crs(input_geojson: Union[dict, FeatureCollection]): + """Validate CRS is valid for a geojson.""" log.debug("validating coordinate reference system") def is_valid_crs(crs_name): @@ -2597,9 +2324,12 @@ def is_valid_crs(crs_name): def is_valid_coordinate(coord): return -180 <= coord[0] <= 180 and -90 <= coord[1] <= 90 - error_message = "ERROR: Unsupported coordinate system, it is recommended to use a GeoJSON file in WGS84(EPSG 4326) standard." + error_message = ( + "ERROR: Unsupported coordinate system, it is recommended to use a " + "GeoJSON file in WGS84(EPSG 4326) standard." + ) if "crs" in input_geojson: - crs = input_geojson["crs"]["properties"]["name"] + crs = input_geojson.get("crs", {}).get("properties", {}).get("name") if not is_valid_crs(crs): log.error(error_message) raise HTTPException(status_code=400, detail=error_message) @@ -2628,6 +2358,7 @@ def is_valid_coordinate(coord): async def get_tasks_count(db: Session, project_id: int): + """Get number of tasks for a project.""" db_task = ( db.query(db_models.DbProject) .filter(db_models.DbProject.id == project_id) @@ -2638,9 +2369,10 @@ async def get_tasks_count(db: Session, project_id: int): async def get_pagination(page: int, count: int, results_per_page: int, total: int): + """Pagination result for splash page.""" total_pages = (count + results_per_page - 1) // results_per_page - hasNext = (page * results_per_page) < count - hasPrev = page > 1 + hasNext = (page * results_per_page) < count # noqa: N806 + hasPrev = page > 1 # noqa: N806 pagination = project_schemas.PaginationInfo( hasNext=hasNext, diff --git a/src/backend/app/projects/project_routes.py b/src/backend/app/projects/project_routes.py index fc157fbb33..8522aa3b95 100644 --- a/src/backend/app/projects/project_routes.py +++ b/src/backend/app/projects/project_routes.py @@ -23,6 +23,7 @@ from pathlib import Path from typing import Optional +import geojson from fastapi import ( APIRouter, BackgroundTasks, @@ -405,29 +406,37 @@ async def upload_multi_project_boundary( async def task_split( project_geojson: UploadFile = File(...), no_of_buildings: int = Form(50), - has_data_extracts: bool = Form(False), + custom_data_extract: Optional[UploadFile] = File(None), db: Session = Depends(database.get_db), ): """Split a task into subtasks. Args: - project_geojson (UploadFile): The file to split. - no_of_buildings (int, optional): The number of buildings per subtask. Defaults to 50. + project_geojson (UploadFile): The geojson to split. + Should be a FeatureCollection. + no_of_buildings (int, optional): The number of buildings per subtask. + Defaults to 50. db (Session, optional): The database session. Injected by FastAPI. Returns: The result of splitting the task into subtasks. """ - # read entire file - content = await project_geojson.read() - boundary = json.loads(content) - + # read project boundary + boundary = geojson.loads(await project_geojson.read()) # Validatiing Coordinate Reference System check_crs(boundary) - result = await project_crud.split_into_tasks( - db, boundary, no_of_buildings, has_data_extracts + # read custom data extract + if custom_data_extract: + custom_data_extract = geojson.loads(await custom_data_extract.read()) + check_crs(custom_data_extract) + + result = await project_crud.split_geojson_into_tasks( + db, + boundary, + no_of_buildings, + custom_data_extract, ) return result @@ -754,9 +763,11 @@ async def generate_log( task_status, task_message = await project_crud.get_background_task_status( uuid, db ) - extract_completion_count = await project_crud.get_extract_completion_count( - project_id, db - ) + extract_completion_count = ( + db.query(db_models.DbProject) + .filter(db_models.DbProject.id == project_id) + .first() + ).extract_completed_count with open("/opt/logs/create_project.json", "r") as log_file: logs = [json.loads(line) for line in log_file] @@ -799,20 +810,13 @@ async def get_categories(): return categories -@router.post("/preview_tasks/") -async def preview_tasks( - project_geojson: UploadFile = File(...), dimension: int = Form(500) +@router.post("/preview_split_by_square/") +async def preview_split_by_square( + project_geojson: UploadFile = File(...), dimension: int = Form(100) ): - """Preview tasks for a project. - - This endpoint allows you to preview tasks for a project. - - ## Request Body - - `project_id` (int): the project's id. Required. - - ## Response - - Returns a JSON object containing a list of tasks. + """Preview splitting by square. + TODO update to use a response_model """ # Validating for .geojson File. file_name = os.path.splitext(project_geojson.filename) @@ -823,12 +827,12 @@ async def preview_tasks( # read entire file content = await project_geojson.read() - boundary = json.loads(content) + boundary = geojson.loads(content) # Validatiing Coordinate Reference System check_crs(boundary) - result = await project_crud.preview_tasks(boundary, dimension) + result = await project_crud.preview_split_by_square(boundary, dimension) return result diff --git a/src/backend/app/submission/submission_crud.py b/src/backend/app/submission/submission_crud.py index b44cb28c1e..dbadecc4b7 100644 --- a/src/backend/app/submission/submission_crud.py +++ b/src/backend/app/submission/submission_crud.py @@ -449,8 +449,8 @@ def get_all_submissions(db: Session, project_id): project = get_odk_project(odk_credentials) - get_task_lists_sync = async_to_sync(get_task_lists) - task_lists = get_task_lists_sync(db, project_id) + get_task_id_list_sync = async_to_sync(get_task_id_list) + task_lists = get_task_id_list_sync(db, project_id) submissions = project.getAllSubmissions(project_info.odkid, task_lists) return submissions diff --git a/src/backend/app/submission/submission_routes.py b/src/backend/app/submission/submission_routes.py index ed7606ece6..d20014c429 100644 --- a/src/backend/app/submission/submission_routes.py +++ b/src/backend/app/submission/submission_routes.py @@ -157,7 +157,7 @@ async def get_submission_count( @router.post("/conflate_data") -async def conflate_osm_date( +async def conflate_osm_data( project_id: int, db: Session = Depends(database.get_db), ): diff --git a/src/backend/app/tasks/tasks_crud.py b/src/backend/app/tasks/tasks_crud.py index ca8c1cb4c1..0ab706de57 100644 --- a/src/backend/app/tasks/tasks_crud.py +++ b/src/backend/app/tasks/tasks_crud.py @@ -43,8 +43,8 @@ async def get_task_count_in_project(db: Session, project_id: int): return result.fetchone()[0] -async def get_task_lists(db: Session, project_id: int): - """Get a list of tasks for a project.""" +async def get_task_id_list(db: Session, project_id: int) -> list[int]: + """Get a list of tasks id for a project.""" query = text( """ SELECT id diff --git a/src/backend/migrations/000-remove-user-password.sql b/src/backend/migrations/000-remove-user-password.sql index 5bf9587924..244cdc109f 100644 --- a/src/backend/migrations/000-remove-user-password.sql +++ b/src/backend/migrations/000-remove-user-password.sql @@ -1,7 +1,5 @@ -- ## Migration to remove password field from public.users (replaced with OSM OAuth) - --- ## Apply Migration -- Start a transaction BEGIN; -- Drop the 'password' column if it exists @@ -9,13 +7,3 @@ ALTER TABLE IF EXISTS public.users DROP COLUMN IF EXISTS password; -- Commit the transaction COMMIT; - - --- ## Revert Migration (comment above, uncomment below) --- -- Start a transaction --- BEGIN; --- -- Add the 'password' column back if it doesn't exist --- ALTER TABLE public.users --- ADD COLUMN IF NOT EXISTS password character varying; --- -- Commit the transaction --- COMMIT; diff --git a/src/backend/migrations/001-project-split-type-fields.sql b/src/backend/migrations/001-project-split-type-fields.sql index f7be161f43..48b230526a 100644 --- a/src/backend/migrations/001-project-split-type-fields.sql +++ b/src/backend/migrations/001-project-split-type-fields.sql @@ -3,9 +3,6 @@ -- * Add field project.task_split_dimension (int). -- * Add field project.task_num_buildings (int). - - --- ## Apply Migration -- Start a transaction BEGIN; -- Create task_split_type enum if it doesn't exist @@ -42,31 +39,3 @@ ALTER TABLE IF EXISTS public.projects ADD COLUMN IF NOT EXISTS task_num_buildings SMALLINT; -- Commit the transaction COMMIT; - - - --- -- ## Revert Migration (comment above, uncomment below) --- -- Start a transaction --- BEGIN; --- -- Revert task_split_type type --- DO $$ --- BEGIN --- -- Check if the column exists --- IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'projects' AND column_name = 'task_split_type') THEN --- -- Alter the column if it exists --- EXECUTE ' --- ALTER TABLE public.projects --- ALTER COLUMN task_split_type --- TYPE VARCHAR --- USING task_split_type::VARCHAR --- '; --- END IF; --- END $$; --- -- Remove extra fields --- ALTER TABLE IF EXISTS public.projects --- DROP COLUMN IF EXISTS task_split_dimension, --- DROP COLUMN IF EXISTS task_num_buildings; --- -- Remove enum --- DROP TYPE IF EXISTS public.tasksplittype; --- -- Commit the transaction --- COMMIT; diff --git a/src/backend/migrations/init/fmtm_base_schema.sql b/src/backend/migrations/init/fmtm_base_schema.sql index e8354d16be..587952aacf 100644 --- a/src/backend/migrations/init/fmtm_base_schema.sql +++ b/src/backend/migrations/init/fmtm_base_schema.sql @@ -172,41 +172,6 @@ CREATE TABLE public.background_tasks ( ALTER TABLE public.background_tasks OWNER TO fmtm; -CREATE TABLE public.buildings ( - id integer, - project_id character varying, - osm_id character varying, - geom public.geometry(Polygon,4326), - tags jsonb, - polyid bigint -); -ALTER TABLE public.buildings OWNER TO fmtm; - - -CREATE TABLE public.clusteredbuildings ( - id integer, - project_id character varying, - osm_id character varying, - geom public.geometry(Polygon,4326), - tags jsonb, - polyid bigint, - numfeatures bigint, - cid integer, - clusteruid text -); -ALTER TABLE public.clusteredbuildings OWNER TO fmtm; - - -CREATE TABLE public.dumpedpoints ( - osm_id character varying, - polyid bigint, - cid integer, - clusteruid text, - geom public.geometry(Point,4326) -); -ALTER TABLE public.dumpedpoints OWNER TO fmtm; - - CREATE TABLE public.features ( id integer NOT NULL, project_id integer, @@ -245,20 +210,6 @@ ALTER TABLE public.licenses_id_seq OWNER TO fmtm; ALTER SEQUENCE public.licenses_id_seq OWNED BY public.licenses.id; -CREATE TABLE public.lowfeaturecountpolygons ( - polyid bigint NOT NULL, - geom public.geometry(Polygon,4326), - geog public.geography(Geometry,4326), - numfeatures bigint, - area double precision, - n_polyid bigint, - n_area double precision, - n_numfeatures bigint, - sharedbound double precision -); -ALTER TABLE public.lowfeaturecountpolygons OWNER TO fmtm; - - CREATE TABLE public.mapping_issue_categories ( id integer NOT NULL, name character varying NOT NULL, @@ -340,24 +291,6 @@ CREATE TABLE public.project_allowed_users ( ALTER TABLE public.project_allowed_users OWNER TO fmtm; -CREATE TABLE public.project_aoi ( - id integer NOT NULL, - project_id character varying, - geom public.geometry(Geometry,4326), - tags jsonb -); -ALTER TABLE public.project_aoi OWNER TO fmtm; -CREATE SEQUENCE public.project_aoi_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; -ALTER TABLE public.project_aoi_id_seq OWNER TO fmtm; -ALTER SEQUENCE public.project_aoi_id_seq OWNED BY public.project_aoi.id; - - CREATE TABLE public.project_chat ( id bigint NOT NULL, project_id integer NOT NULL, @@ -468,16 +401,6 @@ ALTER TABLE public.qr_code_id_seq OWNER TO fmtm; ALTER SEQUENCE public.qr_code_id_seq OWNED BY public.qr_code.id; -CREATE TABLE public.splitpolygons ( - polyid bigint NOT NULL, - geom public.geometry(Polygon,4326), - geog public.geography(Geometry,4326), - numfeatures bigint, - area double precision -); -ALTER TABLE public.splitpolygons OWNER TO fmtm; - - CREATE TABLE public.task_history ( id integer NOT NULL, project_id integer, @@ -544,13 +467,6 @@ ALTER TABLE public.task_mapping_issues_id_seq OWNER TO fmtm; ALTER SEQUENCE public.task_mapping_issues_id_seq OWNED BY public.task_mapping_issues.id; -CREATE TABLE public.taskpolygons ( - geom public.geometry, - clusteruid text -); -ALTER TABLE public.taskpolygons OWNER TO fmtm; - - CREATE TABLE public.tasks ( id integer NOT NULL, project_id integer NOT NULL, @@ -643,51 +559,6 @@ CREATE SEQUENCE public.users_id_seq ALTER TABLE public.users_id_seq OWNER TO fmtm; ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id; - -CREATE TABLE public.voronois ( - clusteruid text, - geom public.geometry -); -ALTER TABLE public.voronois OWNER TO fmtm; - - -CREATE TABLE public.ways_line ( - id integer NOT NULL, - project_id character varying, - geom public.geometry(Geometry,4326), - tags jsonb -); -ALTER TABLE public.ways_line OWNER TO fmtm; -CREATE SEQUENCE public.ways_line_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; -ALTER TABLE public.ways_line_id_seq OWNER TO fmtm; -ALTER SEQUENCE public.ways_line_id_seq OWNED BY public.ways_line.id; - - -CREATE TABLE public.ways_poly ( - id integer NOT NULL, - project_id character varying, - osm_id character varying, - geom public.geometry(Geometry,4326), - tags jsonb -); -ALTER TABLE public.ways_poly OWNER TO fmtm; -CREATE SEQUENCE public.ways_poly_id_seq - AS integer - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; -ALTER TABLE public.ways_poly_id_seq OWNER TO fmtm; -ALTER SEQUENCE public.ways_poly_id_seq OWNED BY public.ways_poly.id; - - CREATE TABLE public.xlsforms ( id integer NOT NULL, title character varying, @@ -715,7 +586,6 @@ ALTER TABLE ONLY public.licenses ALTER COLUMN id SET DEFAULT nextval('public.lic ALTER TABLE ONLY public.mapping_issue_categories ALTER COLUMN id SET DEFAULT nextval('public.mapping_issue_categories_id_seq'::regclass); ALTER TABLE ONLY public.mbtiles_path ALTER COLUMN id SET DEFAULT nextval('public.mbtiles_path_id_seq'::regclass); ALTER TABLE ONLY public.organisations ALTER COLUMN id SET DEFAULT nextval('public.organisations_id_seq'::regclass); -ALTER TABLE ONLY public.project_aoi ALTER COLUMN id SET DEFAULT nextval('public.project_aoi_id_seq'::regclass); ALTER TABLE ONLY public.project_chat ALTER COLUMN id SET DEFAULT nextval('public.project_chat_id_seq'::regclass); ALTER TABLE ONLY public.projects ALTER COLUMN id SET DEFAULT nextval('public.projects_id_seq'::regclass); ALTER TABLE ONLY public.qr_code ALTER COLUMN id SET DEFAULT nextval('public.qr_code_id_seq'::regclass); @@ -725,8 +595,6 @@ ALTER TABLE ONLY public.task_mapping_issues ALTER COLUMN id SET DEFAULT nextval( ALTER TABLE ONLY public.tasks ALTER COLUMN id SET DEFAULT nextval('public.tasks_id_seq'::regclass); ALTER TABLE ONLY public.teams ALTER COLUMN id SET DEFAULT nextval('public.teams_id_seq'::regclass); ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass); -ALTER TABLE ONLY public.ways_line ALTER COLUMN id SET DEFAULT nextval('public.ways_line_id_seq'::regclass); -ALTER TABLE ONLY public.ways_poly ALTER COLUMN id SET DEFAULT nextval('public.ways_poly_id_seq'::regclass); ALTER TABLE ONLY public.xlsforms ALTER COLUMN id SET DEFAULT nextval('public.xlsforms_id_seq'::regclass); @@ -747,9 +615,6 @@ ALTER TABLE ONLY public.licenses ALTER TABLE ONLY public.licenses ADD CONSTRAINT licenses_pkey PRIMARY KEY (id); -ALTER TABLE ONLY public.lowfeaturecountpolygons - ADD CONSTRAINT lowfeaturecountpolygons_pkey PRIMARY KEY (polyid); - ALTER TABLE ONLY public.mapping_issue_categories ADD CONSTRAINT mapping_issue_categories_name_key UNIQUE (name); @@ -771,9 +636,6 @@ ALTER TABLE ONLY public.organisations ALTER TABLE ONLY public.organisations ADD CONSTRAINT organisations_slug_key UNIQUE (slug); -ALTER TABLE ONLY public.project_aoi - ADD CONSTRAINT project_aoi_pkey PRIMARY KEY (id); - ALTER TABLE ONLY public.project_chat ADD CONSTRAINT project_chat_pkey PRIMARY KEY (id); @@ -816,12 +678,6 @@ ALTER TABLE ONLY public.users ALTER TABLE ONLY public.users ADD CONSTRAINT users_username_key UNIQUE (username); -ALTER TABLE ONLY public.ways_line - ADD CONSTRAINT ways_line_pkey PRIMARY KEY (id); - -ALTER TABLE ONLY public.ways_poly - ADD CONSTRAINT ways_poly_pkey PRIMARY KEY (id); - ALTER TABLE ONLY public.xlsforms ADD CONSTRAINT xlsforms_pkey PRIMARY KEY (id); @@ -831,25 +687,17 @@ ALTER TABLE ONLY public.xlsforms -- Indexing -CREATE INDEX buildings_idx ON public.buildings USING gist (geom); -CREATE INDEX clusteredbuildings_idx ON public.clusteredbuildings USING gist (geom); -CREATE INDEX dumpedpoints_idx ON public.dumpedpoints USING gist (geom); CREATE INDEX idx_features_composite ON public.features USING btree (task_id, project_id); CREATE INDEX idx_features_geometry ON public.features USING gist (geometry); CREATE INDEX idx_geometry ON public.projects USING gist (outline); -CREATE INDEX idx_lowfeaturecountpolygons_geog ON public.lowfeaturecountpolygons USING gist (geog); -CREATE INDEX idx_project_aoi_geom ON public.project_aoi USING gist (geom); CREATE INDEX idx_projects_centroid ON public.projects USING gist (centroid); CREATE INDEX idx_projects_outline ON public.projects USING gist (outline); -CREATE INDEX idx_splitpolygons_geog ON public.splitpolygons USING gist (geog); CREATE INDEX idx_task_history_composite ON public.task_history USING btree (task_id, project_id); CREATE INDEX idx_task_history_project_id_user_id ON public.task_history USING btree (user_id, project_id); CREATE INDEX idx_task_validation_history_composite ON public.task_invalidation_history USING btree (task_id, project_id); CREATE INDEX idx_task_validation_mapper_status_composite ON public.task_invalidation_history USING btree (mapper_id, is_closed); CREATE INDEX idx_task_validation_validator_status_composite ON public.task_invalidation_history USING btree (invalidator_id, is_closed); CREATE INDEX idx_tasks_outline ON public.tasks USING gist (outline); -CREATE INDEX idx_ways_line_geom ON public.ways_line USING gist (geom); -CREATE INDEX idx_ways_poly_geom ON public.ways_poly USING gist (geom); CREATE INDEX ix_project_chat_project_id ON public.project_chat USING btree (project_id); CREATE INDEX ix_projects_mapper_level ON public.projects USING btree (mapper_level); CREATE INDEX ix_projects_organisation_id ON public.projects USING btree (organisation_id); @@ -862,11 +710,7 @@ CREATE INDEX ix_tasks_project_id ON public.tasks USING btree (project_id); CREATE INDEX ix_tasks_qr_code_id ON public.tasks USING btree (qr_code_id); CREATE INDEX ix_tasks_validated_by ON public.tasks USING btree (validated_by); CREATE INDEX ix_users_id ON public.users USING btree (id); -CREATE INDEX lowfeaturecountpolygons_idx ON public.lowfeaturecountpolygons USING gist (geom); -CREATE INDEX splitpolygons_idx ON public.splitpolygons USING gist (geom); -CREATE INDEX taskpolygons_idx ON public.taskpolygons USING gist (geom); CREATE INDEX textsearch_idx ON public.project_info USING btree (text_searchable); -CREATE INDEX voronois_idx ON public.voronois USING gist (geom); -- Foreign keys diff --git a/src/backend/migrations/revert/000-remove-user-password.sql b/src/backend/migrations/revert/000-remove-user-password.sql new file mode 100644 index 0000000000..a6b4afeda2 --- /dev/null +++ b/src/backend/migrations/revert/000-remove-user-password.sql @@ -0,0 +1,7 @@ +-- Start a transaction +BEGIN; +-- Add the 'password' column back if it doesn't exist +ALTER TABLE public.users +ADD COLUMN IF NOT EXISTS password character varying; +-- Commit the transaction +COMMIT; diff --git a/src/backend/migrations/revert/001-project-split-type-fields.sql b/src/backend/migrations/revert/001-project-split-type-fields.sql new file mode 100644 index 0000000000..ff30f86e0c --- /dev/null +++ b/src/backend/migrations/revert/001-project-split-type-fields.sql @@ -0,0 +1,24 @@ +-- Start a transaction +BEGIN; +-- Revert task_split_type type +DO $$ +BEGIN + -- Check if the column exists + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'projects' AND column_name = 'task_split_type') THEN + -- Alter the column if it exists + EXECUTE ' + ALTER TABLE public.projects + ALTER COLUMN task_split_type + TYPE VARCHAR + USING task_split_type::VARCHAR + '; + END IF; +END $$; +-- Remove extra fields +ALTER TABLE IF EXISTS public.projects + DROP COLUMN IF EXISTS task_split_dimension, + DROP COLUMN IF EXISTS task_num_buildings; +-- Remove enum +DROP TYPE IF EXISTS public.tasksplittype; +-- Commit the transaction +COMMIT; diff --git a/src/backend/pdm.lock b/src/backend/pdm.lock index db7bd16ad1..90d9a9422b 100644 --- a/src/backend/pdm.lock +++ b/src/backend/pdm.lock @@ -6,7 +6,7 @@ groups = ["default", "debug", "dev", "docs", "test"] cross_platform = true static_urls = false lock_version = "4.3" -content_hash = "sha256:3707107f0d50bb2be53ca9ff05f58dd1c61154b9d52c7a1c6f158428e90fc7e5" +content_hash = "sha256:2025602908bcd27e4e1952cb194dcd3f2d55c631e2af0a0b40382fd23bd6fa5c" [[package]] name = "annotated-types" @@ -34,22 +34,58 @@ files = [ ] [[package]] -name = "appnope" -version = "0.1.3" -summary = "Disable App Nap on macOS >= 10.9" +name = "argcomplete" +version = "3.1.6" +requires_python = ">=3.8" +summary = "Bash tab completion for argparse" files = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, + {file = "argcomplete-3.1.6-py3-none-any.whl", hash = "sha256:71f4683bc9e6b0be85f2b2c1224c47680f210903e23512cfebfe5a41edfd883a"}, + {file = "argcomplete-3.1.6.tar.gz", hash = "sha256:3b1f07d133332547a53c79437527c00be48cca3807b1d4ca5cab1b26313386a6"}, ] [[package]] -name = "argcomplete" -version = "3.1.4" -requires_python = ">=3.8" -summary = "Bash tab completion for argparse" +name = "argon2-cffi" +version = "23.1.0" +requires_python = ">=3.7" +summary = "Argon2 for Python" +dependencies = [ + "argon2-cffi-bindings", +] files = [ - {file = "argcomplete-3.1.4-py3-none-any.whl", hash = "sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f"}, - {file = "argcomplete-3.1.4.tar.gz", hash = "sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94"}, + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +requires_python = ">=3.6" +summary = "Low-level CFFI bindings for Argon2" +dependencies = [ + "cffi>=1.0.1", +] +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, ] [[package]] @@ -77,11 +113,24 @@ files = [ {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ] +[[package]] +name = "attrs" +version = "23.1.0" +requires_python = ">=3.7" +summary = "Classes Without Boilerplate" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + [[package]] name = "babel" version = "2.13.1" requires_python = ">=3.7" summary = "Internationalization utilities" +dependencies = [ + "setuptools; python_version >= \"3.12\"", +] files = [ {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, @@ -116,12 +165,56 @@ files = [ [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +requires_python = ">=3.8" +summary = "Foreign Function Interface for Python calling C code." +dependencies = [ + "pycparser", +] +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [[package]] @@ -171,6 +264,21 @@ files = [ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] @@ -187,6 +295,31 @@ files = [ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] +[[package]] +name = "click-plugins" +version = "1.1.1" +summary = "An extension module for click to enable registering CLI commands via setuptools entry-points." +dependencies = [ + "click>=4.0", +] +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[[package]] +name = "cligj" +version = "0.7.2" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4" +summary = "Click params for commmand line interfaces to GeoJSON" +dependencies = [ + "click>=4.0", +] +files = [ + {file = "cligj-0.7.2-py3-none-any.whl", hash = "sha256:c1ca117dbce1fe20a5809dc96f01e1c2840f6dcc939b3ddbb1111bf330ba82df"}, + {file = "cligj-0.7.2.tar.gz", hash = "sha256:a4bc13d623356b373c2c27c53dbd9c68cae5d526270bfa71f6c6fa69669c6b27"}, +] + [[package]] name = "codetiming" version = "1.4.0" @@ -209,7 +342,7 @@ files = [ [[package]] name = "commitizen" -version = "3.12.0" +version = "3.13.0" requires_python = ">=3.8" summary = "Python commitizen client tool" dependencies = [ @@ -226,8 +359,8 @@ dependencies = [ "tomlkit<1.0.0,>=0.5.3", ] files = [ - {file = "commitizen-3.12.0-py3-none-any.whl", hash = "sha256:082f4733409bc4f01f987467295f8393ceb16b42cc648cf2f5a7a754c6d594db"}, - {file = "commitizen-3.12.0.tar.gz", hash = "sha256:7c313f1f85f45c9acf1a70f1637deab5c388150ae8660a0037ac260e77bb1492"}, + {file = "commitizen-3.13.0-py3-none-any.whl", hash = "sha256:ff57069591ff109136b70841fe79a3434d0525748995531cceb4f3ccadb44ead"}, + {file = "commitizen-3.13.0.tar.gz", hash = "sha256:53cd225ae44fc25cb1582f5d50cda78711a5a1d44a32fee3dcf7a22bc204ce06"}, ] [[package]] @@ -299,12 +432,12 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" requires_python = ">=3.7" summary = "Backport of PEP 654 (exception groups)" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [[package]] @@ -343,6 +476,36 @@ files = [ {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] +[[package]] +name = "fiona" +version = "1.9.5" +requires_python = ">=3.7" +summary = "Fiona reads and writes spatial data files" +dependencies = [ + "attrs>=19.2.0", + "certifi", + "click-plugins>=1.0", + "click~=8.0", + "cligj>=0.5", + "setuptools", + "six", +] +files = [ + {file = "fiona-1.9.5-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:5f40a40529ecfca5294260316cf987a0420c77a2f0cf0849f529d1afbccd093e"}, + {file = "fiona-1.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:374efe749143ecb5cfdd79b585d83917d2bf8ecfbfc6953c819586b336ce9c63"}, + {file = "fiona-1.9.5-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:35dae4b0308eb44617cdc4461ceb91f891d944fdebbcba5479efe524ec5db8de"}, + {file = "fiona-1.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:5b4c6a3df53bee8f85bb46685562b21b43346be1fe96419f18f70fa1ab8c561c"}, + {file = "fiona-1.9.5-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:6ad04c1877b9fd742871b11965606c6a52f40706f56a48d66a87cc3073943828"}, + {file = "fiona-1.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fb9a24a8046c724787719e20557141b33049466145fc3e665764ac7caf5748c"}, + {file = "fiona-1.9.5-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:d722d7f01a66f4ab6cd08d156df3fdb92f0669cf5f8708ddcb209352f416f241"}, + {file = "fiona-1.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:7ede8ddc798f3d447536080c6db9a5fb73733ad8bdb190cb65eed4e289dd4c50"}, + {file = "fiona-1.9.5-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:8b098054a27c12afac4f819f98cb4d4bf2db9853f70b0c588d7d97d26e128c39"}, + {file = "fiona-1.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d9f29e9bcbb33232ff7fa98b4a3c2234db910c1dc6c4147fc36c0b8b930f2e0"}, + {file = "fiona-1.9.5-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:f1af08da4ecea5036cb81c9131946be4404245d1b434b5b24fd3871a1d4030d9"}, + {file = "fiona-1.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:c521e1135c78dec0d7774303e5a1b4c62e0efb0e602bb8f167550ef95e0a2691"}, + {file = "fiona-1.9.5.tar.gz", hash = "sha256:99e2604332caa7692855c2ae6ed91e1fffdf9b59449aa8032dd18e070e59a2f7"}, +] + [[package]] name = "flatdict" version = "4.0.1" @@ -351,6 +514,25 @@ files = [ {file = "flatdict-4.0.1.tar.gz", hash = "sha256:cd32f08fd31ed21eb09ebc76f06b6bd12046a24f77beb1fd0281917e47f26742"}, ] +[[package]] +name = "fmtm-splitter" +version = "0.2.4" +requires_python = ">=3.10" +summary = "A program for splitting a large AOI into smaller tasks." +dependencies = [ + "geoalchemy2>=0.11.0", + "geojson>=2.5.0", + "geopandas>=0.11.0", + "numpy>=1.21.0", + "psycopg2>=2.9.1", + "shapely>=1.8.1", + "sqlalchemy>=2.0.0", +] +files = [ + {file = "fmtm-splitter-0.2.4.tar.gz", hash = "sha256:e23edaabbc2ab1982a82aea0187bb10820844ea4130522d941f2dc04ddf66d7e"}, + {file = "fmtm_splitter-0.2.4-py3-none-any.whl", hash = "sha256:6ba37e16d291ac09e2a2b31dfe6eb9f122ecae521fec202ef137f7467fb74211"}, +] + [[package]] name = "geoalchemy2" version = "0.14.2" @@ -365,19 +547,6 @@ files = [ {file = "GeoAlchemy2-0.14.2.tar.gz", hash = "sha256:8ca023dcb9a36c6d312f3b4aee631d66385264e2fc9feb0ab0f446eb5609407d"}, ] -[[package]] -name = "geodex" -version = "0.1.2" -summary = "A tool to find all geospatial tile indices overlapping an arbitrary boundary at an arbitrary zoom." -dependencies = [ - "pygeotile>=1.0.5", - "shapely>=1.6.4", -] -files = [ - {file = "geodex-0.1.2-py3-none-any.whl", hash = "sha256:9b4d5cc74c8993ea27d3a31405568399bf3f2e8f28f2d08bc266cbb29be27a86"}, - {file = "geodex-0.1.2.tar.gz", hash = "sha256:490e9a6e10f7d4d2825d7fa9bd73e73fa6a3b9b1f63a395d1dd6614da5ca4cc6"}, -] - [[package]] name = "geojson" version = "3.1.0" @@ -401,6 +570,23 @@ files = [ {file = "geojson_pydantic-1.0.1.tar.gz", hash = "sha256:a996ffccd5a016d3acb4a0c6aac941d2c569e3c6163d5ce6a04b61ee131c8f94"}, ] +[[package]] +name = "geopandas" +version = "0.14.1" +requires_python = ">=3.9" +summary = "Geographic pandas extensions" +dependencies = [ + "fiona>=1.8.21", + "packaging", + "pandas>=1.4.0", + "pyproj>=3.3.0", + "shapely>=1.8.0", +] +files = [ + {file = "geopandas-0.14.1-py3-none-any.whl", hash = "sha256:ed5a7cae7874bfc3238fb05e0501cc1760e1b7b11e5b76ecad29da644ca305da"}, + {file = "geopandas-0.14.1.tar.gz", hash = "sha256:4853ff89ecb6d1cfc43e7b3671092c8160e8a46a3dd7368f25906283314e42bb"}, +] + [[package]] name = "ghp-import" version = "2.1.0" @@ -437,20 +623,29 @@ files = [ {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, + {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, + {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, ] [[package]] name = "griffe" -version = "0.36.9" +version = "0.38.1" requires_python = ">=3.8" summary = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." dependencies = [ "colorama>=0.4", ] files = [ - {file = "griffe-0.36.9-py3-none-any.whl", hash = "sha256:7874febe7cd81e8e47eb7b8130ff9d38c8f3656233c01d2d217d2e898a0925f5"}, - {file = "griffe-0.36.9.tar.gz", hash = "sha256:b4e510bf0ed1fc91c58453c68018a2247c561adec8f5dadc40275afc01f51eac"}, + {file = "griffe-0.38.1-py3-none-any.whl", hash = "sha256:334c79d3b5964ade65c05dfcaf53518c576dedd387aaba5c9fd71212f34f1483"}, + {file = "griffe-0.38.1.tar.gz", hash = "sha256:bd68d7da7f3d87bc57eb9962b250db123efd9bbcc06c11c1a91b6e583b2a9361"}, ] [[package]] @@ -475,7 +670,7 @@ files = [ [[package]] name = "httpcore" -version = "1.0.1" +version = "1.0.2" requires_python = ">=3.8" summary = "A minimal low-level HTTP client." dependencies = [ @@ -483,58 +678,58 @@ dependencies = [ "h11<0.15,>=0.13", ] files = [ - {file = "httpcore-1.0.1-py3-none-any.whl", hash = "sha256:c5e97ef177dca2023d0b9aad98e49507ef5423e9f1d94ffe2cfe250aa28e63b0"}, - {file = "httpcore-1.0.1.tar.gz", hash = "sha256:fce1ddf9b606cfb98132ab58865c3728c52c8e4c3c46e2aabb3674464a186e92"}, + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, ] [[package]] name = "httpx" -version = "0.25.1" +version = "0.25.2" requires_python = ">=3.8" summary = "The next generation HTTP client." dependencies = [ "anyio", "certifi", - "httpcore", + "httpcore==1.*", "idna", "sniffio", ] files = [ - {file = "httpx-0.25.1-py3-none-any.whl", hash = "sha256:fec7d6cc5c27c578a391f7e87b9aa7d3d8fbcd034f6399f9f79b45bcc12a866a"}, - {file = "httpx-0.25.1.tar.gz", hash = "sha256:ffd96d5cf901e63863d9f1b4b6807861dbea4d301613415d9e6e57ead15fc5d0"}, + {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, + {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, ] [[package]] name = "identify" -version = "2.5.31" +version = "2.5.33" requires_python = ">=3.8" summary = "File identification library for Python" files = [ - {file = "identify-2.5.31-py2.py3-none-any.whl", hash = "sha256:90199cb9e7bd3c5407a9b7e81b4abec4bb9d249991c79439ec8af740afc6293d"}, - {file = "identify-2.5.31.tar.gz", hash = "sha256:7736b3c7a28233637e3c36550646fc6389bedd74ae84cb788200cc8e2dd60b75"}, + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, ] [[package]] name = "idna" -version = "3.4" +version = "3.6" requires_python = ">=3.5" summary = "Internationalized Domain Names in Applications (IDNA)" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "6.11.0" requires_python = ">=3.8" summary = "Read metadata from Python packages" dependencies = [ "zipp>=0.5", ] files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, ] [[package]] @@ -566,11 +761,10 @@ files = [ [[package]] name = "ipython" -version = "8.17.2" +version = "8.18.0" requires_python = ">=3.9" summary = "IPython: Productive Interactive Computing" dependencies = [ - "appnope; sys_platform == \"darwin\"", "colorama; sys_platform == \"win32\"", "decorator", "exceptiongroup; python_version < \"3.11\"", @@ -583,8 +777,8 @@ dependencies = [ "traitlets>=5", ] files = [ - {file = "ipython-8.17.2-py3-none-any.whl", hash = "sha256:1e4d1d666a023e3c93585ba0d8e962867f7a111af322efff6b9c58062b3e5444"}, - {file = "ipython-8.17.2.tar.gz", hash = "sha256:126bb57e1895594bb0d91ea3090bbd39384f6fe87c3d57fd558d0670f50339bb"}, + {file = "ipython-8.18.0-py3-none-any.whl", hash = "sha256:d538a7a98ad9b7e018926447a5f35856113a85d08fd68a165d7871ab5175f6e0"}, + {file = "ipython-8.18.0.tar.gz", hash = "sha256:4feb61210160f75e229ce932dbf8b719bff37af123c0b985fd038b14233daa16"}, ] [[package]] @@ -664,6 +858,22 @@ files = [ {file = "Levenshtein-0.23.0-cp311-cp311-win32.whl", hash = "sha256:da2063cee1fbecc09e1692e7c4de7624fd4c47a54ee7588b7ea20540f8f8d779"}, {file = "Levenshtein-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:4d3b9c9e2852eca20de6bd8ca7f47d817a056993fd4927a4d50728b62315376b"}, {file = "Levenshtein-0.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:ef2e3e93ae612ac87c3a28f08e8544b707d67e99f9624e420762a7c275bb13c5"}, + {file = "Levenshtein-0.23.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85220b27a47df4a5106ef13d43b6181d73da77d3f78646ec7251a0c5eb08ac40"}, + {file = "Levenshtein-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bb77b3ade7f256ca5882450aaf129be79b11e074505b56c5997af5058a8f834"}, + {file = "Levenshtein-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b487f08c32530ee608e8aab0c4075048262a7f5a6e113bac495b05154ae427"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f91d0a5d3696e373cae08c80ec99a4ff041e562e55648ebe582725cba555190"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fddda71ae372cd835ffd64990f0d0b160409e881bf8722b6c5dc15dc4239d7db"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7664bcf9a12e62c672a926c4579f74689507beaa24378ad7664f0603b0dafd20"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6d07539502610ee8d6437a77840feedefa47044ab0f35cd3bc37adfc63753bd"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:830a74b6a045a13e1b1d28af62af9878aeae8e7386f14888c84084d577b92771"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f29cbd0c172a8fc1d51eaacd163bdc11596aded5a90db617e6b778c2258c7006"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:df0704fd6a30a7c27c03655ae6dc77345c1655634fe59654e74bb06a3c7c1357"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:0ab52358f54ee48ad7656a773a0c72ef89bb9ba5acc6b380cfffd619fb223a23"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f0a86394c9440e23a29f48f2bbc460de7b19950f46ec2bea3be8c2090839bb29"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a689e6e0514f48a434e7ee44cc1eb29c34b21c51c57accb304eac97fba87bf48"}, + {file = "Levenshtein-0.23.0-cp312-cp312-win32.whl", hash = "sha256:2d3229c1336498c2b72842dd4c850dff1040588a5468abe5104444a372c1a573"}, + {file = "Levenshtein-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:5b9b6a8509415bc214d33f5828d7c700c80292ea25f9d9e8cba95ad5a74b3cdf"}, + {file = "Levenshtein-0.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:5a61606bad3afb9fcec0a2a21871319c3f7da933658d2e0e6e55ab4a34814f48"}, {file = "Levenshtein-0.23.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:760c964ff0be8dea5f7eda20314cf66238fdd0fec63f1ce9c474736bb2904924"}, {file = "Levenshtein-0.23.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de42400ea86e3e8be3dc7f9b3b9ed51da7fd06dc2f3a426d7effd7fbf35de848"}, {file = "Levenshtein-0.23.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2080ee52aeac03854a0c6e73d4214d5be2120bdd5f16def4394f9fbc5666e04"}, @@ -732,6 +942,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] @@ -772,15 +992,17 @@ files = [ [[package]] name = "minio" -version = "7.1.17" +version = "7.2.0" summary = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" dependencies = [ + "argon2-cffi", "certifi", + "pycryptodome", "urllib3", ] files = [ - {file = "minio-7.1.17-py3-none-any.whl", hash = "sha256:0aa525d77a3bc61378444c2400b0ba2685ad4cd6ecb3fba4141a0d0765e25f40"}, - {file = "minio-7.1.17.tar.gz", hash = "sha256:b0b687c1ec9be422a1f8b04c65fb8e43a1c090f9508178db57c434a17341c404"}, + {file = "minio-7.2.0-py3-none-any.whl", hash = "sha256:10656272c16156fa08436ce2b27e25e4134ef5142a8c259513ee26fb514531a6"}, + {file = "minio-7.2.0.tar.gz", hash = "sha256:4b015b018d10c1505f7c3e724fa7c2267760ac7bee6463a624cbf22cd272877b"}, ] [[package]] @@ -835,7 +1057,7 @@ files = [ [[package]] name = "mkdocs-material" -version = "9.4.8" +version = "9.4.14" requires_python = ">=3.8" summary = "Documentation that simply works" dependencies = [ @@ -852,50 +1074,52 @@ dependencies = [ "requests~=2.26", ] files = [ - {file = "mkdocs_material-9.4.8-py3-none-any.whl", hash = "sha256:8b20f6851bddeef37dced903893cd176cf13a21a482e97705a103c45f06ce9b9"}, - {file = "mkdocs_material-9.4.8.tar.gz", hash = "sha256:f0c101453e8bc12b040e8b64ca39a405d950d8402609b1378cc2b98976e74b5f"}, + {file = "mkdocs_material-9.4.14-py3-none-any.whl", hash = "sha256:dbc78a4fea97b74319a6aa9a2f0be575a6028be6958f813ba367188f7b8428f6"}, + {file = "mkdocs_material-9.4.14.tar.gz", hash = "sha256:a511d3ff48fa8718b033e7e37d17abd9cc1de0fdf0244a625ca2ae2387e2416d"}, ] [[package]] name = "mkdocs-material-extensions" -version = "1.3" +version = "1.3.1" requires_python = ">=3.8" summary = "Extension pack for Python Markdown and MkDocs Material." files = [ - {file = "mkdocs_material_extensions-1.3-py3-none-any.whl", hash = "sha256:0297cc48ba68a9fdd1ef3780a3b41b534b0d0df1d1181a44676fda5f464eeadc"}, - {file = "mkdocs_material_extensions-1.3.tar.gz", hash = "sha256:f0446091503acb110a7cab9349cbc90eeac51b58d1caa92a704a81ca1e24ddbd"}, + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, ] [[package]] name = "mkdocstrings" -version = "0.23.0" +version = "0.24.0" requires_python = ">=3.8" summary = "Automatic documentation from sources, for MkDocs." dependencies = [ "Jinja2>=2.11.1", "Markdown>=3.3", "MarkupSafe>=1.1", + "click>=7.0", "mkdocs-autorefs>=0.3.1", - "mkdocs>=1.2", + "mkdocs>=1.4", + "platformdirs>=2.2.0", "pymdown-extensions>=6.3", ] files = [ - {file = "mkdocstrings-0.23.0-py3-none-any.whl", hash = "sha256:051fa4014dfcd9ed90254ae91de2dbb4f24e166347dae7be9a997fe16316c65e"}, - {file = "mkdocstrings-0.23.0.tar.gz", hash = "sha256:d9c6a37ffbe7c14a7a54ef1258c70b8d394e6a33a1c80832bce40b9567138d1c"}, + {file = "mkdocstrings-0.24.0-py3-none-any.whl", hash = "sha256:f4908560c10f587326d8f5165d1908817b2e280bbf707607f601c996366a2264"}, + {file = "mkdocstrings-0.24.0.tar.gz", hash = "sha256:222b1165be41257b494a9d29b14135d2b7ca43f38161d5b10caae03b87bd4f7e"}, ] [[package]] name = "mkdocstrings-python" -version = "1.7.3" +version = "1.7.5" requires_python = ">=3.8" summary = "A Python handler for mkdocstrings." dependencies = [ - "griffe>=0.35", + "griffe>=0.37", "mkdocstrings>=0.20", ] files = [ - {file = "mkdocstrings_python-1.7.3-py3-none-any.whl", hash = "sha256:2439d6ad3e34f0bb4c643b845fb3c06ae9233499a1736f9fa273424b75cc5894"}, - {file = "mkdocstrings_python-1.7.3.tar.gz", hash = "sha256:c20128fa96c24dbc6437b10dfedaf33b0415d4503e51ce9ce5e84b271278268e"}, + {file = "mkdocstrings_python-1.7.5-py3-none-any.whl", hash = "sha256:5f6246026353f0c0785135db70c3fe9a5d9318990fc7ceb11d62097b8ffdd704"}, + {file = "mkdocstrings_python-1.7.5.tar.gz", hash = "sha256:c7d143728257dbf1aa550446555a554b760dcd40a763f077189d298502b800be"}, ] [[package]] @@ -923,28 +1147,38 @@ files = [ [[package]] name = "numpy" -version = "1.26.1" -requires_python = "<3.13,>=3.9" +version = "1.26.2" +requires_python = ">=3.9" summary = "Fundamental package for array computing in Python" files = [ - {file = "numpy-1.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82e871307a6331b5f09efda3c22e03c095d957f04bf6bc1804f30048d0e5e7af"}, - {file = "numpy-1.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdd9ec98f0063d93baeb01aad472a1a0840dee302842a2746a7a8e92968f9575"}, - {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d78f269e0c4fd365fc2992c00353e4530d274ba68f15e968d8bc3c69ce5f5244"}, - {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab9163ca8aeb7fd32fe93866490654d2f7dda4e61bc6297bf72ce07fdc02f67"}, - {file = "numpy-1.26.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:78ca54b2f9daffa5f323f34cdf21e1d9779a54073f0018a3094ab907938331a2"}, - {file = "numpy-1.26.1-cp310-cp310-win32.whl", hash = "sha256:d1cfc92db6af1fd37a7bb58e55c8383b4aa1ba23d012bdbba26b4bcca45ac297"}, - {file = "numpy-1.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:d2984cb6caaf05294b8466966627e80bf6c7afd273279077679cb010acb0e5ab"}, - {file = "numpy-1.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd7837b2b734ca72959a1caf3309457a318c934abef7a43a14bb984e574bbb9a"}, - {file = "numpy-1.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c59c046c31a43310ad0199d6299e59f57a289e22f0f36951ced1c9eac3665b9"}, - {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58e8c51a7cf43090d124d5073bc29ab2755822181fcad978b12e144e5e5a4b3"}, - {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6081aed64714a18c72b168a9276095ef9155dd7888b9e74b5987808f0dd0a974"}, - {file = "numpy-1.26.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:97e5d6a9f0702c2863aaabf19f0d1b6c2628fbe476438ce0b5ce06e83085064c"}, - {file = "numpy-1.26.1-cp311-cp311-win32.whl", hash = "sha256:b9d45d1dbb9de84894cc50efece5b09939752a2d75aab3a8b0cef6f3a35ecd6b"}, - {file = "numpy-1.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:3649d566e2fc067597125428db15d60eb42a4e0897fc48d28cb75dc2e0454e53"}, - {file = "numpy-1.26.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06934e1a22c54636a059215d6da99e23286424f316fddd979f5071093b648668"}, - {file = "numpy-1.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76ff661a867d9272cd2a99eed002470f46dbe0943a5ffd140f49be84f68ffc42"}, - {file = "numpy-1.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6965888d65d2848e8768824ca8288db0a81263c1efccec881cb35a0d805fcd2f"}, - {file = "numpy-1.26.1.tar.gz", hash = "sha256:c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, + {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, + {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, + {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, + {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, + {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, + {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, + {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, ] [[package]] @@ -972,37 +1206,34 @@ files = [ [[package]] name = "osm-fieldwork" -version = "0.3.7" +version = "0.4.0" requires_python = ">=3.10" summary = "Processing field data from OpenDataKit to OpenStreetMap format." dependencies = [ - "PyYAML>=6.0", - "codetiming>=1.4.0", + "PyYAML>=6.0.0", + "codetiming>=1.3.0", "flatdict>=4.0.1", - "geodex>=0.1.2", "geojson>=2.5.0", "haversine>=2.8.0", - "levenshtein>=0.21.1", + "levenshtein>=0.20.0", "mercantile>=1.2.1", - "osm-rawdata>=0.1.3", - "overpy>=0.6", - "pandas>=2.0.3", + "osm-rawdata>=0.1.6", + "pandas>=1.5.0", "pmtiles>=3.2.0", "progress>=1.6", - "psycopg2>=2.9.7", + "psycopg2>=2.9.1", "py-cpuinfo>=9.0.0", "pySmartDL>=1.3.4", "pymbtiles>=0.5.0", - "qrcode>=7.4.2", - "requests>=2.31.0", + "requests>=2.26.0", "segno>=1.5.2", "shapely>=1.8.5", "thefuzz>=0.19.0", "xmltodict>=0.13.0", ] files = [ - {file = "osm-fieldwork-0.3.7.tar.gz", hash = "sha256:d941d9ba01d93af0eaf0810b72eabb4cac986d07420859593e6302b533a6a9c1"}, - {file = "osm_fieldwork-0.3.7-py3-none-any.whl", hash = "sha256:18ca6dead0e1be63e693da6537b76c077ca9b4e680285a1ff2b2fba44ed6d590"}, + {file = "osm-fieldwork-0.4.0.tar.gz", hash = "sha256:145011a7d918ac35237fe7953235b3e630e10318a024c2fde2a8dd340a1aa2b1"}, + {file = "osm_fieldwork-0.4.0-py3-none-any.whl", hash = "sha256:10ec0b2ca8e426329adc1a07d640b8ba864ca3fcbfa884db3f58512074a10633"}, ] [[package]] @@ -1022,33 +1253,24 @@ files = [ [[package]] name = "osm-rawdata" -version = "0.1.5" +version = "0.1.7" requires_python = ">=3.10" summary = "Make data extracts from OSM data." dependencies = [ - "GeoAlchemy2>=0.12.5", - "PyYAML>=6.0.1", - "SQLAlchemy-Utils>=0.41.1", + "GeoAlchemy2>=0.11.0", + "PyYAML>=6.0.0", + "SQLAlchemy-Utils>=0.38.3", "flatdict>=4.0.1", "geojson>=2.5.0", - "psycopg2>=2.9.9", + "psycopg2>=2.9.1", "pyarrow>=14.0.1", - "requests>=2.28.2", - "shapely>=2.0.1", - "sqlalchemy>=1.4.41", -] -files = [ - {file = "osm-rawdata-0.1.5.tar.gz", hash = "sha256:46ddb2ed469077f5538cd332e8e8df3932fb91568ef66edfb0ea0f78b754cf30"}, - {file = "osm_rawdata-0.1.5-py3-none-any.whl", hash = "sha256:587fec4665cac52c6f1388d3e83b804dc35e758e3fff99eac9aeb34f2030f464"}, + "requests>=2.26.0", + "shapely>=1.8.1", + "sqlalchemy>=2.0.0", ] - -[[package]] -name = "overpy" -version = "0.6" -requires_python = ">=3.6" -summary = "Python Wrapper to access the OpenStreepMap Overpass API" files = [ - {file = "overpy-0.6.tar.gz", hash = "sha256:75fa462c445a3d8ade4dad84df6f150d273f45548639229316829a3a8c3e2190"}, + {file = "osm-rawdata-0.1.7.tar.gz", hash = "sha256:b012a20e15cca925ed4d0494cd65ebf3fd97759323ed64fb94dc8cf46ce67b6f"}, + {file = "osm_rawdata-0.1.7-py3-none-any.whl", hash = "sha256:9de18ac8ddc5d25058b79506aa940ab688fc9bf096e09c641bc76266678611a8"}, ] [[package]] @@ -1071,30 +1293,37 @@ files = [ [[package]] name = "pandas" -version = "2.1.2" +version = "2.1.3" requires_python = ">=3.9" summary = "Powerful data structures for data analysis, time series, and statistics" dependencies = [ "numpy<2,>=1.22.4; python_version < \"3.11\"", "numpy<2,>=1.23.2; python_version == \"3.11\"", + "numpy<2,>=1.26.0; python_version >= \"3.12\"", "python-dateutil>=2.8.2", "pytz>=2020.1", "tzdata>=2022.1", ] files = [ - {file = "pandas-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:24057459f19db9ebb02984c6fdd164a970b31a95f38e4a49cf7615b36a1b532c"}, - {file = "pandas-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6cf8fcc8a63d333970b950a7331a30544cf59b1a97baf0a7409e09eafc1ac38"}, - {file = "pandas-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ae6ffbd9d614c20d028c7117ee911fc4e266b4dca2065d5c5909e401f8ff683"}, - {file = "pandas-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff794eeb7883c5aefb1ed572e7ff533ae779f6c6277849eab9e77986e352688"}, - {file = "pandas-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02954e285e8e2f4006b6f22be6f0df1f1c3c97adbb7ed211c6b483426f20d5c8"}, - {file = "pandas-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:5b40c9f494e1f27588c369b9e4a6ca19cd924b3a0e1ef9ef1a8e30a07a438f43"}, - {file = "pandas-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08d287b68fd28906a94564f15118a7ca8c242e50ae7f8bd91130c362b2108a81"}, - {file = "pandas-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bbd98dcdcd32f408947afdb3f7434fade6edd408c3077bbce7bd840d654d92c6"}, - {file = "pandas-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e90c95abb3285d06f6e4feedafc134306a8eced93cb78e08cf50e224d5ce22e2"}, - {file = "pandas-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52867d69a54e71666cd184b04e839cff7dfc8ed0cd6b936995117fdae8790b69"}, - {file = "pandas-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d0382645ede2fde352da2a885aac28ec37d38587864c0689b4b2361d17b1d4c"}, - {file = "pandas-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:65177d1c519b55e5b7f094c660ed357bb7d86e799686bb71653b8a4803d8ff0d"}, - {file = "pandas-2.1.2.tar.gz", hash = "sha256:52897edc2774d2779fbeb6880d2cfb305daa0b1a29c16b91f531a18918a6e0f3"}, + {file = "pandas-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acf08a73b5022b479c1be155d4988b72f3020f308f7a87c527702c5f8966d34f"}, + {file = "pandas-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3cc4469ff0cf9aa3a005870cb49ab8969942b7156e0a46cc3f5abd6b11051dfb"}, + {file = "pandas-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35172bff95f598cc5866c047f43c7f4df2c893acd8e10e6653a4b792ed7f19bb"}, + {file = "pandas-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59dfe0e65a2f3988e940224e2a70932edc964df79f3356e5f2997c7d63e758b4"}, + {file = "pandas-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0296a66200dee556850d99b24c54c7dfa53a3264b1ca6f440e42bad424caea03"}, + {file = "pandas-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:465571472267a2d6e00657900afadbe6097c8e1dc43746917db4dfc862e8863e"}, + {file = "pandas-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04d4c58e1f112a74689da707be31cf689db086949c71828ef5da86727cfe3f82"}, + {file = "pandas-2.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fa2ad4ff196768ae63a33f8062e6838efed3a319cf938fdf8b95e956c813042"}, + {file = "pandas-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4441ac94a2a2613e3982e502ccec3bdedefe871e8cea54b8775992485c5660ef"}, + {file = "pandas-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ded6ff28abbf0ea7689f251754d3789e1edb0c4d0d91028f0b980598418a58"}, + {file = "pandas-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca5680368a5139d4920ae3dc993eb5106d49f814ff24018b64d8850a52c6ed2"}, + {file = "pandas-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:de21e12bf1511190fc1e9ebc067f14ca09fccfb189a813b38d63211d54832f5f"}, + {file = "pandas-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a5d53c725832e5f1645e7674989f4c106e4b7249c1d57549023ed5462d73b140"}, + {file = "pandas-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7cf4cf26042476e39394f1f86868d25b265ff787c9b2f0d367280f11afbdee6d"}, + {file = "pandas-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72c84ec1b1d8e5efcbff5312abe92bfb9d5b558f11e0cf077f5496c4f4a3c99e"}, + {file = "pandas-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f539e113739a3e0cc15176bf1231a553db0239bfa47a2c870283fd93ba4f683"}, + {file = "pandas-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc77309da3b55732059e484a1efc0897f6149183c522390772d3561f9bf96c00"}, + {file = "pandas-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:08637041279b8981a062899da0ef47828df52a1838204d2b3761fbd3e9fcb549"}, + {file = "pandas-2.1.3.tar.gz", hash = "sha256:22929f84bca106921917eb73c1521317ddd0a4c71b395bcf767a106e3494209f"}, ] [[package]] @@ -1119,24 +1348,24 @@ files = [ [[package]] name = "pexpect" -version = "4.8.0" +version = "4.9.0" summary = "Pexpect allows easy control of interactive console applications." dependencies = [ "ptyprocess>=0.5", ] files = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, ] [[package]] name = "platformdirs" -version = "3.11.0" -requires_python = ">=3.7" +version = "4.1.0" +requires_python = ">=3.8" summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [[package]] @@ -1206,6 +1435,8 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, ] @@ -1259,122 +1490,172 @@ files = [ {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, + {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, ] +[[package]] +name = "pycparser" +version = "2.21" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +summary = "C parser in Python" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pycryptodome" +version = "3.19.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +summary = "Cryptographic library for Python" +files = [ + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"}, + {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, +] + [[package]] name = "pydantic" -version = "2.4.2" +version = "2.5.2" requires_python = ">=3.7" summary = "Data validation using Python type hints" dependencies = [ "annotated-types>=0.4.0", - "pydantic-core==2.10.1", + "pydantic-core==2.14.5", "typing-extensions>=4.6.1", ] files = [ - {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, - {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [[package]] name = "pydantic-core" -version = "2.10.1" +version = "2.14.5" requires_python = ">=3.7" summary = "" dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] files = [ - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, - {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, - {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, - {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, - {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, - {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, - {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, ] [[package]] name = "pydantic-settings" -version = "2.0.3" -requires_python = ">=3.7" +version = "2.1.0" +requires_python = ">=3.8" summary = "Settings management using Pydantic" dependencies = [ - "pydantic>=2.0.1", + "pydantic>=2.3.0", "python-dotenv>=0.21.0", ] files = [ - {file = "pydantic_settings-2.0.3-py3-none-any.whl", hash = "sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625"}, - {file = "pydantic_settings-2.0.3.tar.gz", hash = "sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945"}, -] - -[[package]] -name = "pygeotile" -version = "1.0.6" -summary = "Python package to handle tiles and points of different projections, in particular WGS 84 (Latitude, Longitude), Spherical Mercator (Meters), Pixel Pyramid and Tiles (TMS, Google, QuadTree)" -files = [ - {file = "pyGeoTile-1.0.6.tar.gz", hash = "sha256:64b1cfac77a392e81e2220412872cd0fb4988c25e136f8aed7c03ced59134ff9"}, + {file = "pydantic_settings-2.1.0-py3-none-any.whl", hash = "sha256:7621c0cb5d90d1140d2f0ef557bdf03573aac7035948109adf2574770b77605a"}, + {file = "pydantic_settings-2.1.0.tar.gz", hash = "sha256:26b1492e0a24755626ac5e6d715e9077ab7ad4fb5f19a8b7ed7011d52f36141c"}, ] [[package]] name = "pygments" -version = "2.16.1" +version = "2.17.2" requires_python = ">=3.7" summary = "Pygments is a syntax highlighting package written in Python." files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [[package]] @@ -1403,6 +1684,16 @@ files = [ {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:871b131b83e9b1122f2325061c68ed1e861eebcb568c934d2fb193652f077f77"}, {file = "pyinstrument-4.6.1-cp311-cp311-win32.whl", hash = "sha256:8d8515156dd91f5652d13b5fcc87e634f8fe1c07b68d1d0840348cdd50bf5ace"}, {file = "pyinstrument-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb868fbe089036e9f32525a249f4c78b8dc46967612393f204b8234f439c9cc4"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a18cd234cce4f230f1733807f17a134e64a1f1acabf74a14d27f583cf2b183df"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:574cfca69150be4ce4461fb224712fbc0722a49b0dc02fa204d02807adf6b5a0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e02cf505e932eb8ccf561b7527550a67ec14fcae1fe0e25319b09c9c166e914"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832fb2acef9d53701c1ab546564c45fb70a8770c816374f8dd11420d399103c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb57e9607545623ebe462345b3d0c4caee0125d2d02267043ece8aca8f4ea0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9be89e7419bcfe8dd6abb0d959d6d9c439c613a4a873514c43d16b48dae697c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:476785cfbc44e8e1b1ad447398aa3deae81a8df4d37eb2d8bbb0c404eff979cd"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e9cebd90128a3d2fee36d3ccb665c1b9dce75261061b2046203e45c4a8012d54"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win32.whl", hash = "sha256:1d0b76683df2ad5c40eff73607dc5c13828c92fbca36aff1ddf869a3c5a55fa6"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:c4b7af1d9d6a523cfbfedebcb69202242d5bd0cb89c4e094cc73d5d6e38279bd"}, {file = "pyinstrument-4.6.1.tar.gz", hash = "sha256:f4731b27121350f5a983d358d2272fe3df2f538aed058f57217eef7801a89288"}, ] @@ -1417,25 +1708,16 @@ files = [ [[package]] name = "pymdown-extensions" -version = "10.3.1" +version = "10.5" requires_python = ">=3.8" summary = "Extension pack for Python Markdown." dependencies = [ - "markdown>=3.2", + "markdown>=3.5", "pyyaml", ] files = [ - {file = "pymdown_extensions-10.3.1-py3-none-any.whl", hash = "sha256:8cba67beb2a1318cdaf742d09dff7c0fc4cafcc290147ade0f8fb7b71522711a"}, - {file = "pymdown_extensions-10.3.1.tar.gz", hash = "sha256:f6c79941498a458852853872e379e7bab63888361ba20992fc8b4f8a9b61735e"}, -] - -[[package]] -name = "pypng" -version = "0.20220715.0" -summary = "Pure Python library for saving and loading PNG images" -files = [ - {file = "pypng-0.20220715.0-py3-none-any.whl", hash = "sha256:4a43e969b8f5aaafb2a415536c1a8ec7e341cd6a3f957fd5b5f32a4cfeed902c"}, - {file = "pypng-0.20220715.0.tar.gz", hash = "sha256:739c433ba96f078315de54c0db975aee537cbc3e1d0ae4ed9aab0ca1e427e2c1"}, + {file = "pymdown_extensions-10.5-py3-none-any.whl", hash = "sha256:1f0ca8bb5beff091315f793ee17683bc1390731f6ac4c5eb01e27464b80fe879"}, + {file = "pymdown_extensions-10.5.tar.gz", hash = "sha256:1b60f1e462adbec5a1ed79dac91f666c9c0d241fa294de1989f29d20096cfd0b"}, ] [[package]] @@ -1459,6 +1741,12 @@ files = [ {file = "pyproj-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ad699e0c830e2b8565afe42bd58cc972b47d829b2e0e48ad9638386d994915"}, {file = "pyproj-3.6.1-cp311-cp311-win32.whl", hash = "sha256:8b8acc31fb8702c54625f4d5a2a6543557bec3c28a0ef638778b7ab1d1772132"}, {file = "pyproj-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:38a3361941eb72b82bd9a18f60c78b0df8408416f9340521df442cebfc4306e2"}, + {file = "pyproj-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1e9fbaf920f0f9b4ee62aab832be3ae3968f33f24e2e3f7fbb8c6728ef1d9746"}, + {file = "pyproj-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d227a865356f225591b6732430b1d1781e946893789a609bb34f59d09b8b0f8"}, + {file = "pyproj-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83039e5ae04e5afc974f7d25ee0870a80a6bd6b7957c3aca5613ccbe0d3e72bf"}, + {file = "pyproj-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb059ba3bced6f6725961ba758649261d85ed6ce670d3e3b0a26e81cf1aa8d"}, + {file = "pyproj-3.6.1-cp312-cp312-win32.whl", hash = "sha256:2d6ff73cc6dbbce3766b6c0bce70ce070193105d8de17aa2470009463682a8eb"}, + {file = "pyproj-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:7a27151ddad8e1439ba70c9b4b2b617b290c39395fa9ddb7411ebb0eb86d6fb0"}, {file = "pyproj-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd93c1a0c6c4aedc77c0fe275a9f2aba4d59b8acf88cebfc19fe3c430cfabf4f"}, {file = "pyproj-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6420ea8e7d2a88cb148b124429fba8cd2e0fae700a2d96eab7083c0928a85110"}, {file = "pyproj-3.6.1.tar.gz", hash = "sha256:44aa7c704c2b7d8fb3d483bbf75af6cb2350d30a63b144279a09b75fead501bf"}, @@ -1493,15 +1781,15 @@ files = [ [[package]] name = "pytest-asyncio" -version = "0.21.1" -requires_python = ">=3.7" +version = "0.23.2" +requires_python = ">=3.8" summary = "Pytest support for asyncio" dependencies = [ "pytest>=7.0.0", ] files = [ - {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, - {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, + {file = "pytest-asyncio-0.23.2.tar.gz", hash = "sha256:c16052382554c7b22d48782ab3438d5b10f8cf7a4bdcae7f0f67f097d95beecc"}, + {file = "pytest_asyncio-0.23.2-py3-none-any.whl", hash = "sha256:ea9021364e32d58f0be43b91c6233fb8d2224ccef2398d6837559e587682808f"}, ] [[package]] @@ -1583,6 +1871,12 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] @@ -1599,21 +1893,6 @@ files = [ {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] -[[package]] -name = "qrcode" -version = "7.4.2" -requires_python = ">=3.7" -summary = "QR Code image generator" -dependencies = [ - "colorama; platform_system == \"Windows\"", - "pypng", - "typing-extensions", -] -files = [ - {file = "qrcode-7.4.2-py3-none-any.whl", hash = "sha256:581dca7a029bcb2deef5d01068e39093e80ef00b4a61098a2182eac59d01643a"}, - {file = "qrcode-7.4.2.tar.gz", hash = "sha256:9dd969454827e127dbd93696b20747239e6d540e082937c90f14ac95b30f5845"}, -] - [[package]] name = "questionary" version = "2.0.1" @@ -1665,6 +1944,22 @@ files = [ {file = "rapidfuzz-3.5.2-cp311-cp311-win32.whl", hash = "sha256:99c9fc5265566fb94731dc6826f43c5109e797078264e6389a36d47814473692"}, {file = "rapidfuzz-3.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:666928ee735562a909d81bd2f63207b3214afd4ca41f790ab3025d066975c814"}, {file = "rapidfuzz-3.5.2-cp311-cp311-win_arm64.whl", hash = "sha256:d55de67c48f06b7772541e8d4c062a2679205799ce904236e2836cb04c106442"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:04e1e02b182283c43c866e215317735e91d22f5d34e65400121c04d5ed7ed859"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:365e544aba3ac13acf1a62cb2e5909ad2ba078d0bfc7d69b1f801dfd673b9782"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b61f77d834f94b0099fa9ed35c189b7829759d4e9c2743697a130dd7ba62259f"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43fb368998b9703fa8c63db292a8ab9e988bf6da0c8a635754be8e69da1e7c1d"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25510b5d142c47786dbd27cfd9da7cae5bdea28d458379377a3644d8460a3404"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf3093443751e5a419834162af358d1e31dec75f84747a91dbbc47b2c04fc085"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fbaf546f15a924613f89d609ff66b85b4f4c2307ac14d93b80fe1025b713138"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d580df0e130ed85400ff77e1c32d965e9bc7be29ac4072ab637f57e26d29fb"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:358a0fbc49343de20fee8ebdb33c7fa8f55a9ff93ff42d1ffe097d2caa248f1b"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fb379ac0ddfc86c5542a225d194f76ed468b071b6f79ff57c4b72e635605ad7d"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7fb21e182dc6d83617e88dea002963d5cf99cf5eabbdbf04094f503d8fe8d723"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c04f9f1310ce414ab00bdcbf26d0906755094bfc59402cb66a7722c6f06d70b2"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6da61cc38c1a95efc5edcedf258759e6dbab73191651a28c5719587f32a56ad"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-win32.whl", hash = "sha256:f823fd1977071486739f484e27092765d693da6beedaceece54edce1dfeec9b2"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:a8162d81486de85ab1606e48e076431b66d44cf431b2b678e9cae458832e7147"}, + {file = "rapidfuzz-3.5.2-cp312-cp312-win_arm64.whl", hash = "sha256:dfc63fabb7d8da8483ca836bae7e55766fe39c63253571e103c034ba8ea80950"}, {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af5221e4f7800db3e84c46b79dba4112e3b3cc2678f808bdff4fcd2487073846"}, {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8501d7875b176930e6ed9dbc1bc35adb37ef312f6106bd6bb5c204adb90160ac"}, {file = "rapidfuzz-3.5.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e414e1ca40386deda4291aa2d45062fea0fbaa14f95015738f8bb75c4d27f862"}, @@ -1713,6 +2008,20 @@ files = [ {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, + {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, + {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, ] @@ -1748,35 +2057,35 @@ files = [ [[package]] name = "segno" -version = "1.5.3" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -summary = "QR Code and Micro QR Code generator for Python 2 and Python 3" +version = "1.6.0" +requires_python = ">=3.5" +summary = "QR Code and Micro QR Code generator for Python" files = [ - {file = "segno-1.5.3-py2.py3-none-any.whl", hash = "sha256:9bf88c8be5ca99657881889399f73ad595231230236d89a16b4eb4933513fe07"}, - {file = "segno-1.5.3.tar.gz", hash = "sha256:08f678ee3d0c96854f916879d2b953829332c6efcaa387d4b41b7b7a78d75e49"}, + {file = "segno-1.6.0-py3-none-any.whl", hash = "sha256:e9c7479e144f750b837f9527fe7492135908b2515586467bc3c893b60a4e4d39"}, + {file = "segno-1.6.0.tar.gz", hash = "sha256:8d3b11098ac6dd93161499544dedbfb187d4459088109b8855ff0bbe98105047"}, ] [[package]] name = "sentry-sdk" -version = "1.34.0" +version = "1.38.0" summary = "Python client for Sentry (https://sentry.io)" dependencies = [ "certifi", "urllib3>=1.26.11; python_version >= \"3.6\"", ] files = [ - {file = "sentry-sdk-1.34.0.tar.gz", hash = "sha256:e5d0d2b25931d88fa10986da59d941ac6037f742ab6ff2fce4143a27981d60c3"}, - {file = "sentry_sdk-1.34.0-py2.py3-none-any.whl", hash = "sha256:76dd087f38062ac6c1e30ed6feb533ee0037ff9e709974802db7b5dbf2e5db21"}, + {file = "sentry-sdk-1.38.0.tar.gz", hash = "sha256:8feab81de6bbf64f53279b085bd3820e3e737403b0a0d9317f73a2c3374ae359"}, + {file = "sentry_sdk-1.38.0-py2.py3-none-any.whl", hash = "sha256:0017fa73b8ae2d4e57fd2522ee3df30453715b29d2692142793ec5d5f90b94a6"}, ] [[package]] name = "setuptools" -version = "68.2.2" +version = "69.0.2" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, ] [[package]] @@ -1802,6 +2111,13 @@ files = [ {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:737dba15011e5a9b54a8302f1748b62daa207c9bc06f820cd0ad32a041f1c6f2"}, {file = "shapely-2.0.2-cp311-cp311-win32.whl", hash = "sha256:45ac6906cff0765455a7b49c1670af6e230c419507c13e2f75db638c8fc6f3bd"}, {file = "shapely-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:dc9342fc82e374130db86a955c3c4525bfbf315a248af8277a913f30911bed9e"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:06f193091a7c6112fc08dfd195a1e3846a64306f890b151fa8c63b3e3624202c"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eebe544df5c018134f3c23b6515877f7e4cd72851f88a8d0c18464f414d141a2"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7e92e7c255f89f5cdf777690313311f422aa8ada9a3205b187113274e0135cd8"}, + {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be46d5509b9251dd9087768eaf35a71360de6afac82ce87c636990a0871aa18b"}, + {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5533a925d8e211d07636ffc2fdd9a7f9f13d54686d00577eeb11d16f00be9c4"}, + {file = "shapely-2.0.2-cp312-cp312-win32.whl", hash = "sha256:084b023dae8ad3d5b98acee9d3bf098fdf688eb0bb9b1401e8b075f6a627b611"}, + {file = "shapely-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:ea84d1cdbcf31e619d672b53c4532f06253894185ee7acb8ceb78f5f33cbe033"}, {file = "shapely-2.0.2.tar.gz", hash = "sha256:1713cc04c171baffc5b259ba8531c58acc2a301707b7f021d88a15ed090649e7"}, ] @@ -1851,6 +2167,14 @@ files = [ {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, ] @@ -1897,12 +2221,12 @@ files = [ [[package]] name = "termcolor" -version = "2.3.0" -requires_python = ">=3.7" +version = "2.4.0" +requires_python = ">=3.8" summary = "ANSI color formatting for output in terminal" files = [ - {file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"}, - {file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"}, + {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, + {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, ] [[package]] @@ -1930,22 +2254,22 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.2" +version = "0.12.3" requires_python = ">=3.7" summary = "Style preserving TOML library" files = [ - {file = "tomlkit-0.12.2-py3-none-any.whl", hash = "sha256:eeea7ac7563faeab0a1ed8fe12c2e5a51c61f933f2502f7e9db0241a65163ad0"}, - {file = "tomlkit-0.12.2.tar.gz", hash = "sha256:df32fab589a81f0d7dc525a4267b6d7a64ee99619cbd1eeb0fae32c1dd426977"}, + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, ] [[package]] name = "traitlets" -version = "5.13.0" +version = "5.14.0" requires_python = ">=3.8" summary = "Traitlets Python configuration system" files = [ - {file = "traitlets-5.13.0-py3-none-any.whl", hash = "sha256:baf991e61542da48fe8aef8b779a9ea0aa38d8a54166ee250d5af5ecf4486619"}, - {file = "traitlets-5.13.0.tar.gz", hash = "sha256:9b232b9430c8f57288c1024b34a8f0251ddcc47268927367a0dd3eeaca40deb5"}, + {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, + {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, ] [[package]] @@ -1970,17 +2294,17 @@ files = [ [[package]] name = "urllib3" -version = "2.0.7" -requires_python = ">=3.7" +version = "2.1.0" +requires_python = ">=3.8" summary = "HTTP library with thread-safe connection pooling, file post, and more." files = [ - {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, - {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [[package]] name = "uvicorn" -version = "0.24.0.post1" +version = "0.24.0" requires_python = ">=3.8" summary = "The lightning-fast ASGI server." dependencies = [ @@ -1989,23 +2313,23 @@ dependencies = [ "typing-extensions>=4.0; python_version < \"3.11\"", ] files = [ - {file = "uvicorn-0.24.0.post1-py3-none-any.whl", hash = "sha256:7c84fea70c619d4a710153482c0d230929af7bcf76c7bfa6de151f0a3a80121e"}, - {file = "uvicorn-0.24.0.post1.tar.gz", hash = "sha256:09c8e5a79dc466bdf28dead50093957db184de356fcdc48697bad3bde4c2588e"}, + {file = "uvicorn-0.24.0-py3-none-any.whl", hash = "sha256:3d19f13dfd2c2af1bfe34dd0f7155118ce689425fdf931177abe832ca44b8a04"}, + {file = "uvicorn-0.24.0.tar.gz", hash = "sha256:368d5d81520a51be96431845169c225d771c9dd22a58613e1a181e6c4512ac33"}, ] [[package]] name = "virtualenv" -version = "20.24.6" +version = "20.25.0" requires_python = ">=3.7" summary = "Virtual Python Environment builder" dependencies = [ "distlib<1,>=0.3.7", "filelock<4,>=3.12.2", - "platformdirs<4,>=3.9.1", + "platformdirs<5,>=3.9.1", ] files = [ - {file = "virtualenv-20.24.6-py3-none-any.whl", hash = "sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381"}, - {file = "virtualenv-20.24.6.tar.gz", hash = "sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af"}, + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, ] [[package]] @@ -2038,11 +2362,11 @@ files = [ [[package]] name = "wcwidth" -version = "0.2.9" +version = "0.2.12" summary = "Measures the displayed width of unicode strings in a terminal" files = [ - {file = "wcwidth-0.2.9-py2.py3-none-any.whl", hash = "sha256:9a929bd8380f6cd9571a968a9c8f4353ca58d7cd812a4822bba831f8d685b223"}, - {file = "wcwidth-0.2.9.tar.gz", hash = "sha256:a675d1a4a2d24ef67096a04b85b02deeecd8e226f57b5e3a72dbb9ed99d27da8"}, + {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, + {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, ] [[package]] diff --git a/src/backend/pyproject.toml b/src/backend/pyproject.toml index d52324ea19..b3624536cf 100644 --- a/src/backend/pyproject.toml +++ b/src/backend/pyproject.toml @@ -24,38 +24,37 @@ authors = [ {name = "HOTOSM", email = "sysadmin@hotosm.org"}, ] dependencies = [ - "uvicorn>=0.23.2", - "fastapi>=0.103.0", - "pydantic>=2.3.0", - "pydantic-settings>=2.0.3", - "geojson-pydantic>=1.0.0", - "python-multipart>=0.0.6", - "sqlalchemy>=2.0.21", - "SQLAlchemy-Utils>=0.41.1", - "psycopg2>=2.9.7", - "geoalchemy2>=0.14.1", - "geojson>=3.0.1", - "shapely>=2.0.1", - "pyxform>=1.12.1", - "qrcode>=7.4.2", - "xmltodict>=0.13.0", - "segno>=1.5.2", - "sentry-sdk>=1.30.0", - "py-cpuinfo>=9.0.0", - "loguru>=0.7.0", + "uvicorn==0.24.0", + "fastapi==0.104.1", + "pydantic==2.5.2", + "pydantic-settings==2.1.0", + "geojson-pydantic==1.0.1", + "python-multipart==0.0.6", + "sqlalchemy==2.0.23", + "SQLAlchemy-Utils==0.41.1", + "psycopg2==2.9.9", + "geoalchemy2==0.14.2", + "geojson==3.1.0", + "shapely==2.0.2", + "pyxform==1.12.2", + "segno==1.6.0", + "sentry-sdk==1.38.0", + "py-cpuinfo==9.0.0", + "loguru==0.7.2", + "minio==7.2.0", + "pyproj==3.6.1", + "asgiref==3.7.2", "osm-login-python==1.0.1", - "osm-fieldwork==0.3.7", - "osm-rawdata==0.1.5", - "minio>=7.1.17", - "pyproj>=3.6.1", - "asgiref>=3.7.2", + "osm-fieldwork==0.4.0", + "osm-rawdata==0.1.7", + "fmtm-splitter==0.2.4", ] -requires-python = ">=3.10,<3.12" +requires-python = ">=3.10" readme = "../../README.md" license = {text = "GPL-3.0-only"} [build-system] -requires = ["pdm-pep517>=1.0.0"] +requires = ["pdm-pep517==1.1.4"] build-backend = "pdm.pep517.api" [tool.pdm] @@ -63,26 +62,26 @@ source-includes = ["../../CHANGELOG.md", "../../LICENSE", "../../README.md"] version = {from = "app/__version__.py"} [tool.pdm.dev-dependencies] dev = [ - "pre-commit>=3.3.3", - "black>=23.7.0", - "commitizen>=3.6.0", + "pre-commit>=3.5.0", + "black>=23.11.0", + "commitizen>=3.13.0", ] test = [ - "pytest>=7.2.2", - "httpx>=0.23.3", - "pytest-asyncio>=0.21.1", - "pyinstrument>=4.6.1", + "pytest==7.4.3", + "httpx==0.25.2", + "pytest-asyncio==0.23.2", + "pyinstrument==4.6.1", ] debug = [ "ipdb>=0.13.13", - "debugpy>=1.6.7.post1", + "debugpy>=1.8.0", ] docs = [ - "mkdocs>=1.5.2", - "mkdocs-material>=9.2.6", - "mkdocstrings-python>=1.6.0", - "mkdocs-exclude>=1.0.2", - "python-dotenv>=1.0.0", + "mkdocs==1.5.3", + "mkdocs-material==9.4.14", + "mkdocstrings-python==1.7.5", + "mkdocs-exclude==1.0.2", + "python-dotenv==1.0.0", ] [tool.black] diff --git a/src/backend/tests/test_projects_routes.py b/src/backend/tests/test_projects_routes.py index dbd5a0b263..42ff028e3f 100644 --- a/src/backend/tests/test_projects_routes.py +++ b/src/backend/tests/test_projects_routes.py @@ -17,14 +17,15 @@ # """Tests for project routes.""" +import functools import json import os import uuid import zipfile -from concurrent.futures import ThreadPoolExecutor, wait from unittest.mock import Mock, patch import pytest +from fastapi.concurrency import run_in_threadpool from geoalchemy2.elements import WKBElement from loguru import logger as log from shapely import Polygon, wkb @@ -177,51 +178,42 @@ async def test_generate_appuser_files(db, project): assert data_extract_uploaded is True # Get project tasks list - task_list = await tasks_crud.get_task_lists(db, project_id) - assert isinstance(task_list, list) + task_ids = await tasks_crud.get_task_id_list(db, project_id) + assert isinstance(task_ids, list) # Provide custom xlsform file path xlsform_file = f"{test_data_path}/buildings.xls" - # Generate project task files using threadpool - with ThreadPoolExecutor() as executor: - # Submit tasks to the thread pool - futures = [ - executor.submit( + for task_id in task_ids: + # NOTE avoid the lambda function for run_in_threadpool + # functools.partial captures the loop variable task_id in a + # way that is safe for use within asynchronous code + success = await run_in_threadpool( + functools.partial( project_crud.generate_task_files, db, project_id, - task, + task_id, xlsform_file, - "xls", + "building", odk_credentials, ) - for task in task_list - ] - - # Wait for all tasks to complete - wait(futures) - - # Check the results, assuming generate_task_files returns a boolean - results = [future.result() for future in futures] - assert all(results) + ) + assert success # Generate appuser files - test_data = { - "db": db, - "project_id": project_id, - "extract_polygon": True, - "custom_xls_form": xlsform_file, - "extracts_contents": data_extracts, - "category": "buildings", - "form_type": "example_form_type", - "background_task_id": uuid.uuid4(), - } - # Generate appuser using threadpool - with ThreadPoolExecutor() as executor: - future = executor.submit(project_crud.generate_appuser_files, **test_data) - result = future.result() - + result = await run_in_threadpool( + lambda: project_crud.generate_appuser_files( + db, + project_id, + extract_polygon=True, + custom_xls_form=xlsform_file, + extracts_contents=data_extracts, + category="buildings", + form_type="example_form_type", + background_task_id=uuid.uuid4(), + ) + ) assert result is None diff --git a/src/frontend/src/api/CreateProjectService.ts b/src/frontend/src/api/CreateProjectService.ts index 5d0e165722..a80f8abff2 100755 --- a/src/frontend/src/api/CreateProjectService.ts +++ b/src/frontend/src/api/CreateProjectService.ts @@ -331,17 +331,19 @@ const TaskSplittingPreviewService: Function = ( url: string, fileUpload: any, no_of_buildings: string, - isCustomDataExtract: boolean, + dataExtractFile: any, ) => { return async (dispatch) => { dispatch(CreateProjectActions.GetTaskSplittingPreviewLoading(true)); - const getTaskSplittingGeojson = async (url, fileUpload, isCustomDataExtract) => { + const getTaskSplittingGeojson = async (url, fileUpload, dataExtractFile) => { try { const taskSplittingFileFormData = new FormData(); taskSplittingFileFormData.append('project_geojson', fileUpload); taskSplittingFileFormData.append('no_of_buildings', no_of_buildings); - taskSplittingFileFormData.append('has_data_extracts', isCustomDataExtract); + if (dataExtractFile) { + taskSplittingFileFormData.append('custom_data_extract', dataExtractFile); + } const getTaskSplittingResponse = await axios.post(url, taskSplittingFileFormData); const resp: OrganisationListModel = getTaskSplittingResponse.data; @@ -360,7 +362,7 @@ const TaskSplittingPreviewService: Function = ( } }; - await getTaskSplittingGeojson(url, fileUpload, isCustomDataExtract); + await getTaskSplittingGeojson(url, fileUpload, dataExtractFile); }; }; const PatchProjectDetails: Function = (url: string, payload: any) => { diff --git a/src/frontend/src/components/createnewproject/DataExtract.tsx b/src/frontend/src/components/createnewproject/DataExtract.tsx index 2f064009d5..87e4dac0b4 100644 --- a/src/frontend/src/components/createnewproject/DataExtract.tsx +++ b/src/frontend/src/components/createnewproject/DataExtract.tsx @@ -159,7 +159,7 @@ const DataExtract = ({ flag, customLineUpload, setCustomLineUpload, customPolygo }} onResetFile={() => resetFile(setCustomPolygonUpload)} customFile={customPolygonUpload} - btnText="Upload a Polygon" + btnText="Upload Polygons" accept=".geojson,.json" fileDescription="*The supported file formats are .geojson, .json" errorMsg={errors.customPolygonUpload} @@ -171,7 +171,7 @@ const DataExtract = ({ flag, customLineUpload, setCustomLineUpload, customPolygo }} onResetFile={() => resetFile(setCustomLineUpload)} customFile={customLineUpload} - btnText="Upload a Line" + btnText="Upload Lines" accept=".geojson,.json" fileDescription="*The supported file formats are .geojson, .json" errorMsg={errors.setCustomLineUpload} diff --git a/src/frontend/src/components/createnewproject/SplitTasks.tsx b/src/frontend/src/components/createnewproject/SplitTasks.tsx index ed62130c55..5e8845ab86 100644 --- a/src/frontend/src/components/createnewproject/SplitTasks.tsx +++ b/src/frontend/src/components/createnewproject/SplitTasks.tsx @@ -34,7 +34,15 @@ const alogrithmList = [ ]; let generateProjectLogIntervalCb: any = null; -const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, customPolygonUpload, customFormFile }) => { +const SplitTasks = ({ + flag, + geojsonFile, + setGeojsonFile, + customLineUpload, + customPolygonUpload, + customFormFile, + dataExtractFile, +}) => { const dispatch = useDispatch(); const navigate = useNavigate(); @@ -168,7 +176,7 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo const drawnGeojsonFile = new File([blob], 'data.json', { type: 'application/json' }); if (splitTasksSelection === task_split_type['divide_on_square']) { dispatch( - GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_tasks/`, { + GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_split_by_square/`, { geojson: drawnGeojsonFile, dimension: formValues?.dimension, }), @@ -183,8 +191,7 @@ const SplitTasks = ({ flag, geojsonFile, setGeojsonFile, customLineUpload, custo `${import.meta.env.VITE_API_URL}/projects/task_split`, drawnGeojsonFile, formValues?.average_buildings_per_task, - false, - // dataExtractFile ? true : false, + dataExtractFile, ), ); } diff --git a/src/frontend/src/components/createproject/DefineTasks.tsx b/src/frontend/src/components/createproject/DefineTasks.tsx index 1d635322b6..77fe0a5e2e 100755 --- a/src/frontend/src/components/createproject/DefineTasks.tsx +++ b/src/frontend/src/components/createproject/DefineTasks.tsx @@ -60,14 +60,14 @@ const DefineTasks: React.FC = ({ geojsonFile, setGeojsonFile, dataExtractFi // Create a file object from the Blob const drawnGeojsonFile = new File([blob], 'data.json', { type: 'application/json' }); dispatch( - GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_tasks/`, { + GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_split_by_square/`, { geojson: drawnGeojsonFile, dimension: formValues?.dimension, }), ); } else { dispatch( - GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_tasks/`, { + GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_split_by_square/`, { geojson: geojsonFile, dimension: formValues?.dimension, }), @@ -88,7 +88,7 @@ const DefineTasks: React.FC = ({ geojsonFile, setGeojsonFile, dataExtractFi `${import.meta.env.VITE_API_URL}/projects/task_split`, drawnGeojsonFile, formValues?.no_of_buildings, - dataExtractFile ? false : false, + dataExtractFile, ), ); } else { @@ -97,7 +97,7 @@ const DefineTasks: React.FC = ({ geojsonFile, setGeojsonFile, dataExtractFi `${import.meta.env.VITE_API_URL}/projects/task_split`, geojsonFile, formValues?.no_of_buildings, - dataExtractFile ? false : false, + dataExtractFile, ), ); } diff --git a/src/frontend/src/components/editproject/UpdateProjectArea.tsx b/src/frontend/src/components/editproject/UpdateProjectArea.tsx index ae4cf8090c..9c83b6860d 100644 --- a/src/frontend/src/components/editproject/UpdateProjectArea.tsx +++ b/src/frontend/src/components/editproject/UpdateProjectArea.tsx @@ -41,7 +41,7 @@ const UpdateProjectArea = ({ projectId }) => { const generateTasksOnMap = () => { dispatch( - GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_tasks/`, { + GetDividedTaskFromGeojson(`${import.meta.env.VITE_API_URL}/projects/preview_split_by_square/`, { geojson: uploadAOI, dimension: projectBoundaryDetails?.dimension, }), diff --git a/src/frontend/src/views/CreateNewProject.tsx b/src/frontend/src/views/CreateNewProject.tsx index f57cf963c8..14f6ac96bb 100644 --- a/src/frontend/src/views/CreateNewProject.tsx +++ b/src/frontend/src/views/CreateNewProject.tsx @@ -24,6 +24,7 @@ const CreateNewProject = () => { const [customLineUpload, setCustomLineUpload] = useState(null); const [customPolygonUpload, setCustomPolygonUpload] = useState(null); const [customFormFile, setCustomFormFile] = useState(null); + const [dataExtractFile] = useState(null); useEffect(() => { if (location.pathname !== '/create-project' && !projectDetails.name && !projectDetails.odk_central_url) { @@ -89,6 +90,7 @@ const CreateNewProject = () => { customLineUpload={customLineUpload} customPolygonUpload={customPolygonUpload} customFormFile={customFormFile} + dataExtractFile={dataExtractFile} /> ); default: