diff --git a/CHANGES.md b/CHANGES.md index 3da52edb..48f48807 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/). Note: Minor version `0.X.0` update might break the API, It's recommended to pin `tipg` to minor version: `tipg>=0.1,<0.2` +## [0.5.3] - 2023-11-29 + +- add Postgres `date` type as valid datetime column type + ## [0.5.2] - 2023-11-28 - add `TIPG_DB_SPATIAL_EXTENT` and `TIPG_DB_DATETIME_EXTENT` environment options to control if `tipg` will scan the tables for spatio-temporal extents (author @hrodmn, https://github.com/developmentseed/tipg/pull/143) @@ -239,7 +243,10 @@ Note: Minor version `0.X.0` update might break the API, It's recommended to pin - Initial release -[unreleased]: https://github.com/developmentseed/tipg/compare/0.5.0...HEAD +[unreleased]: https://github.com/developmentseed/tipg/compare/0.5.3...HEAD +[0.5.3]: https://github.com/developmentseed/tipg/compare/0.5.2...0.5.3 +[0.5.2]: https://github.com/developmentseed/tipg/compare/0.5.1...0.5.2 +[0.5.1]: https://github.com/developmentseed/tipg/compare/0.5.0...0.5.1 [0.5.0]: https://github.com/developmentseed/tipg/compare/0.4.4...0.5.0 [0.4.4]: https://github.com/developmentseed/tipg/compare/0.4.3...0.4.4 [0.4.3]: https://github.com/developmentseed/tipg/compare/0.4.2...0.4.3 diff --git a/tests/conftest.py b/tests/conftest.py index cdbb218a..6f1fa2b9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -157,6 +157,7 @@ def app(database_url, monkeypatch): monkeypatch.setenv("TIPG_TABLE_CONFIG__public_my_data_alt__geomcol", "othergeom") monkeypatch.setenv("TIPG_TABLE_CONFIG__public_my_data_alt__pk", "id") monkeypatch.setenv("TIPG_TABLE_CONFIG__public_landsat__geomcol", "geom") + monkeypatch.setenv("TIPG_TABLE_CONFIG__public_my_data_date__datetimecol", "datedt") # OGC Tiles Settings monkeypatch.setenv("TIPG_DEFAULT_MINZOOM", str(5)) diff --git a/tests/fixtures/my_data.sql b/tests/fixtures/my_data.sql index cbb1d3a6..0e44f8aa 100644 --- a/tests/fixtures/my_data.sql +++ b/tests/fixtures/my_data.sql @@ -16,9 +16,12 @@ INSERT INTO "public"."my_data" ("geom" , "id", "datetime", "decimal", "numeric") INSERT INTO "public"."my_data" ("geom" , "id", "datetime", "decimal", "numeric") VALUES ('0103000020E610000001000000110000000A4C8422590E46C0B656FB86F03B5340D5E76A2BF60F46C0075F984C153C5340FA28B2217F0346C0CE0A257ADB3D5340BEE6287052F545C01AA33BF2DF3F5340F25A937BB7D244C009CB92853C69534049A5CD2EAE0644C03857A7D84686534063B4EEABC4F943C08D992E511D88534034A2B437F8EA43C0F54A5986388A53409C72BC6BC5E843C0920AAB5C038A534050AF9465883342C0363B85F6B5605340D43E0032881142C02A5884BF7F5D5340F4FDD478E90641C007F01648504453409C6F1F2DEA1541C00EA6095E6A4253404E4E9C88873342C06DC6E4C7471E53403EDF52396E3443C0DC9EAF2DC7FD524044696FF0854143C032772D211FFC52400A4C8422590E46C0B656FB86F03B5340', '4', '2004-10-23 10:23:54', 7.55526, 7.55526); INSERT INTO "public"."my_data" ("geom" , "id", "datetime", "decimal", "numeric") VALUES ('0103000020E6100000010000000D000000BBE9944235C347C0EBF06E7961EE52406ADE718A8EC447C0D122DBF97EEE5240942D6301ECB947C05B59871F60F0524086CAEEF61AAE47C0BDEF3BBB76F252400A4C8422590E46C0B656FB86F03B5340FA28B2217F0346C0CE0A257ADB3D534057EC2FBB27F745C02B1895D409405340BEE6287052F545C01AA33BF2DF3F53401D386744692743C07958A835CDFF52403EDF52396E3443C0DC9EAF2DC7FD5240B9E39237FD0645C0574B4E2543B552400AD7A3703D1245C03A234A7B83B35240BBE9944235C347C0EBF06E7961EE5240', '5', '2004-10-24 10:23:54', -78.56, null); ALTER TABLE public.my_data ADD COLUMN otherdt timestamptz; +ALTER TABLE public.my_data ADD COLUMN datedt date; ALTER TABLE public.my_data ADD COLUMN othergeom geometry; UPDATE my_data SET otherdt=datetime+'1 year'::interval, othergeom=st_pointonsurface(geom); +UPDATE my_data SET datedt=datetime+'3 year'::interval; CREATE VIEW public.my_data_alt AS SELECT * FROM my_data; +CREATE VIEW public.my_data_date AS SELECT * FROM my_data; -- Create a copy of my_data but with geography instead of Geometry CREATE TABLE public.my_data_geo AS SELECT * FROM my_data; ALTER TABLE public.my_data_geo ALTER COLUMN geom TYPE geography(Polygon,4326) USING ST_Transform(geom,4326)::geography; diff --git a/tests/routes/test_collections.py b/tests/routes/test_collections.py index 39ac3344..48f3d3e3 100644 --- a/tests/routes/test_collections.py +++ b/tests/routes/test_collections.py @@ -1,7 +1,7 @@ """Test /collections endpoints.""" collection_number = ( - 16 # 5 custom functions + 8 public tables + (N) functions from public + 17 # 5 custom functions + 8 public tables + (N) functions from public ) @@ -79,13 +79,14 @@ def test_collections_search(app): response = app.get("/collections", params={"datetime": "../2022-12-31T23:59:59Z"}) body = response.json() - assert body["numberMatched"] == 4 + assert body["numberMatched"] == 5 ids = [x["id"] for x in body["collections"]] assert sorted( [ "public.my_data", "public.my_data_alt", "public.my_data_geo", + "public.my_data_date", "public.nongeo_data", ] ) == sorted(ids) @@ -96,23 +97,29 @@ def test_collections_search(app): response = app.get("/collections", params={"datetime": "2003-12-31T23:59:59Z/.."}) body = response.json() - assert body["numberMatched"] == 4 + assert body["numberMatched"] == 5 ids = [x["id"] for x in body["collections"]] assert sorted( [ "public.my_data", "public.my_data_alt", "public.my_data_geo", + "public.my_data_date", "public.nongeo_data", ] ) == sorted(ids) response = app.get("/collections", params={"datetime": "2004-12-31T23:59:59Z/.."}) body = response.json() - assert body["numberMatched"] == 3 + assert body["numberMatched"] == 4 ids = [x["id"] for x in body["collections"]] assert sorted( - ["public.my_data", "public.my_data_alt", "public.my_data_geo"] + [ + "public.my_data", + "public.my_data_alt", + "public.my_data_date", + "public.my_data_geo", + ] ) == sorted(ids) response = app.get( @@ -327,3 +334,30 @@ def test_collections_no_spatial_extent(app_no_spatial_extent): body = response.json() assert not body["extent"].get("spatial") assert body["extent"].get("temporal") + + +def test_collections_temporal_extent_datetime_column(app): + """Test /collections endpoint.""" + response = app.get("/collections/public.my_data") + assert response.status_code == 200 + body = response.json() + intervals = body["extent"]["temporal"]["interval"] + assert len(intervals) == 4 + assert intervals[0][0] == "2004-10-19T10:23:54+00:00" + assert intervals[0][1] == "2007-10-24T00:00:00+00:00" + + response = app.get("/collections/public.my_data_alt") + assert response.status_code == 200 + body = response.json() + intervals = body["extent"]["temporal"]["interval"] + assert len(intervals) == 4 + assert intervals[0][0] == "2004-10-19T10:23:54+00:00" + assert intervals[0][1] == "2007-10-24T00:00:00+00:00" + + response = app.get("/collections/public.my_data_date") + assert response.status_code == 200 + body = response.json() + intervals = body["extent"]["temporal"]["interval"] + assert len(intervals) == 4 + assert intervals[0][0] == "2004-10-19T10:23:54+00:00" + assert intervals[0][1] == "2007-10-24T00:00:00+00:00" diff --git a/tests/routes/test_items.py b/tests/routes/test_items.py index f5f00ff3..16fc0e02 100644 --- a/tests/routes/test_items.py +++ b/tests/routes/test_items.py @@ -525,7 +525,10 @@ def test_items_datetime(app): assert response.status_code == 500 assert response.headers["content-type"] == "application/json" body = response.json() - assert body["detail"] == "Must have timestamp typed column to filter with datetime." + assert ( + body["detail"] + == "Must have timestamp/timestamptz/date typed column to filter with datetime." + ) # Invalid datetime column response = app.get( @@ -883,3 +886,30 @@ def test_items_env_table_config_alt(app, monkeypatch): body = response.json() assert body["features"][0]["id"] == "0" Items.model_validate(body) + + # There should NOT be items > 2005-10-19 for `my_data` + response = app.get( + "/collections/public.my_data/items?datetime=2005-10-19T00:00:00Z/.." + ) + body = response.json() + assert body["numberMatched"] == 0 + + # There should be items > 2004-12-31 for `my_data_alt` + response = app.get( + "/collections/public.my_data_alt/items?datetime=2004-12-31T00:00:00Z/.." + ) + body = response.json() + assert body["numberMatched"] == 6 + assert body["features"][0]["properties"]["datetime"] == "2004-10-19T10:23:54" + assert body["features"][0]["properties"]["otherdt"] == "2005-10-19T10:23:54+00:00" + assert body["features"][0]["properties"]["datedt"] == "2007-10-19" + + # There should be items > 2005-10-19 for `my_data_date` + response = app.get( + "/collections/public.my_data_date/items?datetime=2005-10-19T00:00:00Z/.." + ) + body = response.json() + assert body["numberMatched"] == 6 + assert body["features"][0]["properties"]["datetime"] == "2004-10-19T10:23:54" + assert body["features"][0]["properties"]["otherdt"] == "2005-10-19T10:23:54+00:00" + assert body["features"][0]["properties"]["datedt"] == "2007-10-19" diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 81faf18f..9632a35b 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -57,7 +57,7 @@ def test_myschema_and_public_functions(app_myschema_public_functions): def test_myschema_and_public(app_myschema_public): """Available tables should come from `myschema` and `public` and functions from `pg_temp`""" collection_number = ( - 14 # 5 custom functions + 1 tables from myschema + 8 tables from public + 15 # 5 custom functions + 1 tables from myschema + 8 tables from public ) response = app_myschema_public.get("/collections") @@ -78,6 +78,7 @@ def test_myschema_and_public(app_myschema_public): # tables from public assert "public.my_data" in ids assert "public.my_data_alt" in ids + assert "public.my_data_date" in ids assert "public.minnesota" in ids assert "public.canada" in ids assert "public.landsat" in ids @@ -122,7 +123,7 @@ def test_public_functions(app_only_public_functions): def test_myschema_and_public_order(app_myschema_public_order): """Available tables should come from `myschema` and `public` and functions from `pg_temp`""" collection_number = ( - 14 # 5 custom functions + 1 tables from myschema + 8 tables from public + 15 # 5 custom functions + 1 tables from myschema + 8 tables from public ) response = app_myschema_public_order.get("/collections") diff --git a/tipg/collections.py b/tipg/collections.py index 0d8f9283..711364a2 100644 --- a/tipg/collections.py +++ b/tipg/collections.py @@ -145,7 +145,7 @@ def is_geometry(self) -> bool: @property def is_datetime(self) -> bool: """Returns true if this property is a datetime column.""" - return self.type in ("timestamp", "timestamptz") + return self.type in ("timestamp", "timestamptz", "date") class Parameter(Column): @@ -528,7 +528,7 @@ def _where( # noqa: C901 if datetime: if not self.datetime_columns: raise MissingDatetimeColumn( - "Must have timestamp typed column to filter with datetime." + "Must have timestamp/timestamptz/date typed column to filter with datetime." ) datetime_column = self.get_datetime_column(dt) @@ -976,7 +976,7 @@ async def get_collection_index( # noqa: C901 geometry_column = None for c in properties: - if c.get("type") in ("timestamp", "timestamptz"): + if c.get("type") in ("timestamp", "timestamptz", "date"): if ( table_conf.get("datetimecol") == c["name"] or datetime_column is None diff --git a/tipg/sql/dbcatalog.sql b/tipg/sql/dbcatalog.sql index dfc6296b..a274299f 100644 --- a/tipg/sql/dbcatalog.sql +++ b/tipg/sql/dbcatalog.sql @@ -50,7 +50,7 @@ DECLARE bounds_geom geometry; bounds float[]; BEGIN - IF atttype IN ('timestamp', 'timestamptz') AND datetime_extent THEN + IF atttype IN ('timestamp', 'timestamptz', 'date') AND datetime_extent THEN EXECUTE FORMAT( $q$ SELECT to_json(min(%I::timestamptz)), to_json(max(%I::timestamptz))