Skip to content

Commit

Permalink
Minor improvements to assist with native execution and db setup loading
Browse files Browse the repository at this point in the history
  • Loading branch information
tiagojsag committed Dec 17, 2021
1 parent de1691a commit ee32ab5
Show file tree
Hide file tree
Showing 10 changed files with 215 additions and 45 deletions.
87 changes: 62 additions & 25 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,18 +1,23 @@
.PHONY: start

# Read values as needed from .env
#
# Optionally pass 'environment=<something>' to load env vars from '.env-something' instead
# Useful to separate i.e. dev from testing databases
#
# If using the same variables in recipes that need to use a dotenv file other
# than .env, remember to check that no values from .env are being used
# inadvertently.
ENVFILE := $(if $(environment), .env-test-e2e, .env)
ENVFILE := $(if $(environment), .env-$(environment), .env)
ifneq (,$(wildcard $(ENVFILE)))
include $(ENVFILE)
export
endif

CIENV := $(if $(filter $(environment), ci), -f docker-compose-test-e2e.ci.yml , -f docker-compose-test-e2e.local.yml)
API_DB_INSTANCE := $(if $(environment), test-e2e-postgresql-api, postgresql-api)
GEO_DB_INSTANCE := $(if $(environment), test-e2e-postgresql-geo-api, postgresql-geo-api)
REDIS_INSTANCE := $(if $(environment), test-e2e-redis, redis)
_API_POSTGRES_USER := $(if $(filter $(environment), ci),${API_POSTGRES_USER},$(shell grep -e API_POSTGRES_USER ${ENVFILE} | sed 's/^.*=//'))
_API_POSTGRES_DB := $(if $(filter $(environment), ci),${API_POSTGRES_DB},$(shell grep -e API_POSTGRES_DB ${ENVFILE} | sed 's/^.*=//'))
_GEO_POSTGRES_USER := $(if $(filter $(environment), ci),${GEO_POSTGRES_USER},$(shell grep -e GEO_POSTGRES_USER ${ENVFILE} | sed 's/^.*=//'))
_GEO_POSTGRES_DB := $(if $(filter $(environment), ci),${GEO_POSTGRES_DB},$(shell grep -e GEO_POSTGRES_DB ${ENVFILE} | sed 's/^.*=//'))

DOCKER_COMPOSE_FILE := $(if $(environment), -f docker-compose-test-e2e.yml $(CIENV), -f docker-compose.yml )
DOCKER_CLEAN_VOLUMES := $(if $(environment), , \
Expand All @@ -29,8 +34,8 @@ test-commands:
@echo $(ENVFILE)
@echo $(DOCKER_COMPOSE_FILE)
@echo $(CIENV)
@echo $(_API_POSTGRES_DB)
@echo $(_GEO_POSTGRES_USER)
@echo $(API_POSTGRES_DB)
@echo $(GEO_POSTGRES_USER)

# Start only API and Geoprocessing services
#
Expand All @@ -53,10 +58,10 @@ stop:
docker-compose $(DOCKER_COMPOSE_FILE) stop

psql-api:
docker-compose $(DOCKER_COMPOSE_FILE) exec $(API_DB_INSTANCE) psql -U "${_API_POSTGRES_USER}"
docker-compose $(DOCKER_COMPOSE_FILE) exec $(API_DB_INSTANCE) psql -U "${API_POSTGRES_USER}"

psql-geo:
docker-compose $(DOCKER_COMPOSE_FILE) exec $(GEO_DB_INSTANCE) psql -U "${_GEO_POSTGRES_USER}"
docker-compose $(DOCKER_COMPOSE_FILE) exec $(GEO_DB_INSTANCE) psql -U "${GEO_POSTGRES_USER}"

redis-api:
docker-compose exec redis redis-cli
Expand Down Expand Up @@ -84,25 +89,25 @@ seed-dbs: seed-api-with-test-data

seed-api-with-test-data: seed-api-init-data | seed-geoapi-init-data
@echo "$(RED)seeding db with testing project and scenarios:$(NC) $(API_DB_INSTANCE)"
docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${_API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-data.sql
docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-data.sql

seed-api-init-data:
@echo "$(RED)seeding initial dbs:$(NC) $(API_DB_INSTANCE)"
docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${_API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-init-apidb.sql
docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-init-apidb.sql

seed-geoapi-init-data:
@echo "$(RED)seeding dbs with initial geodata:$(NC) $(API_DB_INSTANCE), $(GEO_DB_INSTANCE)"
sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-admin-data.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${_GEO_POSTGRES_USER}"; \
sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-wdpa-data.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${_GEO_POSTGRES_USER}";
docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${_API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-features.sql
sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-admin-data.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${GEO_POSTGRES_USER}"; \
sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-wdpa-data.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${GEO_POSTGRES_USER}";
docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-features.sql
@for i in api/apps/api/test/fixtures/features/*.sql; do \
table_name=`basename -s .sql "$$i"`; \
featureid=`docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -X -A -t -U "${_API_POSTGRES_USER}" -c "select id from features where feature_class_name = '$$table_name'"`; \
featureid=`docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -X -A -t -U "${API_POSTGRES_USER}" -c "select id from features where feature_class_name = '$$table_name'"`; \
echo "appending data for $${table_name} with id $${featureid}"; \
sed -e "s/\$$feature_id/$$featureid/g" api/apps/api/test/fixtures/features/$${table_name}.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${_GEO_POSTGRES_USER}"; \
sed -e "s/\$$feature_id/$$featureid/g" api/apps/api/test/fixtures/features/$${table_name}.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${GEO_POSTGRES_USER}"; \
done;

# need notebook service to execute a expecific notebook. this requires a full geodb
# need notebook service to execute a specific notebook. this requires a full geodb
generate-geo-test-data: extract-geo-test-data
docker-compose --project-name ${COMPOSE_PROJECT_NAME} -f ./data/docker-compose.yml exec marxan-science-notebooks papermill --progress-bar --log-output work/notebooks/Lab/convert_csv_sql.ipynb /dev/null
mv -f -u -Z data/data/processed/test-wdpa-data.sql api/apps/api/test/fixtures/test-wdpa-data.sql
Expand Down Expand Up @@ -159,10 +164,10 @@ run-test-unit:
$(MAKE) --keep-going test-unit-backend

dump-geodb-data:
docker-compose exec -T postgresql-geo-api pg_dump -T migrations -a -U "${_GEO_POSTGRES_USER}" -F t ${_GEO_POSTGRES_DB} | gzip > data/data/processed/db_dumps/geo_db-$$(date +%Y-%m-%d).tar.gz
docker-compose exec -T postgresql-geo-api pg_dump -T migrations -a -U "${GEO_POSTGRES_USER}" -F t ${GEO_POSTGRES_DB} | gzip > data/data/processed/db_dumps/geo_db-$$(date +%Y-%m-%d).tar.gz

dump-api-data:
docker-compose exec -T postgresql-api pg_dump -T '(migrations|api_event_kinds|roles)' -a -U "${_API_POSTGRES_USER}" -F t ${_API_POSTGRES_DB} | gzip > data/data/processed/db_dumps/api_db-$$(date +%Y-%m-%d).tar.gz
docker-compose exec -T postgresql-api pg_dump -T '(migrations|api_event_kinds|roles)' -a -U "${API_POSTGRES_USER}" -F t ${API_POSTGRES_DB} | gzip > data/data/processed/db_dumps/api_db-$$(date +%Y-%m-%d).tar.gz

upload-dump-data:
az storage blob upload-batch --account-name marxancloudtest --auth-mode login -d data-ingestion-test-00/dbs-dumps -s data/data/processed/db_dumps
Expand All @@ -186,18 +191,50 @@ restore-volumes-data:
docker run --rm --volumes-from marxan-postgresql-geo-api -v $$(pwd)/data/data/processed/db_volumes:/backup ubuntu bash -c "rm -rf /var/lib/postgresql/data/* && cd / && tar xvf /backup/psql-geo-data.tar"
extract-geo-test-data:
#This location correspond with the Okavango delta touching partially Botswana, Angola Zambia and Namibia
TEST_GEOMETRY=$(shell cat api/apps/api/test/fixtures/test-geometry-subset.json | jq 'tostring'); \
docker-compose exec -T postgresql-geo-api psql -U "${_GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM admin_regions WHERE st_intersects(the_geom, st_geomfromgeojson('$${TEST_GEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_admin_regions_okavango.csv; \
docker-compose exec -T postgresql-geo-api psql -U "${_GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM wdpa WHERE st_intersects(the_geom, st_geomfromgeojson('$${TEST_GEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_wdpa_okavango.csv; \
docker-compose exec -T postgresql-geo-api psql -U "${_GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM features_data WHERE st_intersects(the_geom, st_geomfromgeojson('$${TEST_GEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_features_data_okavango.csv;
docker-compose exec -T postgresql-api psql -U "${_API_POSTGRES_USER}" -c "COPY (SELECT * FROM features) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/api_features_okavango.csv
TESTGEOMETRY=$(shell cat api/apps/api/test/fixtures/test-geometry-subset.json | jq 'tostring'); \
docker-compose exec -T postgresql-geo-api psql -U "${GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM admin_regions WHERE st_intersects(the_geom, st_geomfromgeojson('$${TESTGEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_admin_regions_okavango.csv; \
docker-compose exec -T postgresql-geo-api psql -U "${GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM wdpa WHERE st_intersects(the_geom, st_geomfromgeojson('$${TESTGEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_wdpa_okavango.csv; \
docker-compose exec -T postgresql-geo-api psql -U "${GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM features_data WHERE st_intersects(the_geom, st_geomfromgeojson('$${TESTGEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_features_data_okavango.csv;
docker-compose exec -T postgresql-api psql -U "${API_POSTGRES_USER}" -c "COPY (SELECT * FROM features) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/api_features_okavango.csv

generate-content-dumps: dump-api-data | dump-geodb-data
jq -n --arg dateName $$(date +%Y-%m-%d) '{"metadata":{"latest":{"name":$$dateName}}}' > data/data/processed/db_dumps/content.json

generate-export-shpfile:
-docker-compose exec -T postgresql-geo-api mkdir testdataoutput2
-docker-compose exec -T postgresql-geo-api pgsql2shp -f ./testdataoutput2/test.shp -h localhost -p 5432 -r -g the_geom -u ${_GEO_POSTGRES_USER} ${_GEO_POSTGRES_DB} "SELECT the_geom, pug.id as uid, 1 as cost FROM scenarios_pu_data spd inner join planning_units_geom pug on pug.id = spd.pu_geom_id ";
-docker-compose exec -T postgresql-geo-api pgsql2shp -f ./testdataoutput2/test.shp -h localhost -p 5432 -r -g the_geom -u ${GEO_POSTGRES_USER} ${GEO_POSTGRES_DB} "SELECT the_geom, pug.id as uid, 1 as cost FROM scenarios_pu_data spd inner join planning_units_geom pug on pug.id = spd.pu_geom_id ";
-mkdir data/data
-docker cp marxan-postgresql-geo-api:testdataoutput2 data/data


# Native support tasks

# Create the API and GEO databases. Fails gracefully if the databases already exist.
native-db-create:
@echo "SELECT 'CREATE DATABASE \"${API_POSTGRES_DB}\"' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '${API_POSTGRES_DB}')\gexec" | psql -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}"
@echo "SELECT 'CREATE DATABASE \"${GEO_POSTGRES_DB}\"' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '${GEO_POSTGRES_DB}')\gexec" | psql -U "${GEO_POSTGRES_USER}" -h "${GEO_POSTGRES_HOST}"

# Apply migrations to the current API and GEO databases. Assumes the PostgreSQL server is up.
native-db-migrate: native-db-create
cd api; yarn geoprocessing:typeorm migration:run -t each
cd api; yarn api:typeorm migration:run -t each

native-seed-api-init-data:
@echo "seeding initial dbs"
psql -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" ${API_POSTGRES_DB} < api/apps/api/test/fixtures/test-init-apidb.sql

native-seed-geoapi-init-data:
@echo "seeding dbs with initial geodata"
sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-admin-data.sql | psql -U "${GEO_POSTGRES_USER}" -h "${GEO_POSTGRES_HOST}" ${GEO_POSTGRES_DB}; \
sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-wdpa-data.sql | psql -U "${GEO_POSTGRES_USER}" -h "${GEO_POSTGRES_HOST}" ${GEO_POSTGRES_DB};
psql -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" ${API_POSTGRES_DB} < api/apps/api/test/fixtures/test-features.sql
@for i in api/apps/api/test/fixtures/features/*.sql; do \
table_name=`basename -s .sql "$$i"`; \
featureid=`psql -X -A -t -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" ${API_POSTGRES_DB} -c "select id from features where feature_class_name = '$$table_name'"`; \
echo "appending data for $${table_name} with id $${featureid}"; \
sed -e "s/\$$feature_id/$$featureid/g" api/apps/api/test/fixtures/features/$${table_name}.sql | psql -U "${GEO_POSTGRES_USER}" -h "${GEO_POSTGRES_HOST}" ${GEO_POSTGRES_DB}; \
done;

native-seed-api-with-test-data: native-db-migrate native-seed-api-init-data | native-seed-geoapi-init-data
@echo "seeding db with testing project and scenarios"
psql -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" ${API_POSTGRES_DB} < api/apps/api/test/fixtures/test-data.sql
53 changes: 39 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@ details.

![Backend architecture](./docs/ARCHITECTURE_infrastructure/marxan-contexts.png)

### Dependencies

- Nodejs v14
- PostgreSQL v14
- Postgis v3
- Redis v6

### Prerequisites

1. Install Docker (19.03+):
Expand Down Expand Up @@ -62,29 +69,39 @@ details.
* `APP_SERVICE_PORT` (number, required): the port on which the App service
should listen on the local machine
* `POSTGRES_API_SERVICE_PORT` (number, required): the port on which the
PostgreSQL service should listen on the local machine
* `API_POSTGRES_USER` (string, required): username to be used for the
PostgreSQL connection (API)
* `API_POSTGRES_PASSWORD` (string, required): password to be used for the
PostgreSQL connection (API)
* `API_POSTGRES_DB` (string, required): name of the database to be used for
the PostgreSQL connection (API)
Docker PostgreSQL service should listen on the local machine
* API PostgreSQL configuration variables:
* `API_POSTGRES_HOST` (string, required): host of the database server to be
used for the PostgreSQL connection (API)
* `API_POSTGRES_PORT` (number, required): port of the database server to be
used for the PostgreSQL connection (API)
* `API_POSTGRES_USER` (string, required): username to be used for the
PostgreSQL connection (API)
* `API_POSTGRES_PASSWORD` (string, required): password to be used for the
PostgreSQL connection (API)
* `API_POSTGRES_DB` (string, required): name of the database to be used for
the PostgreSQL connection (API)
* `GEOPROCESSING_SERVICE_PORT` (number, required): the port exposed by Docker
for the Geoprocessing service; when running an instance under Docker
Compose, NestJS will always be listening on port 3000 internally, and this
is mapped to `GEOPROCESSING_SERVICE_PORT` when exposed outside of the
container
* `POSTGRES_GEO_SERVICE_PORT` (number, required): the port on which the
geoprocessing PostgreSQL service should listen on the local machine
geoprocessing Docker PostgreSQL service should listen on the local machine
* `GEOPROCESSING_RUN_MIGRATIONS_ON_STARTUP`: (`true|false`, optional, default
is `true`): set this to `false` if migrations for the Geoprocessing service
should not run automatically on startup
* `GEO_POSTGRES_USER` (string, required): username to be used for the
geoprocessing PostgreSQL connection (API)
* `GEO_POSTGRES_PASSWORD` (string, required): password to be used for the
geoprocessing PostgreSQL connection (API)
* `GEO_POSTGRES_DB` (string, required): name of the database to be used for
the geoprocessing PostgreSQL connection (API)
* Geoprocessing PostgreSQL configuration variables:
* `GEO_POSTGRES_HOST` (string, required): host of the database server to be
used for the geoprocessing PostgreSQL connection (API)
* `GEO_POSTGRES_PORT` (number, required): port of the database server to be
used for the geoprocessing PostgreSQL connection (API)
* `GEO_POSTGRES_USER` (string, required): username to be used for the
geoprocessing PostgreSQL connection (API)
* `GEO_POSTGRES_PASSWORD` (string, required): password to be used for the
geoprocessing PostgreSQL connection (API)
* `GEO_POSTGRES_DB` (string, required): name of the database to be used for
the geoprocessing PostgreSQL connection (API)
* `POSTGRES_AIRFLOW_SERVICE_PORT` (number, required): the port on which the
PostgreSQL for Airflow service should listen on the local machine
* `AIRFLOW_PORT` (number, required): the port on which the
Expand Down Expand Up @@ -139,6 +156,14 @@ instead of the hardcoded port `3000` which is used in Docker setups.
* `GEOPROCESSING_DAEMON_LISTEN_PORT` (number, optional, default is 3000): port
on which the Express daemon of the Geoprocessing service will listen

Make sure you are running the necessary [dependencies](#Dependencies) locally.
You may need to tweak some env variables to point to the right URLs and ports -
see the list above for details on which configuration options are available.

The included Makefile has some useful build targets (commands) specifically
targeted at native execution (prefixed with `native-`) that you'll find helpful.
Refer to the Makefile inline documentation for more details.

### Running the Marxan Cloud platform

Run `make start` to start all the services.
Expand Down
25 changes: 19 additions & 6 deletions api/apps/api/config/custom-environment-variables.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,22 @@
},
"postgresApi": {
"url": "API_POSTGRES_URL",
"host": "API_POSTGRES_HOST",
"username": "API_POSTGRES_USER",
"password": "API_POSTGRES_PASSWORD",
"database": "API_POSTGRES_DB",
"port": "API_POSTGRES_PORT",
"runMigrationsOnStartup": "API_RUN_MIGRATIONS_ON_STARTUP"
},
"postgresGeoApi": {
},
"postgresGeoApi": {
"url": "GEO_POSTGRES_URL",
"host": "GEO_POSTGRES_HOST",
"username": "GEO_POSTGRES_USER",
"password": "GEO_POSTGRES_PASSWORD",
"database": "GEO_POSTGRES_DB",
"port": "GEO_POSTGRES_PORT",
"runMigrationsOnStartup": "GEOPROCESSING_RUN_MIGRATIONS_ON_STARTUP"
},
},
"network": {
"cors": {
"origins_extra": "NETWORK_CORS_ORIGINS"
Expand All @@ -23,10 +33,13 @@
"secret": "API_AUTH_X_API_KEY"
}
},
"geoprocessing": {
"url": "GEOPROCESSING_URL"
},
"redisApi": {
"connection": {
"host": "REDIS_HOST"
}
"connection": {
"host": "REDIS_HOST"
}
},
"api": {
"url": "API_SERVICE_URL",
Expand Down
16 changes: 16 additions & 0 deletions api/apps/api/config/default.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,22 @@
},
"concurrency": 50
},
"postgresApi": {
"url": null,
"port": 5432,
"host": null,
"username": null,
"password": null,
"database": null
},
"postgresGeoApi": {
"url": null,
"port": 5432,
"host": null,
"username": null,
"password": null,
"database": null
},
"api": {
"url": "http://api:3000",
"daemonListenPort": 3000
Expand Down
25 changes: 25 additions & 0 deletions api/apps/api/src/migrations/api/1608149578000-EnablePostgis.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { Logger } from '@nestjs/common';
import { MigrationInterface, QueryRunner } from 'typeorm';
import { PostgreSQLUtils } from '@marxan-api/utils/postgresql.utils';

export class EnablePostgis1608149578000 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<any> {
if (await PostgreSQLUtils.version13Plus()) {
await queryRunner.query(`
CREATE EXTENSION IF NOT EXISTS postgis;
`);
} else {
Logger.warn(
'The PostgreSQL extension `postgis` is needed for the Marxan API but it was not possible to activate it. Please activate it manually (see setup documentation).',
);
}
}

async down(queryRunner: QueryRunner): Promise<any> {
if (await PostgreSQLUtils.version13Plus()) {
await queryRunner.query(`
DROP EXTENSION IF EXISTS postgis;
`);
}
}
}
Loading

0 comments on commit ee32ab5

Please sign in to comment.