Skip to content

Commit

Permalink
Remove CTs filter dependency
Browse files Browse the repository at this point in the history
  • Loading branch information
tiagojsag committed Nov 3, 2020
1 parent bd049c4 commit e27ea10
Show file tree
Hide file tree
Showing 14 changed files with 703 additions and 122 deletions.
1 change: 1 addition & 0 deletions entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ case "$1" in
;;
test)
echo "Test"
exec pytest
;;
start)
echo "Running Start"
Expand Down
125 changes: 32 additions & 93 deletions microservice/register.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,117 +8,56 @@
],
"endpoints": [
{
"path": "/v1/query/:dataset",
"path": "/v1/query/nexgddp/:dataset",
"method": "GET",
"binary": true,
"redirect": {
"method": "POST",
"path": "/api/v1/nexgddp/query/:dataset"
},
"filters": [
{
"name": "dataset",
"path": "/v1/dataset/:dataset",
"method": "GET",
"params": {
"dataset": "dataset"
},
"condition": "OR",
"compare": [
{
"data": {
"attributes": {
"provider": "nexgddp",
"status": "saved"
}
}
},
{
"data": {
"attributes": {
"provider": "loca",
"status": "saved"
}
}
}
]
}
]
}
},
{
"path": "/v1/query/:dataset",
"path": "/v1/query/nexgddp/:dataset",
"method": "POST",
"binary": true,
"redirect": {
"method": "POST",
"path": "/api/v1/nexgddp/query/:dataset"
},
"filters": [
{
"name": "dataset",
"path": "/v1/dataset/:dataset",
"method": "GET",
"params": {
"dataset": "dataset"
},
"condition": "OR",
"compare": [
{
"data": {
"attributes": {
"provider": "nexgddp",
"status": "saved"
}
}
},
{
"data": {
"attributes": {
"provider": "loca",
"status": "saved"
}
}
}
]
}
]
}
},
{
"path": "/v1/fields/:dataset",
"path": "/v1/fields/nexgddp/:dataset",
"method": "GET",
"redirect": {
"method": "POST",
"path": "/api/v1/nexgddp/fields/:dataset"
},
"filters": [
{
"name": "dataset",
"path": "/v1/dataset/:dataset",
"method": "GET",
"params": {
"dataset": "dataset"
},
"condition": "OR",
"compare": [
{
"data": {
"attributes": {
"provider": "nexgddp",
"status": "saved"
}
}
},
{
"data": {
"attributes": {
"provider": "loca",
"status": "saved"
}
}
}
]
}
]
}
},
{
"path": "/v1/query/loca/:dataset",
"method": "GET",
"binary": true,
"redirect": {
"method": "POST",
"path": "/api/v1/nexgddp/query/:dataset"
}
},
{
"path": "/v1/query/loca/:dataset",
"method": "POST",
"binary": true,
"redirect": {
"method": "POST",
"path": "/api/v1/nexgddp/query/:dataset"
}
},
{
"path": "/v1/fields/loca/:dataset",
"method": "GET",
"redirect": {
"method": "POST",
"path": "/api/v1/nexgddp/fields/:dataset"
}
},
{
"path": "/v1/rest-datasets/nexgddp",
Expand Down
2 changes: 1 addition & 1 deletion nexgddp.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ case "$1" in
;;
test)
type docker-compose >/dev/null 2>&1 || { echo >&2 "docker-compose is required but it's not installed. Aborting."; exit 1; }
docker-compose -f docker-compose-test.yml build && docker-compose -f docker-compose-test.yml up
docker-compose -f docker-compose-test.yml build && docker-compose -f docker-compose-test.yml up --abort-on-container-exit
;;
*)
echo "Usage: nexgddp.sh {develop|test}" >&2
Expand Down
8 changes: 8 additions & 0 deletions nexgddp/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,17 @@ class DatasetNotFound(Error):
pass


class RedisError(Error):
pass


class RasdamanError(Error):
pass


class CoverageNotFound(Error):
pass


class DatasetNotFound(Error):
pass
32 changes: 29 additions & 3 deletions nexgddp/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,16 @@

import json
import logging
from flask import request, redirect
from functools import wraps

from flask import request

from nexgddp.errors import GeostoreNotFound, LayerNotFound
from nexgddp.routes.api import error
from nexgddp.services.dataset_service import DatasetService
from nexgddp.services.geostore_service import GeostoreService
from nexgddp.services.layer_service import LayerService
from nexgddp.services.redis_service import RedisService
from nexgddp.services.storage_service import StorageService
from nexgddp.errors import DatasetNotFound


def get_bbox_by_hash(func):
Expand Down Expand Up @@ -219,3 +219,29 @@ def wrapper(*args, **kwargs):
return error(status=403, detail="Not authorized")

return wrapper


def get_dataset_from_id(func):
@wraps(func)
def wrapper(*args, **kwargs):
logging.debug("Getting dataset from id")

try:
dataset_object = DatasetService.get(kwargs['dataset_id'])
except DatasetNotFound:
return error(status=404, detail="Dataset with id {} doesn't exist".format(kwargs['dataset_id']))

connector_type = dataset_object.get('connectorType', None)
provider = dataset_object.get('provider', None)

if connector_type != "rest":
return error(status=422, detail="This operation is only supported for datasets with connectorType 'rest'")

if provider != "nexgddp":
return error(status=422, detail="This operation is only supported for datasets with provider 'nexgddp'")

kwargs['dataset'] = dataset_object

return func(*args, **kwargs)

return wrapper
45 changes: 27 additions & 18 deletions nexgddp/routes/api/v1/nexgddp_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,14 @@
import dateutil.parser
from CTRegisterMicroserviceFlask import request_to_microservice
from flask import Flask, jsonify, request, Blueprint, send_file
from flask_cache import Cache
from flask_caching import Cache

from nexgddp.config import SETTINGS
from nexgddp.errors import SqlFormatError, PeriodNotValid, TableNameNotValid, GeostoreNeeded, InvalidField, \
CoordinatesNeeded, CoverageNotFound
from nexgddp.helpers.coloring_helper import ColoringHelper
from nexgddp.middleware import get_bbox_by_hash, get_latlon, get_tile_attrs, get_layer, get_year, \
is_microservice, get_diff_attrs, is_microservice_or_admin
# from nexgddp import cache
is_microservice, get_diff_attrs, is_microservice_or_admin, get_dataset_from_id
from nexgddp.routes.api import error
from nexgddp.services.diff_service import DiffService
from nexgddp.services.query_service import QueryService
Expand All @@ -27,14 +26,16 @@

nexgddp_endpoints = Blueprint('nexgddp_endpoints', __name__)

# mmm
app = Flask(__name__)

cache = Cache(app, config={
cache_config = {
'CACHE_TYPE': 'redis',
'CACHE_KEY_PREFIX': 'nexgddp_queries',
'CACHE_REDIS_URL': SETTINGS.get('redis').get('url')
})
}

app.config.from_mapping(cache_config)
cache = Cache(app)


def callback_to_dataset(body):
Expand Down Expand Up @@ -79,7 +80,7 @@ def is_literal(clause):
if not all(val is None for val in select_functions) and not all(val is None for val in select_literals):
logging.error("Provided functions and literals at the same time")
raise Exception()
# And it's neccesary to select something
# And it's necessary to select something
if select == [None] or len(select) == 0 or select is None:
raise Exception()
return select
Expand All @@ -92,7 +93,7 @@ def parse_year(value):
try:
result = dateutil.parser.parse(value).strftime('%Y-%m-%d')
return result
except Error as e:
except Exception as e:
raise PeriodNotValid("Supplied dates are invalid")


Expand Down Expand Up @@ -195,10 +196,11 @@ def get_years(json_sql, temporal_resolution):


def make_cache_key(*args, **kwargs):
# This one is for _queries_ - not layers
logging.debug("Making cache key")
# path = request.path
sql = request.args.get('sql', None) or request.get_json().get('sql', None)
request_json = request.get_json() or {}
sql = request.args.get('sql', None) or request_json.get('sql', None)
if not sql:
return None
logging.debug(f"Original sql statement: {sql}")
converted_sql = base64.b64encode(str.encode(str(sql)))
logging.debug(converted_sql)
Expand All @@ -223,7 +225,11 @@ def make_cache_key(*args, **kwargs):

def unless_cache_query(*args, **kwargs):
logging.info("Checking if previous query failed")
if SETTINGS.get('redis').get('url') is None:
return True
cache_key = make_cache_key()
if cache_key is None:
return True
logging.debug(f"cache_key: {cache_key}")
# try:
res = cache.get(cache_key)
Expand All @@ -245,19 +251,22 @@ def unless_cache_query(*args, **kwargs):


@nexgddp_endpoints.route('/query/<dataset_id>', methods=['POST'])
@get_dataset_from_id
@get_bbox_by_hash
@get_latlon
@cache.cached(timeout=0, key_prefix=make_cache_key, unless=unless_cache_query)
def query(dataset_id, bbox):
def query(dataset_id, bbox, dataset):
"""NEXGDDP QUERY ENDPOINT"""
logging.info('[ROUTER] Doing Query of dataset ' + dataset_id)
# Get and deserialize
dataset = request.get_json().get('dataset', None).get('data', None)
table_name = dataset.get('attributes').get('tableName')
table_name = dataset.get('tableName')
temporal_resolution = table_name.split('_')[-1]
logging.debug(f"temporal_resolution: {temporal_resolution}")
scenario, model = table_name.rsplit('/')
sql = request.args.get('sql', None) or request.get_json().get('sql', None)

request_json = request.get_json() or {}
sql = request.args.get('sql', None) or request_json.get('sql', None)

if not sql:
return error(status=400, detail='sql must be provided')
# convert
Expand Down Expand Up @@ -354,13 +363,13 @@ def is_year(clause):


@nexgddp_endpoints.route('/fields/<dataset_id>', methods=['POST'])
def get_fields(dataset_id):
@get_dataset_from_id
def get_fields(dataset_id, dataset):
"""NEXGDDP FIELDS ENDPOINT"""
logging.info('[ROUTER] Getting fields of dataset' + dataset_id)

# Get and deserialize
dataset = request.get_json().get('dataset', None).get('data', None)
table_name = dataset.get('attributes').get('tableName')
table_name = dataset.get('tableName')
scenario, model = table_name.rsplit('/')

fields_xml = QueryService.get_rasdaman_fields(scenario, model)
Expand Down
10 changes: 9 additions & 1 deletion nexgddp/services/redis_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,32 @@
import redis

from nexgddp.config import SETTINGS
from nexgddp.errors import RedisError

r = redis.StrictRedis.from_url(url=SETTINGS.get('redis').get('url'))
if SETTINGS.get('redis').get('url') is not None:
r = redis.StrictRedis.from_url(url=SETTINGS.get('redis').get('url'))


class RedisService(object):

@staticmethod
def get(layer):
if r is None:
raise RedisError(status=500, detail="Redis server not configured")
text = r.get(layer)
if text is not None:
return text
return None

@staticmethod
def set(key, value):
if r is None:
raise RedisError(status=500, detail="Redis server not configured")
return r.set(key, value)

@staticmethod
def expire_layer(layer):
if r is None:
raise RedisError(status=500, detail="Redis server not configured")
for key in r.scan_iter("*" + layer + "*"):
r.delete(key)
1 change: 0 additions & 1 deletion nexgddp/tests/__init__.py

This file was deleted.

Empty file removed nexgddp/tests/test_basic.py
Empty file.
Loading

0 comments on commit e27ea10

Please sign in to comment.