Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Wrong location archive keys #448

Open
wants to merge 11 commits into
base: develop
Choose a base branch
from
27 changes: 16 additions & 11 deletions cumulus_lambda_functions/uds_api/dapa/granules_dapa_query_es.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ def __get_time_range_terms(self):
]

def __create_pagination_links(self, page_marker_str):
if self.__pagination_link_obj is None:
return []
new_queries = deepcopy(self.__pagination_link_obj.org_query_params)
new_queries['limit'] = int(new_queries['limit'] if 'limit' in new_queries else self.__limit)
current_page = f"{self.__pagination_link_obj.requesting_base_url}?{'&'.join([f'{k}={v}' for k, v in new_queries.items()])}"
Expand Down Expand Up @@ -114,8 +116,20 @@ def archive_single_granule(self, granule_id):
daac_archiver.send_to_daac_internal(cnm_response)
return

def __restructure_each_granule_result(self, each_granules_query_result_stripped):
if 'event_time' in each_granules_query_result_stripped:
each_granules_query_result_stripped.pop('event_time')
if 'bbox' in each_granules_query_result_stripped:
each_granules_query_result_stripped['bbox'] = GranulesDbIndex.from_es_bbox(each_granules_query_result_stripped['bbox'])
for each_archiving_key in GranulesIndexMapping.archiving_keys:
if each_archiving_key in each_granules_query_result_stripped:
each_granules_query_result_stripped['properties'][each_archiving_key] = each_granules_query_result_stripped.pop(each_archiving_key)
return

def get_single_granule(self, granule_id):
granules_query_dsl = {
'size': 1,
'sort': [{'id': {'order': 'asc'}}],
'query': {'bool': {'must': [{
'term': {'id': granule_id}
}]}}
Expand All @@ -132,10 +146,7 @@ def get_single_granule(self, granule_id):
each_granules_query_result_stripped = granules_query_result['hits']['hits'][0]['_source']
self_link = Link(rel='self', target=f'{self.__base_url}/{WebServiceConstants.COLLECTIONS}/{self.__collection_id}/items/{each_granules_query_result_stripped["id"]}', media_type='application/json', title=each_granules_query_result_stripped["id"]).to_dict(False)
each_granules_query_result_stripped['links'].append(self_link)
if 'event_time' in each_granules_query_result_stripped:
each_granules_query_result_stripped.pop('event_time')
if 'bbox' in each_granules_query_result_stripped:
each_granules_query_result_stripped['bbox'] = GranulesDbIndex.from_es_bbox(each_granules_query_result_stripped['bbox'])
self.__restructure_each_granule_result(each_granules_query_result_stripped)
return each_granules_query_result_stripped

def start(self):
Expand All @@ -152,13 +163,7 @@ def start(self):
for each_granules_query_result_stripped in granules_query_result_stripped:
self_link = Link(rel='self', target=f'{self.__base_url}/{WebServiceConstants.COLLECTIONS}/{self.__collection_id}/items/{each_granules_query_result_stripped["id"]}', media_type='application/json', title=each_granules_query_result_stripped["id"]).to_dict(False)
each_granules_query_result_stripped['links'].append(self_link)
if 'event_time' in each_granules_query_result_stripped:
each_granules_query_result_stripped.pop('event_time')
if 'bbox' in each_granules_query_result_stripped:
each_granules_query_result_stripped['bbox'] = GranulesDbIndex.from_es_bbox(each_granules_query_result_stripped['bbox'])
for each_archiving_key in GranulesIndexMapping.archiving_keys:
if each_archiving_key in each_granules_query_result_stripped:
each_granules_query_result_stripped['properties'][each_archiving_key] = each_granules_query_result_stripped.pop(each_archiving_key)
self.__restructure_each_granule_result(each_granules_query_result_stripped)
pagination_link = '' if len(granules_query_result['hits']['hits']) < self.__limit else ','.join(granules_query_result['hits']['hits'][-1]['sort'])
return {
'statusCode': 200,
Expand Down
Empty file.
Empty file.
Loading
Loading