diff --git a/catalogs/trilateral.yaml b/catalogs/trilateral.yaml index 5a941407..6bc9d5b2 100644 --- a/catalogs/trilateral.yaml +++ b/catalogs/trilateral.yaml @@ -56,7 +56,6 @@ collections: - LWL_lake_water_level - SMCG_soil_moisture_content - SIF_solar_induced_chlorophyll_fluorescence - # - NLK_lake_cloudfree #TODO: needs implementation - N5_nightlights - N6_geoglam - N7_slowdown_proxy @@ -64,8 +63,7 @@ collections: - N3b_water_quality - N3a2_chl_concentration - E10d_regional_cropland - - Lakes_ALOS2_HH - - Lakes_ALOS2_HV + - LC_lakes_collection - N1_NO2_jaxa - N2_CO2_jaxa_gosat - GHS_BUILT-S-R2023A diff --git a/collections/LC_lakes_collection.yaml b/collections/LC_lakes_collection.yaml new file mode 100644 index 00000000..a93570e6 --- /dev/null +++ b/collections/LC_lakes_collection.yaml @@ -0,0 +1,45 @@ +Name: lakes_collection +Title: Lakes collection +EodashIdentifier: LC +Subtitle: Collection of multiple lake related datasets +Description: Collection of multiple lake related datasets +Themes: + - water +Tags: + - placeholder +Satellite: + - ALOS + - Sentinel 1 + - Sentinel 2 +Sensor: + - placeholder + - placeholder 2 +Agency: + - ESA + - NASA + - JAXA +Provider: + - Name: ESA +License: MIT +Subcollections: + - Identifier: Lakes_S2L2A + Collection: Lakes_S2L2A + - Identifier: lakes_alos2_hh + Collection: Lakes_ALOS2_HH + Country: KH + Name: Tonle Sap (ALOS2_HH) + Point: [104.1, 12.9] + Bbox: [103.4, 13.3, 105, 12.5] + - Identifier: lakes_alos2_hv + Collection: Lakes_ALOS2_HV + Country: KH + Name: Tonle Sap (ALOS2_HV) + Point: [104.1, 12.9] + Bbox: [103.4, 13.3, 105, 12.5] + - Identifier: NLK_lake_cloudfree + Collection: NLK_lake_cloudfree + Country: ['UZ', 'KZ'] + Name: Aral Lake + Point: [58.581, 45.303] + Bbox: [57, 43.24, 62.29, 47.72] +#Image: LC_lakes_collection/thumbnail.png diff --git a/collections/Lakes_ALOS2_HH.yaml b/collections/Lakes_ALOS2_HH.yaml index 97f34266..10983122 100644 --- a/collections/Lakes_ALOS2_HH.yaml +++ b/collections/Lakes_ALOS2_HH.yaml @@ -1,6 +1,6 @@ Name: lakes_alos2_hh Title: Lakes (ALOS2-HH) -EodashIdentifier: Lakes_ALOS2 +EodashIdentifier: Lakes_ALOS2_HH Subtitle: Total suspended matter description Description: Lakes_ALOS2_lakes_alos2_hh/Lakes_ALOS2.md Themes: diff --git a/collections/Lakes_ALOS2_HV.yaml b/collections/Lakes_ALOS2_HV.yaml index 43c553ff..4d04fbed 100644 --- a/collections/Lakes_ALOS2_HV.yaml +++ b/collections/Lakes_ALOS2_HV.yaml @@ -1,6 +1,6 @@ Name: lakes_alos2_hv Title: Lakes (ALOS2-HV) -EodashIdentifier: Lakes_ALOS2 +EodashIdentifier: Lakes_ALOS2_HV Subtitle: Total suspended matter description Description: Lakes_ALOS2_lakes_alos2_hv/Lakes_ALOS2.md Themes: diff --git a/collections/Lakes_S2L2A.yaml b/collections/Lakes_S2L2A.yaml new file mode 100644 index 00000000..65733a90 --- /dev/null +++ b/collections/Lakes_S2L2A.yaml @@ -0,0 +1,63 @@ +Name: Lakes_S2L2A +Title: Lakes from Sentinel 2 +EodashIdentifier: Lakes_S2L2A +Description: Lakes_S2L2A/Lakes_S2L2A.md +Themes: + - water +Tags: + - placeholder +Satellite: + - Sentinel 2 +Sensor: + - placeholder +Agency: + - ESA +Provider: + - Name: placeholder +License: MIT +Locations: + - Identifier: Tulare + Country: US + Name: Tulare + Point: [-119.70, 35.90] + Bbox: [-120, 35.72, -119.4, 36.2] + Times: [ + '2023-01-01', '2023-01-06', '2023-01-21', '2023-01-26', + '2023-01-31', '2023-02-05', '2023-02-10', '2023-02-15', + '2023-03-02', '2023-03-07', '2023-03-27', '2023-04-01', + '2023-04-06', '2023-04-11', '2023-04-21', '2023-04-26', + '2023-05-11', '2023-05-16', '2023-05-21', '2023-05-26' + ] + - Identifier: Aral + Country: ['UZ', 'KZ'] + Name: Aral + Point: [58.581, 45.303] + Bbox: [57, 43.24, 62.29, 47.72] + Times: [ + '2017-04-07', '2017-08-05', '2017-08-20', '2017-08-25', + '2017-09-14', '2018-04-02', '2018-04-12', '2018-04-27', + '2018-05-17', '2018-06-16', '2018-07-01', '2018-07-26', + '2018-08-30', '2018-09-14', '2018-09-24', '2018-10-04', + '2018-10-09', '2019-06-06', '2019-07-16', '2019-07-22', + '2019-08-20', '2019-08-25', '2019-09-19', '2019-10-04', + '2019-10-19', '2019-10-22', '2019-11-08', '2020-05-26', + '2020-06-15', '2020-06-20', '2020-07-15', '2020-07-20', + '2020-09-03', '2020-09-28', '2020-10-18', '2020-10-28', + '2020-11-22', '2021-04-11', '2021-04-16', '2021-05-16', + '2021-06-20', '2021-06-25', '2021-07-20', '2021-07-25', + '2021-08-09', '2021-08-19', '2021-08-24', '2021-09-03', + '2021-09-23', '2021-10-13', '2021-10-18', '2021-10-28', + '2021-11-07', '2022-04-01', '2022-05-16', '2022-06-25', + '2022-06-30', '2022-07-20', '2022-08-19', '2022-08-24', + '2022-08-29', '2022-09-13', '2022-09-23', '2022-10-03', + '2022-10-08', '2022-10-13', '2022-10-18', '2023-03-17', + '2023-03-22', '2023-03-27', '2023-04-01', '2023-04-21', + '2023-06-05' + ] +Resources: + - EndPoint: https://services.sentinel-hub.com + Name: Sentinel Hub WMS + CollectionId: sentinel-2-l2a + LayerId: SENTINEL-2-L2A-TRUE-COLOR +Image: Lakes_S2L2A/thumbnail.png +Legend: Lakes_S2L2A/cm_legend.png \ No newline at end of file diff --git a/collections/NLK_lake_cloudfree.yaml b/collections/NLK_lake_cloudfree.yaml index 231c87e2..2cf97fea 100644 --- a/collections/NLK_lake_cloudfree.yaml +++ b/collections/NLK_lake_cloudfree.yaml @@ -1,7 +1,7 @@ -Name: NLK_lake_cloud_free +Name: NLK_lake_cloudfree Title: Aral Lake cloud free EodashIdentifier: NLK -Description: NLK_lake_cloud_free/NLK.md +Description: NLK_lake_cloudfree/NLK.md Themes: - oceans Tags: @@ -18,7 +18,7 @@ Services: Name: VEDA Statistics Resources: - EndPoint: https://staging-stac.delta-backend.com/ - Type: cog + Type: tiles Name: VEDA CollectionId: landsat-c2l2-sr-lakes-aral-sea Assets: diff --git a/generators/generate_indicators.py b/generators/generate_indicators.py index 54371bbd..3edc5f75 100644 --- a/generators/generate_indicators.py +++ b/generators/generate_indicators.py @@ -11,6 +11,7 @@ import re from pathlib import Path from datetime import datetime +from dotenv import load_dotenv import yaml from yaml.loader import SafeLoader from itertools import groupby @@ -41,6 +42,9 @@ import spdx_lookup as lookup import argparse +# make sure we are loading the env local definition +load_dotenv() + argparser = argparse.ArgumentParser( prog='STAC generator and harvester', description=''' @@ -99,25 +103,85 @@ def process_collection_file(config, file_path, catalog): print("Processing collection:", file_path) with open(file_path) as f: data = yaml.load(f, Loader=SafeLoader) - for resource in data["Resources"]: - if "EndPoint" in resource: - if resource["Name"] == "Sentinel Hub": - handle_SH_endpoint(config, resource, data, catalog) - elif resource["Name"] == "GeoDB": - collection = handle_GeoDB_endpoint(config, resource, data, catalog) - add_to_catalog(collection, catalog, resource, data) - elif resource["Name"] == "VEDA": - handle_VEDA_endpoint(config, resource, data, catalog) - elif resource["Name"] == "xcube": - handle_xcube_endpoint(config, resource, data, catalog) - elif resource["Name"] == "WMS": - handle_WMS_endpoint(config, resource, data, catalog) - elif resource["Name"] == "GeoDB Vector Tiles": - handle_GeoDB_Tiles_endpoint(config, resource, data, catalog) - elif resource["Name"] == "Collection-only": - handle_collection_only(config, resource, data, catalog) + if "Resources" in data: + for resource in data["Resources"]: + if "EndPoint" in resource: + if resource["Name"] == "Sentinel Hub": + handle_SH_endpoint(config, resource, data, catalog) + elif resource["Name"] == "Sentinel Hub WMS": + collection = handle_SH_WMS_endpoint(config, resource, data, catalog) + elif resource["Name"] == "GeoDB": + collection = handle_GeoDB_endpoint(config, resource, data, catalog) + add_to_catalog(collection, catalog, resource, data) + elif resource["Name"] == "VEDA": + handle_VEDA_endpoint(config, resource, data, catalog) + elif resource["Name"] == "xcube": + handle_xcube_endpoint(config, resource, data, catalog) + elif resource["Name"] == "WMS": + handle_WMS_endpoint(config, resource, data, catalog) + elif resource["Name"] == "GeoDB Vector Tiles": + handle_GeoDB_Tiles_endpoint(config, resource, data, catalog) + elif resource["Name"] == "Collection-only": + handle_collection_only(config, resource, data, catalog) + else: + raise ValueError("Type of Resource is not supported") + elif "Subcollections" in data: + # if no endpoint is specified we check for definition of subcollections + parent_collection, _ = get_or_create_collection(catalog, data["Name"], data, config) + + locations = [] + countries = [] + for sub_coll_def in data["Subcollections"]: + # Subcollection has only data on one location which is defined for the entire collection + if "Name" in sub_coll_def and "Point" in sub_coll_def: + locations.append(sub_coll_def["Name"]) + if isinstance(sub_coll_def["Country"], list): + countries.extend(sub_coll_def["Country"]) + else: + countries.append(sub_coll_def["Country"]) + process_collection_file(config, "../collections/%s.yaml"%(sub_coll_def["Collection"]), parent_collection) + # find link in parent collection to update metadata + for link in parent_collection.links: + if link.rel == "child" and "id" in link.extra_fields and link.extra_fields["id"] == sub_coll_def["Identifier"]: + latlng = "%s,%s"%(sub_coll_def["Point"][1], sub_coll_def["Point"][0]) + link.extra_fields["id"] = sub_coll_def["Identifier"] + link.extra_fields["latlng"] = latlng + link.extra_fields["name"] = sub_coll_def["Name"] + # Update title of collection to use location name + sub_collection = parent_collection.get_child(id=sub_coll_def["Identifier"]) + if sub_collection: + sub_collection.title = sub_coll_def["Name"] + # The subcollection has multiple locations which need to be extracted and elevated to parent collection level else: - raise ValueError("Type of Resource is not supported") + # create temp catalog to save collection + tmp_catalog = Catalog(id = "tmp_catalog", description="temp catalog placeholder") + process_collection_file(config, "../collections/%s.yaml"%(sub_coll_def["Collection"]), tmp_catalog) + links = tmp_catalog.get_child(sub_coll_def["Identifier"]).get_links() + for link in links: + # extract summary information + if "city" in link.extra_fields: + locations.append(link.extra_fields["city"]) + if "country" in link.extra_fields: + if isinstance(link.extra_fields["country"], list): + countries.extend(link.extra_fields["country"]) + else: + countries.append(link.extra_fields["country"]) + + parent_collection.add_links(links) + + add_collection_information(config, parent_collection, data) + parent_collection.update_extent_from_items() + # Add bbox extents from children + for c_child in parent_collection.get_children(): + parent_collection.extent.spatial.bboxes.append( + c_child.extent.spatial.bboxes[0] + ) + # Fill summaries for locations + parent_collection.summaries = Summaries({ + "cities": list(set(locations)), + "countries": list(set(countries)), + }) + add_to_catalog(parent_collection, catalog, None, data) def handle_collection_only(config, endpoint, data, catalog): @@ -170,9 +234,57 @@ def handle_SH_endpoint(config, endpoint, data, catalog): token = get_SH_token() headers = {"Authorization": "Bearer %s"%token} endpoint["EndPoint"] = "https://services.sentinel-hub.com/api/v1/catalog/1.0.0/" - endpoint["CollectionId"] = endpoint["Type"] + "-" + endpoint["CollectionId"] + # Overwrite collection id with type, such as ZARR or BYOC + if "Type" in endpoint: + endpoint["CollectionId"] = endpoint["Type"] + "-" + endpoint["CollectionId"] handle_STAC_based_endpoint(config, endpoint, data, catalog, headers) +def handle_SH_WMS_endpoint(config, endpoint, data, catalog): + # create collection and subcollections (based on locations) + if "Locations" in data: + root_collection, _ = get_or_create_collection(catalog, data["Name"], data, config, endpoint) + for location in data["Locations"]: + # create and populate location collections based on times + # TODO: Should we add some new description per location? + location_config = { + "Title": location["Name"], + "Description": "", + } + collection, _ = get_or_create_collection( + catalog, location["Identifier"], location_config, config, endpoint + ) + collection.extra_fields["endpointtype"] = endpoint["Name"] + for time in location["Times"]: + item = Item( + id = time, + bbox=location["Bbox"], + properties={}, + geometry = None, + datetime = parser.isoparse(time), + ) + item_link = collection.add_item(item) + item_link.extra_fields["datetime"] = time + + link = root_collection.add_child(collection) + # bubble up information we want to the link + latlng = "%s,%s"%(location["Point"][1], location["Point"][0]) + link.extra_fields["id"] = location["Identifier"] + link.extra_fields["latlng"] = latlng + link.extra_fields["country"] = location["Country"] + link.extra_fields["city"] = location["Name"] + collection.update_extent_from_items() + add_visualization_info(collection, data, endpoint) + + + root_collection.update_extent_from_items() + # Add bbox extents from children + for c_child in root_collection.get_children(): + root_collection.extent.spatial.bboxes.append( + c_child.extent.spatial.bboxes[0] + ) + add_to_catalog(root_collection, catalog, endpoint, data) + return root_collection + def handle_VEDA_endpoint(config, endpoint, data, catalog): handle_STAC_based_endpoint(config, endpoint, data, catalog) @@ -188,21 +300,21 @@ def handle_xcube_endpoint(config, endpoint, data, catalog): add_to_catalog(root_collection, catalog, endpoint, data) -def get_or_create_collection(catalog, collection_id, data, config, endpoint): +def get_or_create_collection(catalog, collection_id, data, config, endpoint=None): # Check if collection already in catalog for collection in catalog.get_collections(): if collection.id == collection_id: return collection, [] # If none found create a new one spatial_extent = [-180.0, -90.0, 180.0, 90.0] - if endpoint.get("OverwriteBBox"): + if endpoint and endpoint.get("OverwriteBBox"): spatial_extent = endpoint.get("OverwriteBBox") spatial_extent = SpatialExtent([ spatial_extent, ]) times = [] temporal_extent = TemporalExtent([[datetime.now(), None]]) - if endpoint.get("Type") == "OverwriteTimes": + if endpoint and endpoint.get("Type") == "OverwriteTimes": if endpoint.get("Times"): times = endpoint.get("Times") times_datetimes = sorted([parser.isoparse(time) for time in times]) @@ -264,8 +376,12 @@ def add_to_catalog(collection, catalog, endpoint, data): return link = catalog.add_child(collection) - # bubble fields we want to have up to collection link - if endpoint: + # bubble fields we want to have up to collection link and add them to collection + if endpoint and "Type" in endpoint: + collection.extra_fields["endpointtype"] = "%s_%s"%(endpoint["Name"], endpoint["Type"]) + link.extra_fields["endpointtype"] = "%s_%s"%(endpoint["Name"], endpoint["Type"]) + elif endpoint: + collection.extra_fields["endpointtype"] = endpoint["Name"] link.extra_fields["endpointtype"] = endpoint["Name"] # Disabling bubbling up of description as now it is considered to be # used as markdown loading would increase the catalog size unnecessarily @@ -274,12 +390,13 @@ def add_to_catalog(collection, catalog, endpoint, data): link.extra_fields["subtitle"] = data["Subtitle"] link.extra_fields["title"] = collection.title link.extra_fields["code"] = data["EodashIdentifier"] + link.extra_fields["id"] = data["Name"] link.extra_fields["themes"] = data["Themes"] # Check for summaries and bubble up info if collection.summaries.lists: for sum in collection.summaries.lists: link.extra_fields[sum] = collection.summaries.lists[sum] - if "Locations" in data: + if "Locations" in data or "Subcollections" in data: link.extra_fields["locations"] = True if "Tags" in data: link.extra_fields["tags"] = data["Tags"] @@ -370,16 +487,30 @@ def handle_STAC_based_endpoint(config, endpoint, data, catalog, headers=None): if "Locations" in data: root_collection, _ = get_or_create_collection(catalog, data["Name"], data, config, endpoint) for location in data["Locations"]: - collection = process_STACAPI_Endpoint( - config=config, - endpoint=endpoint, - data=data, - catalog=catalog, - headers=headers, - bbox=",".join(map(str,location["Bbox"])), - root_collection=root_collection, - ) + if "FilterDates" in location: + collection = process_STACAPI_Endpoint( + config=config, + endpoint=endpoint, + data=data, + catalog=catalog, + headers=headers, + bbox=",".join(map(str,location["Bbox"])), + filter_dates=location["FilterDates"], + root_collection=root_collection, + ) + else: + collection = process_STACAPI_Endpoint( + config=config, + endpoint=endpoint, + data=data, + catalog=catalog, + headers=headers, + bbox=",".join(map(str,location["Bbox"])), + root_collection=root_collection, + ) # Update identifier to use location as well as title + # TODO: should we use the name as id? it provides much more + # information in the clients collection.id = location["Identifier"] collection.title = location["Name"], # See if description should be overwritten @@ -395,6 +526,10 @@ def handle_STAC_based_endpoint(config, endpoint, data, catalog, headers=None): link.extra_fields["latlng"] = latlng link.extra_fields["name"] = location["Name"] add_example_info(collection, data, endpoint, config) + if "OverwriteBBox" in location: + collection.extent.spatial = SpatialExtent([ + location["OverwriteBBox"], + ]) root_collection.update_extent_from_items() # Add bbox extents from children for c_child in root_collection.get_children(): @@ -403,7 +538,7 @@ def handle_STAC_based_endpoint(config, endpoint, data, catalog, headers=None): ) else: if "Bbox" in endpoint: - root_collection = process_STACAPI_Endpoint( + root_collection = process_STACAPI_Endpoint( config=config, endpoint=endpoint, data=data, @@ -472,7 +607,7 @@ def add_example_info(stac_object, data, endpoint, config): }, ) ) -def generate_veda_link(endpoint, file_url): +def generate_veda_cog_link(endpoint, file_url): bidx = "" if "Bidx" in endpoint: # Check if an array was provided @@ -511,9 +646,31 @@ def generate_veda_link(endpoint, file_url): ) return target_url +def generate_veda_tiles_link(endpoint, item): + assets = "" + for asset in endpoint["Assets"]: + assets += "&assets=%s"%asset + color_formula = "" + if "ColorFormula" in endpoint: + color_formula = "&color_formula=%s"%endpoint["ColorFormula"] + no_data = "" + if "NoData" in endpoint: + no_data = "&no_data=%s"%endpoint["NoData"] + if item: + item = "item=%s&"%(item) + else: + item = "" + target_url = "https://staging-raster.delta-backend.com/stac/tiles/WebMercatorQuad/{z}/{x}/{y}?%s%s%s%s"%( + item, + assets, + color_formula, + no_data, + ) + return target_url + def add_visualization_info(stac_object, data, endpoint, file_url=None, time=None, styles=None): # add extension reference - if endpoint["Name"] == "Sentinel Hub": + if endpoint["Name"] == "Sentinel Hub" or endpoint["Name"] == "Sentinel Hub WMS": instanceId = os.getenv("SH_INSTANCE_ID") if "InstanceId" in endpoint: instanceId = endpoint["InstanceId"] @@ -579,8 +736,15 @@ def add_visualization_info(stac_object, data, endpoint, file_url=None, time=None ) pass elif endpoint["Name"] == "VEDA": - if endpoint["Type"] == "cog": - target_url = generate_veda_link(endpoint, file_url) + if endpoint["Type"] == "cog": + target_url = generate_veda_cog_link(endpoint, file_url) + elif endpoint["Type"] == "tiles": + item_kvp = "" + if file_url: + item_kvp = "&item=%s"%file_url + target_url = generate_veda_tiles_link(endpoint, file_url) + target_url = "https://staging-raster.delta-backend.com/stac/tiles/WebMercatorQuad/{z}/{x}/{y}?collection=%s%s&assets=red&assets=green&assets=blue&color_formula=gamma RGB 2.7, saturation 1.5, sigmoidal RGB 15 0.55&nodata=0&format=png"%(endpoint["CollectionId"], item_kvp) + if target_url: stac_object.add_link( Link( rel="xyz", @@ -589,7 +753,6 @@ def add_visualization_info(stac_object, data, endpoint, file_url=None, time=None title=data["Name"], ) ) - pass elif endpoint["Name"] == "GeoDB Vector Tiles": #`${geoserverUrl}${config.layerName}@EPSG%3A${projString}@pbf/{z}/{x}/{-y}.pbf`, # 'geodb_debd884d-92f9-4979-87b6-eadef1139394:GTIF_AT_Gemeinden_3857' @@ -617,8 +780,13 @@ def add_visualization_info(stac_object, data, endpoint, file_url=None, time=None else: print("Visualization endpoint not supported") -def process_STACAPI_Endpoint(config, endpoint, data, catalog, headers={}, bbox=None, root_collection=None): - collection, _ = get_or_create_collection(catalog, endpoint["CollectionId"], data, config, endpoint) +def process_STACAPI_Endpoint( + config, endpoint, data, catalog, headers={}, bbox=None, + root_collection=None, filter_dates=None + ): + collection, _ = get_or_create_collection( + catalog, endpoint["CollectionId"], data, config, endpoint + ) add_visualization_info(collection, data, endpoint) api = Client.open(endpoint["EndPoint"], headers=headers) @@ -629,7 +797,18 @@ def process_STACAPI_Endpoint(config, endpoint, data, catalog, headers={}, bbox=N bbox=bbox, datetime=['1900-01-01T00:00:00Z', '3000-01-01T00:00:00Z'], ) + # We keep track of potential duplicate times in this list + added_times = {} for item in results.items(): + item_datetime = item.get_datetime() + if item_datetime != None: + iso_date = item_datetime.isoformat()[:10] + # if filterdates has been specified skip dates not listed in config + if filter_dates and iso_date not in filter_dates: + continue + if iso_date in added_times: + continue + added_times[iso_date] = True link = collection.add_item(item) if(options.tn): if "cog_default" in item.assets: @@ -637,7 +816,10 @@ def process_STACAPI_Endpoint(config, endpoint, data, catalog, headers={}, bbox=N else: generate_thumbnail(item, data, endpoint) # Check if we can create visualization link - if "cog_default" in item.assets: + if "Assets" in endpoint: + add_visualization_info(item, data, endpoint, item.id) + link.extra_fields["item"] = item.id + elif "cog_default" in item.assets: add_visualization_info(item, data, endpoint, item.assets["cog_default"].href) link.extra_fields["cog_href"] = item.assets["cog_default"].href # If a root collection exists we point back to it from the item @@ -645,10 +827,14 @@ def process_STACAPI_Endpoint(config, endpoint, data, catalog, headers={}, bbox=N item.set_collection(root_collection) # bubble up information we want to the link - item_datetime = item.get_datetime() # it is possible for datetime to be null, if it is start and end datetime have to exist if item_datetime: - link.extra_fields["datetime"] = item_datetime.isoformat()[:-6] + 'Z' + iso_time = item_datetime.isoformat()[:-6] + 'Z' + if endpoint["Name"] == "Sentinel Hub": + # for SH WMS we only save the date (no time) + link.extra_fields["datetime"] = iso_date + else: + link.extra_fields["datetime"] = iso_time else: link.extra_fields["start_datetime"] = item.properties["start_datetime"] link.extra_fields["end_datetime"] = item.properties["end_datetime"] diff --git a/generators/sh_endpoint.py b/generators/sh_endpoint.py index ccf3d847..6e985ea2 100644 --- a/generators/sh_endpoint.py +++ b/generators/sh_endpoint.py @@ -1,12 +1,10 @@ import os from oauthlib.oauth2 import BackendApplicationClient from requests_oauthlib import OAuth2Session -from dotenv import load_dotenv -def get_SH_token(): - load_dotenv() +def get_SH_token(): # Your client credentials client_id = os.getenv('SH_CLIENT_ID') client_secret = os.getenv('SH_CLIENT_SECRET')