diff --git a/src/imagery/i.sentinel/i.sentinel.coverage/i.sentinel.coverage.html b/src/imagery/i.sentinel/i.sentinel.coverage/i.sentinel.coverage.html index 524a7d9978..1f39acd6f1 100644 --- a/src/imagery/i.sentinel/i.sentinel.coverage/i.sentinel.coverage.html +++ b/src/imagery/i.sentinel/i.sentinel.coverage/i.sentinel.coverage.html @@ -10,16 +10,13 @@

EXAMPLES

Check Sentinel-1 scenes by region, start and end time

-Note that only the last 12 months of Sentinel data are online available -ESA Hub, older scenes are stored in the Long Term Archive (LTA) and -cannot be retrieved immediately. The example is based on the North Carolina -dataset: +The example is based on the North Carolina dataset:
 # extract Durham (NC) county
 v.extract input=boundary_county output=county_durham where="NAME = 'DURHAM'"
 
-# simplify geometry (needed for ESA Hub)
+# simplify geometry
 v.generalize input=county_durham output=county_durham_dp1000 method=douglas threshold=1000
 
 # search for SLC scenes in certain period of time
@@ -29,16 +26,13 @@ 

Check Sentinel-1 scenes by region, start and end time

Check Sentinel-2 scenes by region, cloud coverage, start and end time

-Note that only the last 12 months of Sentinel data are online available -ESA Hub, older scenes are stored in the Long Term Archive (LTA) and -cannot be retrieved immediately. The example is based on the North Carolina -dataset: +The example is based on the North Carolina dataset:
 # extract Durham (NC) county
 v.extract input=boundary_county output=county_durham where="NAME = 'DURHAM'"
 
-# simplify geometry (needed for ESA Hub)
+# simplify geometry
 v.generalize input=county_durham output=county_durham_dp1000 method=douglas threshold=1000
 
 # search for L2A scenes with minimal clouds in certain period of time
diff --git a/src/imagery/i.sentinel/i.sentinel.coverage/i.sentinel.coverage.py b/src/imagery/i.sentinel/i.sentinel.coverage/i.sentinel.coverage.py
old mode 100644
new mode 100755
index 41a2c45ec9..f6f8e6c01c
--- a/src/imagery/i.sentinel/i.sentinel.coverage/i.sentinel.coverage.py
+++ b/src/imagery/i.sentinel/i.sentinel.coverage/i.sentinel.coverage.py
@@ -56,7 +56,7 @@
 # % type: string
 # % description: Sentinel product type to filter
 # % required: no
-# % options: SLC,GRD,OCN,S2MSI1C,S2MSI2A,S2MSI2Ap
+# % options: SLC,GRD,OCN,S2MSI1C,S2MSI2A
 # % answer: S2MSI2A
 # % guisection: Filter
 # %end
@@ -96,7 +96,7 @@
 
 # %rules
 # % collective: start,end
-# % excludes: names,start,end,clouds,producttype
+# % excludes: names,start,end,clouds
 # %end
 
 
@@ -218,7 +218,7 @@ def main():
         grass.fatal(_("Vector map <{}> not found").format(area))
     producttype = options["producttype"]
 
-    grass.message(_("Retrieving Sentinel footprints from ESA hub ..."))
+    grass.message(_("Retrieving Sentinel footprints from CDSE ..."))
     fps = "tmp_fps_%s" % str(os.getpid())
     rm_vectors.append(fps)
 
@@ -265,7 +265,7 @@ def main():
                 for i in range(0, len(resp)):
                     error_msg += resp[i].decode("utf-8")
                 grass.fatal(_("Error using i.sentinel.download: {}").format(error_msg))
-        name_list_tmp = [x.split(" ")[1] for x in s_list]
+        name_list_tmp = [x.split(" ")[0] for x in s_list]
     else:
         name_list_tmp = options["names"].split(",")
     name_list = []
@@ -279,13 +279,10 @@ def main():
             grass.run_command(
                 "i.sentinel.download",
                 settings=settings,
-                map=area,
-                start=start_day,
-                end=end_day,
                 footprints=fpi,
                 producttype=producttype,
-                query="identifier=%s" % name,
-                flags="lb",
+                id=name,
+                flags="l",
                 quiet=True,
             )
             name_list.append(name)
@@ -310,30 +307,30 @@ def main():
             grass.run_command(
                 "v.db.update",
                 map=temp_overlay,
-                column="a_identifier",
-                query_column='a_identifier || "+" ' + "|| b_identifier",
-                where="a_identifier NOT NULL AND " + "b_identifier NOT NULL",
+                column="a_title",
+                query_column='a_title || "+" ' + "|| b_title",
+                where="a_title NOT NULL AND " + "b_title NOT NULL",
                 quiet=True,
             )
             grass.run_command(
                 "v.db.update",
                 map=temp_overlay,
-                column="a_identifier",
-                query_column="b_identifier",
-                where="a_identifier IS NULL",
+                column="a_title",
+                query_column="b_title",
+                where="a_title IS NULL",
                 quiet=True,
             )
             grass.run_command(
                 "v.db.renamecolumn",
                 map=temp_overlay,
-                column="a_identifier,identifier",
+                column="a_title,title",
                 quiet=True,
             )
             columns_dict = grass.parse_command("v.info", map=temp_overlay, flags="c")
             drop_columns = [
                 col.split("|")[1]
                 for col in columns_dict
-                if col.split("|")[1] not in ["cat", "identifier"]
+                if col.split("|")[1] not in ["cat", "title"]
             ]
             grass.run_command(
                 "v.db.dropcolumn", map=temp_overlay, columns=drop_columns, quiet=True
@@ -366,7 +363,7 @@ def main():
     # list of scenes that actually intersect with bbox
     name_list_updated_tmp = list(
         grass.parse_command(
-            "v.db.select", map=fps_in_area, column="a_identifier", flags="c"
+            "v.db.select", map=fps_in_area, column="a_title", flags="c"
         ).keys()
     )