Skip to content

Commit

Permalink
Fix various bugs introduced by style updates (#55)
Browse files Browse the repository at this point in the history
* test log-file

* use Path in downloader

* type not iterable

* fix spatiotemporal topology handling
  • Loading branch information
ninsbl authored Aug 6, 2024
1 parent d47979d commit b21914f
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 45 deletions.
3 changes: 2 additions & 1 deletion src/imagery/i.pytorch/pytorchlib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,9 @@ def predict_torch(data_cube, config_dict=None, device=None, dl_model=None):

def not_in_types(data_type, allowed_types):
"""Check if data_type is not an element of allowed_types"""

allowed_types = (
tuple(allowed_types) if isinstance(allowed_types, type) else allowed_types
(allowed_types,) if isinstance(allowed_types, type) else allowed_types
)
if data_type is None:
data_type_str = "'None'"
Expand Down
7 changes: 3 additions & 4 deletions src/temporal/t.import.hrsi/t.import.hrsi.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def __init__(
#: Attribute for storing a dictionary with the access token for downloading Copernicus Cryosphere data
self.token = {}
#: Attribute containg directory to which data is downloaded or written to
self.output_directory = output_directory
self.output_directory = Path(output_directory)
# Check if download directory is writable
check_permissions(self.output_directory, "Download")
#: Attribute containg a maximum number for retries for incomplete downloads (may happen if connection is closed prematurely or empty chunk of data is send
Expand Down Expand Up @@ -407,7 +407,6 @@ def print_search_info(self, shell_script_style):

def fetch_data(self, query_params, product_metadata):
"""Wrapper method to execute download in batches"""
check_permissions(self.output_directory, "Download")
# Minimize pageing
query_params["maxRecords"] = self.batch_size
batches_n = int(
Expand Down Expand Up @@ -569,7 +568,7 @@ def _download_and_import_data(self, download_urls):
if hrsi_file.endswith("xml")
][0]
zip_data = zip_file.read(hrsi_file_xml)
hrsi_file_path = Path(self.output_directory) / Path(hrsi_file_xml).name
hrsi_file_path = self.output_directory / Path(hrsi_file_xml).name
hrsi_file_path.write_bytes(zip_data)

# temporal extend is not consistently represented in the
Expand All @@ -593,7 +592,7 @@ def _download_and_import_data(self, download_urls):
continue

zip_data = zip_file.read(hrsi_file)
hrsi_file_path = Path(self.output_directory) / Path(hrsi_file).name
hrsi_file_path = self.output_directory / Path(hrsi_file).name
hrsi_file_path.write_bytes(zip_data)
map_name = legalize_name_string(hrsi_file_path.stem)
full_map_name = f"{map_name}@{self.gisenv['MAPSET']}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def test_import_SWS(self):
test_product = "SARWetSnow"
self.assertModule(
"t.import.hrsi",
flags="f",
flags="fw",
product_type=test_product,
aoi="./data/aoi.geojson",
start_time="2023-04-05T00:00:00",
Expand Down
51 changes: 12 additions & 39 deletions src/temporal/t.rast.aggregate.patch/t.rast.aggregate.patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,7 @@ def patch_by_topology(
count = 0
current_mapset = get_current_mapset()

# Handle semantic labels (one granule_list per semantic label)
for semantic_label, raster_map_list in map_dict.items():
topo_builder = SpatioTemporalTopologyBuilder()
topo_builder.build(mapsA=granularity_list, mapsB=raster_map_list)
Expand All @@ -253,48 +254,20 @@ def patch_by_topology(
msgr.percent(count, len(granularity_list), 1)
count += 1

start_time = granule.temporal_extent.get_start_time()
end_time = granule.temporal_extent.get_end_time()
start_time, end_time = granule.get_temporal_extent_as_tuple()

aggregation_list = []

# Handle semantic labels (one granule per semantic label)
if "equal" in topo_list and granule.equal:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
if "contains" in topo_list and granule.contains:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
if "during" in topo_list and granule.during:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
if "starts" in topo_list and granule.starts:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
if "started" in topo_list and granule.started:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
if "finishes" in topo_list and granule.finishes:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
if "finished" in topo_list and granule.finished:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
if "overlaps" in topo_list and granule.overlaps:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
if "overlapped" in topo_list and granule.overlapped:
aggregation_list.extend(
[map_layer.get_name() for map_layer in granule.equal]
)
for topology in topo_list:
matching_objects = getattr(granule, topology)
# Check if any maps are temporally related to the granule with the given topology
if matching_objects:
aggregation_list.extend(
[map_layer.get_name() for map_layer in matching_objects]
)
# Reset Spatio-Temporal-Topology
granule.set_spatial_topology_build_false()
granule.set_temporal_topology_build_false()

if aggregation_list:
msgr.verbose(
Expand Down

0 comments on commit b21914f

Please sign in to comment.