diff --git a/configs/stacking_processings/ga_ls8c_nbart_stacking_cyear_3.yaml b/configs/burn_mapping_processings/ga_ls8c_nbart_stacking_cyear_3.yaml similarity index 91% rename from configs/stacking_processings/ga_ls8c_nbart_stacking_cyear_3.yaml rename to configs/burn_mapping_processings/ga_ls8c_nbart_stacking_cyear_3.yaml index 05850ef..bc583e2 100644 --- a/configs/stacking_processings/ga_ls8c_nbart_stacking_cyear_3.yaml +++ b/configs/burn_mapping_processings/ga_ls8c_nbart_stacking_cyear_3.yaml @@ -16,10 +16,10 @@ input_products: - nbart_swir_2 product: - name: ga_ls8c_nbart_stacking_cyear_3 - short_name: ga_ls8c_nbart_stacking_cyear_3 + name: ga_ls8c_nbart_burn_mapping_cyear_3 + short_name: ga_ls8c_nbart_burn_mapping_cyear_3 version: 3.0.0 - product_family: stacking + product_family: burn_mapping bands: - any - all @@ -27,7 +27,7 @@ product: - rf - bc - rbr - - severity + - uncertainty inherit_skip_properties: - eo:cloud_cover - fmask:clear @@ -70,7 +70,7 @@ product: band: any mapped_bands: true mpl_ramp: Oranges - name: ga_ls8c_nbart_stacking_cyear_3 + name: ga_ls8c_nbart_burn_mapping_cyear_3 needed_bands: - any range: diff --git a/configs/stacking_products/ga_ls8c_nbart_stacking_cyear_3.odc-product.yaml b/configs/burn_mapping_products/ga_ls8c_nbart_stacking_cyear_3.odc-product.yaml similarity index 80% rename from configs/stacking_products/ga_ls8c_nbart_stacking_cyear_3.odc-product.yaml rename to configs/burn_mapping_products/ga_ls8c_nbart_stacking_cyear_3.odc-product.yaml index d76fb8e..77eea84 100644 --- a/configs/stacking_products/ga_ls8c_nbart_stacking_cyear_3.odc-product.yaml +++ b/configs/burn_mapping_products/ga_ls8c_nbart_stacking_cyear_3.odc-product.yaml @@ -1,5 +1,5 @@ -name: ga_ls8c_nbart_stacking_cyear_3 -description: Geoscience Australia Landsat Nadir BRDF Adjusted Reflectance Terrain, Landsat 8 Stacking 4 Calendar Years Collection 3 +name: ga_ls8c_nbart_burn_mapping_cyear_3 +description: Geoscience Australia Landsat Nadir BRDF Adjusted Reflectance Terrain, Landsat 8 Burn Mapping 4 Calendar Years Collection 3 license: CC-BY-4.0 metadata_type: eo3 @@ -8,7 +8,7 @@ metadata: odc:file_format: GeoTIFF odc:product_family: burncube product: - name: ga_ls8c_nbart_stacking_cyear_3 + name: ga_ls8c_nbart_burn_mapping_cyear_3 measurements: - name: any @@ -35,7 +35,7 @@ measurements: dtype: float64 nodata: -999 units: '1' - - name: severity + - name: uncertainty dtype: float64 nodata: -999 units: '1' diff --git a/dea_burn_cube/task.py b/dea_burn_cube/task.py index 4f6f4f1..b0c8fa3 100644 --- a/dea_burn_cube/task.py +++ b/dea_burn_cube/task.py @@ -559,8 +559,8 @@ def upload_processing_log(self): "output": self.s3_file_uri, "task_table": self.task_table, "DEA Burn Cube": version, - #"summary_datasets": [e.metadata_doc["label"] for e in self.geomed_datasets] - #+ [e.metadata_doc["label"] for e in self.wofs_datasets], + # "summary_datasets": [e.metadata_doc["label"] for e in self.geomed_datasets] + # + [e.metadata_doc["label"] for e in self.wofs_datasets], "ard_datasets": [str(e.id) for e in self.ref_ard_datasets] + [str(e.id) for e in self.mapping_ard_datasets], } diff --git a/nbic_project/DEA_Stacking_method.ipynb b/nbic_project/DEA_Stacking_method.ipynb index 0e91e62..6c85f7f 100644 --- a/nbic_project/DEA_Stacking_method.ipynb +++ b/nbic_project/DEA_Stacking_method.ipynb @@ -75,7 +75,7 @@ "def process_files(base_product_file, match_products):\n", " \"\"\"\n", " Process a base product file, search for corresponding files from the match_products,\n", - " and perform weighted stacking, returning the combined result.\n", + " and perform weighted burn_mapping, returning the combined result.\n", " \"\"\"\n", " # Extract pattern (e.g., x and y coordinates) from the base file\n", " pattern = extract_info(base_product_file)\n", @@ -153,7 +153,7 @@ " # Write the result to a COG file\n", " write_cog(\n", " geo_im=sum_summary,\n", - " fname=\"dea_stacking_\" + pattern.replace(\"/\", \"\") + \"_2020.tif\",\n", + " fname=\"dea_burn_mapping_\" + pattern.replace(\"/\", \"\") + \"_2020.tif\",\n", " overwrite=True\n", " )" ] diff --git a/scripts/stacking_processing_script.py b/scripts/stacking_processing_script.py index 27d5241..09e1c74 100644 --- a/scripts/stacking_processing_script.py +++ b/scripts/stacking_processing_script.py @@ -162,16 +162,16 @@ def generate_binary_mask(combined_data, condition): "-p", type=str, required=True, - help="REQUIRED. URL to the Stacking process configuration file (YAML format).", + help="REQUIRED. URL to the burn_mapping process configuration file (YAML format).", ) @click.option( "--overwrite/--no-overwrite", default=False, help="Whether to rerun scenes that have already been processed.", ) -def stacking_processing(task_id, region_id, process_cfg_url, overwrite): +def burn_mapping_processing(task_id, region_id, process_cfg_url, overwrite): """ - Load and process satellite imagery data to generate a stacking result saved as a GeoTIFF file. + Load and process satellite imagery data to generate a burn_mapping result saved as a GeoTIFF file. Parameters: - task_id (str): The unique identifier of the task. @@ -203,26 +203,27 @@ def stacking_processing(task_id, region_id, process_cfg_url, overwrite): ) raster_data = raster_result["raster_data"] - severity = ( + uncertainty = ( raster_result["combined_data"].sum(dim="variable") / raster_result["combined_data"].sizes["variable"] ) - fname = "severity.tif" + fname = "uncertainty.tif" # Write the result to a Cloud Optimized GeoTIFF (COG) file - write_cog(geo_im=severity, fname=fname, overwrite=overwrite, nodata=-999) + write_cog(geo_im=uncertainty, fname=fname, overwrite=overwrite, nodata=-999) # Activate AWS credentials from the service account attached helper.get_and_set_aws_credentials() # Construct the S3 file URI for the output file - s3_file_uri = f"s3://{processing_task.s3_bucket_name}/{processing_task.s3_object_key}_{fname}" + s3_file_uri = ( + f"s3://{processing_task.s3_bucket_name}/{processing_task.s3_object_key}_{fname}" + ) # Upload the output GeoTIFF to the specified S3 location bc_io.upload_object_to_s3(fname, s3_file_uri) logger.info(f"Uploaded to S3: {s3_file_uri}") - for match_product_short_name, data_info in raster_data.items(): # Extract the raster data (assuming it's stored under the "data" key in the dictionary) geo_im = data_info["data"] @@ -269,4 +270,4 @@ def stacking_processing(task_id, region_id, process_cfg_url, overwrite): if __name__ == "__main__": - stacking_processing() + burn_mapping_processing()