Skip to content

Commit

Permalink
Rename the stacking to burn_mapping
Browse files Browse the repository at this point in the history
  • Loading branch information
supermarkion committed Nov 19, 2024
1 parent 4092e5b commit af192cc
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,18 @@ input_products:
- nbart_swir_2

product:
name: ga_ls8c_nbart_stacking_cyear_3
short_name: ga_ls8c_nbart_stacking_cyear_3
name: ga_ls8c_nbart_burn_mapping_cyear_3
short_name: ga_ls8c_nbart_burn_mapping_cyear_3
version: 3.0.0
product_family: stacking
product_family: burn_mapping
bands:
- any
- all
- majority
- rf
- bc
- rbr
- severity
- uncertainty
inherit_skip_properties:
- eo:cloud_cover
- fmask:clear
Expand Down Expand Up @@ -70,7 +70,7 @@ product:
band: any
mapped_bands: true
mpl_ramp: Oranges
name: ga_ls8c_nbart_stacking_cyear_3
name: ga_ls8c_nbart_burn_mapping_cyear_3
needed_bands:
- any
range:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name: ga_ls8c_nbart_stacking_cyear_3
description: Geoscience Australia Landsat Nadir BRDF Adjusted Reflectance Terrain, Landsat 8 Stacking 4 Calendar Years Collection 3
name: ga_ls8c_nbart_burn_mapping_cyear_3
description: Geoscience Australia Landsat Nadir BRDF Adjusted Reflectance Terrain, Landsat 8 Burn Mapping 4 Calendar Years Collection 3
license: CC-BY-4.0
metadata_type: eo3

Expand All @@ -8,7 +8,7 @@ metadata:
odc:file_format: GeoTIFF
odc:product_family: burncube
product:
name: ga_ls8c_nbart_stacking_cyear_3
name: ga_ls8c_nbart_burn_mapping_cyear_3

measurements:
- name: any
Expand All @@ -35,7 +35,7 @@ measurements:
dtype: float64
nodata: -999
units: '1'
- name: severity
- name: uncertainty
dtype: float64
nodata: -999
units: '1'
Expand Down
4 changes: 2 additions & 2 deletions dea_burn_cube/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -559,8 +559,8 @@ def upload_processing_log(self):
"output": self.s3_file_uri,
"task_table": self.task_table,
"DEA Burn Cube": version,
#"summary_datasets": [e.metadata_doc["label"] for e in self.geomed_datasets]
#+ [e.metadata_doc["label"] for e in self.wofs_datasets],
# "summary_datasets": [e.metadata_doc["label"] for e in self.geomed_datasets]
# + [e.metadata_doc["label"] for e in self.wofs_datasets],
"ard_datasets": [str(e.id) for e in self.ref_ard_datasets]
+ [str(e.id) for e in self.mapping_ard_datasets],
}
Expand Down
4 changes: 2 additions & 2 deletions nbic_project/DEA_Stacking_method.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
"def process_files(base_product_file, match_products):\n",
" \"\"\"\n",
" Process a base product file, search for corresponding files from the match_products,\n",
" and perform weighted stacking, returning the combined result.\n",
" and perform weighted burn_mapping, returning the combined result.\n",
" \"\"\"\n",
" # Extract pattern (e.g., x and y coordinates) from the base file\n",
" pattern = extract_info(base_product_file)\n",
Expand Down Expand Up @@ -153,7 +153,7 @@
" # Write the result to a COG file\n",
" write_cog(\n",
" geo_im=sum_summary,\n",
" fname=\"dea_stacking_\" + pattern.replace(\"/\", \"\") + \"_2020.tif\",\n",
" fname=\"dea_burn_mapping_\" + pattern.replace(\"/\", \"\") + \"_2020.tif\",\n",
" overwrite=True\n",
" )"
]
Expand Down
19 changes: 10 additions & 9 deletions scripts/stacking_processing_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,16 +162,16 @@ def generate_binary_mask(combined_data, condition):
"-p",
type=str,
required=True,
help="REQUIRED. URL to the Stacking process configuration file (YAML format).",
help="REQUIRED. URL to the burn_mapping process configuration file (YAML format).",
)
@click.option(
"--overwrite/--no-overwrite",
default=False,
help="Whether to rerun scenes that have already been processed.",
)
def stacking_processing(task_id, region_id, process_cfg_url, overwrite):
def burn_mapping_processing(task_id, region_id, process_cfg_url, overwrite):
"""
Load and process satellite imagery data to generate a stacking result saved as a GeoTIFF file.
Load and process satellite imagery data to generate a burn_mapping result saved as a GeoTIFF file.
Parameters:
- task_id (str): The unique identifier of the task.
Expand Down Expand Up @@ -203,26 +203,27 @@ def stacking_processing(task_id, region_id, process_cfg_url, overwrite):
)
raster_data = raster_result["raster_data"]

severity = (
uncertainty = (
raster_result["combined_data"].sum(dim="variable")
/ raster_result["combined_data"].sizes["variable"]
)

fname = "severity.tif"
fname = "uncertainty.tif"

# Write the result to a Cloud Optimized GeoTIFF (COG) file
write_cog(geo_im=severity, fname=fname, overwrite=overwrite, nodata=-999)
write_cog(geo_im=uncertainty, fname=fname, overwrite=overwrite, nodata=-999)

# Activate AWS credentials from the service account attached
helper.get_and_set_aws_credentials()

# Construct the S3 file URI for the output file
s3_file_uri = f"s3://{processing_task.s3_bucket_name}/{processing_task.s3_object_key}_{fname}"
s3_file_uri = (
f"s3://{processing_task.s3_bucket_name}/{processing_task.s3_object_key}_{fname}"
)
# Upload the output GeoTIFF to the specified S3 location
bc_io.upload_object_to_s3(fname, s3_file_uri)
logger.info(f"Uploaded to S3: {s3_file_uri}")


for match_product_short_name, data_info in raster_data.items():
# Extract the raster data (assuming it's stored under the "data" key in the dictionary)
geo_im = data_info["data"]
Expand Down Expand Up @@ -269,4 +270,4 @@ def stacking_processing(task_id, region_id, process_cfg_url, overwrite):


if __name__ == "__main__":
stacking_processing()
burn_mapping_processing()

0 comments on commit af192cc

Please sign in to comment.