Skip to content

Commit

Permalink
Intelligent pc_align spawing, reporting, and writing out
Browse files Browse the repository at this point in the history
  • Loading branch information
bpurinton committed Nov 26, 2024
1 parent 45e3041 commit 5d2be42
Show file tree
Hide file tree
Showing 3 changed files with 222 additions and 97 deletions.
91 changes: 44 additions & 47 deletions asp_plot/alignment.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import os
import re

import numpy as np
from osgeo import gdal, osr
Expand All @@ -15,7 +16,6 @@ def __init__(
self,
directory,
dem_fn,
# aligned_dem_fn=None,
**kwargs,
):
self.directory = directory
Expand All @@ -24,16 +24,6 @@ def __init__(
raise ValueError(f"DEM file not found: {dem_fn}")
self.dem_fn = dem_fn

# if aligned_dem_fn is not None and not os.path.exists(aligned_dem_fn):
# raise ValueError(f"Aligned DEM file not found: {aligned_dem_fn}")
# self.aligned_dem_fn = aligned_dem_fn

# TODO: move all pc_align steps and dem translations to separate class
# call this alignment for different processing levels, with some minimum number
# of required points (a ~500 point parameter).
# Report all translation results to user via a dictionary and printing.
# If translation agrees within some tolerance (a 5% or 10% parameter) in XYZ components,
# apply the translation of the points found closest in time to the DEM.
def pc_align_dem_to_atl06sr(
self,
max_displacement=20,
Expand Down Expand Up @@ -78,50 +68,41 @@ def pc_align_report(self, output_prefix="pc_align/pc_align"):
with open(pc_align_log, "r") as file:
content = file.readlines()

report = ""
report = {}
for line in content:
if "Input: error percentile of smallest errors (meters):" in line:
report_line = line.split("[ console ] : ")[1]
report += report_line
if "Input: mean of smallest errors (meters):" in line:
report_line = line.split("[ console ] : ")[1]
report += report_line
if "Output: error percentile of smallest errors (meters):" in line:
report_line = line.split("[ console ] : ")[1]
report += report_line
if "Output: mean of smallest errors (meters):" in line:
report_line = line.split("[ console ] : ")[1]
report += report_line
if "Input: error percentile" in line:
values = re.findall(r"(?:\d+%: )(\d+\.\d+)", line)
percentile_dict = {
"p16_beg": float(values[0]),
"p50_beg": float(values[1]),
"p84_beg": float(values[2]),
}
report = report | percentile_dict
if "Output: error percentile" in line:
values = re.findall(r"(?:\d+%: )(\d+\.\d+)", line)
percentile_dict = {
"p16_end": float(values[0]),
"p50_end": float(values[1]),
"p84_end": float(values[2]),
}
report = report | percentile_dict
if "Translation vector (Cartesian, meters):" in line:
report_line = line.split("[ console ] : ")[1]
report += report_line
ecef_shift = np.genfromtxt(
[line.split("Vector3")[1][1:-2]], delimiter=","
)
report += f"\nECEF shift: {ecef_shift}\n"
xyz_shift_dict = {
"x_shift": ecef_shift[0],
"y_shift": ecef_shift[1],
"z_shift": ecef_shift[2],
}
report = report | xyz_shift_dict
if "Translation vector magnitude (meters):" in line:
report_line = line.split("[ console ] : ")[1]
report += report_line
magnitude = re.findall(r"magnitude \(meters\): (\d+\.\d+)", line)[0]
report["translation_magnitude"] = float(magnitude)

return report

def apply_dem_translation(self, output_prefix="pc_align/pc_align", inv_trans=True):
def get_proj_shift(src_c, src_shift, s_srs, t_srs, inv_trans=True):
if s_srs.IsSame(t_srs):
proj_shift = src_shift
else:
src_c_shift = src_c + src_shift
src2proj = osr.CoordinateTransformation(s_srs, t_srs)
proj_c = np.array(src2proj.TransformPoint(*src_c))
proj_c_shift = np.array(src2proj.TransformPoint(*src_c_shift))
if inv_trans:
proj_shift = proj_c - proj_c_shift
else:
proj_shift = proj_c_shift - proj_c
# Reduce unnecessary precision
proj_shift = np.around(proj_shift, 3)
return proj_shift

def apply_dem_translation(self, output_prefix="pc_align/pc_align"):
pc_align_log = glob_file(self.directory, f"{output_prefix}-log-pc_align*.txt")

src = Raster(self.dem_fn)
Expand Down Expand Up @@ -171,7 +152,7 @@ def get_proj_shift(src_c, src_shift, s_srs, t_srs, inv_trans=True):
# Determine shift in original dataset coords
t_srs = osr.SpatialReference()
t_srs.ImportFromWkt(src.ds.crs.to_wkt())
proj_shift = get_proj_shift(src_c, src_shift, s_srs, t_srs, inv_trans)
proj_shift = self.get_proj_shift(src_c, src_shift, s_srs, t_srs, inv_trans=True)

aligned_dem_fn = self.dem_fn.replace(".tif", "_pc_align_translated.tif")
print(f"\nWriting out: {aligned_dem_fn}\n")
Expand All @@ -194,6 +175,22 @@ def get_proj_shift(src_c, src_shift, s_srs, t_srs, inv_trans=True):

return aligned_dem_fn

def get_proj_shift(self, src_c, src_shift, s_srs, t_srs, inv_trans=True):
if s_srs.IsSame(t_srs):
proj_shift = src_shift
else:
src_c_shift = src_c + src_shift
src2proj = osr.CoordinateTransformation(s_srs, t_srs)
proj_c = np.array(src2proj.TransformPoint(*src_c))
proj_c_shift = np.array(src2proj.TransformPoint(*src_c_shift))
if inv_trans:
proj_shift = proj_c - proj_c_shift
else:
proj_shift = proj_c_shift - proj_c
# Reduce unnecessary precision
proj_shift = np.around(proj_shift, 3)
return proj_shift

# DEPRECATED: Very slow, better to use apply_dem_translation().
# def generate_translated_dem(self, pc_align_output, dem_out_fn):
# """
Expand Down
49 changes: 47 additions & 2 deletions asp_plot/altimetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,6 @@ def filter_esa_worldcover(self, filter_out="water"):
mask = ~atl06sr["esa_worldcover.value"].isin(values)
self.atl06sr_processing_levels_filtered[key] = atl06sr[mask]

# TODO: for pc_align: Spawn all four, see if they agree and provide similar translations)
def predefined_temporal_filter_atl06sr(self, date=None):
if date is None:
date = StereopairMetadataParser(self.directory).get_pair_dict()["cdate"]
Expand Down Expand Up @@ -285,7 +284,9 @@ def alignment_report(
self,
processing_level="high_confidence",
minimum_points=500,
agreement_threshold=0.25,
write_out_aligned_dem=False,
min_translation_threshold=0.1,
key_for_aligned_dem="high_confidence",
):
filtered_keys = [
Expand Down Expand Up @@ -318,18 +319,62 @@ def alignment_report(
output_prefix=f"pc_align/pc_align_{key}",
)

report_data = []
for key in filtered_keys:
report = alignment.pc_align_report(output_prefix=f"pc_align/pc_align_{key}")
print(f"\n{key} alignment report:\n{report}\n")
report_data.append({"key": key} | report)
alignment_report_df = pd.DataFrame(report_data)

gsd = Raster(self.dem_fn).get_gsd()
if (
alignment_report_df["translation_magnitude"].mean()
< min_translation_threshold * gsd
):
write_out_aligned_dem = False
print(
f"\nTranslation magnitude is less than {min_translation_threshold*100}% of the DEM GSD. Skipping writing out aligned DEM.\n"
)

if write_out_aligned_dem:
# Calculate ranges and mean for each shift component
x_range = (
alignment_report_df["x_shift"].max()
- alignment_report_df["x_shift"].min()
)
y_range = (
alignment_report_df["y_shift"].max()
- alignment_report_df["y_shift"].min()
)
z_range = (
alignment_report_df["z_shift"].max()
- alignment_report_df["z_shift"].min()
)
x_mean = alignment_report_df["x_shift"].mean()
y_mean = alignment_report_df["y_shift"].mean()
z_mean = alignment_report_df["z_shift"].mean()

# Check if range is more than X% of mean for any component
if (
x_range > abs(x_mean * agreement_threshold)
or y_range > abs(y_mean * agreement_threshold)
or z_range > abs(z_mean * agreement_threshold)
):
print(
f"\nWarning: Translation components vary by more than {agreement_threshold*100}% across temporal filters. The translation applied to the aligned DEM may be inaccurate.\n"
)
print(f"X shift range: {x_range:.3f} m (mean: {x_mean:.3f} m)")
print(f"Y shift range: {y_range:.3f} m (mean: {y_mean:.3f} m)")
print(f"Z shift range: {z_range:.3f} m (mean: {z_mean:.3f} m)")

self.aligned_dem_fn = alignment.apply_dem_translation(
output_prefix=f"pc_align/pc_align_{key_for_aligned_dem}",
)
print(
f"\nWrote out {key_for_aligned_dem} aligned DEM to {self.aligned_dem_fn}\n"
)

self.alignment_report_df = alignment_report_df

def plot_atl06sr_time_stamps(
self,
key="high_confidence",
Expand Down
Loading

0 comments on commit 5d2be42

Please sign in to comment.