Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Patch for "no polynomial" case for azimuth FM rate mitigation & handling the burst without burst polygon #134

Merged
23 changes: 20 additions & 3 deletions src/s1reader/s1_annotation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1180,13 +1180,15 @@ def from_polynomial_lists(cls,
fm_rate_coeff_burst_arr,
fm_rate_tau0_burst_vec) = cls.extract_polynomial_sequence(az_fm_rate_list,
sensing_start,
sensing_end)
sensing_end,
handle_out_of_range=True)

(dc_aztime_burst_vec,
dc_coeff_burst_arr,
dc_tau0_burst_vec) = cls.extract_polynomial_sequence(doppler_centroid_list,
sensing_start,
sensing_end)
sensing_end,
handle_out_of_range=True)

return cls(fm_rate_aztime_burst_vec, fm_rate_coeff_burst_arr, fm_rate_tau0_burst_vec,
dc_aztime_burst_vec, dc_coeff_burst_arr, dc_tau0_burst_vec)
Expand All @@ -1195,7 +1197,8 @@ def from_polynomial_lists(cls,
@classmethod
def extract_polynomial_sequence(cls, polynomial_list: list,
datetime_start: datetime.datetime,
datetime_end: datetime.datetime):
datetime_end: datetime.datetime,
handle_out_of_range=True):
'''
Scan `vec_azimuth_time` end find indices of the vector
that covers the period defined with
Expand Down Expand Up @@ -1246,6 +1249,20 @@ def extract_polynomial_sequence(cls, polynomial_list: list,

# Scale factor to convert range (in meters) to seconds (tau)
range_to_tau = 2.0 / speed_of_light

# Take care of the case when the az. time of the polynomial list does not cover
# the sensing start/stop
if (index_end == index_start) and handle_out_of_range:
# 0--1--2--3--4--5 <- az. time of polynomial list (index shown on the left)
#|--| <- sensing start / stop
if index_start == 0:
index_end += 1

# 0--1--2--3--4--5 <- az. time of polynomial list (index shown on the left)
# |--| <- sensing start / stop
else:
index_start -= 1

for poly in polynomial_list[index_start:index_end+1]:
vec_aztime_sequence.append(poly[0])
arr_coeff_sequence.append(poly[1].coeffs)
Expand Down
6 changes: 6 additions & 0 deletions src/s1reader/s1_burst_slc.py
Original file line number Diff line number Diff line change
Expand Up @@ -911,15 +911,21 @@ def az_fm_rate_mismatch_from_llh(self,
for datetime_vec in self.extended_coeffs.dc_aztime_vec]

# calculate splined interpolation of the coeffs. and tau_0s
if len(fm_rate_aztime_sec_vec) <=1:
seongsujeong marked this conversation as resolved.
Show resolved Hide resolved
return isce3.core.LUT2d()
interpolator_tau0_ka = InterpolatedUnivariateSpline(
fm_rate_aztime_sec_vec,
self.extended_coeffs.fm_rate_tau0_vec,
ext=3,
k=1)
tau0_ka_interp = interpolator_tau0_ka(vec_t)[..., np.newaxis]

if len(dc_aztime_sec_vec) <=1:
return isce3.core.LUT2d()
seongsujeong marked this conversation as resolved.
Show resolved Hide resolved
interpolator_tau0_fdc_interp = InterpolatedUnivariateSpline(
dc_aztime_sec_vec,
self.extended_coeffs.dc_tau0_vec,
ext=3,
k=1)
tau0_fdc_interp = interpolator_tau0_fdc_interp(vec_t)[..., np.newaxis]

Expand Down
32 changes: 24 additions & 8 deletions src/s1reader/s1_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import isce3
import numpy as np
import shapely
from shapely.affinity import translate
seongsujeong marked this conversation as resolved.
Show resolved Hide resolved

from scipy.interpolate import InterpolatedUnivariateSpline
from nisar.workflows.stage_dem import check_dateline
Expand Down Expand Up @@ -221,16 +222,20 @@ def calculate_centroid(lons, lats):

return shapely.geometry.Point(llh_centroid[:2])

def get_burst_centers_and_boundaries(tree):
def get_burst_centers_and_boundaries(tree, num_bursts=None):
'''Parse grid points list and calculate burst center lat and lon

Parameters:
-----------
Parameters
----------
tree : Element
Element containing geolocation grid points.

Returns:
--------
num_bursts: int or None
Expected number of bursts in the subswath.
To check if the # of polygon is the same as the parsed polygons.
When it's None, the number of burst is the same as # polygons found in this function.

Returns
-------
center_pts : list
List of burst centroids ass shapely Points
boundary_pts : list
Expand All @@ -252,6 +257,9 @@ def get_burst_centers_and_boundaries(tree):
lons[i] = float(grid_pt[5].text)

unique_line_indices = np.unique(lines)

if num_bursts is None:
num_bursts = len(unique_line_indices) - 1
n_bursts = len(unique_line_indices) - 1
center_pts = [[]] * n_bursts
boundary_pts = [[]] * n_bursts
Expand All @@ -273,6 +281,14 @@ def get_burst_centers_and_boundaries(tree):
poly = shapely.geometry.Polygon(zip(burst_lons, burst_lats))
boundary_pts[i] = check_dateline(poly)

num_border_polygon = len(unique_line_indices) - 1
if num_bursts > num_border_polygon:
warnings.warn('Inconsistency between # bursts in subswath, and the # polygons. ')
num_missing_polygons = num_bursts - num_border_polygon

center_pts += [shapely.Point()] * num_missing_polygons
boundary_pts += [[shapely.Polygon()]] * num_missing_polygons

return center_pts, boundary_pts


Expand Down Expand Up @@ -853,8 +869,6 @@ def burst_from_xml(annotation_path: str, orbit_path: str, tiff_path: str,

orbit_number = int(tree.find('adsHeader/absoluteOrbitNumber').text)

center_pts, boundary_pts = get_burst_centers_and_boundaries(tree)

wavelength = isce3.core.speed_of_light / radar_freq
starting_range = slant_range_time * isce3.core.speed_of_light / 2
range_pxl_spacing = isce3.core.speed_of_light / (2 * range_sampling_rate)
Expand Down Expand Up @@ -912,6 +926,8 @@ def burst_from_xml(annotation_path: str, orbit_path: str, tiff_path: str,
n_bursts = int(burst_list_elements.attrib['count'])
bursts = [[]] * n_bursts

center_pts, boundary_pts = get_burst_centers_and_boundaries(tree, num_bursts=n_bursts)

for i, burst_list_element in enumerate(burst_list_elements):
# Zero Doppler azimuth time of the first line of this burst
sensing_start = as_datetime(burst_list_element.find('azimuthTime').text)
Expand Down