Skip to content

Commit

Permalink
Made a retry-VTK fix for rays that fail to find intersections with em…
Browse files Browse the repository at this point in the history
…bree/trimesh.
  • Loading branch information
havardlovas committed Apr 16, 2024
1 parent 7e0747c commit bce1bb3
Show file tree
Hide file tree
Showing 7 changed files with 241 additions and 71 deletions.
2 changes: 1 addition & 1 deletion data/config_examples/configuration_specim.ini
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ dem_folder = Input/GIS/
tide_path = Input/tidevann_nn2000_NMA.txt
dem_path = Input/GIS/DEM_downsampled_deluxe.tif
hsi_calib_path = Input/Calib/HSI_2b.xml
model_path = Input/GIS/model.vtk
model_path = Input/GIS/model.ply
pose_path = Intermediate/pose.csv

[HDF.raw_nav]
Expand Down
6 changes: 0 additions & 6 deletions data/config_examples/configuration_uhi.ini
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ mission_dir = D:/HyperspectralDataAll/UHI/2020-07-01-14-34-57-ArcticSeaIce-Ben-L
pose_export_type = h5_embedded
model_export_type = dem_file
sensor_type = UHI-4
tex_path = None
modeltype = DEM
max_ray_length = 20
blue_wave_length = 460
Expand All @@ -12,7 +11,6 @@ red_wave_length = 590
wavelength_unit = Nanometers
radiometric_unit = muW/(cm2 nm sr)
compare_to_rgb_mosaic = False
lever_arm_unit = m
lab_cal_dir = D:/HyperspectralDataAll/UHI/Lab_Calibration_Data/NP/

[Coordinate Reference Systems]
Expand All @@ -32,7 +30,6 @@ model_path = Input/GIS/model.vtk
rgborthoreshaped = Intermediate/OrthoReshaped/
demreshaped = Intermediate/OrthoReshaped/
pose_path = Intermediate/pose.csv
hsicalibobj = Intermediate/Pickle/cal_obj.pkl
rgb_point_cloud_folder = Output/3Dmodels/
footprint_folder = Output/GIS/FootPrints/
rgb_composite_folder = Output/GIS/RGBComposites/
Expand Down Expand Up @@ -98,9 +95,6 @@ ancillary_suffix = _anc
nodata = -9999
raster_transform_method = north_east




[HDF.rgb]
rgb_frames = rawdata/rgb/rgbFrames
rgb_frames_timestamp = rawdata/rgb/timestamp
Expand Down
28 changes: 16 additions & 12 deletions gref4hsi/scripts/coregistration.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def interpolate_time_nodes(time_from, value, time_to, method = 'linear'):
elif method in ['gaussian']:
# Use sigma as the difference between two neighboring time nodes
sigma = time_from[1]-time_from[0]
eps**2 = np.sqrt(0.5*1/(sigma**2))
#eps**2 = np.sqrt(0.5*1/(sigma**2))
# sigma = 1/(sqrt(2)eps)
#https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.Rbf.html
return RBFInterpolator(time_from.reshape((-1,1)), value, kernel='gaussian', epsilon = eps)(np.array(time_to).reshape((-1,1)))
Expand All @@ -78,11 +78,13 @@ def compose_pose_errors(param_pose_tot, time_nodes, unix_time_features, rot_body
time_to = unix_time_features,
method=time_interpolation_method).transpose()

# The errors in the interpolated form
# The position errors make up the three first
param_err_pos = err_interpolated[:, 0:3]
param_err_eul_ZYX = np.vstack((err_interpolated[:, 3:4].flatten(),
np.zeros(n_features),
np.zeros(n_features))).transpose()

# The err has columns 3-5 roll, pitch, yaw, while from_euler('ZYX',...) requires yaw pitch roll order
param_err_eul_ZYX = np.vstack((err_interpolated[:, 5].flatten(),
err_interpolated[:, 4].flatten(),
err_interpolated[:, 3].flatten())).transpose()

# To be left multiplied with attitude
rot_err_NED = RotLib.from_euler('ZYX', param_err_eul_ZYX, degrees=True)
Expand Down Expand Up @@ -464,13 +466,13 @@ def main(config_path, mode):
# Quaternion is stored here in the H5 file
h5_folder_quaternion_ecef = config['HDF.processed_nav']['quaternion_ecef']

# Poses modified in coregistration are written to
# Modified positions after coregistration
h5_folder_position_ecef_coreg = config['HDF.coregistration']['position_ecef']

# Quaternion is stored here in the H5 file
# Modified quaternions after coregistration
h5_folder_quaternion_ecef_coreg = config['HDF.coregistration']['quaternion_ecef']

# Timestamps here
# Timestamps for each hyperspectral frame
h5_folder_time_pose = config['HDF.processed_nav']['timestamp']

h5_paths = {'h5_folder_position_ecef': h5_folder_position_ecef,
Expand Down Expand Up @@ -626,6 +628,7 @@ def main(config_path, mode):
u_err = gcp_df_all['diff_u']
v_err = gcp_df_all['diff_v']

# Filter outliers according to the interquartile method using the registration errors
feature_mask = filter_gcp_by_registration_error(u_err, v_err, method = 'iqr')

# These features are used
Expand All @@ -642,15 +645,16 @@ def main(config_path, mode):
'calibrate_camera': False,
'calibrate_lever_arm': False,
'calibrate_cx': False,
'calibrate_f': True,
'calibrate_f': False,
'calibrate_k1': False,
'calibrate_k2': True,
'calibrate_k3': True}
'calibrate_k2': False,
'calibrate_k3': False}

calibrate_per_transect = True
estimate_time_varying = True
time_node_spacing = 10 # s. If spacing 10 and transect lasts for 53 s will attempt to divide into largest integer leaving
time_interpolation_method = 'gaussian'
# TODO: fix gaussian interpolation
time_interpolation_method = 'linear'

# Select which time varying degrees of freedom to estimate errors for
calibrate_dict_extr = {'calibrate_pos_x': True,
Expand Down
106 changes: 94 additions & 12 deletions gref4hsi/scripts/georeference.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Built-ins
import configparser
import json
import os
import sys

Expand Down Expand Up @@ -112,7 +113,7 @@ def write_intersection_geometry_2_h5_file(hsi_geometry, config, h5_filename):


# Function called to apply standard processing on a folder of files
def main(iniPath, viz = False):
def main(iniPath, viz = False, use_coreg_param = False):
config = configparser.ConfigParser()
config.read(iniPath)

Expand All @@ -122,10 +123,42 @@ def main(iniPath, viz = False):
# Directory of H5 files
dir_r = config['Absolute Paths']['h5_folder']





# Timestamps here
h5_folder_time_pose = config['HDF.processed_nav']['timestamp']

# Use the regular parameters from nav system and manufacturer:
# The path to the XML file
hsi_cal_xml = config['Absolute Paths']['hsi_calib_path']

# The path to the Tide file (if necessary)
# Position is stored here in the H5 file
h5_folder_position_ecef = config['HDF.processed_nav']['position_ecef']

# Quaternion is stored here in the H5 file
h5_folder_quaternion_ecef = config['HDF.processed_nav']['quaternion_ecef']

if use_coreg_param:
print('Using coregistred parameters for georeferencing')

# Set the camera model to the calibrated one if it exists
hsi_cal_xml_coreg = config['Absolute Paths']['calib_file_coreg']
if os.path.exists(hsi_cal_xml_coreg):
hsi_cal_xml = hsi_cal_xml_coreg

# Optimized position
h5_folder_position_ecef_coreg = config['HDF.coregistration']['position_ecef']

# Quaternion
h5_folder_quaternion_ecef_coreg = config['HDF.coregistration']['quaternion_ecef']





# The path to the Tide file (if necessary and available)
try:
path_tide = config['Absolute Paths']['tide_path']
except Exception as e:
Expand All @@ -134,7 +167,25 @@ def main(iniPath, viz = False):
# Maximal allowed ray length
max_ray_length = float(config['General']['max_ray_length'])

mesh = pv.read(path_mesh,)
mesh = pv.read(path_mesh)

"""metadata_mesh = {
"offset_x": offset_x,
"offset_y": offset_y,
"offset_z": offset_y,
"epsg_code": geocsc.to_epsg(), # Example EPSG code, replace with your actual code
"data_type": str(mesh.points.dtype), # Add other metadata entries here
}"""

model_meta_path = path_mesh.split('.')[0] + '_meta.json'
with open(model_meta_path, "r") as f:
# Load the JSON data from the file
metadata_mesh = json.load(f)
mesh_off_x = metadata_mesh['offset_x']
mesh_off_y = metadata_mesh['offset_y']
mesh_off_z = metadata_mesh['offset_z']
# Mesh is translated by this much
mesh_trans = np.array([mesh_off_x, mesh_off_y, mesh_off_z]).astype(np.float64)


print("\n################ Georeferencing: ################")
Expand All @@ -148,23 +199,54 @@ def main(iniPath, viz = False):
print(f"Georeferencing file {file_count+1}/{n_files}, progress is {progress_perc} %")

# Path to hierarchical file
path_hdf = dir_r + filename
h5_filename = dir_r + filename

# Read h5 file
hyp = Hyperspectral(path_hdf, config)

# Using the cal file, we can define lever arm, boresight and local ray geometry (in dictionary)
hyp = Hyperspectral(h5_filename, config)

if use_coreg_param:
try:
# Use the coregistred dataset if it exists
pos_ref_ecef = Hyperspectral.get_dataset(h5_filename=h5_filename,
dataset_name= h5_folder_position_ecef_coreg)
except:
# If not use the original
pos_ref_ecef = Hyperspectral.get_dataset(h5_filename=h5_filename,
dataset_name= h5_folder_position_ecef)
try:
# Use the coregistred dataset if it exists
pos_ref_ecef = Hyperspectral.get_dataset(h5_filename=h5_filename,
dataset_name= h5_folder_quaternion_ecef_coreg)
except:
# If not use the original
pos_ref_ecef = Hyperspectral.get_dataset(h5_filename=h5_filename,
dataset_name= h5_folder_quaternion_ecef)
else:

# Just use the regular navigation data
# If not use the original
pos_ref_ecef = Hyperspectral.get_dataset(h5_filename=h5_filename,
dataset_name= h5_folder_position_ecef)
# Extract the ecef orientations for each frame
quat_ref_ecef = Hyperspectral.get_dataset(h5_filename=h5_filename,
dataset_name=h5_folder_quaternion_ecef)
# Extract the timestamps for each frame
time_pose = Hyperspectral.get_dataset(h5_filename=h5_filename,
dataset_name= h5_folder_time_pose)


# Using the cal file, we can define lever arm, boresight and camera model geometry (in dictionary)
intrinsic_geometry_dict = cal_file_to_rays(filename_cal=hsi_cal_xml)


# Define the rays in ECEF for each frame.
hsi_geometry = define_hsi_ray_geometry(pos_ref_ecef = hyp.pos_ref,
quat_ref_ecef = hyp.quat_ref,
time_pose = hyp.pose_time,
hsi_geometry = define_hsi_ray_geometry(pos_ref_ecef,
quat_ref_ecef,
time_pose,
intrinsic_geometry_dict = intrinsic_geometry_dict)


hsi_geometry.intersect_with_mesh(mesh = mesh, max_ray_length=max_ray_length)
hsi_geometry.intersect_with_mesh(mesh = mesh, max_ray_length=max_ray_length, mesh_trans = mesh_trans)

# Computes the view angles in the local NED. Computationally intensive as local NED is defined for each intersection
hsi_geometry.compute_view_directions_local_tangent_plane()
Expand All @@ -177,7 +259,7 @@ def main(iniPath, viz = False):
hsi_geometry.compute_elevation_mean_sealevel(source_epsg = config['Coordinate Reference Systems']['geocsc_epsg_export'],
geoid_path = config['Absolute Paths']['geoid_path'])

write_intersection_geometry_2_h5_file(hsi_geometry=hsi_geometry, config = config, h5_filename=path_hdf)
write_intersection_geometry_2_h5_file(hsi_geometry=hsi_geometry, config = config, h5_filename=h5_filename)

hsi_geometry.write_rgb_point_cloud(config = config, hyp = hyp, transect_string = filename.split('.')[0])

Expand Down
61 changes: 49 additions & 12 deletions gref4hsi/tests/test_main_specim.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,7 @@
sys.path.append(module_path)

# Local resources
from gref4hsi.scripts import georeference
from gref4hsi.scripts import orthorectification
from gref4hsi.scripts import coregistration
from gref4hsi.scripts import georeference, orthorectification, coregistration
from gref4hsi.utils import parsing_utils, specim_parsing_utils
from gref4hsi.utils import visualize
from gref4hsi.utils.config_utils import prepend_data_dir_to_relative_paths, customize_config
Expand Down Expand Up @@ -69,6 +67,24 @@ def main(config_yaml, specim_mission_folder, geoid_path, config_template_path, l
DEM_PATH = os.path.join(dem_fold, files[0])
#print(f"The file '{DEM_PATH}' is used as terrain.")
TERRAIN_TYPE = "dem_file"


# Do coregistration if there is an orthomosaic to compare under "orthomosaic"
do_coreg = False
ortho_ref_fold = os.path.join(specim_mission_folder, "orthomosaic")

if not os.path.exists(ortho_ref_fold):
print('Coregistration is not done, as there was no reference orthomosaic')

else:
if not os.listdir(ortho_ref_fold):
print('Coregistration is not done, as there was no reference orthomosaic')
else:
# If there is a folder and it is not empty
# Find the only file that is there
ortho_ref_file = [f for f in os.listdir(ortho_ref_fold) if f not in ('.', '..')][0]
do_coreg == True
print(f"The file '{ortho_ref_file}' is used as as reference orthomosaic.")



Expand Down Expand Up @@ -110,7 +126,7 @@ def main(config_yaml, specim_mission_folder, geoid_path, config_template_path, l
custom_config = {'General':
{'mission_dir': DATA_DIR,
'model_export_type': TERRAIN_TYPE, # Ray trace onto geoid
'max_ray_length': 200}, # Max distance in meters from spectral imager to seafloor. Specim does not fly higher
'max_ray_length': 150}, # Max distance in meters from spectral imager to seafloor. Specim does not fly higher

'Coordinate Reference Systems':
{'proj_epsg' : EPSG_CODE, # The projected CRS UTM 32, common on mainland norway
Expand Down Expand Up @@ -156,6 +172,11 @@ def main(config_yaml, specim_mission_folder, geoid_path, config_template_path, l
elif TERRAIN_TYPE == 'dem_file':
custom_config['Absolute Paths']['dem_path'] = DEM_PATH


# No need to orthorectify the data cube initially when coregistration with RGB composites is done
if do_coreg:
custom_config['Orthorectification']['resample_rgb_only'] = True

# Customizes the config file according to settings
customize_config(config_path=config_file_mission, dict_custom=custom_config)

Expand All @@ -169,11 +190,11 @@ def main(config_yaml, specim_mission_folder, geoid_path, config_template_path, l
"""specim_parsing_utils.main(config=config,
config_specim=config_specim_preprocess)"""

# Interpolates and reformats the pose (of the vehicle body) to "processed/nav/" folder.
# Time interpolates and reformats the pose (of the vehicle body) to "processed/nav/" folder.
config = parsing_utils.export_pose(config_file_mission)

# Exports model
"""parsing_utils.export_model(config_file_mission)"""
# Formats model to triangular mesh and an earth centered earth fixed / geocentric coordinate system
parsing_utils.export_model(config_file_mission)

# Commenting out the georeference step is fine if it has been done

Expand All @@ -183,18 +204,34 @@ def main(config_yaml, specim_mission_folder, geoid_path, config_template_path, l
visualize.show_mesh_camera(config, show_mesh = True, show_pose = True, ref_frame='ENU')"""

# Georeference the line scans of the hyperspectral imager. Utilizes parsed data
#georeference.main(config_file_mission)
georeference.main(config_file_mission)

orthorectification.main(config_file_mission)
# The coregistration compares to the reference and optimizes geometric parameters which are used to re-georeference.
if do_coreg:
# Optional: coregistration
# Match RGB composite to reference, find features and following data, ground control point (gcp) list, for each feature pair:
# reference point 3D (from reference), position/orientation of vehicle (using resampled time) and pixel coordinate (using resampled pixel coordinate)
coregistration.main(config_file_mission, mode='compare')

# The gcp list allows reprojecting reference points and evaluate the reprojection error,
# which is used to optimize static geometric parameters (e.g. boresight, camera model...) or dynamic geometric parameters (time-varying nav errors).
# Settings are currently in coregistration script
coregistration.main(config_file_mission, mode='calibrate')

# The final orthorectification can be conducted on the datacube too
custom_config['Orthorectification']['resample_rgb_only'] = True

#orthorectification.main(config_file_mission)
# Georeference with coregistred parameters
georeference.main(config_file_mission, use_coreg_param=True)

#coregistration.main(config_file_mission, mode='compare')
orthorectification.main(config_file_mission)

coregistration.main(config_file_mission, mode='calibrate')


if __name__ == "__main__":
# Select a recording folder on drive
specim_mission_folder = os.path.join(base_fp, r"Specim/Missions/2022-08-31-Remøy/remoy_202208310800_ntnu_hyperspectral_74m")
specim_mission_folder = os.path.join(base_fp, r"Specim/Missions/2022-08-31-Remoy/remoy_202208310800_ntnu_hyperspectral_74m")

# Globally accessible files:
geoid_path = os.path.join(home, "VsCodeProjects/gref4hsi/data/world/geoids/egm08_25.gtx")
Expand Down
Loading

0 comments on commit bce1bb3

Please sign in to comment.