From 2af3eea964a7514d972508db588e1d98edeb97ab Mon Sep 17 00:00:00 2001 From: Sharon Fitzpatrick Date: Thu, 7 Nov 2024 16:39:33 -0800 Subject: [PATCH 1/3] sort the transect ids columns in raw_transect_time_series.csv --- src/coastseg/coastseg_map.py | 2 ++ src/coastseg/common.py | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/src/coastseg/coastseg_map.py b/src/coastseg/coastseg_map.py index 02a0b18..96db8de 100644 --- a/src/coastseg/coastseg_map.py +++ b/src/coastseg/coastseg_map.py @@ -1840,6 +1840,8 @@ def extract_all_shorelines(self,roi_ids:list=None) -> None: Returns: None """ + if isinstance(roi_ids, str): + roi_ids = [roi_ids] # 1. validate the inputs for shoreline extraction exist: ROIs, transects,shorelines and a downloaded data for each ROI self.validate_extract_shoreline_inputs(roi_ids) diff --git a/src/coastseg/common.py b/src/coastseg/common.py index 826c190..491c381 100644 --- a/src/coastseg/common.py +++ b/src/coastseg/common.py @@ -1921,8 +1921,13 @@ def save_transects( filepath = os.path.join(save_location, "raw_transect_time_series_merged.csv") merged_timeseries_df.to_csv(filepath, sep=",",index=False) + # sort the columns + sorted_columns = [timeseries_df.columns[0]] + sorted(timeseries_df.columns[1:], key=lambda x: int(''.join(filter(str.isdigit, x)))) + timeseries_df = timeseries_df[sorted_columns] + filepath = os.path.join(save_location, "raw_transect_time_series.csv") timeseries_df.to_csv(filepath, sep=",",index=False) + # save transect settings to file transect_settings = get_transect_settings(settings) transect_settings_path = os.path.join(save_location, "transects_settings.json") From e268173582fd056578d5870e2091d1685251db11 Mon Sep 17 00:00:00 2001 From: Sharon Fitzpatrick Date: Thu, 7 Nov 2024 16:40:41 -0800 Subject: [PATCH 2/3] sort the transect id columns in tidally_corrected_transect_time_series.csv --- src/coastseg/tide_correction.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/coastseg/tide_correction.py b/src/coastseg/tide_correction.py index e4a279a..3c9142c 100644 --- a/src/coastseg/tide_correction.py +++ b/src/coastseg/tide_correction.py @@ -249,6 +249,8 @@ def correct_tides( 'tidally_corrected') # Save the Tidally corrected time series + sorted_columns = [timeseries_df.columns[0]] + sorted(timeseries_df.columns[1:], key=lambda x: int(''.join(filter(str.isdigit, x)))) + timeseries_df = timeseries_df[sorted_columns] timeseries_df.to_csv(os.path.join(session_path, 'tidally_corrected_transect_time_series.csv'),index=False) From 929fceba4b016652e2f4d8cd3c191ff7dc80fa0d Mon Sep 17 00:00:00 2001 From: Sharon Fitzpatrick Date: Wed, 13 Nov 2024 15:42:11 -0800 Subject: [PATCH 3/3] #280 fix add_shore_points_to_timeseries to correctly get the last point of transect & add code to filter out dropped points from merged time series --- scripts/apply_tidal_correction.py | 8 ++++++-- src/coastseg/coastseg_map.py | 2 ++ src/coastseg/common.py | 8 ++++++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/scripts/apply_tidal_correction.py b/scripts/apply_tidal_correction.py index c89fbba..8f2bf1e 100644 --- a/scripts/apply_tidal_correction.py +++ b/scripts/apply_tidal_correction.py @@ -101,7 +101,7 @@ def add_shore_points_to_timeseries(timeseries_data: pd.DataFrame, transect = transects_utm.iloc[i] transect_id = transect['id'] first = transect.geometry.coords[0] - last = transect.geometry.coords[1] + last = transect.geometry.coords[-1] idx = timeseries_data['transect_id'].str.contains(transect_id) ##in case there is a transect in the config_gdf that doesn't have any intersections @@ -377,7 +377,11 @@ def add_lat_lon_to_timeseries(merged_timeseries_df, transects_gdf,timeseries_df, merged_timeseries_gdf,dropped_points_df = filter_points_outside_transects(merged_timeseries_gdf,transects_gdf,save_location,ext) if not dropped_points_df.empty: timeseries_df = filter_dropped_points_out_of_timeseries(timeseries_df, dropped_points_df) - + merged_timeseries_df = merged_timeseries_df[~merged_timeseries_df.set_index(['dates', 'transect_id']).index.isin(dropped_points_df.set_index(['dates', 'transect_id']).index)] + if len(merged_timeseries_df) == 0: + print("All points were dropped from the timeseries. This means all of the detected shoreline points were not on the transects. Turn off the only_keep_points_on_transects parameter to keep all points.") + + # save the time series of along shore points as points to a geojson (saves shore_x and shore_y as x and y coordinates in the geojson) cross_shore_pts = convert_date_gdf(merged_timeseries_gdf.drop(columns=['x','y','shore_x','shore_y','cross_distance']).to_crs('epsg:4326')) # rename the dates column to date diff --git a/src/coastseg/coastseg_map.py b/src/coastseg/coastseg_map.py index 96db8de..91f90b3 100644 --- a/src/coastseg/coastseg_map.py +++ b/src/coastseg/coastseg_map.py @@ -2101,6 +2101,8 @@ def save_session(self, roi_ids: list[str], save_transects: bool = True): roi_ids (list[str]): List of ROI IDs. save_transects (bool, optional): Flag to save transects. Defaults to True. """ + if isinstance(roi_ids, str): + roi_ids = [roi_ids] # Save extracted shoreline info to session directory session_name = self.get_session_name() for roi_id in roi_ids: diff --git a/src/coastseg/common.py b/src/coastseg/common.py index 491c381..e13f16f 100644 --- a/src/coastseg/common.py +++ b/src/coastseg/common.py @@ -1708,7 +1708,7 @@ def add_shore_points_to_timeseries(timeseries_data: pd.DataFrame, for i, transect in transects_utm.iterrows(): transect_id = transect['id'] first = transect.geometry.coords[0] - last = transect.geometry.coords[1] + last = transect.geometry.coords[-1] # Filter timeseries data for the current transect_id idx = timeseries_data['transect_id'] == transect_id @@ -1776,7 +1776,11 @@ def add_lat_lon_to_timeseries(merged_timeseries_df, transects_gdf,timeseries_df, merged_timeseries_gdf,dropped_points_df = filter_points_outside_transects(merged_timeseries_gdf,transects_gdf,save_location,ext) if not dropped_points_df.empty: timeseries_df = filter_dropped_points_out_of_timeseries(timeseries_df, dropped_points_df) - + merged_timeseries_df = merged_timeseries_df[~merged_timeseries_df.set_index(['dates', 'transect_id']).index.isin(dropped_points_df.set_index(['dates', 'transect_id']).index)] + if len(merged_timeseries_df) == 0: + logger.warning("All points were dropped from the timeseries. This means all of the detected shoreline points were not on the transects. Turn off the only_keep_points_on_transects parameter to keep all points.") + print("All points were dropped from the timeseries. This means all of the detected shoreline points were not on the transects. Turn off the only_keep_points_on_transects parameter to keep all points.") + # save the time series of along shore points as points to a geojson (saves shore_x and shore_y as x and y coordinates in the geojson) cross_shore_pts = convert_date_gdf(merged_timeseries_gdf.drop(columns=['x','y','shore_x','shore_y','cross_distance']).to_crs('epsg:4326')) # rename the dates column to date