From 5b6a86bb12d1c6b9b7bf465c5247d0834dea633e Mon Sep 17 00:00:00 2001 From: Weber Date: Mon, 18 Apr 2022 15:02:22 -0700 Subject: [PATCH] fixed duplicate handling for points --- LakeCat.py | 2 +- LakeCat_functions.py | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/LakeCat.py b/LakeCat.py index 9d3b6d2..dfb7263 100644 --- a/LakeCat.py +++ b/LakeCat.py @@ -16,7 +16,7 @@ from border import makeBrdrPctFile from lake_cat_config import LYR_DIR, NHD_DIR, OUT_DIR, STREAMCAT_DIR, FRAMEWORK -from LakeCat_functions import (Accumulation, NHDtblMerge, PointInPoly, +from LakeCat_functions import (Accumulation, NHDtblMerge, PointInPoly,dbf2DF, chkColumnLength, doStats, getOnNetLakes, inputs, makeBasins, makeNParrays, rpus) diff --git a/LakeCat_functions.py b/LakeCat_functions.py index d235f6f..0d602ad 100644 --- a/LakeCat_functions.py +++ b/LakeCat_functions.py @@ -435,11 +435,10 @@ def PointInPoly(points, inZoneData, pct_full, fld="GRIDCODE", summaryfield=None) if not points.crs == polys.crs: points = points.to_crs(polys.crs) # Get list of lat/long fields in the table - latlon = [ - s for s in points.columns if any(xs in s.upper() for xs in ["LONGIT", "LATIT"]) - ] - # Remove duplicate points for 'Count' - points2 = points.loc[~points.duplicated(latlon)] + points['lon'] = points['geometry'].x + points['lat'] = points['geometry'].y + # Remove duplicate points + points2 = points.drop_duplicates(subset=["lon", "lat"], keep='last') try: point_poly_join = sjoin(points2, polys, how="left", op="within") except: