From 2be52711442922f877d5213b9fe8ff5df39684af Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Sat, 8 Dec 2018 16:17:38 -0600 Subject: [PATCH 01/17] added access to outfalls, conduits, and junctions in INP --- swmmio/swmmio.py | 105 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) diff --git a/swmmio/swmmio.py b/swmmio/swmmio.py index 0c227a1..dc774ae 100644 --- a/swmmio/swmmio.py +++ b/swmmio/swmmio.py @@ -486,5 +486,110 @@ class inp(SWMMIOFile): #make sure INP has been saved in the GUI before using this def __init__(self, filePath): + self._conduits_df = None + self._junctions_df = None + self._outfalls_df = None #is this class necessary anymore? SWMMIOFile.__init__(self, filePath) #run the superclass init + + @property + def conduits(self): + """ + Get/set conduits section of the INP file. + + :return: Conduits section of the INP file + :rtype: pandas.DataFrame + + Examples: + + >>> import swmmio + >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH + >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) + >>> model.conduits + ... + ... InletNode OutletNode Length ManningN InletOffset OutletOffset \ + ... Name + ... C1:C2 J1 J2 244.63 0.01 0 0 + ... C2.1 J2 J3 666.00 0.01 0 0 + ... 1 1 4 400.00 0.01 0 0 + ... 2 4 5 400.00 0.01 0 0 + ... 3 5 J1 400.00 0.01 0 0 + ... 4 3 4 400.00 0.01 0 0 + ... 5 2 5 400.00 0.01 0 0 + ... InitFlow MaxFlow + ... Name + ... C1:C2 0 0 + ... C2.1 0 0 + ... 1 0 0 + ... 2 0 0 + ... 3 0 0 + ... 4 0 0 + ... 5 0 0 + """ + if self._conduits_df is None: + self._conduits_df = create_dataframeINP(self.path, "[CONDUITS]", comment_cols=False) + return self._conduits_df + @conduits.setter + def conduits(self, df): + """Set inp.conduits DataFrame.""" + self._conduits_df = df + + @property + def junctions(self): + """ + Get/set junctions section of the INP file. + + :return: junctions section of the INP file + :rtype: pandas.DataFrame + + Examples: + + >>> import swmmio + >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH + >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) + >>> model.junctions + ... + ... InvertElev MaxDepth InitDepth SurchargeDepth PondedArea + Name + J1 20.728 15 0 0 0 + J3 6.547 15 0 0 0 + 1 0.000 0 0 0 0 + 2 0.000 0 0 0 0 + 3 0.000 0 0 0 0 + 4 0.000 0 0 0 0 + 5 0.000 0 0 0 0 + """ + if self._junctions_df is None: + self._junctions_df = create_dataframeINP(self.path, "[JUNCTIONS]", comment_cols=False) + return self._junctions_df + @junctions.setter + def junctions(self, df): + """Set inp.junctions DataFrame.""" + self._junctions_df = df + + @property + def outfalls(self): + """ + Get/set outfalls section of the INP file. + + :return: outfalls section of the INP file + :rtype: pandas.DataFrame + + Examples: + + >>> import swmmio + >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH + >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) + >>> model.outfalls + ... + InvertElev OutfallType StageOrTimeseries TideGate + Name + J4 0 FREE NO NaN + """ + if self._outfalls_df is None: + self._outfalls_df = create_dataframeINP(self.path, "[OUTFALLS]", comment_cols=False) + return self._outfalls_df + @outfalls.setter + def outfalls(self, df): + """Set inp.outfalls DataFrame.""" + self._outfalls_df = df From 9f6b8e0df8a4038c6e13b56d0370477c735fe65d Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Wed, 19 Dec 2018 20:35:31 -0500 Subject: [PATCH 02/17] added unfinished save function swmmio.inp and increased version --- setup.py | 2 +- swmmio/swmmio.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 583e898..1b108e6 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() -VERSION = '0.3.1' +VERSION = '0.3.2.dev' AUTHOR_NAME = 'Adam Erispaha' AUTHOR_EMAIL = 'aerispaha@gmail.com' diff --git a/swmmio/swmmio.py b/swmmio/swmmio.py index dc774ae..c41d3b9 100644 --- a/swmmio/swmmio.py +++ b/swmmio/swmmio.py @@ -492,6 +492,20 @@ def __init__(self, filePath): #is this class necessary anymore? SWMMIOFile.__init__(self, filePath) #run the superclass init + self._sections = [self._conduits_df, self._junctions_df, self._outfalls_df] + + def save(self, target_path=None): + ''' + Save the inp file to disk. File will be overwritten unless a target_path + is provided + ''' + from swmmio.utils.modify_model import replace_inp_section + target_path = target_path if target_path is not None else self.path + + for section in self._sections: + if section is not None: + replace_inp_section() + @property def conduits(self): """ From fc1b37ae2d57774dedae6f7348b83b95553c38d5 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Thu, 20 Dec 2018 19:04:18 -0500 Subject: [PATCH 03/17] added test rpt, create_dataframeRPT test --- README.md | 2 +- swmmio/__init__.py | 3 +- swmmio/defs/sectionheaders.py | 2 +- swmmio/run_models/start_pool.py | 2 +- .../data/model_full_features_network.inp | 383 +++++++++--------- .../data/model_full_features_network.rpt | 352 ++++++++++++++++ swmmio/tests/test_dataframes.py | 23 +- 7 files changed, 575 insertions(+), 192 deletions(-) create mode 100644 swmmio/tests/data/model_full_features_network.rpt diff --git a/README.md b/README.md index c17feb7..ff867b7 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,7 @@ For example, climate change impacts can be investigated by creating a set of mod import os, shutil import swmmio from swmmio.utils.modify_model import replace_inp_section -from swmmio.utils.dataframes import create_dataframeINP +from swmmio import create_dataframeINP #initialize a baseline model object baseline = swmmio.Model(r'path\to\baseline.inp') diff --git a/swmmio/__init__.py b/swmmio/__init__.py index 6030d2e..7cfefc3 100644 --- a/swmmio/__init__.py +++ b/swmmio/__init__.py @@ -1,10 +1,11 @@ '''Python SWMM Input/Output Tools''' -VERSION_INFO = (0, 3, 2) +VERSION_INFO = (0, 3, 3, 'dev') __version__ = '.'.join(map(str, VERSION_INFO)) __author__ = 'Adam Erispaha' __copyright__ = 'Copyright (c) 2016' __licence__ = '' from .swmmio import * +from swmmio.utils.dataframes import create_dataframeBI, create_dataframeRPT, create_dataframeINP diff --git a/swmmio/defs/sectionheaders.py b/swmmio/defs/sectionheaders.py index 17e0078..86e4b69 100644 --- a/swmmio/defs/sectionheaders.py +++ b/swmmio/defs/sectionheaders.py @@ -44,7 +44,7 @@ 'Node Inflow Summary':'Name Type MaxLatInflow MaxTotalInflow MaxDay MaxHr LatInflowV TotalInflowV FlowBalErrorPerc XXX', 'Node Surcharge Summary':'Name Type HourSurcharged MaxHeightAboveCrown MinDepthBelowRim', 'Storage Volume Summary':'Name AvgVolume AvgPctFull EvapPctLoss ExfilPctLoss MaxVolume MaxPctFull MaxDay MaxFullHr MaxOutflow', - 'Node Depth Summary':'Name Type AvgDepth MaxNodeDepth MaxHGL MaxDay MaxHr', + 'Node Depth Summary':'Name Type AvgDepth MaxNodeDepth MaxHGL MaxDay MaxHr MaxNodeDepthReported', 'Link Flow Summary':'Name Type MaxQ MaxDay MaxHr MaxV MaxQPerc MaxDPerc', 'Subcatchment Results': 'Date Time PrecipInchPerHour LossesInchPerHr RunoffCFS', 'Node Results': 'Date Time InflowCFS FloodingCFS DepthFt HeadFt TSS TP TN', diff --git a/swmmio/run_models/start_pool.py b/swmmio/run_models/start_pool.py index 9fb69a2..9f280d0 100644 --- a/swmmio/run_models/start_pool.py +++ b/swmmio/run_models/start_pool.py @@ -1,7 +1,7 @@ from swmmio.run_models import run from swmmio.swmmio import Model # from swmmio.reporting import reporting -from swmmio.utils import swmm_utils as su +# from swmmio.utils import swmm_utils as su from swmmio.reporting import batch from multiprocessing import Pool, cpu_count from datetime import datetime diff --git a/swmmio/tests/data/model_full_features_network.inp b/swmmio/tests/data/model_full_features_network.inp index 2d9149f..24986a2 100644 --- a/swmmio/tests/data/model_full_features_network.inp +++ b/swmmio/tests/data/model_full_features_network.inp @@ -12,9 +12,9 @@ ALLOW_PONDING NO SKIP_STEADY_STATE NO START_DATE 11/01/2015 -START_TIME 14:00:00 +START_TIME 00:00:00 REPORT_START_DATE 11/01/2015 -REPORT_START_TIME 14:00:00 +REPORT_START_TIME 00:00:00 END_DATE 11/04/2015 END_TIME 00:00:00 SWEEP_START 01/01 @@ -23,7 +23,7 @@ DRY_DAYS 0 REPORT_STEP 00:01:00 WET_STEP 00:05:00 DRY_STEP 00:05:00 -ROUTING_STEP 0:00:01 +ROUTING_STEP 0:00:01 INERTIAL_DAMPING NONE NORMAL_FLOW_LIMITED BOTH @@ -45,206 +45,210 @@ CONSTANT 0.0 DRY_ONLY NO [RAINGAGES] -;;Name Format Interval SCF Source +;;Name Format Interval SCF Source ;;-------------- --------- ------ ------ ---------- SCS_24h_Type_I_1in INTENSITY 0:15 1.0 TIMESERIES SCS_24h_Type_I_1in [SUBCATCHMENTS] -;;Name Rain Gage Outlet Area %Imperv Width %Slope CurbLen SnowPack +;;Name Rain Gage Outlet Area %Imperv Width %Slope CurbLen SnowPack ;;-------------- ---------------- ---------------- -------- -------- -------- -------- -------- ---------------- -S1 SCS_24h_Type_I_1in J1 1 100 500 0.5 0 -S2 SCS_24h_Type_I_1in J2 2 100 500 0.5 0 -S3 SCS_24h_Type_I_1in j3 3 100 500 0.5 0 +S1 SCS_24h_Type_I_1in J1 3 30 500 0.5 0 +S2 SCS_24h_Type_I_1in J2 2 100 500 0.5 0 +S3 SCS_24h_Type_I_1in j3 3 100 500 0.5 0 +S4 SCS_24h_Type_I_1in 1 20 25 500 0.5 0 [SUBAREAS] -;;Subcatchment N-Imperv N-Perv S-Imperv S-Perv PctZero RouteTo PctRouted +;;Subcatchment N-Imperv N-Perv S-Imperv S-Perv PctZero RouteTo PctRouted ;;-------------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- -S1 0.01 0.1 0.05 0.05 25 OUTLET -S2 0.01 0.1 0.05 0.05 25 OUTLET -S3 0.01 0.1 0.05 0.05 25 OUTLET +S1 0.01 0.1 0.05 0.05 25 OUTLET +S2 0.01 0.1 0.05 0.05 25 OUTLET +S3 0.01 0.1 0.05 0.05 25 OUTLET +S4 0.01 0.1 0.05 0.05 25 OUTLET [INFILTRATION] -;;Subcatchment MaxRate MinRate Decay DryTime MaxInfil +;;Subcatchment MaxRate MinRate Decay DryTime MaxInfil ;;-------------- ---------- ---------- ---------- ---------- ---------- -S1 3 0.5 4 7 0 -S2 3 0.5 4 7 0 -S3 3 0.5 4 7 0 +S1 3 0.5 4 7 0 +S2 3 0.5 4 7 0 +S3 3 0.5 4 7 0 +S4 3.0 0.5 4 7 0 [JUNCTIONS] -;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;Name Elevation MaxDepth InitDepth SurDepth Aponded ;;-------------- ---------- ---------- ---------- ---------- ---------- -J1 20.728 15 0 0 0 -J3 6.547 15 0 0 0 -1 0 0 0 0 0 -2 0 0 0 0 0 -3 0 0 0 0 0 -4 0 0 0 0 0 -5 0 0 0 0 0 +J3 6.547 15 0 0 0 +1 17 0 0 0 0 +2 17 0 0 0 0 +3 16.5 0 0 0 0 +4 16 0 0 0 0 +5 15 0 0 0 0 +J2 13.0 15 0 0 0 [OUTFALLS] -;;Name Elevation Type Stage Data Gated Route To +;;Name Elevation Type Stage Data Gated Route To ;;-------------- ---------- ---------- ---------------- -------- ---------------- -J4 0 FREE NO +J4 0 FREE NO [STORAGE] -;;Name Elev. MaxDepth InitDepth Shape Curve Name/Params N/A Fevap Psi Ksat IMD +;;Name Elev. MaxDepth InitDepth Shape Curve Name/Params N/A Fevap Psi Ksat IMD ;;-------------- -------- ---------- ----------- ---------- ---------------------------- -------- -------- -------- -------- -J2 13.392 15 0 FUNCTIONAL 1000 0 0 0 0 +J1 13.392 15 0 FUNCTIONAL 1000 0 0 0 0 [CONDUITS] -;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow ;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- -C1:C2 J1 J2 244.63 0.01 0 0 0 0 +C1:C2 J1 J2 244.63 0.01 0 0 0 0 C2.1 J2 J3 666 0.01 0 0 0 0 -1 1 4 400 0.01 0 0 0 0 -2 4 5 400 0.01 0 0 0 0 -3 5 J1 400 0.01 0 0 0 0 -4 3 4 400 0.01 0 0 0 0 -5 2 5 400 0.01 0 0 0 0 +1 1 4 400 0.01 0 0 0 0 +2 4 5 400 0.01 0 0 0 0 +3 5 J1 400 0.01 0 0 0 0 +4 3 4 400 0.01 0 0 0 0 +5 2 5 400 0.01 0 0 0 0 [PUMPS] -;;Name From Node To Node Pump Curve Status Sartup Shutoff +;;Name From Node To Node Pump Curve Status Sartup Shutoff ;;-------------- ---------------- ---------------- ---------------- ------ -------- -------- -C2 J2 J3 * ON 0 0 +C2 J2 J3 P1_Curve ON 0 0 [WEIRS] -;;Name From Node To Node Type CrestHt Qcoeff Gated EndCon EndCoeff Surcharge RoadWidth RoadSurf +;;Name From Node To Node Type CrestHt Qcoeff Gated EndCon EndCoeff Surcharge RoadWidth RoadSurf ;;-------------- ---------------- ---------------- ------------ ---------- ---------- -------- -------- ---------- ---------- ---------- ---------- -C3 J3 J4 TRANSVERSE 0 3.33 NO 0 0 NO +C3 J3 J4 TRANSVERSE 0 3.33 NO 0 0 NO [XSECTIONS] -;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert ;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- -C1:C2 CIRCULAR 1 0 0 0 1 -C2.1 CIRCULAR 1 0 0 0 1 -1 CIRCULAR 1 0 0 0 1 -2 CIRCULAR 1 0 0 0 1 -3 CIRCULAR 1 0 0 0 1 -4 CIRCULAR 1 0 0 0 1 -5 CIRCULAR 1 0 0 0 1 -C3 RECT_OPEN 5 1 0 0 +C1:C2 CIRCULAR 1 0 0 0 1 +C2.1 CIRCULAR 1 0 0 0 1 +1 CIRCULAR 1 0 0 0 1 +2 CIRCULAR 1 0 0 0 1 +3 CIRCULAR 1 0 0 0 1 +4 CIRCULAR 1 0 0 0 1 +5 CIRCULAR 1 0 0 0 1 +C3 RECT_OPEN 5 1 0 0 [INFLOWS] ;;Node Constituent Time Series Type Mfactor Sfactor Baseline Pattern ;;-------------- ---------------- ---------------- -------- -------- -------- -------- -------- -J1 FLOW "" FLOW 1.0 1 1 -J3 Flow "" FLOW 1.0 1 1 -J2 FLOW "" FLOW 1.0 1 1 +J3 Flow "" FLOW 1.0 1 1 +J2 FLOW "" FLOW 1.0 1 1 +J1 FLOW "" FLOW 1.0 1 1 -[DWF] -;;Node Constituent Baseline Patterns -;;-------------- ---------------- ---------- ---------- -J1 FLOW 8 "" "" "" +[CURVES] +;;Name Type X-Value Y-Value +;;-------------- ---------- ---------- ---------- +P1_Curve Pump4 0 10 +P1_Curve 5 20 [TIMESERIES] -;;Name Date Time Value +;;Name Date Time Value ;;-------------- ---------- ---------- ---------- ;SCS_24h_Type_I_1in design storm, total rainfall = 1 in, rain units = in/hr. -SCS_24h_Type_I_1in 0:00 0.0175 -SCS_24h_Type_I_1in 0:15 0.0175 -SCS_24h_Type_I_1in 0:30 0.0175 -SCS_24h_Type_I_1in 0:45 0.0175 -SCS_24h_Type_I_1in 1:00 0.0175 -SCS_24h_Type_I_1in 1:15 0.0175 -SCS_24h_Type_I_1in 1:30 0.0175 -SCS_24h_Type_I_1in 1:45 0.0175 -SCS_24h_Type_I_1in 2:00 0.0205 -SCS_24h_Type_I_1in 2:15 0.0205 -SCS_24h_Type_I_1in 2:30 0.0205 -SCS_24h_Type_I_1in 2:45 0.0205 -SCS_24h_Type_I_1in 3:00 0.0205 -SCS_24h_Type_I_1in 3:15 0.0205 -SCS_24h_Type_I_1in 3:30 0.0205 -SCS_24h_Type_I_1in 3:45 0.0205 -SCS_24h_Type_I_1in 4:00 0.0245 -SCS_24h_Type_I_1in 4:15 0.0245 -SCS_24h_Type_I_1in 4:30 0.0245 -SCS_24h_Type_I_1in 4:45 0.0245 -SCS_24h_Type_I_1in 5:00 0.0245 -SCS_24h_Type_I_1in 5:15 0.0245 -SCS_24h_Type_I_1in 5:30 0.0245 -SCS_24h_Type_I_1in 5:45 0.0245 -SCS_24h_Type_I_1in 6:00 0.031 -SCS_24h_Type_I_1in 6:15 0.031 -SCS_24h_Type_I_1in 6:30 0.031 -SCS_24h_Type_I_1in 6:45 0.031 -SCS_24h_Type_I_1in 7:00 0.038 -SCS_24h_Type_I_1in 7:15 0.038 -SCS_24h_Type_I_1in 7:30 0.038 -SCS_24h_Type_I_1in 7:45 0.038 -SCS_24h_Type_I_1in 8:00 0.05 -SCS_24h_Type_I_1in 8:15 0.05 -SCS_24h_Type_I_1in 8:30 0.07 -SCS_24h_Type_I_1in 8:45 0.07 -SCS_24h_Type_I_1in 9:00 0.098 -SCS_24h_Type_I_1in 9:15 0.098 -SCS_24h_Type_I_1in 9:30 0.236 -SCS_24h_Type_I_1in 9:45 0.612 -SCS_24h_Type_I_1in 10:00 0.136 -SCS_24h_Type_I_1in 10:15 0.136 -SCS_24h_Type_I_1in 10:30 0.082 -SCS_24h_Type_I_1in 10:45 0.082 -SCS_24h_Type_I_1in 11:00 0.06 -SCS_24h_Type_I_1in 11:15 0.06 -SCS_24h_Type_I_1in 11:30 0.06 -SCS_24h_Type_I_1in 11:45 0.052 -SCS_24h_Type_I_1in 12:00 0.048 -SCS_24h_Type_I_1in 12:15 0.048 -SCS_24h_Type_I_1in 12:30 0.042 -SCS_24h_Type_I_1in 12:45 0.042 -SCS_24h_Type_I_1in 13:00 0.042 -SCS_24h_Type_I_1in 13:15 0.042 -SCS_24h_Type_I_1in 13:30 0.038 -SCS_24h_Type_I_1in 13:45 0.038 -SCS_24h_Type_I_1in 14:00 0.0315 -SCS_24h_Type_I_1in 14:15 0.0315 -SCS_24h_Type_I_1in 14:30 0.0315 -SCS_24h_Type_I_1in 14:45 0.0315 -SCS_24h_Type_I_1in 15:00 0.0315 -SCS_24h_Type_I_1in 15:15 0.0315 -SCS_24h_Type_I_1in 15:30 0.0315 -SCS_24h_Type_I_1in 15:45 0.0315 -SCS_24h_Type_I_1in 16:00 0.024 -SCS_24h_Type_I_1in 16:15 0.024 -SCS_24h_Type_I_1in 16:30 0.024 -SCS_24h_Type_I_1in 16:45 0.024 -SCS_24h_Type_I_1in 17:00 0.024 -SCS_24h_Type_I_1in 17:15 0.024 -SCS_24h_Type_I_1in 17:30 0.024 -SCS_24h_Type_I_1in 17:45 0.024 -SCS_24h_Type_I_1in 18:00 0.024 -SCS_24h_Type_I_1in 18:15 0.024 -SCS_24h_Type_I_1in 18:30 0.024 -SCS_24h_Type_I_1in 18:45 0.024 -SCS_24h_Type_I_1in 19:00 0.024 -SCS_24h_Type_I_1in 19:15 0.024 -SCS_24h_Type_I_1in 19:30 0.024 -SCS_24h_Type_I_1in 19:45 0.024 -SCS_24h_Type_I_1in 20:00 0.0185 -SCS_24h_Type_I_1in 20:15 0.0185 -SCS_24h_Type_I_1in 20:30 0.0185 -SCS_24h_Type_I_1in 20:45 0.0185 -SCS_24h_Type_I_1in 21:00 0.0185 -SCS_24h_Type_I_1in 21:15 0.0185 -SCS_24h_Type_I_1in 21:30 0.0185 -SCS_24h_Type_I_1in 21:45 0.0185 -SCS_24h_Type_I_1in 22:00 0.0185 -SCS_24h_Type_I_1in 22:15 0.0185 -SCS_24h_Type_I_1in 22:30 0.0185 -SCS_24h_Type_I_1in 22:45 0.0185 -SCS_24h_Type_I_1in 23:00 0.0185 -SCS_24h_Type_I_1in 23:15 0.0185 -SCS_24h_Type_I_1in 23:30 0.0185 -SCS_24h_Type_I_1in 23:45 0.0185 -SCS_24h_Type_I_1in 24:00 0 +SCS_24h_Type_I_1in 0:00 0.0175 +SCS_24h_Type_I_1in 0:15 0.0175 +SCS_24h_Type_I_1in 0:30 0.0175 +SCS_24h_Type_I_1in 0:45 0.0175 +SCS_24h_Type_I_1in 1:00 0.0175 +SCS_24h_Type_I_1in 1:15 0.0175 +SCS_24h_Type_I_1in 1:30 0.0175 +SCS_24h_Type_I_1in 1:45 0.0175 +SCS_24h_Type_I_1in 2:00 0.0205 +SCS_24h_Type_I_1in 2:15 0.0205 +SCS_24h_Type_I_1in 2:30 0.0205 +SCS_24h_Type_I_1in 2:45 0.0205 +SCS_24h_Type_I_1in 3:00 0.0205 +SCS_24h_Type_I_1in 3:15 0.0205 +SCS_24h_Type_I_1in 3:30 0.0205 +SCS_24h_Type_I_1in 3:45 0.0205 +SCS_24h_Type_I_1in 4:00 0.0245 +SCS_24h_Type_I_1in 4:15 0.0245 +SCS_24h_Type_I_1in 4:30 0.0245 +SCS_24h_Type_I_1in 4:45 0.0245 +SCS_24h_Type_I_1in 5:00 0.0245 +SCS_24h_Type_I_1in 5:15 0.0245 +SCS_24h_Type_I_1in 5:30 0.0245 +SCS_24h_Type_I_1in 5:45 0.0245 +SCS_24h_Type_I_1in 6:00 0.031 +SCS_24h_Type_I_1in 6:15 0.031 +SCS_24h_Type_I_1in 6:30 0.031 +SCS_24h_Type_I_1in 6:45 0.031 +SCS_24h_Type_I_1in 7:00 0.038 +SCS_24h_Type_I_1in 7:15 0.038 +SCS_24h_Type_I_1in 7:30 0.038 +SCS_24h_Type_I_1in 7:45 0.038 +SCS_24h_Type_I_1in 8:00 0.05 +SCS_24h_Type_I_1in 8:15 0.05 +SCS_24h_Type_I_1in 8:30 0.07 +SCS_24h_Type_I_1in 8:45 0.07 +SCS_24h_Type_I_1in 9:00 0.098 +SCS_24h_Type_I_1in 9:15 0.098 +SCS_24h_Type_I_1in 9:30 0.236 +SCS_24h_Type_I_1in 9:45 0.612 +SCS_24h_Type_I_1in 10:00 0.136 +SCS_24h_Type_I_1in 10:15 0.136 +SCS_24h_Type_I_1in 10:30 0.082 +SCS_24h_Type_I_1in 10:45 0.082 +SCS_24h_Type_I_1in 11:00 0.06 +SCS_24h_Type_I_1in 11:15 0.06 +SCS_24h_Type_I_1in 11:30 0.06 +SCS_24h_Type_I_1in 11:45 0.052 +SCS_24h_Type_I_1in 12:00 0.048 +SCS_24h_Type_I_1in 12:15 0.048 +SCS_24h_Type_I_1in 12:30 0.042 +SCS_24h_Type_I_1in 12:45 0.042 +SCS_24h_Type_I_1in 13:00 0.042 +SCS_24h_Type_I_1in 13:15 0.042 +SCS_24h_Type_I_1in 13:30 0.038 +SCS_24h_Type_I_1in 13:45 0.038 +SCS_24h_Type_I_1in 14:00 0.0315 +SCS_24h_Type_I_1in 14:15 0.0315 +SCS_24h_Type_I_1in 14:30 0.0315 +SCS_24h_Type_I_1in 14:45 0.0315 +SCS_24h_Type_I_1in 15:00 0.0315 +SCS_24h_Type_I_1in 15:15 0.0315 +SCS_24h_Type_I_1in 15:30 0.0315 +SCS_24h_Type_I_1in 15:45 0.0315 +SCS_24h_Type_I_1in 16:00 0.024 +SCS_24h_Type_I_1in 16:15 0.024 +SCS_24h_Type_I_1in 16:30 0.024 +SCS_24h_Type_I_1in 16:45 0.024 +SCS_24h_Type_I_1in 17:00 0.024 +SCS_24h_Type_I_1in 17:15 0.024 +SCS_24h_Type_I_1in 17:30 0.024 +SCS_24h_Type_I_1in 17:45 0.024 +SCS_24h_Type_I_1in 18:00 0.024 +SCS_24h_Type_I_1in 18:15 0.024 +SCS_24h_Type_I_1in 18:30 0.024 +SCS_24h_Type_I_1in 18:45 0.024 +SCS_24h_Type_I_1in 19:00 0.024 +SCS_24h_Type_I_1in 19:15 0.024 +SCS_24h_Type_I_1in 19:30 0.024 +SCS_24h_Type_I_1in 19:45 0.024 +SCS_24h_Type_I_1in 20:00 0.0185 +SCS_24h_Type_I_1in 20:15 0.0185 +SCS_24h_Type_I_1in 20:30 0.0185 +SCS_24h_Type_I_1in 20:45 0.0185 +SCS_24h_Type_I_1in 21:00 0.0185 +SCS_24h_Type_I_1in 21:15 0.0185 +SCS_24h_Type_I_1in 21:30 0.0185 +SCS_24h_Type_I_1in 21:45 0.0185 +SCS_24h_Type_I_1in 22:00 0.0185 +SCS_24h_Type_I_1in 22:15 0.0185 +SCS_24h_Type_I_1in 22:30 0.0185 +SCS_24h_Type_I_1in 22:45 0.0185 +SCS_24h_Type_I_1in 23:00 0.0185 +SCS_24h_Type_I_1in 23:15 0.0185 +SCS_24h_Type_I_1in 23:30 0.0185 +SCS_24h_Type_I_1in 23:45 0.0185 +SCS_24h_Type_I_1in 24:00 0 [REPORT] ;;Reporting Options INPUT YES CONTROLS YES -SUBCATCHMENTS ALL +SUBCATCHMENTS NONE NODES ALL -LINKS ALL +LINKS NONE [TAGS] @@ -253,45 +257,50 @@ DIMENSIONS -100.036 -181.979 708.126 213.879 Units Feet [COORDINATES] -;;Node X-Coord Y-Coord +;;Node X-Coord Y-Coord ;;-------------- ------------------ ------------------ -J1 0.000 0.000 -J3 459.058 -113.145 -1 -77.021 -78.321 -2 -84.988 43.833 -3 -18.600 -71.239 -4 -67.284 -37.603 -5 -56.662 15.507 -J4 671.391 -163.985 -J2 238.750 -53.332 +J3 459.058 -113.145 +1 -77.021 -78.321 +2 -84.988 43.833 +3 -18.600 -71.239 +4 -67.284 -37.603 +5 -56.662 15.507 +J2 238.750 -53.332 +J4 671.391 -163.985 +J1 0.000 0.000 [VERTICES] -;;Link X-Coord Y-Coord +;;Link X-Coord Y-Coord ;;-------------- ------------------ ------------------ -C2.1 295.636 -159.756 -C2.1 360.253 -181.886 -4 -23.911 -51.766 -5 -85.873 19.933 +C2.1 295.636 -159.756 +C2.1 360.253 -181.886 +4 -23.911 -51.766 +5 -85.873 19.933 [Polygons] -;;Subcatchment X-Coord Y-Coord +;;Subcatchment X-Coord Y-Coord ;;-------------- ------------------ ------------------ -S1 110.154 195.885 -S1 110.154 47.351 -S1 -56.323 42.367 -S1 -63.301 181.928 -S1 110.154 195.885 -S2 394.261 131.088 -S2 410.211 -20.436 -S2 245.728 -19.439 -S2 235.759 110.154 -S2 394.261 131.088 -S3 660.425 55.326 -S3 657.435 -104.173 -S3 519.867 -96.198 -S3 509.898 50.342 -S3 660.425 55.326 +S1 110.154 195.885 +S1 110.154 47.351 +S1 -56.323 42.367 +S1 -63.301 181.928 +S1 110.154 195.885 +S2 394.261 131.088 +S2 410.211 -20.436 +S2 245.728 -19.439 +S2 235.759 110.154 +S2 394.261 131.088 +S3 660.425 55.326 +S3 657.435 -104.173 +S3 519.867 -96.198 +S3 509.898 50.342 +S3 660.425 55.326 +S4 -63.523 -116.383 +S4 -82.996 -174.805 +S4 -154.695 -168.608 +S4 -148.499 -126.120 [SYMBOLS] -;;Gage X-Coord Y-Coord +;;Gage X-Coord Y-Coord ;;-------------- ------------------ ------------------ + diff --git a/swmmio/tests/data/model_full_features_network.rpt b/swmmio/tests/data/model_full_features_network.rpt new file mode 100644 index 0000000..bdcc549 --- /dev/null +++ b/swmmio/tests/data/model_full_features_network.rpt @@ -0,0 +1,352 @@ + + EPA STORM WATER MANAGEMENT MODEL - VERSION 5.1 (Build 5.1.010) + -------------------------------------------------------------- + + + ************* + Element Count + ************* + Number of rain gages ...... 1 + Number of subcatchments ... 4 + Number of nodes ........... 9 + Number of links ........... 9 + Number of pollutants ...... 0 + Number of land uses ....... 0 + + + **************** + Raingage Summary + **************** + Data Recording + Name Data Source Type Interval + ------------------------------------------------------------------------ + SCS_24h_Type_I_1in SCS_24h_Type_I_1in INTENSITY 15 min. + + + ******************** + Subcatchment Summary + ******************** + Name Area Width %Imperv %Slope Rain Gage Outlet + ----------------------------------------------------------------------------------------------------------- + S1 3.00 500.00 30.00 0.5000 SCS_24h_Type_I_1in J1 + S2 2.00 500.00 100.00 0.5000 SCS_24h_Type_I_1in J2 + S3 3.00 500.00 100.00 0.5000 SCS_24h_Type_I_1in J3 + S4 20.00 500.00 25.00 0.5000 SCS_24h_Type_I_1in 1 + + + ************ + Node Summary + ************ + Invert Max. Ponded External + Name Type Elev. Depth Area Inflow + ------------------------------------------------------------------------------- + J3 JUNCTION 6.55 15.00 0.0 Yes + 1 JUNCTION 17.00 1.00 0.0 + 2 JUNCTION 17.00 1.00 0.0 + 3 JUNCTION 16.50 1.00 0.0 + 4 JUNCTION 16.00 1.00 0.0 + 5 JUNCTION 15.00 1.00 0.0 + J2 JUNCTION 13.00 15.00 0.0 Yes + J4 OUTFALL 0.00 0.00 0.0 + J1 STORAGE 13.39 15.00 0.0 Yes + + + ************ + Link Summary + ************ + Name From Node To Node Type Length %Slope Roughness + --------------------------------------------------------------------------------------------- + C1:C2 J1 J2 CONDUIT 244.6 0.1602 0.0100 + C2.1 J2 J3 CONDUIT 666.0 0.9690 0.0100 + 1 1 4 CONDUIT 400.0 0.2500 0.0100 + 2 4 5 CONDUIT 400.0 0.2500 0.0100 + 3 5 J1 CONDUIT 400.0 0.4020 0.0100 + 4 3 4 CONDUIT 400.0 0.1250 0.0100 + 5 2 5 CONDUIT 400.0 0.5000 0.0100 + C2 J2 J3 TYPE4 PUMP + C3 J3 J4 WEIR + + + ********************* + Cross Section Summary + ********************* + Full Full Hyd. Max. No. of Full + Conduit Shape Depth Area Rad. Width Barrels Flow + --------------------------------------------------------------------------------------- + C1:C2 CIRCULAR 1.00 0.79 0.25 1.00 1 1.85 + C2.1 CIRCULAR 1.00 0.79 0.25 1.00 1 4.56 + 1 CIRCULAR 1.00 0.79 0.25 1.00 1 2.32 + 2 CIRCULAR 1.00 0.79 0.25 1.00 1 2.32 + 3 CIRCULAR 1.00 0.79 0.25 1.00 1 2.94 + 4 CIRCULAR 1.00 0.79 0.25 1.00 1 1.64 + 5 CIRCULAR 1.00 0.79 0.25 1.00 1 3.28 + + + + ********************************************************* + NOTE: The summary statistics displayed in this report are + based on results found at every computational time step, + not just on results from each reporting time step. + ********************************************************* + + **************** + Analysis Options + **************** + Flow Units ............... CFS + Process Models: + Rainfall/Runoff ........ YES + RDII ................... NO + Snowmelt ............... NO + Groundwater ............ NO + Flow Routing ........... YES + Ponding Allowed ........ NO + Water Quality .......... NO + Infiltration Method ...... HORTON + Flow Routing Method ...... DYNWAVE + Starting Date ............ NOV-01-2015 00:00:00 + Ending Date .............. NOV-04-2015 00:00:00 + Antecedent Dry Days ...... 0.0 + Report Time Step ......... 00:01:00 + Wet Time Step ............ 00:05:00 + Dry Time Step ............ 00:05:00 + Routing Time Step ........ 1.00 sec + Variable Time Step ....... YES + Maximum Trials ........... 8 + Number of Threads ........ 1 + Head Tolerance ........... 0.005000 ft + + + ********************* + Control Actions Taken + ********************* + + + ************************** Volume Depth + Runoff Quantity Continuity acre-feet inches + ************************** --------- ------- + Total Precipitation ...... 2.333 1.000 + Evaporation Loss ......... 0.000 0.000 + Infiltration Loss ........ 1.425 0.611 + Surface Runoff ........... 0.875 0.375 + Final Storage ............ 0.034 0.015 + Continuity Error (%) ..... -0.038 + + + ************************** Volume Volume + Flow Routing Continuity acre-feet 10^6 gal + ************************** --------- --------- + Dry Weather Inflow ....... 0.000 0.000 + Wet Weather Inflow ....... 0.875 0.285 + Groundwater Inflow ....... 0.000 0.000 + RDII Inflow .............. 0.000 0.000 + External Inflow .......... 17.851 5.817 + External Outflow ......... 18.701 6.094 + Flooding Loss ............ 0.000 0.000 + Evaporation Loss ......... 0.000 0.000 + Exfiltration Loss ........ 0.000 0.000 + Initial Stored Volume .... 0.000 0.000 + Final Stored Volume ...... 0.025 0.008 + Continuity Error (%) ..... -0.006 + + + *************************** + Time-Step Critical Elements + *************************** + None + + + ******************************** + Highest Flow Instability Indexes + ******************************** + All links are stable. + + + ************************* + Routing Time Step Summary + ************************* + Minimum Time Step : 0.50 sec + Average Time Step : 1.00 sec + Maximum Time Step : 1.00 sec + Percent in Steady State : 0.00 + Average Iterations per Step : 2.00 + Percent Not Converging : 0.00 + + + *************************** + Subcatchment Runoff Summary + *************************** + + -------------------------------------------------------------------------------------------------------- + Total Total Total Total Total Total Peak Runoff + Precip Runon Evap Infil Runoff Runoff Runoff Coeff + Subcatchment in in in in in 10^6 gal CFS + -------------------------------------------------------------------------------------------------------- + S1 1.00 0.00 0.00 0.70 0.29 0.02 0.55 0.289 + S2 1.00 0.00 0.00 0.00 0.96 0.05 1.19 0.964 + S3 1.00 0.00 0.00 0.00 0.96 0.08 1.71 0.963 + S4 1.00 0.00 0.00 0.75 0.24 0.13 2.61 0.241 + + + ****************** + Node Depth Summary + ****************** + + --------------------------------------------------------------------------------- + Average Maximum Maximum Time of Max Reported + Depth Depth HGL Occurrence Max Depth + Node Type Feet Feet Feet days hr:min Feet + --------------------------------------------------------------------------------- + J3 JUNCTION 0.96 1.64 8.19 0 10:00 1.64 + 1 JUNCTION 0.07 0.94 17.94 0 10:00 0.93 + 2 JUNCTION 0.00 0.00 17.00 0 00:00 0.00 + 3 JUNCTION 0.00 0.39 16.89 0 10:03 0.39 + 4 JUNCTION 0.07 0.87 16.87 0 10:02 0.87 + 5 JUNCTION 0.06 1.00 16.00 0 10:09 1.00 + J2 JUNCTION 0.00 0.00 13.00 0 00:00 0.00 + J4 OUTFALL 0.00 0.00 0.00 0 00:00 0.00 + J1 STORAGE 0.70 2.18 15.57 0 10:19 2.18 + + + ******************* + Node Inflow Summary + ******************* + + ------------------------------------------------------------------------------------------------- + Maximum Maximum Lateral Total Flow + Lateral Total Time of Max Inflow Inflow Balance + Inflow Inflow Occurrence Volume Volume Error + Node Type CFS CFS days hr:min 10^6 gal 10^6 gal Percent + ------------------------------------------------------------------------------------------------- + J3 JUNCTION 2.71 7.04 0 10:00 2.02 6.1 0.032 + 1 JUNCTION 2.61 2.61 0 10:00 0.131 0.131 0.013 + 2 JUNCTION 0.00 0.00 0 00:00 0 0 0.000 gal + 3 JUNCTION 0.00 0.13 0 09:54 0 0.000536 1.265 + 4 JUNCTION 0.00 2.54 0 10:00 0 0.131 0.014 + 5 JUNCTION 0.00 2.38 0 10:03 0 0.131 -0.031 + J2 JUNCTION 2.19 4.33 0 10:00 1.99 4.08 0.000 + J4 OUTFALL 0.00 7.00 0 10:00 0 6.09 0.000 + J1 STORAGE 1.55 3.52 0 09:59 1.96 2.09 0.049 + + + ********************** + Node Surcharge Summary + ********************** + + Surcharging occurs when water rises above the top of the highest conduit. + --------------------------------------------------------------------- + Max. Height Min. Depth + Hours Above Crown Below Rim + Node Type Surcharged Feet Feet + --------------------------------------------------------------------- + 5 JUNCTION 0.02 0.000 0.000 + J1 STORAGE 1.56 1.182 12.818 + + + ********************* + Node Flooding Summary + ********************* + + Flooding refers to all water that overflows a node, whether it ponds or not. + -------------------------------------------------------------------------- + Total Maximum + Maximum Time of Max Flood Ponded + Hours Rate Occurrence Volume Depth + Node Flooded CFS days hr:min 10^6 gal Feet + -------------------------------------------------------------------------- + 5 0.02 0.07 0 10:09 0.000 0.000 + + + ********************** + Storage Volume Summary + ********************** + + -------------------------------------------------------------------------------------------------- + Average Avg Evap Exfil Maximum Max Time of Max Maximum + Volume Pcnt Pcnt Pcnt Volume Pcnt Occurrence Outflow + Storage Unit 1000 ft3 Full Loss Loss 1000 ft3 Full days hr:min CFS + -------------------------------------------------------------------------------------------------- + J1 0.704 5 0 0 2.182 15 0 10:19 2.45 + + + *********************** + Outfall Loading Summary + *********************** + + ----------------------------------------------------------- + Flow Avg Max Total + Freq Flow Flow Volume + Outfall Node Pcnt CFS CFS 10^6 gal + ----------------------------------------------------------- + J4 100.00 3.14 7.00 6.094 + ----------------------------------------------------------- + System 100.00 3.14 7.00 6.094 + + + ******************** + Link Flow Summary + ******************** + + ----------------------------------------------------------------------------- + Maximum Time of Max Maximum Max/ Max/ + |Flow| Occurrence |Veloc| Full Full + Link Type CFS days hr:min ft/sec Flow Depth + ----------------------------------------------------------------------------- + C1:C2 CONDUIT 2.45 0 10:19 6.23 1.32 0.50 + C2.1 CONDUIT 0.00 0 00:00 0.00 0.00 0.50 + 1 CONDUIT 2.54 0 10:00 3.48 1.10 0.89 + 2 CONDUIT 2.38 0 10:03 3.64 1.03 0.85 + 3 CONDUIT 1.97 0 09:59 2.92 0.67 1.00 + 4 CONDUIT 0.16 0 10:05 0.44 0.10 0.63 + 5 CONDUIT 0.00 0 00:00 0.00 0.00 0.50 + C2 PUMP 4.33 0 10:00 0.22 + C3 WEIR 7.00 0 10:00 0.33 + + + *************************** + Flow Classification Summary + *************************** + + ------------------------------------------------------------------------------------- + Adjusted ---------- Fraction of Time in Flow Class ---------- + /Actual Up Down Sub Sup Up Down Norm Inlet + Conduit Length Dry Dry Dry Crit Crit Crit Crit Ltd Ctrl + ------------------------------------------------------------------------------------- + C1:C2 1.00 0.00 0.00 0.00 0.00 1.00 0.00 0.00 0.00 0.00 + C2.1 1.00 0.00 1.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 + 1 1.00 0.00 0.00 0.00 1.00 0.00 0.00 0.00 0.97 0.00 + 2 1.00 0.00 0.00 0.00 0.99 0.01 0.00 0.00 0.01 0.00 + 3 1.00 0.00 0.00 0.00 1.00 0.00 0.00 0.00 0.99 0.00 + 4 1.00 0.00 0.88 0.00 0.12 0.00 0.00 0.00 0.86 0.00 + 5 1.00 0.00 1.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 + + + ************************* + Conduit Surcharge Summary + ************************* + + ---------------------------------------------------------------------------- + Hours Hours + --------- Hours Full -------- Above Full Capacity + Conduit Both Ends Upstream Dnstream Normal Flow Limited + ---------------------------------------------------------------------------- + C1:C2 0.01 1.56 0.01 1.44 0.01 + 1 0.01 0.01 0.01 0.10 0.01 + 2 0.01 0.01 0.02 0.06 0.01 + 3 0.02 0.02 1.56 0.01 0.01 + + + *************** + Pumping Summary + *************** + + --------------------------------------------------------------------------------------------------------- + Min Avg Max Total Power % Time Off + Percent Number of Flow Flow Flow Volume Usage Pump Curve + Pump Utilized Start-Ups CFS CFS CFS 10^6 gal Kw-hr Low High + --------------------------------------------------------------------------------------------------------- + C2 100.00 1 0.00 2.10 4.33 4.078 70.26 0.0 0.0 + + + Analysis begun on: Thu Dec 20 18:27:11 2018 + Analysis ended on: Thu Dec 20 18:27:12 2018 + Total elapsed time: 00:00:01 \ No newline at end of file diff --git a/swmmio/tests/test_dataframes.py b/swmmio/tests/test_dataframes.py index 9c0c428..d8ffaa6 100644 --- a/swmmio/tests/test_dataframes.py +++ b/swmmio/tests/test_dataframes.py @@ -1,6 +1,27 @@ from swmmio.tests.data import (MODEL_FULL_FEATURES_PATH, MODEL_FULL_FEATURES__NET_PATH, MODEL_BROWARD_COUNTY_PATH, MODEL_XSECTION_ALT_01) -from swmmio import swmmio +import swmmio + + +def test_create_dataframeRPT(): + m = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) + + depth_summ = swmmio.create_dataframeRPT(m.rpt.path, "Node Depth Summary") + flood_summ = swmmio.create_dataframeRPT(m.rpt.path, "Node Flooding Summary") + inflo_summ = swmmio.create_dataframeRPT(m.rpt.path, "Node Inflow Summary") + + print ('\n', depth_summ) + print (inflo_summ) + print (flood_summ) + + assert(inflo_summ.loc['J3', 'TotalInflowV'] == 6.1) + assert(inflo_summ.loc['J1', 'MaxTotalInflow'] == 3.52) + + assert(depth_summ.loc['J3', 'MaxNodeDepth'] == 1.64) + assert(depth_summ.loc['4', 'MaxNodeDepth'] == 0.87) + + # need to ensure indicies are strings always + assert(flood_summ.loc[5, 'TotalFloodVol'] == 0) def test_conduits_dataframe(): From b0abce148d54c0c9f115aa9b8990b7c00c2df631 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Thu, 20 Dec 2018 19:09:27 -0500 Subject: [PATCH 04/17] updated test to reflect test model --- swmmio/tests/test_dataframes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swmmio/tests/test_dataframes.py b/swmmio/tests/test_dataframes.py index d8ffaa6..2db2216 100644 --- a/swmmio/tests/test_dataframes.py +++ b/swmmio/tests/test_dataframes.py @@ -52,4 +52,4 @@ def test_model_to_networkx(): assert(G['J2']['J3']['C2.1']['Length'] == 666) assert(G['J1']['J2']['C1:C2']['Length'] == 244.63) - assert(round(G.node['J2']['InvertElev'], 3) == 13.392) + assert(round(G.node['J2']['InvertElev'], 3) == 13.0) From 727038e268a0e36249dcf98ccaf85acc4861287c Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Sat, 9 Mar 2019 23:57:32 -0500 Subject: [PATCH 05/17] drafted new ModelSection object, general encapsulation of sections in core.py --- .gitignore | 3 + swmmio/__init__.py | 2 +- swmmio/core.py | 605 +++++++++++++++++++++++++++ swmmio/defs/section_headers.json | 5 + swmmio/elements.py | 74 ++++ swmmio/swmmio.py | 609 ---------------------------- swmmio/tests/test_dataframes.py | 9 +- swmmio/tests/test_model_elements.py | 33 ++ swmmio/utils/dataframes.py | 4 + swmmio/utils/functions.py | 10 +- 10 files changed, 737 insertions(+), 617 deletions(-) create mode 100644 swmmio/core.py create mode 100644 swmmio/defs/section_headers.json create mode 100644 swmmio/elements.py delete mode 100644 swmmio/swmmio.py create mode 100644 swmmio/tests/test_model_elements.py diff --git a/.gitignore b/.gitignore index d526ad9..f889e66 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,6 @@ __pycache__/ .cache/ .pytest_cache/ _build/ + +# IDE Stuff +*.idea/ diff --git a/swmmio/__init__.py b/swmmio/__init__.py index 7cfefc3..fc0d992 100644 --- a/swmmio/__init__.py +++ b/swmmio/__init__.py @@ -7,5 +7,5 @@ __copyright__ = 'Copyright (c) 2016' __licence__ = '' -from .swmmio import * +from .core import * from swmmio.utils.dataframes import create_dataframeBI, create_dataframeRPT, create_dataframeINP diff --git a/swmmio/core.py b/swmmio/core.py new file mode 100644 index 0000000..a2da0cb --- /dev/null +++ b/swmmio/core.py @@ -0,0 +1,605 @@ +#!/usr/bin/env python +# coding:utf-8 +import re +from time import ctime +import pandas as pd +import glob +import math +from swmmio.utils import spatial +from swmmio.utils import functions +from swmmio.utils.dataframes import create_dataframeINP, create_dataframeRPT, get_link_coords +from swmmio.defs.config import * +import warnings + + +class Model(object): + + def __init__(self, in_file_path): + + """ + Class representing a complete SWMM model incorporating its INP and RPT + files and data + + initialize a swmmio.Model object by pointing it to a directory containing + a single INP (and optionally an RPT file with matching filename) or by + pointing it directly to an .inp file. + """ + self.crs = None + inp_path = None + if os.path.isdir(in_file_path): + # a directory was passed in + inps_in_dir = glob.glob1(in_file_path, "*.inp") + if len(inps_in_dir) == 1: + # there is only one INP in this directory -> good. + inp_path = os.path.join(in_file_path, inps_in_dir[0]) + + elif os.path.splitext(in_file_path)[1] == '.inp': + # an inp was passed in + inp_path = in_file_path + + if inp_path: + wd = os.path.dirname(inp_path) # working dir + name = os.path.splitext(os.path.basename(inp_path))[0] + self.name = name + self.inp = inp(inp_path) # inp object + self.rpt = None # until we can confirm it initializes properly + self.bbox = None # to remember how the model data was clipped + self.scenario = '' # self._get_scenario() + + # try to initialize a companion RPT object + rpt_path = os.path.join(wd, name + '.rpt') + if os.path.exists(rpt_path): + try: + self.rpt = rpt(rpt_path) + except Exception as e: + print('{}.rpt failed to initialize\n{}'.format(name, e)) + + self._nodes_df = None + self._conduits_df = None + self._orifices_df = None + self._weirs_df = None + self._pumps_df = None + self._subcatchments_df = None + self._network = None + + def rpt_is_valid(self, verbose=False): + """ + Return true if the .rpt file exists and has a revision date more + recent than the .inp file. If the inp has an modified date later than + the rpt, assume that the rpt should be regenerated + """ + + if self.rpt is None: + if verbose: + print('{} does not have an rpt file'.format(self.name)) + return False + + # check if the rpt has ERRORS output from SWMM + with open(self.rpt.path) as f: + # jump to 500 bytes before the end of file + f.seek(self.rpt.file_size - 500) + for line in f: + spl = line.split() + if len(spl) > 0 and spl[0] == 'ERROR': + # return false at first "ERROR" occurence + return False + + rpt_mod_time = os.path.getmtime(self.rpt.path) + inp_mod_time = os.path.getmtime(self.inp.path) + + if verbose: + print("{}.rpt: modified {}".format(self.name, ctime(rpt_mod_time))) + print("{}.inp: modified {}".format(self.name, ctime(inp_mod_time))) + + if inp_mod_time > rpt_mod_time: + # inp datetime modified greater than rpt datetime modified + return False + else: + return True + + def to_map(self, filename=None, inproj='epsg:2272'): + ''' + To be removed in v0.4.0. Use swmmio.reporting.visualize.create_map() + ''' + + def wrn(): + w = '''to_map is no longer supported! Use + swmmio.reporting.visualize.create_map() instead''' + warnings.warn(w, DeprecationWarning) + + with warnings.catch_warnings(): + warnings.simplefilter("always") + wrn() + + def conduits(self): + + """ + collect all useful and available data related model conduits and + organize in one dataframe. + """ + + # check if this has been done already and return that data accordingly + if self._conduits_df is not None: + return self._conduits_df + + # parse out the main objects of this model + inp = self.inp + rpt = self.rpt + + # create dataframes of relevant sections from the INP + conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) + xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) + conduits_df = conduits_df.join(xsections_df) + coords_df = create_dataframeINP(inp.path, "[COORDINATES]") # .drop_duplicates() + + if rpt: + # create a dictionary holding data from an rpt file, if provided + link_flow_df = create_dataframeRPT(rpt.path, "Link Flow Summary") + conduits_df = conduits_df.join(link_flow_df) + + # add conduit coordinates + # the xys.map() junk is to unpack a nested list + verts = create_dataframeINP(inp.path, '[VERTICES]') + xys = conduits_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + df = conduits_df.assign(coords=xys.map(lambda x: x[0])) + + # add conduit up/down inverts and calculate slope + elevs = self.nodes()[['InvertElev']] + df = pd.merge(df, elevs, left_on='InletNode', right_index=True, how='left') + df = df.rename(index=str, columns={"InvertElev": "InletNodeInvert"}) + df = pd.merge(df, elevs, left_on='OutletNode', right_index=True, how='left') + df = df.rename(index=str, columns={"InvertElev": "OutletNodeInvert"}) + df['UpstreamInvert'] = df.InletNodeInvert + df.InletOffset + df['DownstreamInvert'] = df.OutletNodeInvert + df.OutletOffset + df['SlopeFtPerFt'] = (df.UpstreamInvert - df.DownstreamInvert) / df.Length + + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) + + self._conduits_df = df + + return df + + def orifices(self): + + """ + collect all useful and available data related model orifices and + organize in one dataframe. + """ + + # check if this has been done already and return that data accordingly + if self._orifices_df is not None: + return self._orifices_df + + # parse out the main objects of this model + inp = self.inp + rpt = self.rpt + + # create dataframes of relevant sections from the INP + orifices_df = create_dataframeINP(inp.path, "[ORIFICES]", comment_cols=False) + if orifices_df.empty: + return pd.DataFrame() + + coords_df = create_dataframeINP(inp.path, "[COORDINATES]") + + # add conduit coordinates + verts = create_dataframeINP(inp.path, '[VERTICES]') + xys = orifices_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + df = orifices_df.assign(coords=xys.map(lambda x: x[0])) + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) + self._orifices_df = df + + return df + + def weirs(self): + + """ + collect all useful and available data related model weirs and + organize in one dataframe. + """ + + # check if this has been done already and return that data accordingly + if self._weirs_df is not None: + return self._weirs_df + + # parse out the main objects of this model + inp = self.inp + rpt = self.rpt + + # create dataframes of relevant sections from the INP + weirs_df = create_dataframeINP(inp.path, "[WEIRS]") + if weirs_df.empty: + return pd.DataFrame() + + weirs_df = weirs_df[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] + coords_df = create_dataframeINP(inp.path, "[COORDINATES]") # .drop_duplicates() + + # add conduit coordinates + # the xys.map() junk is to unpack a nested list + verts = create_dataframeINP(inp.path, '[VERTICES]') + xys = weirs_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + df = weirs_df.assign(coords=xys.map(lambda x: x[0])) + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) + + self._weirs_df = df + + return df + + def pumps(self): + + """ + collect all useful and available data related model pumps and + organize in one dataframe. + """ + + # check if this has been done already and return that data accordingly + if self._pumps_df is not None: + return self._pumps_df + + # parse out the main objects of this model + inp = self.inp + rpt = self.rpt + + # create dataframes of relevant sections from the INP + pumps_df = create_dataframeINP(inp.path, "[PUMPS]", comment_cols=False) + if pumps_df.empty: + return pd.DataFrame() + + coords_df = create_dataframeINP(inp.path, "[COORDINATES]") # .drop_duplicates() + + # add conduit coordinates + verts = create_dataframeINP(inp.path, '[VERTICES]') + xys = pumps_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + df = pumps_df.assign(coords=xys.map(lambda x: x[0])) + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) + + self._pumps_df = df + + return df + + def nodes(self, bbox=None, subset=None): + + """ + collect all useful and available data related model nodes and organize + in one dataframe. + """ + + # check if this has been done already and return that data accordingly + if self._nodes_df is not None and bbox == self.bbox: + return self._nodes_df + + # parse out the main objects of this model + inp = self.inp + rpt = self.rpt + + # create dataframes of relevant sections from the INP + juncs_df = create_dataframeINP(inp.path, "[JUNCTIONS]") + outfalls_df = create_dataframeINP(inp.path, "[OUTFALLS]") + storage_df = create_dataframeINP(inp.path, "[STORAGE]") + coords_df = create_dataframeINP(inp.path, "[COORDINATES]") + + # concatenate the DFs and keep only relevant cols + all_nodes = pd.concat([juncs_df, outfalls_df, storage_df]) + cols = ['InvertElev', 'MaxDepth', 'SurchargeDepth', 'PondedArea'] + all_nodes = all_nodes[cols] + + if rpt: + # add results data if a rpt file was found + depth_summ = create_dataframeRPT(rpt.path, "Node Depth Summary") + flood_summ = create_dataframeRPT(rpt.path, "Node Flooding Summary") + + # join the rpt data (index on depth df, suffixes for common cols) + rpt_df = depth_summ.join(flood_summ, lsuffix='_depth', rsuffix='_flood') + all_nodes = all_nodes.join(rpt_df) # join to the all_nodes df + + all_nodes = all_nodes.join(coords_df[['X', 'Y']]) + + def nodexy(row): + if math.isnan(row.X) or math.isnan(row.Y): + return None + else: + return [(row.X, row.Y)] + + xys = all_nodes.apply(lambda r: nodexy(r), axis=1) + all_nodes = all_nodes.assign(coords=xys) + all_nodes = all_nodes.rename(index=str) + self._nodes_df = all_nodes + + return all_nodes + + def subcatchments(self): + """ + collect all useful and available data related subcatchments and organize + in one dataframe. + """ + subs = create_dataframeINP(self.inp.path, "[SUBCATCHMENTS]") + subs = subs.drop([';', 'Comment', 'Origin'], axis=1) + + if self.rpt: + flw = create_dataframeRPT(self.rpt.path, 'Subcatchment Runoff Summary') + subs = subs.join(flw) + + # more accurate runoff calculations + subs['RunoffAcFt'] = subs.TotalRunoffIn / 12.0 * subs.Area + subs['RunoffMGAccurate'] = subs.RunoffAcFt / 3.06888785 + + self._subcatchments_df = subs + + return subs + + def node(self, node, conduit=None): + ''' + To be removed in v0.4.0 + ''' + + def wrn(): + w = "Depreciated. Use model.nodes().loc['{}'] instead".format(node) + warnings.warn(w, DeprecationWarning) + return self.nodes().loc[node] + + with warnings.catch_warnings(): + warnings.simplefilter("always") + wrn() + + @property + def network(self): + """ + Networkx MultiDiGraph representation of the model + :return: Networkx MultiDiGraph representation of model + :rtype: networkx.MultiDiGraph + """ + + if self._network is None: + G = functions.model_to_networkx(self, drop_cycles=False) + self._network = G + + return self._network + + def to_geojson(self, target_path=None): + """ + Return a GeoJSON representation of the entire model + :param target_path: target path of geojson (optional) + :return: GeoJSON representation of model + """ + raise NotImplementedError + + def export_to_shapefile(self, shpdir, prj=None): + """ + export the model data into a shapefile. element_type dictates which type + of data will be included. + + default projection is PA State Plane - untested on other cases + """ + + # CREATE THE CONDUIT shp + conds = self.conduits() + conds_path = os.path.join(shpdir, self.inp.name + '_conduits.shp') + spatial.write_shapefile(conds, conds_path, prj=prj) + + # CREATE THE NODE shp + nodes = self.nodes() + nodes_path = os.path.join(shpdir, self.inp.name + '_nodes.shp') + spatial.write_shapefile(nodes, nodes_path, geomtype='point', prj=prj) + + +class SWMMIOFile(object): + defaultSection = "Link Flow Summary" + + def __init__(self, file_path): + + # file name and path variables + self.path = file_path + self.name = os.path.splitext(os.path.basename(file_path))[0] + self.dir = os.path.dirname(file_path) + self.file_size = os.path.getsize(file_path) + + def findByteRangeOfSection(self, startStr): + ''' + returns the start and end "byte" location of substrings in a text file + ''' + + with open(self.path) as f: + start = None + end = None + l = 0 # line bytes index + for line in f: + + if start and line.strip() == "" and (l - start) > 100: + # LOGIC: if start exists (was found) and the current line + # length is 3 or less (length of /n ) and we're more than + # 100 bytes from the start location then we are at the first + # "blank" line after our start section (aka the end of the + # section) + end = l + break + + if (startStr in line) and (not start): + start = l + + # increment length (bytes?) of current position + l += len(line) + len("\n") + + return [start, end] + + +class rpt(SWMMIOFile): + ''' + An accessible SWMM .rpt object + ''' + + def __init__(self, filePath): + + SWMMIOFile.__init__(self, filePath) + + with open(filePath) as f: + for line in f: + if "Starting Date" in line: + simulationStart = line.split(".. ")[1].replace("\n", "") + if "Ending Date" in line: + simulationEnd = line.split(".. ")[1].replace("\n", "") + if "Report Time Step ........." in line: + timeStepMin = int(line.split(":")[1].replace("\n", "")) + break + + self.simulationStart = simulationStart + self.simulationEnd = simulationEnd + self.timeStepMin = timeStepMin + + # grab the date of analysis + with open(filePath) as f: + f.seek(self.file_size - 500) # jump to 500 bytes before the end of file + for line in f: + if "Analysis begun on" in line: + date = line.split("Analysis begun on: ")[1].replace("\n", "") + + self.dateOfAnalysis = date + self.elementByteLocations = {"Link Results": {}, "Node Results": {}} + + def returnDataAtDTime(self, id, dtime, sectionTitle="Link Results", startByte=0): + ''' + return data from time series in RPT file + ''' + + byteLocDict = self.elementByteLocations[sectionTitle] + if byteLocDict: + startByte = byteLocDict[id] + + elif startByte == 0: + startByte = self.findByteRangeOfSection(sectionTitle)[0] + print('startByte ' + str(startByte)) + + with open(self.path) as f: + + f.seek(startByte) # jump to general area of file if we know it + subsectionFound = False + + for line in f: + if id in line: subsectionFound = True + + if subsectionFound and dtime in line: + line = ' '.join(re.findall('\"[^\"]*\"|\S+', line)) + rowdata = line.replace("\n", "").split(" ") + return rowdata + + +class inp(SWMMIOFile): + + # creates an accessible SWMM .inp object + # make sure INP has been saved in the GUI before using this + + def __init__(self, file_path): + self._conduits_df = None + self._junctions_df = None + self._outfalls_df = None + + SWMMIOFile.__init__(self, file_path) # run the superclass init + + self._sections = [self._conduits_df, self._junctions_df, self._outfalls_df] + + def save(self, target_path=None): + ''' + Save the inp file to disk. File will be overwritten unless a target_path + is provided + ''' + from swmmio.utils.modify_model import replace_inp_section + target_path = target_path if target_path is not None else self.path + + for section in self._sections: + if section is not None: + replace_inp_section() + + @property + def conduits(self): + """ + Get/set conduits section of the INP file. + + :return: Conduits section of the INP file + :rtype: pandas.DataFrame + + Examples: + + >>> import swmmio + >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH + >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) + >>> model.inp.conduits[['InletNode', 'OutletNode', 'Length', 'ManningN']] + InletNode OutletNode Length ManningN + Name + C1:C2 J1 J2 244.63 0.01 + C2.1 J2 J3 666.00 0.01 + 1 1 4 400.00 0.01 + 2 4 5 400.00 0.01 + 3 5 J1 400.00 0.01 + 4 3 4 400.00 0.01 + 5 2 5 400.00 0.01 + """ + if self._conduits_df is None: + self._conduits_df = create_dataframeINP(self.path, "[CONDUITS]", comment_cols=False) + return self._conduits_df + + @conduits.setter + def conduits(self, df): + """Set inp.conduits DataFrame.""" + self._conduits_df = df + + @property + def junctions(self): + """ + Get/set junctions section of the INP file. + + :return: junctions section of the INP file + :rtype: pandas.DataFrame + + Examples: + + >>> import swmmio + >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH + >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) + >>> model.inp.junctions + InvertElev MaxDepth InitDepth SurchargeDepth PondedArea + Name + J3 6.547 15 0 0 0 + 1 17.000 0 0 0 0 + 2 17.000 0 0 0 0 + 3 16.500 0 0 0 0 + 4 16.000 0 0 0 0 + 5 15.000 0 0 0 0 + J2 13.000 15 0 0 0 + """ + if self._junctions_df is None: + self._junctions_df = create_dataframeINP(self.path, "[JUNCTIONS]", comment_cols=False) + return self._junctions_df + + @junctions.setter + def junctions(self, df): + """Set inp.junctions DataFrame.""" + self._junctions_df = df + + @property + def outfalls(self): + """ + Get/set outfalls section of the INP file. + + :return: outfalls section of the INP file + :rtype: pandas.DataFrame + + Examples: + + >>> import swmmio + >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH + >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) + >>> model.inp.outfalls + InvertElev OutfallType StageOrTimeseries TideGate + Name + J4 0 FREE NO NaN + """ + if self._outfalls_df is None: + self._outfalls_df = create_dataframeINP(self.path, "[OUTFALLS]", comment_cols=False) + return self._outfalls_df + + @outfalls.setter + def outfalls(self, df): + """Set inp.outfalls DataFrame.""" + self._outfalls_df = df diff --git a/swmmio/defs/section_headers.json b/swmmio/defs/section_headers.json new file mode 100644 index 0000000..0b66b14 --- /dev/null +++ b/swmmio/defs/section_headers.json @@ -0,0 +1,5 @@ +{ + "junctions": ["Name", "Elevation", "MaxDepth", "InitDepth", "SurDepth", "Aponded"], + "outfalls": ["Name", "Elevation", "Type", "Stage Data", "Gated", "Route To"], + "conduits": ["Name", "InletNode", "OutletNode", "Length", "ManningN", "InletOffset", "OutletOffset", "InitFlow", "MaxFlow"] +} \ No newline at end of file diff --git a/swmmio/elements.py b/swmmio/elements.py new file mode 100644 index 0000000..8269e9d --- /dev/null +++ b/swmmio/elements.py @@ -0,0 +1,74 @@ +""" +Objects encapsulating model elements +""" +import swmmio +from swmmio.utils.dataframes import create_dataframeINP, create_dataframeRPT, get_link_coords +from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH +import pandas as pd + + +class ModelSection(object): + def __init__(self, model, section_name): + """ + Base class of a group of model elements. + :param model: swmmio.Model object + :param section_name: name of section of model + """ + self.model = model + self.section_name = section_name + + # def to_geojson(self, target_path=None): + # """ + # Return a GeoJSON representation of the group + # :param target_path: target of GeoJSON representation of elements + # :return: GeoJSON representation of elements + # """ + + def __call__(self, data=None): + + """ + collect all useful and available data related to the conduits and + organize in one dataframe. + >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) + >>> conduits_section = ModelSection(model, 'conduits') + >>> conduits_section() + """ + + # parse out the main objects of this model + inp = self.model.inp + rpt = self.model.rpt + + # create dataframes of relevant sections from the INP + conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) + xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) + conduits_df = conduits_df.join(xsections_df) + coords_df = create_dataframeINP(inp.path, "[COORDINATES]") # .drop_duplicates() + + if rpt: + # create a dictionary holding data from an rpt file, if provided + link_flow_df = create_dataframeRPT(rpt.path, "Link Flow Summary") + conduits_df = conduits_df.join(link_flow_df) + + # add conduit coordinates + # the xys.map() junk is to unpack a nested list + verts = create_dataframeINP(inp.path, '[VERTICES]') + xys = conduits_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + df = conduits_df.assign(coords=xys.map(lambda x: x[0])) + + # add conduit up/down inverts and calculate slope + elevs = self.model.nodes()[['InvertElev']] + df = pd.merge(df, elevs, left_on='InletNode', right_index=True, how='left') + df = df.rename(index=str, columns={"InvertElev": "InletNodeInvert"}) + df = pd.merge(df, elevs, left_on='OutletNode', right_index=True, how='left') + df = df.rename(index=str, columns={"InvertElev": "OutletNodeInvert"}) + df['UpstreamInvert'] = df.InletNodeInvert + df.InletOffset + df['DownstreamInvert'] = df.OutletNodeInvert + df.OutletOffset + df['SlopeFtPerFt'] = (df.UpstreamInvert - df.DownstreamInvert) / df.Length + + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) + + self.model._conduits_df = df + + return df + diff --git a/swmmio/swmmio.py b/swmmio/swmmio.py deleted file mode 100644 index c41d3b9..0000000 --- a/swmmio/swmmio.py +++ /dev/null @@ -1,609 +0,0 @@ -#!/usr/bin/env python -#coding:utf-8 -import re -import os -from time import ctime -import pandas as pd -import glob -import math -from .utils import spatial -from .utils import functions -from .utils import text as txt -from .utils.dataframes import create_dataframeINP, create_dataframeRPT, get_link_coords -from .defs.config import * -import warnings - -class Model(object): - - def __init__(self, in_file_path): - - """ - Class representing a complete SWMM model incorporating its INP and RPT - files and data - - initialize a swmmio.Model object by pointing it to a directory containing - a single INP (and optionally an RPT file with matching filename) or by - pointing it directly to an .inp file. - """ - - inp_path = None - if os.path.isdir(in_file_path): - #a directory was passed in - inps_in_dir = glob.glob1(in_file_path, "*.inp") - if len(inps_in_dir) == 1: - #there is only one INP in this directory -> good. - inp_path = os.path.join(in_file_path, inps_in_dir[0]) - - elif os.path.splitext(in_file_path)[1] == '.inp': - #an inp was passed in - inp_path = in_file_path - - if inp_path: - wd = os.path.dirname(inp_path) #working dir - name = os.path.splitext(os.path.basename(inp_path))[0] - self.name = name - self.inp = inp(inp_path) #inp object - self.rpt = None #until we can confirm it initializes properly - self.bbox = None #to remember how the model data was clipped - self.scenario = '' #self._get_scenario() - - #try to initialize a companion RPT object - rpt_path = os.path.join(wd, name + '.rpt') - if os.path.exists(rpt_path): - try: - self.rpt = rpt(rpt_path) - except: - print('{}.rpt failed to initialize'.format(name)) - - self._nodes_df = None - self._conduits_df = None - self._orifices_df = None - self._weirs_df = None - self._pumps_df = None - self._subcatchments_df = None - self._network = None - - def rpt_is_valid(self , verbose=False): - """ - Return true if the .rpt file exists and has a revision date more - recent than the .inp file. If the inp has an modified date later than - the rpt, assume that the rpt should be regenerated - """ - - if self.rpt is None: - if verbose: - print('{} does not have an rpt file'.format(self.name)) - return False - - #check if the rpt has ERRORS output from SWMM - with open (self.rpt.path) as f: - #jump to 500 bytes before the end of file - f.seek(self.rpt.file_size - 500) - for line in f: - spl = line.split() - if len(spl) > 0 and spl[0]=='ERROR': - #return false at first "ERROR" occurence - return False - - rpt_mod_time = os.path.getmtime(self.rpt.path) - inp_mod_time = os.path.getmtime(self.inp.path) - - if verbose: - print("{}.rpt: modified {}".format(self.name, ctime(rpt_mod_time))) - print("{}.inp: modified {}".format(self.name, ctime(inp_mod_time))) - - if inp_mod_time > rpt_mod_time: - #inp datetime modified greater than rpt datetime modified - return False - else: - return True - - def to_map(self, filename=None, inproj='epsg:2272'): - ''' - To be removed in v0.4.0. Use swmmio.reporting.visualize.create_map() - ''' - def wrn(): - w = '''to_map is no longer supported! Use - swmmio.reporting.visualize.create_map() instead''' - warnings.warn(w, DeprecationWarning) - - with warnings.catch_warnings(): - warnings.simplefilter("always") - wrn() - - # def conduits(self): - # return self.conduits - - # @property - def conduits(self): - - """ - collect all useful and available data related model conduits and - organize in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._conduits_df is not None: - return self._conduits_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) - xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) - conduits_df = conduits_df.join(xsections_df) - coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() - - if rpt: - #create a dictionary holding data from an rpt file, if provided - link_flow_df = create_dataframeRPT(rpt.path, "Link Flow Summary") - conduits_df = conduits_df.join(link_flow_df) - - #add conduit coordinates - #the xys.map() junk is to unpack a nested list - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = conduits_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) - df = conduits_df.assign(coords=xys.map(lambda x: x[0])) - - #add conduit up/down inverts and calculate slope - elevs = self.nodes()[['InvertElev']] - df = pd.merge(df, elevs, left_on='InletNode', right_index=True, how='left') - df = df.rename(index=str, columns={"InvertElev": "InletNodeInvert"}) - df = pd.merge(df, elevs, left_on='OutletNode', right_index=True, how='left') - df = df.rename(index=str, columns={"InvertElev": "OutletNodeInvert"}) - df['UpstreamInvert'] = df.InletNodeInvert + df.InletOffset - df['DownstreamInvert'] = df.OutletNodeInvert + df.OutletOffset - df['SlopeFtPerFt'] = (df.UpstreamInvert - df.DownstreamInvert) / df.Length - - df.InletNode = df.InletNode.astype(str) - df.OutletNode = df.OutletNode.astype(str) - - self._conduits_df = df - - return df - - def orifices(self): - - """ - collect all useful and available data related model orifices and - organize in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._orifices_df is not None: - return self._orifices_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - orifices_df = create_dataframeINP(inp.path, "[ORIFICES]", comment_cols=False) - if orifices_df.empty: - return pd.DataFrame() - - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") - - #add conduit coordinates - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = orifices_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) - df = orifices_df.assign(coords=xys.map(lambda x: x[0])) - df.InletNode = df.InletNode.astype(str) - df.OutletNode = df.OutletNode.astype(str) - self._orifices_df = df - - return df - - def weirs(self): - - """ - collect all useful and available data related model weirs and - organize in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._weirs_df is not None: - return self._weirs_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - weirs_df = create_dataframeINP(inp.path, "[WEIRS]") - if weirs_df.empty: - return pd.DataFrame() - - weirs_df = weirs_df[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] - coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() - - #add conduit coordinates - #the xys.map() junk is to unpack a nested list - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = weirs_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) - df = weirs_df.assign(coords=xys.map(lambda x: x[0])) - df.InletNode = df.InletNode.astype(str) - df.OutletNode = df.OutletNode.astype(str) - - self._weirs_df = df - - return df - - def pumps(self): - - """ - collect all useful and available data related model pumps and - organize in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._pumps_df is not None: - return self._pumps_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - pumps_df = create_dataframeINP(inp.path, "[PUMPS]", comment_cols=False) - if pumps_df.empty: - return pd.DataFrame() - - coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() - - #add conduit coordinates - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = pumps_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) - df = pumps_df.assign(coords=xys.map(lambda x: x[0])) - df.InletNode = df.InletNode.astype(str) - df.OutletNode = df.OutletNode.astype(str) - - self._pumps_df = df - - return df - - def nodes(self, bbox=None, subset=None): - - """ - collect all useful and available data related model nodes and organize - in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._nodes_df is not None and bbox==self.bbox: - return self._nodes_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - juncs_df = create_dataframeINP(inp.path, "[JUNCTIONS]") - outfalls_df = create_dataframeINP(inp.path, "[OUTFALLS]") - storage_df = create_dataframeINP(inp.path, "[STORAGE]") - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") - - #concatenate the DFs and keep only relevant cols - all_nodes = pd.concat([juncs_df, outfalls_df, storage_df]) - cols =['InvertElev', 'MaxDepth', 'SurchargeDepth', 'PondedArea'] - all_nodes = all_nodes[cols] - - if rpt: - #add results data if a rpt file was found - depth_summ = create_dataframeRPT(rpt.path, "Node Depth Summary") - flood_summ = create_dataframeRPT(rpt.path, "Node Flooding Summary") - - #join the rpt data (index on depth df, suffixes for common cols) - rpt_df = depth_summ.join(flood_summ,lsuffix='_depth',rsuffix='_flood') - all_nodes = all_nodes.join(rpt_df) #join to the all_nodes df - - all_nodes = all_nodes.join(coords_df[['X', 'Y']]) - def nodexy(row): - if math.isnan(row.X) or math.isnan(row.Y): - return None - else: - return [(row.X, row.Y)] - - xys = all_nodes.apply(lambda r: nodexy(r), axis=1) - all_nodes = all_nodes.assign(coords = xys) - all_nodes = all_nodes.rename(index=str) - self._nodes_df = all_nodes - - return all_nodes - - def subcatchments(self): - """ - collect all useful and available data related subcatchments and organize - in one dataframe. - """ - subs = create_dataframeINP(self.inp.path, "[SUBCATCHMENTS]") - subs = subs.drop([';', 'Comment', 'Origin'], axis=1) - - if self.rpt: - flw = create_dataframeRPT(self.rpt.path, 'Subcatchment Runoff Summary') - subs = subs.join(flw) - - #more accurate runoff calculations - subs['RunoffAcFt'] = subs.TotalRunoffIn/ 12.0 * subs.Area - subs['RunoffMGAccurate'] = subs.RunoffAcFt / 3.06888785 - - self._subcatchments_df = subs - - return subs - - - def node(self, node, conduit=None): - ''' - To be removed in v0.4.0 - ''' - def wrn(): - w = "Depreciated. Use model.nodes().loc['{}'] instead".format(node) - warnings.warn(w, DeprecationWarning) - return self.nodes().loc[node] - - with warnings.catch_warnings(): - warnings.simplefilter("always") - wrn() - - @property - def network(self): - ''' - Networkx MultiDiGraph representation of the model - ''' - if self._network is None: - G = functions.model_to_networkx(self, drop_cycles=False) - self._network = G - - return self._network - - - def export_to_shapefile(self, shpdir, prj=None): - """ - export the model data into a shapefile. element_type dictates which type - of data will be included. - - default projection is PA State Plane - untested on other cases - """ - - #CREATE THE CONDUIT shp - conds = self.conduits() - conds_path = os.path.join(shpdir, self.inp.name + '_conduits.shp') - spatial.write_shapefile(conds, conds_path, prj=prj) - - #CREATE THE NODE shp - nodes = self.nodes() - nodes_path = os.path.join(shpdir, self.inp.name + '_nodes.shp') - spatial.write_shapefile(nodes, nodes_path, geomtype='point', prj=prj) - - -class SWMMIOFile(object): - - defaultSection = "Link Flow Summary" - - def __init__(self, file_path): - - #file name and path variables - self.path = file_path - self.name = os.path.splitext(os.path.basename(file_path))[0] - self.dir = os.path.dirname(file_path) - self.file_size = os.path.getsize(file_path) - - - def findByteRangeOfSection(self, startStr): - ''' - returns the start and end "byte" location of substrings in a text file - ''' - - with open(self.path) as f: - start = None - end = None - l = 0 #line bytes index - for line in f: - - if start and line.strip() == "" and (l - start) > 100: - # LOGIC: if start exists (was found) and the current line - # length is 3 or less (length of /n ) and we're more than - # 100 bytes from the start location then we are at the first - # "blank" line after our start section (aka the end of the - # section) - end = l - break - - if (startStr in line) and (not start): - start = l - - #increment length (bytes?) of current position - l += len(line) + len("\n") - - return [start, end] - - -class rpt(SWMMIOFile): - - ''' - An accessible SWMM .rpt object - ''' - def __init__(self, filePath): - - SWMMIOFile.__init__(self, filePath) - - with open (filePath) as f: - for line in f: - if "Starting Date" in line: - simulationStart = line.split(".. ")[1].replace("\n", "") - if "Ending Date" in line: - simulationEnd = line.split(".. ")[1].replace("\n", "") - if "Report Time Step ........." in line: - timeStepMin = int(line.split(":")[1].replace("\n", "")) - break - - self.simulationStart = simulationStart - self.simulationEnd = simulationEnd - self.timeStepMin = timeStepMin - - #grab the date of analysis - with open (filePath) as f: - f.seek(self.file_size - 500) #jump to 500 bytes before the end of file - for line in f: - if "Analysis begun on" in line: - date = line.split("Analysis begun on: ")[1].replace("\n", "") - - self.dateOfAnalysis = date - self.elementByteLocations = {"Link Results":{}, "Node Results":{}} - - - def returnDataAtDTime(self, id, dtime, sectionTitle="Link Results", startByte=0): - ''' - return data from time series in RPT file - ''' - - byteLocDict = self.elementByteLocations[sectionTitle] - if byteLocDict: - startByte = byteLocDict[id] - - elif startByte == 0: - startByte = self.findByteRangeOfSection(sectionTitle)[0] - print('startByte ' + str(startByte)) - - with open(self.path) as f: - - f.seek(startByte) #jump to general area of file if we know it - subsectionFound = False - - for line in f: - if id in line: subsectionFound = True - - if subsectionFound and dtime in line: - line = ' '.join(re.findall('\"[^\"]*\"|\S+', line)) - rowdata = line.replace("\n", "").split(" ") - return rowdata - -class inp(SWMMIOFile): - - #creates an accessible SWMM .inp object - #make sure INP has been saved in the GUI before using this - - def __init__(self, filePath): - self._conduits_df = None - self._junctions_df = None - self._outfalls_df = None - #is this class necessary anymore? - SWMMIOFile.__init__(self, filePath) #run the superclass init - - self._sections = [self._conduits_df, self._junctions_df, self._outfalls_df] - - def save(self, target_path=None): - ''' - Save the inp file to disk. File will be overwritten unless a target_path - is provided - ''' - from swmmio.utils.modify_model import replace_inp_section - target_path = target_path if target_path is not None else self.path - - for section in self._sections: - if section is not None: - replace_inp_section() - - @property - def conduits(self): - """ - Get/set conduits section of the INP file. - - :return: Conduits section of the INP file - :rtype: pandas.DataFrame - - Examples: - - >>> import swmmio - >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH - >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) - >>> model.conduits - ... - ... InletNode OutletNode Length ManningN InletOffset OutletOffset \ - ... Name - ... C1:C2 J1 J2 244.63 0.01 0 0 - ... C2.1 J2 J3 666.00 0.01 0 0 - ... 1 1 4 400.00 0.01 0 0 - ... 2 4 5 400.00 0.01 0 0 - ... 3 5 J1 400.00 0.01 0 0 - ... 4 3 4 400.00 0.01 0 0 - ... 5 2 5 400.00 0.01 0 0 - ... InitFlow MaxFlow - ... Name - ... C1:C2 0 0 - ... C2.1 0 0 - ... 1 0 0 - ... 2 0 0 - ... 3 0 0 - ... 4 0 0 - ... 5 0 0 - """ - if self._conduits_df is None: - self._conduits_df = create_dataframeINP(self.path, "[CONDUITS]", comment_cols=False) - return self._conduits_df - @conduits.setter - def conduits(self, df): - """Set inp.conduits DataFrame.""" - self._conduits_df = df - - @property - def junctions(self): - """ - Get/set junctions section of the INP file. - - :return: junctions section of the INP file - :rtype: pandas.DataFrame - - Examples: - - >>> import swmmio - >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH - >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) - >>> model.junctions - ... - ... InvertElev MaxDepth InitDepth SurchargeDepth PondedArea - Name - J1 20.728 15 0 0 0 - J3 6.547 15 0 0 0 - 1 0.000 0 0 0 0 - 2 0.000 0 0 0 0 - 3 0.000 0 0 0 0 - 4 0.000 0 0 0 0 - 5 0.000 0 0 0 0 - """ - if self._junctions_df is None: - self._junctions_df = create_dataframeINP(self.path, "[JUNCTIONS]", comment_cols=False) - return self._junctions_df - @junctions.setter - def junctions(self, df): - """Set inp.junctions DataFrame.""" - self._junctions_df = df - - @property - def outfalls(self): - """ - Get/set outfalls section of the INP file. - - :return: outfalls section of the INP file - :rtype: pandas.DataFrame - - Examples: - - >>> import swmmio - >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH - >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) - >>> model.outfalls - ... - InvertElev OutfallType StageOrTimeseries TideGate - Name - J4 0 FREE NO NaN - """ - if self._outfalls_df is None: - self._outfalls_df = create_dataframeINP(self.path, "[OUTFALLS]", comment_cols=False) - return self._outfalls_df - @outfalls.setter - def outfalls(self, df): - """Set inp.outfalls DataFrame.""" - self._outfalls_df = df diff --git a/swmmio/tests/test_dataframes.py b/swmmio/tests/test_dataframes.py index 2db2216..0922e79 100644 --- a/swmmio/tests/test_dataframes.py +++ b/swmmio/tests/test_dataframes.py @@ -20,18 +20,17 @@ def test_create_dataframeRPT(): assert(depth_summ.loc['J3', 'MaxNodeDepth'] == 1.64) assert(depth_summ.loc['4', 'MaxNodeDepth'] == 0.87) - # need to ensure indicies are strings always + # need to ensure indices are strings always assert(flood_summ.loc[5, 'TotalFloodVol'] == 0) def test_conduits_dataframe(): - m = swmmio.Model(MODEL_FULL_FEATURES_PATH) - conduits = m.conduits() + conduits = 2 assert(list(conduits.index) == ['C1:C2']) -def test_nodes_dataframe(): +def test_nodes_dataframe(): m = swmmio.Model(MODEL_XSECTION_ALT_01) nodes = m.nodes() @@ -45,8 +44,8 @@ def test_nodes_dataframe(): assert(nodes.loc['dummy_node4', 'MaxDepth'] == 12.59314) assert(nodes.loc['dummy_node5', 'PondedArea'] == 73511) -def test_model_to_networkx(): +def test_model_to_networkx(): m = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) G = m.network diff --git a/swmmio/tests/test_model_elements.py b/swmmio/tests/test_model_elements.py new file mode 100644 index 0000000..df43110 --- /dev/null +++ b/swmmio/tests/test_model_elements.py @@ -0,0 +1,33 @@ +from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH +from swmmio import Model +from swmmio.elements import ModelSection +from swmmio.utils import functions +import pytest + + +@pytest.fixture +def test_model(): + return Model(MODEL_FULL_FEATURES__NET_PATH) + + +def test_model_section(test_model): + group = ModelSection(test_model, 'junctions') + print(group) + + bayside = Model(MODEL_FULL_FEATURES__NET_PATH) + + a = bayside.inp.junctions[1] + # print(a) + tsb_ids = [1213, 13131, 232131, 12313] + tsbs = bayside.conduits(data=['MaxDepth, MaxQ', 'geometry']) + + +def test_complete_headers(test_model): + headers = functions.complete_inp_headers(test_model.inp.path) + sections_in_inp = [ + '[TITLE]', '[OPTIONS]', '[EVAPORATION]', '[RAINGAGES]', '[SUBCATCHMENTS]', '[SUBAREAS]', '[INFILTRATION]', + '[JUNCTIONS]', '[OUTFALLS]', '[STORAGE]', '[CONDUITS]', '[PUMPS]', '[WEIRS]', '[XSECTIONS]', '[INFLOWS]', + '[CURVES]', '[TIMESERIES]', '[REPORT]', '[TAGS]', '[MAP]', '[COORDINATES]', '[VERTICES]', '[Polygons]', + '[SYMBOLS]' + ] + assert (all(section in headers['headers'] for section in sections_in_inp)) diff --git a/swmmio/utils/dataframes.py b/swmmio/utils/dataframes.py index 61a381f..eaf2441 100644 --- a/swmmio/utils/dataframes.py +++ b/swmmio/utils/dataframes.py @@ -3,6 +3,7 @@ import pandas as pd import os + def create_dataframeBI(bi_path, section='[CONDUITS]'): """ given a path to a biuld instructions file, create a dataframe of data in a @@ -20,6 +21,7 @@ def create_dataframeBI(bi_path, section='[CONDUITS]'): return df + def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, comment_str=';', comment_cols=True): """ @@ -66,6 +68,7 @@ def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, return df.rename(index=str) + def get_link_coords(row, nodexys, verticies): """for use in an df.apply, to get coordinates of a conduit/link """ @@ -96,6 +99,7 @@ def get_link_coords(row, nodexys, verticies): return [res] #nest in a list to force a series to be returned in a df.apply + def create_dataframeRPT(rpt_path, section='Link Flow Summary', element_id=None): """ given a path to an RPT file, create a dataframe of data in the given diff --git a/swmmio/utils/functions.py b/swmmio/utils/functions.py index 63bd7fd..ef0fb6f 100644 --- a/swmmio/utils/functions.py +++ b/swmmio/utils/functions.py @@ -2,6 +2,7 @@ from collections import deque import pandas as pd + def random_alphanumeric(n=6): import random chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' @@ -75,6 +76,8 @@ def multidigraph_from_edges(edges, source, target): return G + +# Todo: use an OrderedDict instead of a dict and a "order" list def complete_inp_headers (inpfilepath): """ creates a dictionary with all the headers found in an INP file @@ -88,7 +91,7 @@ def complete_inp_headers (inpfilepath): header section keys and their respective cleaned column headers 'order' -> an array of section headers found in the INP file - that perserves the original order + that preserves the original order """ foundheaders= {} order = [] @@ -105,6 +108,7 @@ def complete_inp_headers (inpfilepath): return {'headers':foundheaders, 'order':order} + def complete_rpt_headers (rptfilepath): """ creates a dictionary with all the headers found in an RPT file @@ -146,6 +150,7 @@ def complete_rpt_headers (rptfilepath): return {'headers':foundheaders, 'order':order} + def merge_dicts(*dict_args): ''' Given any number of dicts, shallow copy and merge into a new dict, @@ -157,10 +162,11 @@ def merge_dicts(*dict_args): result.update(dictionary) return result + def trace_from_node(conduits, startnode, mode='up', stopnode=None): """ - trace up and down a SWMM model given a start node and optionally an + trace up and down a SWMM model given a start node and optionally a stop node. """ From a0f8b555ec074b7462e3338d93d453b7d12d6d30 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Sat, 9 Mar 2019 23:58:52 -0500 Subject: [PATCH 06/17] pep8 --- swmmio/core.py | 6 ++-- swmmio/tests/test_dataframes.py | 40 +++++++++++------------ swmmio/utils/dataframes.py | 58 ++++++++++++++++----------------- swmmio/utils/functions.py | 16 ++++----- 4 files changed, 60 insertions(+), 60 deletions(-) diff --git a/swmmio/core.py b/swmmio/core.py index a2da0cb..f0ccf6b 100644 --- a/swmmio/core.py +++ b/swmmio/core.py @@ -559,8 +559,8 @@ def junctions(self): >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) >>> model.inp.junctions InvertElev MaxDepth InitDepth SurchargeDepth PondedArea - Name - J3 6.547 15 0 0 0 + Name + J3 6.547 15 0 0 0 1 17.000 0 0 0 0 2 17.000 0 0 0 0 3 16.500 0 0 0 0 @@ -591,7 +591,7 @@ def outfalls(self): >>> from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH >>> model = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) >>> model.inp.outfalls - InvertElev OutfallType StageOrTimeseries TideGate + InvertElev OutfallType StageOrTimeseries TideGate Name J4 0 FREE NO NaN """ diff --git a/swmmio/tests/test_dataframes.py b/swmmio/tests/test_dataframes.py index 0922e79..96a2a6e 100644 --- a/swmmio/tests/test_dataframes.py +++ b/swmmio/tests/test_dataframes.py @@ -10,45 +10,45 @@ def test_create_dataframeRPT(): flood_summ = swmmio.create_dataframeRPT(m.rpt.path, "Node Flooding Summary") inflo_summ = swmmio.create_dataframeRPT(m.rpt.path, "Node Inflow Summary") - print ('\n', depth_summ) - print (inflo_summ) - print (flood_summ) + print('\n', depth_summ) + print(inflo_summ) + print(flood_summ) - assert(inflo_summ.loc['J3', 'TotalInflowV'] == 6.1) - assert(inflo_summ.loc['J1', 'MaxTotalInflow'] == 3.52) + assert (inflo_summ.loc['J3', 'TotalInflowV'] == 6.1) + assert (inflo_summ.loc['J1', 'MaxTotalInflow'] == 3.52) - assert(depth_summ.loc['J3', 'MaxNodeDepth'] == 1.64) - assert(depth_summ.loc['4', 'MaxNodeDepth'] == 0.87) + assert (depth_summ.loc['J3', 'MaxNodeDepth'] == 1.64) + assert (depth_summ.loc['4', 'MaxNodeDepth'] == 0.87) # need to ensure indices are strings always - assert(flood_summ.loc[5, 'TotalFloodVol'] == 0) + assert (flood_summ.loc[5, 'TotalFloodVol'] == 0) def test_conduits_dataframe(): m = swmmio.Model(MODEL_FULL_FEATURES_PATH) conduits = 2 - assert(list(conduits.index) == ['C1:C2']) + assert (list(conduits.index) == ['C1:C2']) def test_nodes_dataframe(): m = swmmio.Model(MODEL_XSECTION_ALT_01) nodes = m.nodes() - node_ids_01 = ['dummy_node1','dummy_node2','dummy_node3','dummy_node4', - 'dummy_node5','dummy_node6','dummy_outfall'] + node_ids_01 = ['dummy_node1', 'dummy_node2', 'dummy_node3', 'dummy_node4', + 'dummy_node5', 'dummy_node6', 'dummy_outfall'] - assert(list(nodes.index) == node_ids_01) - assert(nodes.loc['dummy_node1', 'InvertElev'] == -10.99) - assert(nodes.loc['dummy_node2', 'MaxDepth'] == 20) - assert(nodes.loc['dummy_node3', 'X'] == -4205.457) - assert(nodes.loc['dummy_node4', 'MaxDepth'] == 12.59314) - assert(nodes.loc['dummy_node5', 'PondedArea'] == 73511) + assert (list(nodes.index) == node_ids_01) + assert (nodes.loc['dummy_node1', 'InvertElev'] == -10.99) + assert (nodes.loc['dummy_node2', 'MaxDepth'] == 20) + assert (nodes.loc['dummy_node3', 'X'] == -4205.457) + assert (nodes.loc['dummy_node4', 'MaxDepth'] == 12.59314) + assert (nodes.loc['dummy_node5', 'PondedArea'] == 73511) def test_model_to_networkx(): m = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) G = m.network - assert(G['J2']['J3']['C2.1']['Length'] == 666) - assert(G['J1']['J2']['C1:C2']['Length'] == 244.63) - assert(round(G.node['J2']['InvertElev'], 3) == 13.0) + assert (G['J2']['J3']['C2.1']['Length'] == 666) + assert (G['J1']['J2']['C1:C2']['Length'] == 244.63) + assert (round(G.node['J2']['InvertElev'], 3) == 13.0) diff --git a/swmmio/utils/dataframes.py b/swmmio/utils/dataframes.py index eaf2441..c9564e2 100644 --- a/swmmio/utils/dataframes.py +++ b/swmmio/utils/dataframes.py @@ -15,31 +15,31 @@ def create_dataframeBI(bi_path, section='[CONDUITS]'): headerdefs=headerdefs, skipheaders=True) df = pd.read_table(tempfilepath, header=None, delim_whitespace=True, - skiprows=[0], index_col=0, names = headerlist, comment=None) + skiprows=[0], index_col=0, names=headerlist, comment=None) - os.remove(tempfilepath) #clean up + os.remove(tempfilepath) # clean up return df def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, - comment_str=';', comment_cols=True): + comment_str=';', comment_cols=True): """ given a path to an INP file, create a dataframe of data in the given section. """ - #find all the headers and their defs (section title with cleaned one-liner column headers) + # find all the headers and their defs (section title with cleaned one-liner column headers) headerdefs = funcs.complete_inp_headers(inp_path) - #create temp file with section isolated from inp file + # create temp file with section isolated from inp file tempfilepath = txt.extract_section_from_inp(inp_path, section, headerdefs=headerdefs, ignore_comments=ignore_comments) if ignore_comments: comment_str = None if not tempfilepath: - #if this head (section) was not found in the textfile, return a - #blank dataframe with the appropriate schema + # if this head (section) was not found in the textfile, return a + # blank dataframe with the appropriate schema print('header "{}" not found in "{}"'.format(section, inp_path)) print('returning empty dataframe') headerlist = headerdefs['headers'].get(section, 'blob').split() + [';', 'Comment', 'Origin'] @@ -47,21 +47,21 @@ def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, return blank_df if headerdefs['headers'][section] == 'blob': - #return the whole row, without specifc col headers + # return the whole row, without specifc col headers df = pd.read_table(tempfilepath, delim_whitespace=False, comment=comment_str) - elif section == '[CURVES]' or section =='[TIMESERIES]': - #return the whole row, without specifc col headers - df = pd.read_table(tempfilepath, delim_whitespace=False)#, index_col=0)#, skiprows=[0]) + elif section == '[CURVES]' or section == '[TIMESERIES]': + # return the whole row, without specifc col headers + df = pd.read_table(tempfilepath, delim_whitespace=False) # , index_col=0)#, skiprows=[0]) else: - #this section header is recognized and will be organized into known columns + # this section header is recognized and will be organized into known columns headerlist = headerdefs['headers'][section].split() if comment_cols: headerlist = headerlist + [';', 'Comment', 'Origin'] df = pd.read_table(tempfilepath, header=None, delim_whitespace=True, skiprows=[0], - index_col=0, names = headerlist, comment=comment_str) + index_col=0, names=headerlist, comment=comment_str) if comment_cols: - #add new blank comment column after a semicolon column + # add new blank comment column after a semicolon column df[';'] = ';' os.remove(tempfilepath) @@ -72,9 +72,9 @@ def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, def get_link_coords(row, nodexys, verticies): """for use in an df.apply, to get coordinates of a conduit/link """ - #cast IDs to string + # cast IDs to string inlet_id = str(row.InletNode) - outlet_id =str(row.OutletNode) + outlet_id = str(row.OutletNode) xys_str = nodexys.rename(index=str) x1 = round(xys_str.at[inlet_id, 'X'], 4) @@ -83,21 +83,21 @@ def get_link_coords(row, nodexys, verticies): y2 = round(xys_str.at[outlet_id, 'Y'], 4) if None in [x1, x2, y1, y2]: print(row.name, 'problem, no coords') - #grab any extra verts, place in between up/dwn nodes - res = [(x1, y1)] + # grab any extra verts, place in between up/dwn nodes + res = [(x1, y1)] if row.name in verticies.index: xs = verticies.loc[row.name, 'X'].tolist() ys = verticies.loc[row.name, 'Y'].tolist() if isinstance(xs, list) and isinstance(ys, list): - #if more than one vert for this link exists, arrays are returned - #from verticies.get_value(). it then needs to be zipped up + # if more than one vert for this link exists, arrays are returned + # from verticies.get_value(). it then needs to be zipped up res = res + list(zip(xs, ys)) else: res = res + [(xs, ys)] res = res + [(x2, y2)] - return [res] #nest in a list to force a series to be returned in a df.apply + return [res] # nest in a list to force a series to be returned in a df.apply def create_dataframeRPT(rpt_path, section='Link Flow Summary', element_id=None): @@ -106,9 +106,9 @@ def create_dataframeRPT(rpt_path, section='Link Flow Summary', element_id=None): section. """ - #find all the headers and their defs (section title with cleaned one-liner column headers) + # find all the headers and their defs (section title with cleaned one-liner column headers) headerdefs = funcs.complete_rpt_headers(rpt_path) - #create temp file with section isolated from rpt file + # create temp file with section isolated from rpt file tempfilepath = txt.extract_section_from_rpt(rpt_path, section, headerdefs=headerdefs, element_id=element_id) @@ -118,18 +118,18 @@ def create_dataframeRPT(rpt_path, section='Link Flow Summary', element_id=None): return None if headerdefs['headers'][section] == 'blob': - #return the whole row, without specifc col headers + # return the whole row, without specifc col headers df = pd.read_table(tempfilepath, delim_whitespace=False, comment=";") else: if element_id: - #we'retrying to pull a time series, parse the datetimes by - #concatenating the Date Time columns (cols 1,2) + # we'retrying to pull a time series, parse the datetimes by + # concatenating the Date Time columns (cols 1,2) df0 = pd.read_table(tempfilepath, delim_whitespace=True) - df = df0[df0.columns[2:]] #the data sans date time columns - df.index=pd.to_datetime(df0['Date'] + ' ' + df0['Time']) + df = df0[df0.columns[2:]] # the data sans date time columns + df.index = pd.to_datetime(df0['Date'] + ' ' + df0['Time']) df.index.name = "".join(df0.columns[:2]) else: - #this section header is recognized, will be organized into known cols + # this section header is recognized, will be organized into known cols df = pd.read_table(tempfilepath, delim_whitespace=True, index_col=0) os.remove(tempfilepath) diff --git a/swmmio/utils/functions.py b/swmmio/utils/functions.py index ef0fb6f..79c2351 100644 --- a/swmmio/utils/functions.py +++ b/swmmio/utils/functions.py @@ -78,7 +78,7 @@ def multidigraph_from_edges(edges, source, target): # Todo: use an OrderedDict instead of a dict and a "order" list -def complete_inp_headers (inpfilepath): +def complete_inp_headers(inpfilepath): """ creates a dictionary with all the headers found in an INP file (which varies based on what the user has defined in a given model) @@ -93,23 +93,23 @@ def complete_inp_headers (inpfilepath): an array of section headers found in the INP file that preserves the original order """ - foundheaders= {} + foundheaders = {} order = [] - #print inp_header_dict + # print inp_header_dict with open(inpfilepath) as f: for line in f: if '[' and ']' in line: h = line.strip() order.append(h) if h in inp_header_dict: - foundheaders.update({h:inp_header_dict[h]}) + foundheaders.update({h: inp_header_dict[h]}) else: - foundheaders.update({h:'blob'}) + foundheaders.update({h: 'blob'}) - return {'headers':foundheaders, 'order':order} + return {'headers': foundheaders, 'order': order} -def complete_rpt_headers (rptfilepath): +def complete_rpt_headers(rptfilepath): """ creates a dictionary with all the headers found in an RPT file (which varies based on what the user has defined in a given model) @@ -124,7 +124,7 @@ def complete_rpt_headers (rptfilepath): an array of section headers found in the RPT file that perserves the original order """ - foundheaders= {} + foundheaders = {} order = [] with open(rptfilepath) as f: buff3line = deque() From 06817faa1fd08b2f53f1c2e3f7daf3c7cc88f4c2 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Mon, 11 Mar 2019 23:06:04 -0400 Subject: [PATCH 07/17] draft set and change CRS of model --- swmmio/__init__.py | 1 + swmmio/core.py | 121 ++++++- swmmio/elements.py | 2 +- swmmio/tests/data/__init__.py | 3 + swmmio/tests/data/df_test_coordinates.csv | 5 + .../data/model_full_features_network_xy.inp | 309 ++++++++++++++++++ swmmio/tests/test_dataframes.py | 13 +- swmmio/tests/test_model_elements.py | 8 +- swmmio/utils/spatial.py | 118 +++++-- 9 files changed, 533 insertions(+), 47 deletions(-) create mode 100644 swmmio/tests/data/df_test_coordinates.csv create mode 100644 swmmio/tests/data/model_full_features_network_xy.inp diff --git a/swmmio/__init__.py b/swmmio/__init__.py index fc0d992..2d1f734 100644 --- a/swmmio/__init__.py +++ b/swmmio/__init__.py @@ -9,3 +9,4 @@ from .core import * from swmmio.utils.dataframes import create_dataframeBI, create_dataframeRPT, create_dataframeINP +from swmmio import * \ No newline at end of file diff --git a/swmmio/core.py b/swmmio/core.py index f0ccf6b..259d65f 100644 --- a/swmmio/core.py +++ b/swmmio/core.py @@ -9,9 +9,11 @@ from swmmio.utils import functions from swmmio.utils.dataframes import create_dataframeINP, create_dataframeRPT, get_link_coords from swmmio.defs.config import * +from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH, MODEL_FULL_FEATURES_XY import warnings + class Model(object): def __init__(self, in_file_path): @@ -45,6 +47,7 @@ def __init__(self, in_file_path): self.rpt = None # until we can confirm it initializes properly self.bbox = None # to remember how the model data was clipped self.scenario = '' # self._get_scenario() + self.crs = None # coordinate reference system # try to initialize a companion RPT object rpt_path = os.path.join(wd, name + '.rpt') @@ -130,7 +133,7 @@ def conduits(self): conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) conduits_df = conduits_df.join(xsections_df) - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") # .drop_duplicates() + coords_df = self.inp.coordinates if rpt: # create a dictionary holding data from an rpt file, if provided @@ -139,7 +142,7 @@ def conduits(self): # add conduit coordinates # the xys.map() junk is to unpack a nested list - verts = create_dataframeINP(inp.path, '[VERTICES]') + verts = self.inp.vertices xys = conduits_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) df = conduits_df.assign(coords=xys.map(lambda x: x[0])) @@ -180,10 +183,10 @@ def orifices(self): if orifices_df.empty: return pd.DataFrame() - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") + coords_df = self.inp.coordinates # add conduit coordinates - verts = create_dataframeINP(inp.path, '[VERTICES]') + verts = self.inp.vertices xys = orifices_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) df = orifices_df.assign(coords=xys.map(lambda x: x[0])) df.InletNode = df.InletNode.astype(str) @@ -213,11 +216,11 @@ def weirs(self): return pd.DataFrame() weirs_df = weirs_df[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") # .drop_duplicates() + coords_df = self.inp.coordinates # .drop_duplicates() # add conduit coordinates # the xys.map() junk is to unpack a nested list - verts = create_dataframeINP(inp.path, '[VERTICES]') + verts = self.inp.vertices xys = weirs_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) df = weirs_df.assign(coords=xys.map(lambda x: x[0])) df.InletNode = df.InletNode.astype(str) @@ -247,10 +250,10 @@ def pumps(self): if pumps_df.empty: return pd.DataFrame() - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") # .drop_duplicates() + coords_df = self.inp.coordinates # add conduit coordinates - verts = create_dataframeINP(inp.path, '[VERTICES]') + verts = self.inp.vertices xys = pumps_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) df = pumps_df.assign(coords=xys.map(lambda x: x[0])) df.InletNode = df.InletNode.astype(str) @@ -279,7 +282,7 @@ def nodes(self, bbox=None, subset=None): juncs_df = create_dataframeINP(inp.path, "[JUNCTIONS]") outfalls_df = create_dataframeINP(inp.path, "[OUTFALLS]") storage_df = create_dataframeINP(inp.path, "[STORAGE]") - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") + coords_df = self.inp.coordinates # concatenate the DFs and keep only relevant cols all_nodes = pd.concat([juncs_df, outfalls_df, storage_df]) @@ -317,6 +320,7 @@ def subcatchments(self): """ subs = create_dataframeINP(self.inp.path, "[SUBCATCHMENTS]") subs = subs.drop([';', 'Comment', 'Origin'], axis=1) + polygons_df = self.inp.polygons if self.rpt: flw = create_dataframeRPT(self.rpt.path, 'Subcatchment Runoff Summary') @@ -358,6 +362,51 @@ def network(self): return self._network + def to_crs(self, *args, **kwargs): + """ + Convert coordinate reference system of the model coordinates + :param target_crs: + :return: True + Example: + >>> import swmmio + >>> m = swmmio.Model(MODEL_FULL_FEATURES_XY) + >>> m.crs = "+init=EPSG:2272" + >>> m.to_crs("+init=EPSG:4326") # convert to WGS84 web mercator + >>> m.inp.coordinates + X Y + Name + J3 42.365958 -74.866424 + 1 42.368292 -74.870614 + 2 42.367916 -74.867615 + 3 42.368527 -74.869387 + 4 42.368089 -74.869024 + 5 42.367709 -74.868888 + J2 42.366748 -74.868458 + J4 42.365966 -74.864787 + J1 42.366968 -74.868861 + >>> m.inp.vertices + X Y + Name + C1:C2 42.366833 -74.868703 + C2.1 42.366271 -74.868034 + C2.1 42.365974 -74.867305 + """ + try: + import pyproj + except ImportError: + raise ImportError('pyproj module needed. get this package here: ', + 'https://pypi.python.org/pypi/pyproj') + + if self.crs is None: + raise AttributeError('CRS of model object not set') + + self.inp.coordinates = spatial.change_crs(self.inp.coordinates, self.crs, *args, **kwargs) + self.inp.polygons = spatial.change_crs(self.inp.polygons, self.crs, *args, **kwargs) + self.inp.vertices = spatial.change_crs(self.inp.vertices, self.crs, *args, **kwargs) + self.crs = args[0] + + + def to_geojson(self, target_path=None): """ Return a GeoJSON representation of the entire model @@ -494,10 +543,14 @@ def __init__(self, file_path): self._conduits_df = None self._junctions_df = None self._outfalls_df = None + self._coordinates_df = None + self._vertices_df = None + self._polygons_df = None SWMMIOFile.__init__(self, file_path) # run the superclass init - self._sections = [self._conduits_df, self._junctions_df, self._outfalls_df] + self._sections = [self._conduits_df, self._junctions_df, self._outfalls_df, + self._coordinates_df, self._vertices_df, self._polygons_df] def save(self, target_path=None): ''' @@ -603,3 +656,51 @@ def outfalls(self): def outfalls(self, df): """Set inp.outfalls DataFrame.""" self._outfalls_df = df + + @property + def coordinates(self): + """ + Get/set coordinates section of model + :return: dataframe of model coordinates + """ + if self._coordinates_df is not None: + return self._coordinates_df + self._coordinates_df = create_dataframeINP(self.path, "[COORDINATES]", comment_cols=False) + return self._coordinates_df + + @coordinates.setter + def coordinates(self, df): + """Set inp.coordinates DataFrame.""" + self._coordinates_df = df + + @property + def vertices(self): + """ + get/set vertices section of model + :return: dataframe of model coordinates + """ + if self._vertices_df is not None: + return self._vertices_df + self._vertices_df = create_dataframeINP(self.path, '[VERTICES]', comment_cols=False) + return self._vertices_df + + @vertices.setter + def vertices(self, df): + """Set inp.vertices DataFrame.""" + self._vertices_df = df + + @property + def polygons(self): + """ + get/set polygons section of model + :return: dataframe of model coordinates + """ + if self._polygons_df is not None: + return self._polygons_df + self._polygons_df = create_dataframeINP(self.path, '[Polygons]', comment_cols=False) + return self._polygons_df + + @polygons.setter + def polygons(self, df): + """Set inp.polygons DataFrame.""" + self._polygons_df = df diff --git a/swmmio/elements.py b/swmmio/elements.py index 8269e9d..a8004da 100644 --- a/swmmio/elements.py +++ b/swmmio/elements.py @@ -42,7 +42,7 @@ def __call__(self, data=None): conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) conduits_df = conduits_df.join(xsections_df) - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") # .drop_duplicates() + coords_df = self.model._coordinates_df() if rpt: # create a dictionary holding data from an rpt file, if provided diff --git a/swmmio/tests/data/__init__.py b/swmmio/tests/data/__init__.py index d360bed..36d8384 100644 --- a/swmmio/tests/data/__init__.py +++ b/swmmio/tests/data/__init__.py @@ -14,6 +14,7 @@ # Test models paths MODEL_FULL_FEATURES_PATH = os.path.join(DATA_PATH, 'model_full_features.inp') +MODEL_FULL_FEATURES_XY = os.path.join(DATA_PATH, 'model_full_features_network_xy.inp') MODEL_FULL_FEATURES__NET_PATH = os.path.join(DATA_PATH, 'model_full_features_network.inp') MODEL_BROWARD_COUNTY_PATH = os.path.join(DATA_PATH, 'RUNOFF46_SW5.INP') @@ -22,3 +23,5 @@ MODEL_XSECTION_ALT_01 = os.path.join(DATA_PATH, 'alt_test1.inp') MODEL_XSECTION_ALT_02 = os.path.join(DATA_PATH, 'alt_test2.inp') MODEL_XSECTION_ALT_03 = os.path.join(DATA_PATH, 'alt_test3.inp') + +df_test_coordinates_csv = os.path.join(DATA_PATH, 'df_test_coordinates.csv') \ No newline at end of file diff --git a/swmmio/tests/data/df_test_coordinates.csv b/swmmio/tests/data/df_test_coordinates.csv new file mode 100644 index 0000000..e961cda --- /dev/null +++ b/swmmio/tests/data/df_test_coordinates.csv @@ -0,0 +1,5 @@ +Name,X,Y,;,Comment,Origin +J1,0.0,0.0,;,, +J3,459.05800000000005,-113.145,;,, +J4,671.3910000000001,-163.985,;,, +J2,238.75,-53.332,;,, diff --git a/swmmio/tests/data/model_full_features_network_xy.inp b/swmmio/tests/data/model_full_features_network_xy.inp new file mode 100644 index 0000000..e57b6cd --- /dev/null +++ b/swmmio/tests/data/model_full_features_network_xy.inp @@ -0,0 +1,309 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING DYNWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 11/01/2015 +START_TIME 00:00:00 +REPORT_START_DATE 11/01/2015 +REPORT_START_TIME 00:00:00 +END_DATE 11/04/2015 +END_TIME 00:00:00 +SWEEP_START 01/01 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:01:00 +WET_STEP 00:05:00 +DRY_STEP 00:05:00 +ROUTING_STEP 0:00:01 + +INERTIAL_DAMPING NONE +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 12.557 +MAX_TRIALS 8 +HEAD_TOLERANCE 0.005 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[RAINGAGES] +;;Name Format Interval SCF Source +;;-------------- --------- ------ ------ ---------- +SCS_24h_Type_I_1in INTENSITY 0:15 1.0 TIMESERIES SCS_24h_Type_I_1in + +[SUBCATCHMENTS] +;;Name Rain Gage Outlet Area %Imperv Width %Slope CurbLen SnowPack +;;-------------- ---------------- ---------------- -------- -------- -------- -------- -------- ---------------- +S1 SCS_24h_Type_I_1in J1 3 30 500 0.5 0 +S2 SCS_24h_Type_I_1in 2 2 100 500 0.5 0 +S3 SCS_24h_Type_I_1in j3 3 100 500 0.5 0 +S4 SCS_24h_Type_I_1in 1 20 25 500 0.5 0 + +[SUBAREAS] +;;Subcatchment N-Imperv N-Perv S-Imperv S-Perv PctZero RouteTo PctRouted +;;-------------- ---------- ---------- ---------- ---------- ---------- ---------- ---------- +S1 0.01 0.1 0.05 0.05 25 OUTLET +S2 0.01 0.1 0.05 0.05 25 OUTLET +S3 0.01 0.1 0.05 0.05 25 OUTLET +S4 0.01 0.1 0.05 0.05 25 OUTLET + +[INFILTRATION] +;;Subcatchment MaxRate MinRate Decay DryTime MaxInfil +;;-------------- ---------- ---------- ---------- ---------- ---------- +S1 3 0.5 4 7 0 +S2 3 0.5 4 7 0 +S3 3 0.5 4 7 0 +S4 3.0 0.5 4 7 0 + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +J3 6.547 15 0 0 0 +1 17 0 0 0 0 +2 17 0 0 0 0 +3 16.5 0 0 0 0 +4 16 0 0 0 0 +5 15 0 0 0 0 +J2 13.0 15 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +J4 0 FREE NO + +[STORAGE] +;;Name Elev. MaxDepth InitDepth Shape Curve Name/Params N/A Fevap Psi Ksat IMD +;;-------------- -------- ---------- ----------- ---------- ---------------------------- -------- -------- -------- -------- +J1 13.392 15 0 FUNCTIONAL 1000 0 0 0 0 + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +C1:C2 J1 J2 244.63 0.01 0 0 0 0 +C2.1 J2 J3 666 0.01 0 0 0 0 +1 1 4 400 0.01 0 0 0 0 +2 4 5 400 0.01 0 0 0 0 +3 5 J1 400 0.01 0 0 0 0 +4 3 4 400 0.01 0 0 0 0 +5 2 5 400 0.01 0 0 0 0 + +[PUMPS] +;;Name From Node To Node Pump Curve Status Sartup Shutoff +;;-------------- ---------------- ---------------- ---------------- ------ -------- -------- +C2 J2 J3 P1_Curve ON 0 0 + +[WEIRS] +;;Name From Node To Node Type CrestHt Qcoeff Gated EndCon EndCoeff Surcharge RoadWidth RoadSurf +;;-------------- ---------------- ---------------- ------------ ---------- ---------- -------- -------- ---------- ---------- ---------- ---------- +C3 J3 J4 TRANSVERSE 0 3.33 NO 0 0 NO + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +C1:C2 CIRCULAR 1 0 0 0 1 +C2.1 CIRCULAR 1 0 0 0 1 +1 CIRCULAR 1 0 0 0 1 +2 CIRCULAR 1 0 0 0 1 +3 CIRCULAR 1 0 0 0 1 +4 CIRCULAR 1 0 0 0 1 +5 CIRCULAR 1 0 0 0 1 +C3 RECT_OPEN 5 1 0 0 + +[INFLOWS] +;;Node Constituent Time Series Type Mfactor Sfactor Baseline Pattern +;;-------------- ---------------- ---------------- -------- -------- -------- -------- -------- +J3 Flow "" FLOW 1.0 1 1 +J2 FLOW "" FLOW 1.0 1 1 +J1 FLOW "" FLOW 1.0 1 1 + +[CURVES] +;;Name Type X-Value Y-Value +;;-------------- ---------- ---------- ---------- +P1_Curve Pump4 0 10 +P1_Curve 5 20 + +[TIMESERIES] +;;Name Date Time Value +;;-------------- ---------- ---------- ---------- +;SCS_24h_Type_I_1in design storm, total rainfall = 1 in, rain units = in/hr. +SCS_24h_Type_I_1in 0:00 0.0175 +SCS_24h_Type_I_1in 0:15 0.0175 +SCS_24h_Type_I_1in 0:30 0.0175 +SCS_24h_Type_I_1in 0:45 0.0175 +SCS_24h_Type_I_1in 1:00 0.0175 +SCS_24h_Type_I_1in 1:15 0.0175 +SCS_24h_Type_I_1in 1:30 0.0175 +SCS_24h_Type_I_1in 1:45 0.0175 +SCS_24h_Type_I_1in 2:00 0.0205 +SCS_24h_Type_I_1in 2:15 0.0205 +SCS_24h_Type_I_1in 2:30 0.0205 +SCS_24h_Type_I_1in 2:45 0.0205 +SCS_24h_Type_I_1in 3:00 0.0205 +SCS_24h_Type_I_1in 3:15 0.0205 +SCS_24h_Type_I_1in 3:30 0.0205 +SCS_24h_Type_I_1in 3:45 0.0205 +SCS_24h_Type_I_1in 4:00 0.0245 +SCS_24h_Type_I_1in 4:15 0.0245 +SCS_24h_Type_I_1in 4:30 0.0245 +SCS_24h_Type_I_1in 4:45 0.0245 +SCS_24h_Type_I_1in 5:00 0.0245 +SCS_24h_Type_I_1in 5:15 0.0245 +SCS_24h_Type_I_1in 5:30 0.0245 +SCS_24h_Type_I_1in 5:45 0.0245 +SCS_24h_Type_I_1in 6:00 0.031 +SCS_24h_Type_I_1in 6:15 0.031 +SCS_24h_Type_I_1in 6:30 0.031 +SCS_24h_Type_I_1in 6:45 0.031 +SCS_24h_Type_I_1in 7:00 0.038 +SCS_24h_Type_I_1in 7:15 0.038 +SCS_24h_Type_I_1in 7:30 0.038 +SCS_24h_Type_I_1in 7:45 0.038 +SCS_24h_Type_I_1in 8:00 0.05 +SCS_24h_Type_I_1in 8:15 0.05 +SCS_24h_Type_I_1in 8:30 0.07 +SCS_24h_Type_I_1in 8:45 0.07 +SCS_24h_Type_I_1in 9:00 0.098 +SCS_24h_Type_I_1in 9:15 0.098 +SCS_24h_Type_I_1in 9:30 0.236 +SCS_24h_Type_I_1in 9:45 0.612 +SCS_24h_Type_I_1in 10:00 0.136 +SCS_24h_Type_I_1in 10:15 0.136 +SCS_24h_Type_I_1in 10:30 0.082 +SCS_24h_Type_I_1in 10:45 0.082 +SCS_24h_Type_I_1in 11:00 0.06 +SCS_24h_Type_I_1in 11:15 0.06 +SCS_24h_Type_I_1in 11:30 0.06 +SCS_24h_Type_I_1in 11:45 0.052 +SCS_24h_Type_I_1in 12:00 0.048 +SCS_24h_Type_I_1in 12:15 0.048 +SCS_24h_Type_I_1in 12:30 0.042 +SCS_24h_Type_I_1in 12:45 0.042 +SCS_24h_Type_I_1in 13:00 0.042 +SCS_24h_Type_I_1in 13:15 0.042 +SCS_24h_Type_I_1in 13:30 0.038 +SCS_24h_Type_I_1in 13:45 0.038 +SCS_24h_Type_I_1in 14:00 0.0315 +SCS_24h_Type_I_1in 14:15 0.0315 +SCS_24h_Type_I_1in 14:30 0.0315 +SCS_24h_Type_I_1in 14:45 0.0315 +SCS_24h_Type_I_1in 15:00 0.0315 +SCS_24h_Type_I_1in 15:15 0.0315 +SCS_24h_Type_I_1in 15:30 0.0315 +SCS_24h_Type_I_1in 15:45 0.0315 +SCS_24h_Type_I_1in 16:00 0.024 +SCS_24h_Type_I_1in 16:15 0.024 +SCS_24h_Type_I_1in 16:30 0.024 +SCS_24h_Type_I_1in 16:45 0.024 +SCS_24h_Type_I_1in 17:00 0.024 +SCS_24h_Type_I_1in 17:15 0.024 +SCS_24h_Type_I_1in 17:30 0.024 +SCS_24h_Type_I_1in 17:45 0.024 +SCS_24h_Type_I_1in 18:00 0.024 +SCS_24h_Type_I_1in 18:15 0.024 +SCS_24h_Type_I_1in 18:30 0.024 +SCS_24h_Type_I_1in 18:45 0.024 +SCS_24h_Type_I_1in 19:00 0.024 +SCS_24h_Type_I_1in 19:15 0.024 +SCS_24h_Type_I_1in 19:30 0.024 +SCS_24h_Type_I_1in 19:45 0.024 +SCS_24h_Type_I_1in 20:00 0.0185 +SCS_24h_Type_I_1in 20:15 0.0185 +SCS_24h_Type_I_1in 20:30 0.0185 +SCS_24h_Type_I_1in 20:45 0.0185 +SCS_24h_Type_I_1in 21:00 0.0185 +SCS_24h_Type_I_1in 21:15 0.0185 +SCS_24h_Type_I_1in 21:30 0.0185 +SCS_24h_Type_I_1in 21:45 0.0185 +SCS_24h_Type_I_1in 22:00 0.0185 +SCS_24h_Type_I_1in 22:15 0.0185 +SCS_24h_Type_I_1in 22:30 0.0185 +SCS_24h_Type_I_1in 22:45 0.0185 +SCS_24h_Type_I_1in 23:00 0.0185 +SCS_24h_Type_I_1in 23:15 0.0185 +SCS_24h_Type_I_1in 23:30 0.0185 +SCS_24h_Type_I_1in 23:45 0.0185 +SCS_24h_Type_I_1in 24:00 0 + +[REPORT] +;;Reporting Options +INPUT YES +CONTROLS YES +SUBCATCHMENTS NONE +NODES ALL +LINKS NONE + +[TAGS] + +[MAP] +DIMENSIONS 2746427.630 1117685.602 2748614.997 1119016.264 +Units Feet + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +J3 2748073.306 1117746.087 +1 2746913.127 1118559.809 +2 2747728.148 1118449.164 +3 2747242.131 1118656.381 +4 2747345.325 1118499.807 +5 2747386.555 1118362.817 +J2 2747514.212 1118016.207 +J4 2748515.571 1117763.466 +J1 2747402.678 1118092.704 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ +C1:C2 2747446.992 1118044.900 +C2.1 2747634.497 1117846.256 +C2.1 2747834.998 1117744.149 + +[Polygons] +;;Subcatchment X-Coord Y-Coord +;;-------------- ------------------ ------------------ +S1 2746954.580 1118179.306 +S1 2747306.004 1117998.644 +S1 2747284.350 1118423.694 +S1 2746979.947 1118463.291 +S2 2747840.565 1118007.306 +S2 2748063.299 1118230.039 +S2 2747405.616 1118921.132 +S2 2747078.940 1118955.779 +S2 2747430.364 1118537.535 +S2 2747492.234 1118089.594 +S3 2748103.514 1118193.536 +S3 2747848.608 1117921.306 +S3 2748088.665 1117782.716 +S3 2748242.104 1117782.716 +S3 2748511.859 1117807.464 +S4 2746913.127 1118252.931 +S4 2746942.825 1118500.413 +S4 2747192.781 1118468.241 +S4 2747168.033 1118599.406 +S4 2747088.839 1118755.319 +S4 2746741.127 1118938.456 +S4 2746527.056 1118812.240 +S4 2746551.804 1118480.615 + +[SYMBOLS] +;;Gage X-Coord Y-Coord +;;-------------- ------------------ ------------------ + diff --git a/swmmio/tests/test_dataframes.py b/swmmio/tests/test_dataframes.py index 96a2a6e..b1330a6 100644 --- a/swmmio/tests/test_dataframes.py +++ b/swmmio/tests/test_dataframes.py @@ -1,6 +1,8 @@ from swmmio.tests.data import (MODEL_FULL_FEATURES_PATH, MODEL_FULL_FEATURES__NET_PATH, - MODEL_BROWARD_COUNTY_PATH, MODEL_XSECTION_ALT_01) + MODEL_BROWARD_COUNTY_PATH, MODEL_XSECTION_ALT_01, df_test_coordinates_csv, + MODEL_FULL_FEATURES_XY) import swmmio +import pandas as pd def test_create_dataframeRPT(): @@ -52,3 +54,12 @@ def test_model_to_networkx(): assert (G['J2']['J3']['C2.1']['Length'] == 666) assert (G['J1']['J2']['C1:C2']['Length'] == 244.63) assert (round(G.node['J2']['InvertElev'], 3) == 13.0) + + +def test_coordinates(): + m = swmmio.Model(MODEL_FULL_FEATURES_XY) + coordinates = m.inp.coordinates + print(coordinates) + test_coords = pd.read_csv(df_test_coordinates_csv, index_col=0) + assert(coordinates.equals(None)) + diff --git a/swmmio/tests/test_model_elements.py b/swmmio/tests/test_model_elements.py index df43110..2f7138f 100644 --- a/swmmio/tests/test_model_elements.py +++ b/swmmio/tests/test_model_elements.py @@ -16,10 +16,10 @@ def test_model_section(test_model): bayside = Model(MODEL_FULL_FEATURES__NET_PATH) - a = bayside.inp.junctions[1] - # print(a) - tsb_ids = [1213, 13131, 232131, 12313] - tsbs = bayside.conduits(data=['MaxDepth, MaxQ', 'geometry']) + a = bayside.inp.junctions + print(a) + # tsb_ids = [1213, 13131, 232131, 12313] + # tsbs = bayside.conduits(data=['MaxDepth, MaxQ', 'geometry']) def test_complete_headers(test_model): diff --git a/swmmio/utils/spatial.py b/swmmio/utils/spatial.py index 6cfd5d4..9e8b373 100644 --- a/swmmio/utils/spatial.py +++ b/swmmio/utils/spatial.py @@ -1,35 +1,90 @@ from swmmio.defs.config import ROOT_DIR -import geojson +from swmmio.tests.data import MODEL_FULL_FEATURES_XY import json import pandas as pd from geojson import Point, LineString, Polygon, FeatureCollection, Feature import os, shutil -def write_geojson(df, filename=None, geomtype='linestring', inproj='epsg:2272'): +def change_crs(series, in_crs, to_crs): + """ + Change the projection of a series of coordinates + :param series: + :param in_crs: + :param to_proj: + :return: series of reprojected coordinates + >>> import swmmio + >>> m = swmmio.Model(MODEL_FULL_FEATURES_XY) + >>> proj4_str = '+proj=tmerc +lat_0=36.16666666666666 +lon_0=-94.5 +k=0.9999411764705882 +x_0=850000 +y_0=0 +datum=NAD83 +units=us-ft +no_defs' #"+init=EPSG:102698" + >>> m.crs = proj4_str + >>> nodes = m.nodes() + >>> change_crs(nodes['coords'], proj4_str, "+init=EPSG:4326") + Name + J3 [(39.236286854940964, -94.64346373821752)] + 1 [(39.23851590020802, -94.64756446847099)] + 2 [(39.2382157223383, -94.64468629488778)] + 3 [(39.23878251491925, -94.64640342340165)] + 4 [(39.238353081411915, -94.64603818939938)] + 5 [(39.23797714290924, -94.64589184224722)] + J2 [(39.23702605103406, -94.64543916929885)] + J4 [(39.23633648359375, -94.64190240294558)] + J1 [(39.23723558954326, -94.64583338271147)] + Name: coords, dtype: object + """ + try: + import pyproj + except ImportError: + raise ImportError('pyproj module needed. get this package here: ', + 'https://pypi.python.org/pypi/pyproj') + + # SET UP THE TO AND FROM COORDINATE PROJECTION + in_proj = pyproj.Proj(in_crs, preserve_units=True) + to_proj = pyproj.Proj(to_crs)# to_crs) # google maps, etc + + # convert coords in coordinates, vertices, and polygons inp sections + # transform to the typical 'WGS84' coord system + def get_xys(xy_row): + # need to reverse to lat/long after conversion + return [pyproj.transform(in_proj, to_proj, x, y)[::-1] for x, y in xy_row] + + if isinstance(series, pd.Series): + return series.apply(lambda row: get_xys(row)) + if isinstance(series, pd.DataFrame): + zipped_coords = list(zip(series.X, series.Y)) + df = pd.DataFrame(data=get_xys(zipped_coords), columns=["X", "Y"], index=series.index) + return df + elif isinstance(series, (list, tuple)): + if isinstance(series[0], (list, tuple)): + return get_xys(series) + else: + return get_xys([series]) + - try: import pyproj +def write_geojson(df, filename=None, geomtype='linestring', inproj='epsg:2272'): + try: + import pyproj except ImportError: raise ImportError('pyproj module needed. get this package here: ', - 'https://pypi.python.org/pypi/pyproj') + 'https://pypi.python.org/pypi/pyproj') - #SET UP THE TO AND FROM COORDINATE PROJECTION + # SET UP THE TO AND FROM COORDINATE PROJECTION pa_plane = pyproj.Proj(init=inproj, preserve_units=True) - wgs = pyproj.Proj(proj='longlat', datum='WGS84', ellps='WGS84') #google maps, etc + wgs = pyproj.Proj(proj='longlat', datum='WGS84', ellps='WGS84') # google maps, etc - #CONVERT THE DF INTO JSON - df['Name'] = df.index #add a name column (we wont have the index) + # CONVERT THE DF INTO JSON + df['Name'] = df.index # add a name column (we wont have the index) records = json.loads(df.to_json(orient='records')) - #ITERATE THROUGH THE RECORDS AND CREATE GEOJSON OBJECTS + # ITERATE THROUGH THE RECORDS AND CREATE GEOJSON OBJECTS features = [] for rec in records: - coordinates =rec['coords'] - del rec['coords'] #delete the coords so they aren't in the properties + coordinates = rec['coords'] + del rec['coords'] # delete the coords so they aren't in the properties - #transform to the typical 'WGS84' coord system + # transform to the typical 'WGS84' coord system latlngs = [pyproj.transform(pa_plane, wgs, *xy) for xy in coordinates] + if geomtype == 'linestring': geometry = LineString(latlngs) elif geomtype == 'point': @@ -49,8 +104,8 @@ def write_geojson(df, filename=None, geomtype='linestring', inproj='epsg:2272'): else: return FeatureCollection(features) -def write_shapefile(df, filename, geomtype='line', prj=None): +def write_shapefile(df, filename, geomtype='line', prj=None): """ create a shapefile given a pandas Dataframe that has coordinate data in a column called 'coords'. @@ -59,7 +114,7 @@ def write_shapefile(df, filename, geomtype='line', prj=None): import shapefile df['Name'] = df.index - #create a shp file writer object of geom type 'point' + # create a shp file writer object of geom type 'point' if geomtype == 'point': w = shapefile.Writer(shapefile.POINT) elif geomtype == 'line': @@ -67,42 +122,43 @@ def write_shapefile(df, filename, geomtype='line', prj=None): elif geomtype == 'polygon': w = shapefile.Writer(shapefile.POLYGON) - #use the helper mode to ensure the # of records equals the # of shapes - #(shapefile are made up of shapes and records, and need both to be valid) + # use the helper mode to ensure the # of records equals the # of shapes + # (shapefile are made up of shapes and records, and need both to be valid) w.autoBalance = 1 - #add the fields + # add the fields for fieldname in df.columns: w.field(fieldname, "C") for k, row in df.iterrows(): w.record(*row.tolist()) - w.line(parts = [row.coords]) + w.line(parts=[row.coords]) w.save(filename) - #add projection data to the shapefile, + # add projection data to the shapefile, if prj is None: - #if not sepcified, the default, projection is used (PA StatePlane) + # if not sepcified, the default, projection is used (PA StatePlane) prj = os.path.join(ROOT_DIR, 'swmmio/defs/default.prj') prj_filepath = os.path.splitext(filename)[0] + '.prj' shutil.copy(prj, prj_filepath) + def read_shapefile(shp_path): - """ + """ Read a shapefile into a Pandas dataframe with a 'coords' column holding the geometry information. This uses the pyshp package """ - import shapefile + import shapefile - #read file, parse out the records and shapes - sf = shapefile.Reader(shp_path) - fields = [x[0] for x in sf.fields][1:] - records = sf.records() - shps = [s.points for s in sf.shapes()] + # read file, parse out the records and shapes + sf = shapefile.Reader(shp_path) + fields = [x[0] for x in sf.fields][1:] + records = sf.records() + shps = [s.points for s in sf.shapes()] - #write into a dataframe - df = pd.DataFrame(columns=fields, data=records) - df = df.assign(coords=shps) + # write into a dataframe + df = pd.DataFrame(columns=fields, data=records) + df = df.assign(coords=shps) - return df + return df From 6e56c21fa0ba18fa3aa3d61e09a9cbd0673a802a Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Tue, 12 Mar 2019 01:31:01 -0400 Subject: [PATCH 08/17] allow long/lat orientation from pyproj --- swmmio/core.py | 12 ++++++++---- swmmio/utils/functions.py | 3 ++- swmmio/utils/spatial.py | 2 +- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/swmmio/core.py b/swmmio/core.py index 259d65f..277294f 100644 --- a/swmmio/core.py +++ b/swmmio/core.py @@ -400,12 +400,16 @@ def to_crs(self, *args, **kwargs): if self.crs is None: raise AttributeError('CRS of model object not set') - self.inp.coordinates = spatial.change_crs(self.inp.coordinates, self.crs, *args, **kwargs) - self.inp.polygons = spatial.change_crs(self.inp.polygons, self.crs, *args, **kwargs) - self.inp.vertices = spatial.change_crs(self.inp.vertices, self.crs, *args, **kwargs) - self.crs = args[0] + if not self.inp.coordinates.empty: + self.inp.coordinates = spatial.change_crs(self.inp.coordinates, self.crs, *args, **kwargs) + + if not self.inp.vertices.empty: + self.inp.vertices = spatial.change_crs(self.inp.vertices, self.crs, *args, **kwargs) + if not self.inp.polygons.empty: + self.inp.polygons = spatial.change_crs(self.inp.polygons, self.crs, *args, **kwargs) + self.crs = args[0] def to_geojson(self, target_path=None): """ diff --git a/swmmio/utils/functions.py b/swmmio/utils/functions.py index 79c2351..e6e45e4 100644 --- a/swmmio/utils/functions.py +++ b/swmmio/utils/functions.py @@ -65,7 +65,7 @@ def multidigraph_from_edges(edges, source, target): G[u][v][k]['geometry'] = LineString(coords) for n, coords in G.nodes(data='coords'): if coords: - G.node[n]['geometry'] = Point(coords) + G.node[n]['geometry'] = Point(coords[0]) if drop_cycles: # remove cycles @@ -74,6 +74,7 @@ def multidigraph_from_edges(edges, source, target): print('cycles detected and removed: {}'.format(cycles)) G.remove_edges_from(cycles) + G.graph['crs'] = model.crs return G diff --git a/swmmio/utils/spatial.py b/swmmio/utils/spatial.py index 9e8b373..f3c1671 100644 --- a/swmmio/utils/spatial.py +++ b/swmmio/utils/spatial.py @@ -45,7 +45,7 @@ def change_crs(series, in_crs, to_crs): # transform to the typical 'WGS84' coord system def get_xys(xy_row): # need to reverse to lat/long after conversion - return [pyproj.transform(in_proj, to_proj, x, y)[::-1] for x, y in xy_row] + return [pyproj.transform(in_proj, to_proj, x, y) for x, y in xy_row] if isinstance(series, pd.Series): return series.apply(lambda row: get_xys(row)) From b5bf29162dfa1b233c7f3b9eb0ccb1d76edec318 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Tue, 12 Mar 2019 12:18:38 -0400 Subject: [PATCH 09/17] pass tests for lat/long --- swmmio/__init__.py | 8 +-- swmmio/core.py | 67 ++++++++--------------- swmmio/tests/data/__init__.py | 10 ++-- swmmio/tests/data/df_test_coordinates.csv | 15 +++-- swmmio/tests/test_dataframes.py | 11 ++-- swmmio/tests/test_version_control.py | 13 +++-- swmmio/utils/spatial.py | 12 ++-- 7 files changed, 64 insertions(+), 72 deletions(-) diff --git a/swmmio/__init__.py b/swmmio/__init__.py index 2d1f734..e84ee4a 100644 --- a/swmmio/__init__.py +++ b/swmmio/__init__.py @@ -1,3 +1,7 @@ +from swmmio import * +from swmmio.utils.dataframes import create_dataframeBI, create_dataframeRPT, create_dataframeINP +from .core import * +import swmmio.core as swmmio '''Python SWMM Input/Output Tools''' @@ -6,7 +10,3 @@ __author__ = 'Adam Erispaha' __copyright__ = 'Copyright (c) 2016' __licence__ = '' - -from .core import * -from swmmio.utils.dataframes import create_dataframeBI, create_dataframeRPT, create_dataframeINP -from swmmio import * \ No newline at end of file diff --git a/swmmio/core.py b/swmmio/core.py index 277294f..c1fb213 100644 --- a/swmmio/core.py +++ b/swmmio/core.py @@ -13,11 +13,9 @@ import warnings - class Model(object): - def __init__(self, in_file_path): - + def __init__(self, in_file_path, crs=None): """ Class representing a complete SWMM model incorporating its INP and RPT files and data @@ -47,7 +45,7 @@ def __init__(self, in_file_path): self.rpt = None # until we can confirm it initializes properly self.bbox = None # to remember how the model data was clipped self.scenario = '' # self._get_scenario() - self.crs = None # coordinate reference system + self.crs = crs # coordinate reference system # try to initialize a companion RPT object rpt_path = os.path.join(wd, name + '.rpt') @@ -115,7 +113,6 @@ def wrn(): wrn() def conduits(self): - """ collect all useful and available data related model conduits and organize in one dataframe. @@ -133,7 +130,6 @@ def conduits(self): conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) conduits_df = conduits_df.join(xsections_df) - coords_df = self.inp.coordinates if rpt: # create a dictionary holding data from an rpt file, if provided @@ -141,9 +137,7 @@ def conduits(self): conduits_df = conduits_df.join(link_flow_df) # add conduit coordinates - # the xys.map() junk is to unpack a nested list - verts = self.inp.vertices - xys = conduits_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + xys = conduits_df.apply(lambda r: get_link_coords(r, self.inp.coordinates, self.inp.vertices), axis=1) df = conduits_df.assign(coords=xys.map(lambda x: x[0])) # add conduit up/down inverts and calculate slope @@ -164,7 +158,6 @@ def conduits(self): return df def orifices(self): - """ collect all useful and available data related model orifices and organize in one dataframe. @@ -183,11 +176,8 @@ def orifices(self): if orifices_df.empty: return pd.DataFrame() - coords_df = self.inp.coordinates - # add conduit coordinates - verts = self.inp.vertices - xys = orifices_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + xys = orifices_df.apply(lambda r: get_link_coords(r, self.inp.coordinates, self.inp.vertices), axis=1) df = orifices_df.assign(coords=xys.map(lambda x: x[0])) df.InletNode = df.InletNode.astype(str) df.OutletNode = df.OutletNode.astype(str) @@ -196,7 +186,6 @@ def orifices(self): return df def weirs(self): - """ collect all useful and available data related model weirs and organize in one dataframe. @@ -216,12 +205,9 @@ def weirs(self): return pd.DataFrame() weirs_df = weirs_df[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] - coords_df = self.inp.coordinates # .drop_duplicates() # add conduit coordinates - # the xys.map() junk is to unpack a nested list - verts = self.inp.vertices - xys = weirs_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + xys = weirs_df.apply(lambda r: get_link_coords(r, self.inp.coordinates, self.inp.vertices), axis=1) df = weirs_df.assign(coords=xys.map(lambda x: x[0])) df.InletNode = df.InletNode.astype(str) df.OutletNode = df.OutletNode.astype(str) @@ -231,7 +217,6 @@ def weirs(self): return df def pumps(self): - """ collect all useful and available data related model pumps and organize in one dataframe. @@ -250,11 +235,8 @@ def pumps(self): if pumps_df.empty: return pd.DataFrame() - coords_df = self.inp.coordinates - # add conduit coordinates - verts = self.inp.vertices - xys = pumps_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + xys = pumps_df.apply(lambda r: get_link_coords(r, self.inp.coordinates, self.inp.vertices), axis=1) df = pumps_df.assign(coords=xys.map(lambda x: x[0])) df.InletNode = df.InletNode.astype(str) df.OutletNode = df.OutletNode.astype(str) @@ -264,7 +246,6 @@ def pumps(self): return df def nodes(self, bbox=None, subset=None): - """ collect all useful and available data related model nodes and organize in one dataframe. @@ -282,7 +263,6 @@ def nodes(self, bbox=None, subset=None): juncs_df = create_dataframeINP(inp.path, "[JUNCTIONS]") outfalls_df = create_dataframeINP(inp.path, "[OUTFALLS]") storage_df = create_dataframeINP(inp.path, "[STORAGE]") - coords_df = self.inp.coordinates # concatenate the DFs and keep only relevant cols all_nodes = pd.concat([juncs_df, outfalls_df, storage_df]) @@ -295,10 +275,11 @@ def nodes(self, bbox=None, subset=None): flood_summ = create_dataframeRPT(rpt.path, "Node Flooding Summary") # join the rpt data (index on depth df, suffixes for common cols) - rpt_df = depth_summ.join(flood_summ, lsuffix='_depth', rsuffix='_flood') + rpt_df = depth_summ.join( + flood_summ, lsuffix='_depth', rsuffix='_flood') all_nodes = all_nodes.join(rpt_df) # join to the all_nodes df - all_nodes = all_nodes.join(coords_df[['X', 'Y']]) + all_nodes = all_nodes.join(self.inp.coordinates[['X', 'Y']]) def nodexy(row): if math.isnan(row.X) or math.isnan(row.Y): @@ -323,7 +304,8 @@ def subcatchments(self): polygons_df = self.inp.polygons if self.rpt: - flw = create_dataframeRPT(self.rpt.path, 'Subcatchment Runoff Summary') + flw = create_dataframeRPT( + self.rpt.path, 'Subcatchment Runoff Summary') subs = subs.join(flw) # more accurate runoff calculations @@ -369,27 +351,26 @@ def to_crs(self, *args, **kwargs): :return: True Example: >>> import swmmio - >>> m = swmmio.Model(MODEL_FULL_FEATURES_XY) - >>> m.crs = "+init=EPSG:2272" + >>> m = swmmio.Model(MODEL_FULL_FEATURES_XY, crs="+init=EPSG:2272") >>> m.to_crs("+init=EPSG:4326") # convert to WGS84 web mercator >>> m.inp.coordinates X Y Name - J3 42.365958 -74.866424 - 1 42.368292 -74.870614 - 2 42.367916 -74.867615 - 3 42.368527 -74.869387 - 4 42.368089 -74.869024 - 5 42.367709 -74.868888 - J2 42.366748 -74.868458 - J4 42.365966 -74.864787 - J1 42.366968 -74.868861 + J3 -74.866424 42.365958 + 1 -74.870614 42.368292 + 2 -74.867615 42.367916 + 3 -74.869387 42.368527 + 4 -74.869024 42.368089 + 5 -74.868888 42.367709 + J2 -74.868458 42.366748 + J4 -74.864787 42.365966 + J1 -74.868861 42.366968 >>> m.inp.vertices X Y Name - C1:C2 42.366833 -74.868703 - C2.1 42.366271 -74.868034 - C2.1 42.365974 -74.867305 + C1:C2 -74.868703 42.366833 + C2.1 -74.868034 42.366271 + C2.1 -74.867305 42.365974 """ try: import pyproj diff --git a/swmmio/tests/data/__init__.py b/swmmio/tests/data/__init__.py index 36d8384..0a348a4 100644 --- a/swmmio/tests/data/__init__.py +++ b/swmmio/tests/data/__init__.py @@ -14,14 +14,16 @@ # Test models paths MODEL_FULL_FEATURES_PATH = os.path.join(DATA_PATH, 'model_full_features.inp') -MODEL_FULL_FEATURES_XY = os.path.join(DATA_PATH, 'model_full_features_network_xy.inp') -MODEL_FULL_FEATURES__NET_PATH = os.path.join(DATA_PATH, 'model_full_features_network.inp') +MODEL_FULL_FEATURES_XY = os.path.join( + DATA_PATH, 'model_full_features_network_xy.inp') +MODEL_FULL_FEATURES__NET_PATH = os.path.join( + DATA_PATH, 'model_full_features_network.inp') MODEL_BROWARD_COUNTY_PATH = os.path.join(DATA_PATH, 'RUNOFF46_SW5.INP') -#version control test models +# version control test models MODEL_XSECTION_BASELINE = os.path.join(DATA_PATH, 'baseline_test.inp') MODEL_XSECTION_ALT_01 = os.path.join(DATA_PATH, 'alt_test1.inp') MODEL_XSECTION_ALT_02 = os.path.join(DATA_PATH, 'alt_test2.inp') MODEL_XSECTION_ALT_03 = os.path.join(DATA_PATH, 'alt_test3.inp') -df_test_coordinates_csv = os.path.join(DATA_PATH, 'df_test_coordinates.csv') \ No newline at end of file +df_test_coordinates_csv = os.path.join(DATA_PATH, 'df_test_coordinates.csv') diff --git a/swmmio/tests/data/df_test_coordinates.csv b/swmmio/tests/data/df_test_coordinates.csv index e961cda..59c521a 100644 --- a/swmmio/tests/data/df_test_coordinates.csv +++ b/swmmio/tests/data/df_test_coordinates.csv @@ -1,5 +1,10 @@ -Name,X,Y,;,Comment,Origin -J1,0.0,0.0,;,, -J3,459.05800000000005,-113.145,;,, -J4,671.3910000000001,-163.985,;,, -J2,238.75,-53.332,;,, +Name,X,Y +J3,2748073.306,1117746.087 +1,2746913.127,1118559.809 +2,2747728.148,1118449.164 +3,2747242.131,1118656.381 +4,2747345.325,1118499.807 +5,2747386.555,1118362.817 +J2,2747514.212,1118016.207 +J4,2748515.571,1117763.466 +J1,2747402.678,1118092.704 diff --git a/swmmio/tests/test_dataframes.py b/swmmio/tests/test_dataframes.py index b1330a6..a798307 100644 --- a/swmmio/tests/test_dataframes.py +++ b/swmmio/tests/test_dataframes.py @@ -9,7 +9,8 @@ def test_create_dataframeRPT(): m = swmmio.Model(MODEL_FULL_FEATURES__NET_PATH) depth_summ = swmmio.create_dataframeRPT(m.rpt.path, "Node Depth Summary") - flood_summ = swmmio.create_dataframeRPT(m.rpt.path, "Node Flooding Summary") + flood_summ = swmmio.create_dataframeRPT( + m.rpt.path, "Node Flooding Summary") inflo_summ = swmmio.create_dataframeRPT(m.rpt.path, "Node Inflow Summary") print('\n', depth_summ) @@ -28,7 +29,7 @@ def test_create_dataframeRPT(): def test_conduits_dataframe(): m = swmmio.Model(MODEL_FULL_FEATURES_PATH) - conduits = 2 + conduits = m.conduits() assert (list(conduits.index) == ['C1:C2']) @@ -59,7 +60,9 @@ def test_model_to_networkx(): def test_coordinates(): m = swmmio.Model(MODEL_FULL_FEATURES_XY) coordinates = m.inp.coordinates - print(coordinates) + # coordinates.to_csv(df_test_coordinates_csv) test_coords = pd.read_csv(df_test_coordinates_csv, index_col=0) - assert(coordinates.equals(None)) + assert(coordinates.equals(test_coords)) + + # change projection diff --git a/swmmio/tests/test_version_control.py b/swmmio/tests/test_version_control.py index 6ecb376..b19c80f 100644 --- a/swmmio/tests/test_version_control.py +++ b/swmmio/tests/test_version_control.py @@ -9,10 +9,10 @@ def test_complete_inp_headers(): headers = [ - '[TITLE]','[OPTIONS]','[EVAPORATION]','[JUNCTIONS]','[OUTFALLS]', - '[CONDUITS]','[XSECTIONS]','[DWF]','[REPORT]','[TAGS]','[MAP]', - '[COORDINATES]','[VERTICES]', - ] + '[TITLE]', '[OPTIONS]', '[EVAPORATION]', '[JUNCTIONS]', '[OUTFALLS]', + '[CONDUITS]', '[XSECTIONS]', '[DWF]', '[REPORT]', '[TAGS]', '[MAP]', + '[COORDINATES]', '[VERTICES]', + ] h1 = funcs.complete_inp_headers(MODEL_XSECTION_BASELINE) @@ -26,9 +26,10 @@ def test_create_inp_build_instructions(): MODEL_XSECTION_ALT_03, 'vc_dir', 'test_version_id', 'cool comments') - + latest_bi = vc_utils.newest_file('vc_dir') bi = inp.BuildInstructions(latest_bi) juncs = bi.instructions['[JUNCTIONS]'] - assert(all(j in juncs.altered.index for j in ['dummy_node1', 'dummy_node5'])) + assert(all(j in juncs.altered.index for j in [ + 'dummy_node1', 'dummy_node5'])) diff --git a/swmmio/utils/spatial.py b/swmmio/utils/spatial.py index f3c1671..93079e6 100644 --- a/swmmio/utils/spatial.py +++ b/swmmio/utils/spatial.py @@ -3,7 +3,8 @@ import json import pandas as pd from geojson import Point, LineString, Polygon, FeatureCollection, Feature -import os, shutil +import os +import shutil def change_crs(series, in_crs, to_crs): @@ -39,7 +40,7 @@ def change_crs(series, in_crs, to_crs): # SET UP THE TO AND FROM COORDINATE PROJECTION in_proj = pyproj.Proj(in_crs, preserve_units=True) - to_proj = pyproj.Proj(to_crs)# to_crs) # google maps, etc + to_proj = pyproj.Proj(to_crs) # convert coords in coordinates, vertices, and polygons inp sections # transform to the typical 'WGS84' coord system @@ -88,7 +89,6 @@ def write_geojson(df, filename=None, geomtype='linestring', inproj='epsg:2272'): if geomtype == 'linestring': geometry = LineString(latlngs) elif geomtype == 'point': - # lnglats = [(latlngs[0][1], latlngs[0][0])] #needs to be reversed. Why?? geometry = Point(latlngs) elif geomtype == 'polygon': geometry = Polygon([latlngs]) @@ -146,9 +146,9 @@ def write_shapefile(df, filename, geomtype='line', prj=None): def read_shapefile(shp_path): """ - Read a shapefile into a Pandas dataframe with a 'coords' column holding - the geometry information. This uses the pyshp package - """ + Read a shapefile into a Pandas dataframe with a 'coords' column holding + the geometry information. This uses the pyshp package + """ import shapefile # read file, parse out the records and shapes From 3457d294c46f8f5067dd824d5aa09088681b9463 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Tue, 12 Mar 2019 12:49:28 -0400 Subject: [PATCH 10/17] added links(0 model function, aggregating all edges in model into single DF --- swmmio/core.py | 14 ++++++++++++++ swmmio/tests/test_dataframes.py | 3 +++ 2 files changed, 17 insertions(+) diff --git a/swmmio/core.py b/swmmio/core.py index c1fb213..8fbad65 100644 --- a/swmmio/core.py +++ b/swmmio/core.py @@ -60,6 +60,7 @@ def __init__(self, in_file_path, crs=None): self._orifices_df = None self._weirs_df = None self._pumps_df = None + self._links_df = None self._subcatchments_df = None self._network = None @@ -245,6 +246,19 @@ def pumps(self): return df + def links(self): + """ + create a DataFrame containing all link objects in the model including conduits, pumps, weirs, and orifices. + :return: dataframe containing all link objects in the model + """ + if self._links_df is not None: + return self._links_df + + df = pd.concat([self.conduits(), self.orifices(), self.weirs(), self.pumps()]) + df['facilityid'] = df.index + self._links_df = df + return df + def nodes(self, bbox=None, subset=None): """ collect all useful and available data related model nodes and organize diff --git a/swmmio/tests/test_dataframes.py b/swmmio/tests/test_dataframes.py index a798307..3592855 100644 --- a/swmmio/tests/test_dataframes.py +++ b/swmmio/tests/test_dataframes.py @@ -56,6 +56,9 @@ def test_model_to_networkx(): assert (G['J1']['J2']['C1:C2']['Length'] == 244.63) assert (round(G.node['J2']['InvertElev'], 3) == 13.0) + links = m.links() + assert(len(links) == len(G.edges())) + def test_coordinates(): m = swmmio.Model(MODEL_FULL_FEATURES_XY) From fb9fd6d1a5598612c4b7ff3ad920e2685c7b969c Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Tue, 12 Mar 2019 18:13:13 -0400 Subject: [PATCH 11/17] draft coords to geojson general func --- swmmio/elements.py | 5 +---- swmmio/utils/spatial.py | 37 ++++++++++++++++++++++++++++++++++++- 2 files changed, 37 insertions(+), 5 deletions(-) diff --git a/swmmio/elements.py b/swmmio/elements.py index a8004da..8878944 100644 --- a/swmmio/elements.py +++ b/swmmio/elements.py @@ -42,7 +42,6 @@ def __call__(self, data=None): conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) conduits_df = conduits_df.join(xsections_df) - coords_df = self.model._coordinates_df() if rpt: # create a dictionary holding data from an rpt file, if provided @@ -50,9 +49,7 @@ def __call__(self, data=None): conduits_df = conduits_df.join(link_flow_df) # add conduit coordinates - # the xys.map() junk is to unpack a nested list - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = conduits_df.apply(lambda r: get_link_coords(r, coords_df, verts), axis=1) + xys = conduits_df.apply(lambda r: get_link_coords(r, inp.coordinates, inp.vertices), axis=1) df = conduits_df.assign(coords=xys.map(lambda x: x[0])) # add conduit up/down inverts and calculate slope diff --git a/swmmio/utils/spatial.py b/swmmio/utils/spatial.py index 93079e6..44e7621 100644 --- a/swmmio/utils/spatial.py +++ b/swmmio/utils/spatial.py @@ -11,8 +11,8 @@ def change_crs(series, in_crs, to_crs): """ Change the projection of a series of coordinates :param series: + :param to_crs: :param in_crs: - :param to_proj: :return: series of reprojected coordinates >>> import swmmio >>> m = swmmio.Model(MODEL_FULL_FEATURES_XY) @@ -61,6 +61,41 @@ def get_xys(xy_row): return get_xys([series]) +def coords_series_to_geometry(coords, geomtype='linestring', format='geojson'): + """ + Convert a series of coords (list of list(s)) to a series of geometry objects. + :param coords: series of lists of xy coordinates + :param format: format of geometry objects to be created ('geojson', 'shapely') + :return: series of geometry objects + >>> import swmmio + >>> model = swmmio.Model(MODEL_FULL_FEATURES_XY) + >>> nodes = model.nodes() + >>> geoms = coords_series_to_geometry(nodes['coords'], geomtype='point') + >>> geoms.iloc[0] + {"coordinates": [2748073.3060000003, 1117746.087], "type": "Point"} + """ + + # detect whether LineString or Point should be used + geomtype = geomtype.lower() + if geomtype == 'linestring': + geoms = [LineString(latlngs) for latlngs in coords] + elif geomtype == 'point': + geoms = [Point(latlngs[0]) for latlngs in coords] + elif geomtype == 'polygon': + geoms = [Polygon([latlngs]) for latlngs in coords] + + if format.lower() == 'shape': + # convert to shapely objects + try: + from shapely.geometry import shape + except ImportError: + raise ImportError('shapely module needed. Install it via GeoPandas with conda: ', + 'conda install geopandas') + geoms = [shape(g) for g in geoms] + + return pd.Series(index=coords.index, name='geometry', data=geoms) + + def write_geojson(df, filename=None, geomtype='linestring', inproj='epsg:2272'): try: import pyproj From 45f1099b630f672e9d7656d2b997f50fc8049f8d Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Wed, 13 Mar 2019 12:20:28 -0400 Subject: [PATCH 12/17] added draft ModelSection object to generalize process of returning a dataframe with model data --- swmmio/core.py | 38 ++++++++++----------- swmmio/defs/__init__.py | 9 +++++ swmmio/defs/section_headers.json | 10 +++++- swmmio/elements.py | 57 +++++++++++++++++--------------- swmmio/tests/test_dataframes.py | 39 ++++++++++++++++++++++ 5 files changed, 104 insertions(+), 49 deletions(-) diff --git a/swmmio/core.py b/swmmio/core.py index 8fbad65..8c32526 100644 --- a/swmmio/core.py +++ b/swmmio/core.py @@ -11,6 +11,8 @@ from swmmio.defs.config import * from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH, MODEL_FULL_FEATURES_XY import warnings +import swmmio +from swmmio.elements import ModelSection class Model(object): @@ -217,34 +219,28 @@ def weirs(self): return df + @property def pumps(self): + """ collect all useful and available data related model pumps and organize in one dataframe. + >>> import swmmio + >>> from swmmio.tests.data import MODEL_FULL_FEATURES_XY + >>> model = swmmio.Model(MODEL_FULL_FEATURES_XY) + >>> pumps = model.pumps() + >>> pumps[['PumpCurve', 'InitStatus']] + PumpCurve InitStatus + Name + C2 P1_Curve ON + >>> pumps = model.pumps.to_gdf() + >>> pumps """ - # check if this has been done already and return that data accordingly - if self._pumps_df is not None: - return self._pumps_df - - # parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - # create dataframes of relevant sections from the INP - pumps_df = create_dataframeINP(inp.path, "[PUMPS]", comment_cols=False) - if pumps_df.empty: - return pd.DataFrame() + pumps_df = ModelSection(self, 'pumps') + self._pumps_df = pumps_df - # add conduit coordinates - xys = pumps_df.apply(lambda r: get_link_coords(r, self.inp.coordinates, self.inp.vertices), axis=1) - df = pumps_df.assign(coords=xys.map(lambda x: x[0])) - df.InletNode = df.InletNode.astype(str) - df.OutletNode = df.OutletNode.astype(str) - - self._pumps_df = df - - return df + return pumps_df def links(self): """ diff --git a/swmmio/defs/__init__.py b/swmmio/defs/__init__.py index e69de29..205b164 100644 --- a/swmmio/defs/__init__.py +++ b/swmmio/defs/__init__.py @@ -0,0 +1,9 @@ +# Standard library imports +import os +import json + +_DEFS_PATH = os.path.abspath(os.path.dirname(__file__)) +_HEADERS_JSON = os.path.join(_DEFS_PATH, 'section_headers.json') + +with open(_HEADERS_JSON, 'r') as f: + HEADERS = json.load(f) diff --git a/swmmio/defs/section_headers.json b/swmmio/defs/section_headers.json index 0b66b14..6a641e3 100644 --- a/swmmio/defs/section_headers.json +++ b/swmmio/defs/section_headers.json @@ -1,5 +1,13 @@ { "junctions": ["Name", "Elevation", "MaxDepth", "InitDepth", "SurDepth", "Aponded"], "outfalls": ["Name", "Elevation", "Type", "Stage Data", "Gated", "Route To"], - "conduits": ["Name", "InletNode", "OutletNode", "Length", "ManningN", "InletOffset", "OutletOffset", "InitFlow", "MaxFlow"] + "conduits": { + "inp_sections": ["[CONDUITS]", "[XSECTIONS]"], + "rpt_sections": ["Link Flow Summary"], + "columns": ["Name", "InletNode", "OutletNode", "Length", "ManningN", "InletOffset", "OutletOffset", "InitFlow", "MaxFlow"] + }, + "pumps": { + "inp_sections": ["[PUMPS]"], + "rpt_sections": ["Link Flow Summary"] + } } \ No newline at end of file diff --git a/swmmio/elements.py b/swmmio/elements.py index 8878944..bc19b7a 100644 --- a/swmmio/elements.py +++ b/swmmio/elements.py @@ -4,7 +4,8 @@ import swmmio from swmmio.utils.dataframes import create_dataframeINP, create_dataframeRPT, get_link_coords from swmmio.tests.data import MODEL_FULL_FEATURES__NET_PATH -import pandas as pd +from swmmio.defs import HEADERS +from swmmio.utils.spatial import coords_series_to_geometry class ModelSection(object): @@ -15,7 +16,10 @@ def __init__(self, model, section_name): :param section_name: name of section of model """ self.model = model + self.inp = self.model.inp + self.rpt = self.model.rpt self.section_name = section_name + self.config = HEADERS[section_name.lower()] # def to_geojson(self, target_path=None): # """ @@ -23,8 +27,20 @@ def __init__(self, model, section_name): # :param target_path: target of GeoJSON representation of elements # :return: GeoJSON representation of elements # """ + def to_gdf(self): + # uses GeoPandas + try: + import geopandas as gp + except ImportError: + raise ImportError('geopandas module needed. Install GeoPandas with conda: ', + 'conda install geopandas') - def __call__(self, data=None): + df = self.__call__() + df['geometry'] = coords_series_to_geometry(df['coords'], geomtype='linestring', format='shape') + df = df.drop(['coords'], axis=1) + return gp.GeoDataFrame(df, crs=self.model.crs) + + def __call__(self): """ collect all useful and available data related to the conduits and @@ -34,38 +50,25 @@ def __call__(self, data=None): >>> conduits_section() """ - # parse out the main objects of this model - inp = self.model.inp - rpt = self.model.rpt - # create dataframes of relevant sections from the INP - conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) - xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) - conduits_df = conduits_df.join(xsections_df) + for ix, sect in enumerate(self.config['inp_sections']): + if ix == 0: + df = create_dataframeINP(self.inp.path, sect, comment_cols=False) + else: + df_other = create_dataframeINP(self.inp.path, sect, comment_cols=False) + df = df.join(df_other) - if rpt: - # create a dictionary holding data from an rpt file, if provided - link_flow_df = create_dataframeRPT(rpt.path, "Link Flow Summary") - conduits_df = conduits_df.join(link_flow_df) + if self.rpt: + for rpt_sect in self.config['rpt_sections']: + df = df.join(create_dataframeRPT(self.rpt.path, rpt_sect)) # add conduit coordinates - xys = conduits_df.apply(lambda r: get_link_coords(r, inp.coordinates, inp.vertices), axis=1) - df = conduits_df.assign(coords=xys.map(lambda x: x[0])) - - # add conduit up/down inverts and calculate slope - elevs = self.model.nodes()[['InvertElev']] - df = pd.merge(df, elevs, left_on='InletNode', right_index=True, how='left') - df = df.rename(index=str, columns={"InvertElev": "InletNodeInvert"}) - df = pd.merge(df, elevs, left_on='OutletNode', right_index=True, how='left') - df = df.rename(index=str, columns={"InvertElev": "OutletNodeInvert"}) - df['UpstreamInvert'] = df.InletNodeInvert + df.InletOffset - df['DownstreamInvert'] = df.OutletNodeInvert + df.OutletOffset - df['SlopeFtPerFt'] = (df.UpstreamInvert - df.DownstreamInvert) / df.Length + xys = df.apply(lambda r: get_link_coords(r, self.inp.coordinates, self.inp.vertices), axis=1) + df = df.assign(coords=xys.map(lambda x: x[0])) + # make inlet/outlet node IDs string type df.InletNode = df.InletNode.astype(str) df.OutletNode = df.OutletNode.astype(str) - self.model._conduits_df = df - return df diff --git a/swmmio/tests/test_dataframes.py b/swmmio/tests/test_dataframes.py index 3592855..8061dd3 100644 --- a/swmmio/tests/test_dataframes.py +++ b/swmmio/tests/test_dataframes.py @@ -2,6 +2,7 @@ MODEL_BROWARD_COUNTY_PATH, MODEL_XSECTION_ALT_01, df_test_coordinates_csv, MODEL_FULL_FEATURES_XY) import swmmio +from swmmio import create_dataframeINP import pandas as pd @@ -69,3 +70,41 @@ def test_coordinates(): assert(coordinates.equals(test_coords)) # change projection + + +def test_model_section(): + m = swmmio.Model(MODEL_FULL_FEATURES_XY) + + def pumps_old_method(model): + """ + collect all useful and available data related model pumps and + organize in one dataframe. + """ + + # check if this has been done already and return that data accordingly + if model._pumps_df is not None: + return model._pumps_df + + # parse out the main objects of this model + inp = model.inp + rpt = model.rpt + + # create dataframes of relevant sections from the INP + pumps_df = create_dataframeINP(inp.path, "[PUMPS]", comment_cols=False) + if pumps_df.empty: + return pd.DataFrame() + + # add conduit coordinates + xys = pumps_df.apply(lambda r: swmmio.get_link_coords(r, inp.coordinates, inp.vertices), axis=1) + df = pumps_df.assign(coords=xys.map(lambda x: x[0])) + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) + + model._pumps_df = df + + return df + + pumps_old_method = pumps_old_method(m) + pumps = m.pumps() + + assert(pumps_old_method.equals(pumps)) \ No newline at end of file From 49ffca0c59946428a33582da1a1e46d4b321fbcd Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Fri, 15 Mar 2019 11:17:47 -0400 Subject: [PATCH 13/17] update version number --- swmmio/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swmmio/__init__.py b/swmmio/__init__.py index e84ee4a..1044b5b 100644 --- a/swmmio/__init__.py +++ b/swmmio/__init__.py @@ -5,7 +5,7 @@ '''Python SWMM Input/Output Tools''' -VERSION_INFO = (0, 3, 3, 'dev') +VERSION_INFO = (0, 3, 3) __version__ = '.'.join(map(str, VERSION_INFO)) __author__ = 'Adam Erispaha' __copyright__ = 'Copyright (c) 2016' From 5260becaab3761735b6a2c2288d2133511a564d0 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Fri, 15 Mar 2019 11:19:12 -0400 Subject: [PATCH 14/17] include py7 in CI tests --- .travis.yml | 9 ++++++++- appveyor.yml | 1 + 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index eba1512..46e8f71 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,14 @@ language: python python: - "2.7" - "3.6" - # - "3.7" + - "3.7" + +# Enable 3.7 without globally enabling sudo and dist: xenial for other build jobs +matrix: + include: + - python: 3.7 + dist: xenial + sudo: true # command to install dependencies install: - pip install -r requirements.txt diff --git a/appveyor.yml b/appveyor.yml index 9d4c5ee..037d606 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -14,6 +14,7 @@ environment: # isn't covered by this document) at the time of writing. - PYTHON: "C:\\Python27" - PYTHON: "C:\\Python36" + - PYTHON: "C:\\Python37" install: - "%PYTHON%\\python setup.py develop" From 3035aa25356f8f315cebee05b7e933d0e0403738 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Fri, 15 Mar 2019 11:35:47 -0400 Subject: [PATCH 15/17] pandas concat future warning, py7 travis fix --- .travis.yml | 1 - swmmio/core.py | 4 ++-- swmmio/reporting/reporting.py | 2 +- swmmio/reporting/visualize.py | 2 +- swmmio/utils/functions.py | 2 +- swmmio/version_control/inp.py | 8 ++++---- 6 files changed, 9 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 46e8f71..c017b54 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,6 @@ language: python python: - "2.7" - "3.6" - - "3.7" # Enable 3.7 without globally enabling sudo and dist: xenial for other build jobs matrix: diff --git a/swmmio/core.py b/swmmio/core.py index 8c32526..7a763b0 100644 --- a/swmmio/core.py +++ b/swmmio/core.py @@ -250,7 +250,7 @@ def links(self): if self._links_df is not None: return self._links_df - df = pd.concat([self.conduits(), self.orifices(), self.weirs(), self.pumps()]) + df = pd.concat([self.conduits(), self.orifices(), self.weirs(), self.pumps()], sort=True) df['facilityid'] = df.index self._links_df = df return df @@ -275,7 +275,7 @@ def nodes(self, bbox=None, subset=None): storage_df = create_dataframeINP(inp.path, "[STORAGE]") # concatenate the DFs and keep only relevant cols - all_nodes = pd.concat([juncs_df, outfalls_df, storage_df]) + all_nodes = pd.concat([juncs_df, outfalls_df, storage_df], sort=True) cols = ['InvertElev', 'MaxDepth', 'SurchargeDepth', 'PondedArea'] all_nodes = all_nodes[cols] diff --git a/swmmio/reporting/reporting.py b/swmmio/reporting/reporting.py index 2918c8a..c6ee60b 100644 --- a/swmmio/reporting/reporting.py +++ b/swmmio/reporting/reporting.py @@ -93,7 +93,7 @@ def __init__(self, baseline_report, alt_report, additional_costs=None, proposed_flooding = alt_report.parcel_flooding conduitdiff = INPDiff(basemodel, altmodel, '[CONDUITS]') - new_cond_ids = pd.concat([conduitdiff.added, conduitdiff.altered]).index + new_cond_ids = pd.concat([conduitdiff.added, conduitdiff.altered], sort=True).index self.baseline_report = baseline_report self.alt_report = alt_report diff --git a/swmmio/reporting/visualize.py b/swmmio/reporting/visualize.py index 3c21b31..70b1fac 100644 --- a/swmmio/reporting/visualize.py +++ b/swmmio/reporting/visualize.py @@ -21,7 +21,7 @@ def create_map(model1, model2=None, bbox=None, crs=None, filename=None, if model2 is not None: changes = INPDiff(model1, model2, section='[CONDUITS]') - df = pd.concat([changes.added, changes.altered]) + df = pd.concat([changes.added, changes.altered], sort=True) subset = df.index.tolist() # else: diff --git a/swmmio/utils/functions.py b/swmmio/utils/functions.py index e6e45e4..56e8c77 100644 --- a/swmmio/utils/functions.py +++ b/swmmio/utils/functions.py @@ -52,7 +52,7 @@ def multidigraph_from_edges(edges, source, target): nodes = nodes.join(flows) conduits = model.conduits() - links = pd.concat([conduits, model.orifices(), model.weirs(), model.pumps()]) + links = pd.concat([conduits, model.orifices(), model.weirs(), model.pumps()], sort=True) links['facilityid'] = links.index # create a nx.MultiDiGraph from the combined model links, add node data, set CRS diff --git a/swmmio/version_control/inp.py b/swmmio/version_control/inp.py index 375cd16..da847c1 100644 --- a/swmmio/version_control/inp.py +++ b/swmmio/version_control/inp.py @@ -86,7 +86,7 @@ def save(self, dir, filename): with open (filepath, 'w') as f: vc_utils.write_meta_data(f, self.metadata) for section, change_obj in self.instructions.items(): - section_df = pd.concat([change_obj.removed, change_obj.altered, change_obj.added]) + section_df = pd.concat([change_obj.removed, change_obj.altered, change_obj.added], sort=True) vc_utils.write_inp_section(f, allheaders=None, sectionheader=section, section_data=section_df, pad_top=False, na_fill='NaN') @@ -118,7 +118,7 @@ def build(self, baseline_dir, target_path): new_section = basedf.drop(remove_ids) #add elements - new_section = pd.concat([new_section, changes.altered, changes.added]) + new_section = pd.concat([new_section, changes.altered, changes.added], sort=True) else: #section is not well understood or is problematic, just blindly copy new_section = create_dataframeINP(basemodel.inp.path, section=section) @@ -149,7 +149,7 @@ def __init__(self, model1=None, model2=None, section='[JUNCTIONS]', build_instr_ #find where elements were changed (but kept with same ID) common_ids = df1.index.difference(removed_ids) #original - removed = in common #both dfs concatenated, with matched indices for each element - full_set = pd.concat([df1.loc[common_ids], df2.loc[common_ids]]) + full_set = pd.concat([df1.loc[common_ids], df2.loc[common_ids]], sort=True) #drop dupes on the set, all things that did not changed should have 1 row changes_with_dupes = full_set.drop_duplicates() #duplicate indicies are rows that have changes, isolate these @@ -307,7 +307,7 @@ def create_inp_build_instructions(inpA, inpB, path, filename, comments=''): if section not in problem_sections: #calculate the changes in the current section changes = INPDiff(modela, modelb, section) - data = pd.concat([changes.removed, changes.added, changes.altered]) + data = pd.concat([changes.removed, changes.added, changes.altered], sort=True) #vc_utils.write_excel_inp_section(excelwriter, allsections_a, section, data) vc_utils.write_inp_section(newf, allsections_a, section, data, pad_top=False, na_fill='NaN') #na fill fixes SNOWPACK blanks spaces issue From 3fe63712b4cf7f7479978038d3c3dc54c746f91a Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Fri, 15 Mar 2019 11:48:28 -0400 Subject: [PATCH 16/17] reverted pandas sort kwarg --- README.md | 1 + appveyor.yml | 1 - swmmio/core.py | 4 ++-- swmmio/reporting/reporting.py | 2 +- swmmio/reporting/visualize.py | 2 +- swmmio/utils/functions.py | 2 +- swmmio/version_control/inp.py | 8 ++++---- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index ff867b7..806d7b6 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # SWMMIO [![Build status](https://ci.appveyor.com/api/projects/status/qywujm5w2wm0y2tv?svg=true)](https://ci.appveyor.com/project/aerispaha/swmmio) +[![Build Status](https://travis-ci.com/aerispaha/swmmio.svg?branch=master)](https://travis-ci.com/aerispaha/swmmio) ![Kool Picture](docs/img/impact_of_option.png?raw=true "Impact of Option") SWMMIO is a set of python tools aiming to provide a means for version control and visualizing results from the EPA Stormwater Management Model (SWMM). Command line tools are also provided for running models individually and in parallel via Python's `multiprocessing` module. These tools are being developed specifically for the application of flood risk management, though most functionality is applicable to SWMM modeling in general. diff --git a/appveyor.yml b/appveyor.yml index 037d606..9d4c5ee 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -14,7 +14,6 @@ environment: # isn't covered by this document) at the time of writing. - PYTHON: "C:\\Python27" - PYTHON: "C:\\Python36" - - PYTHON: "C:\\Python37" install: - "%PYTHON%\\python setup.py develop" diff --git a/swmmio/core.py b/swmmio/core.py index 7a763b0..8c32526 100644 --- a/swmmio/core.py +++ b/swmmio/core.py @@ -250,7 +250,7 @@ def links(self): if self._links_df is not None: return self._links_df - df = pd.concat([self.conduits(), self.orifices(), self.weirs(), self.pumps()], sort=True) + df = pd.concat([self.conduits(), self.orifices(), self.weirs(), self.pumps()]) df['facilityid'] = df.index self._links_df = df return df @@ -275,7 +275,7 @@ def nodes(self, bbox=None, subset=None): storage_df = create_dataframeINP(inp.path, "[STORAGE]") # concatenate the DFs and keep only relevant cols - all_nodes = pd.concat([juncs_df, outfalls_df, storage_df], sort=True) + all_nodes = pd.concat([juncs_df, outfalls_df, storage_df]) cols = ['InvertElev', 'MaxDepth', 'SurchargeDepth', 'PondedArea'] all_nodes = all_nodes[cols] diff --git a/swmmio/reporting/reporting.py b/swmmio/reporting/reporting.py index c6ee60b..2918c8a 100644 --- a/swmmio/reporting/reporting.py +++ b/swmmio/reporting/reporting.py @@ -93,7 +93,7 @@ def __init__(self, baseline_report, alt_report, additional_costs=None, proposed_flooding = alt_report.parcel_flooding conduitdiff = INPDiff(basemodel, altmodel, '[CONDUITS]') - new_cond_ids = pd.concat([conduitdiff.added, conduitdiff.altered], sort=True).index + new_cond_ids = pd.concat([conduitdiff.added, conduitdiff.altered]).index self.baseline_report = baseline_report self.alt_report = alt_report diff --git a/swmmio/reporting/visualize.py b/swmmio/reporting/visualize.py index 70b1fac..3c21b31 100644 --- a/swmmio/reporting/visualize.py +++ b/swmmio/reporting/visualize.py @@ -21,7 +21,7 @@ def create_map(model1, model2=None, bbox=None, crs=None, filename=None, if model2 is not None: changes = INPDiff(model1, model2, section='[CONDUITS]') - df = pd.concat([changes.added, changes.altered], sort=True) + df = pd.concat([changes.added, changes.altered]) subset = df.index.tolist() # else: diff --git a/swmmio/utils/functions.py b/swmmio/utils/functions.py index 56e8c77..e6e45e4 100644 --- a/swmmio/utils/functions.py +++ b/swmmio/utils/functions.py @@ -52,7 +52,7 @@ def multidigraph_from_edges(edges, source, target): nodes = nodes.join(flows) conduits = model.conduits() - links = pd.concat([conduits, model.orifices(), model.weirs(), model.pumps()], sort=True) + links = pd.concat([conduits, model.orifices(), model.weirs(), model.pumps()]) links['facilityid'] = links.index # create a nx.MultiDiGraph from the combined model links, add node data, set CRS diff --git a/swmmio/version_control/inp.py b/swmmio/version_control/inp.py index da847c1..375cd16 100644 --- a/swmmio/version_control/inp.py +++ b/swmmio/version_control/inp.py @@ -86,7 +86,7 @@ def save(self, dir, filename): with open (filepath, 'w') as f: vc_utils.write_meta_data(f, self.metadata) for section, change_obj in self.instructions.items(): - section_df = pd.concat([change_obj.removed, change_obj.altered, change_obj.added], sort=True) + section_df = pd.concat([change_obj.removed, change_obj.altered, change_obj.added]) vc_utils.write_inp_section(f, allheaders=None, sectionheader=section, section_data=section_df, pad_top=False, na_fill='NaN') @@ -118,7 +118,7 @@ def build(self, baseline_dir, target_path): new_section = basedf.drop(remove_ids) #add elements - new_section = pd.concat([new_section, changes.altered, changes.added], sort=True) + new_section = pd.concat([new_section, changes.altered, changes.added]) else: #section is not well understood or is problematic, just blindly copy new_section = create_dataframeINP(basemodel.inp.path, section=section) @@ -149,7 +149,7 @@ def __init__(self, model1=None, model2=None, section='[JUNCTIONS]', build_instr_ #find where elements were changed (but kept with same ID) common_ids = df1.index.difference(removed_ids) #original - removed = in common #both dfs concatenated, with matched indices for each element - full_set = pd.concat([df1.loc[common_ids], df2.loc[common_ids]], sort=True) + full_set = pd.concat([df1.loc[common_ids], df2.loc[common_ids]]) #drop dupes on the set, all things that did not changed should have 1 row changes_with_dupes = full_set.drop_duplicates() #duplicate indicies are rows that have changes, isolate these @@ -307,7 +307,7 @@ def create_inp_build_instructions(inpA, inpB, path, filename, comments=''): if section not in problem_sections: #calculate the changes in the current section changes = INPDiff(modela, modelb, section) - data = pd.concat([changes.removed, changes.added, changes.altered], sort=True) + data = pd.concat([changes.removed, changes.added, changes.altered]) #vc_utils.write_excel_inp_section(excelwriter, allsections_a, section, data) vc_utils.write_inp_section(newf, allsections_a, section, data, pad_top=False, na_fill='NaN') #na fill fixes SNOWPACK blanks spaces issue From de96ceb5f4ee0be6494fbc09765b5bd1e7c13331 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Fri, 15 Mar 2019 12:15:41 -0400 Subject: [PATCH 17/17] fixed v number --- swmmio/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swmmio/__init__.py b/swmmio/__init__.py index 1044b5b..4373c28 100644 --- a/swmmio/__init__.py +++ b/swmmio/__init__.py @@ -5,7 +5,7 @@ '''Python SWMM Input/Output Tools''' -VERSION_INFO = (0, 3, 3) +VERSION_INFO = (0, 3, 4) __version__ = '.'.join(map(str, VERSION_INFO)) __author__ = 'Adam Erispaha' __copyright__ = 'Copyright (c) 2016'