From d9b7d27185297aa613c618941e485d64081157a7 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Wed, 6 Jun 2018 19:02:39 -0400 Subject: [PATCH 01/12] merged master with 2to3, changed Pillow requirement --- setup.py | 5 +- swmmio/swmmio.py.bak | 592 -------------------------------------- swmmio/utils/functions.py | 12 +- 3 files changed, 9 insertions(+), 600 deletions(-) delete mode 100644 swmmio/swmmio.py.bak diff --git a/setup.py b/setup.py index 26065e2..58e2343 100644 --- a/setup.py +++ b/setup.py @@ -6,12 +6,13 @@ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() -VERSION = '0.2.0' +VERSION = '0.2.1' AUTHOR_NAME = 'Adam Erispaha' AUTHOR_EMAIL = 'aerispaha@gmail.com' install_requires = [ - 'pillow==3.0.0', + # 'pillow==3.0.0', + 'Pillow', 'numpy', 'pandas', 'pyshp', diff --git a/swmmio/swmmio.py.bak b/swmmio/swmmio.py.bak deleted file mode 100644 index 6f16a7b..0000000 --- a/swmmio/swmmio.py.bak +++ /dev/null @@ -1,592 +0,0 @@ -#!/usr/bin/env python -#coding:utf-8 -import re -import os -from time import ctime -import pandas as pd -from .utils import functions, spatial -import glob -import math -import geojson -from .utils import text as txt -from .utils.dataframes import create_dataframeINP, create_dataframeRPT, get_link_coords -from definitions import * - -class Model(object): - - #Class representing a complete SWMM model incorporating its INP and RPT - #files and data - - def __init__(self, in_file_path): - - #can init with a directory containing files, or the specific inp file - """ - initialize a swmmio.Model object by pointing it to a directory containing - a single INP (and optionally an RPT file with matching filename) or by - pointing it directly to an .inp file. - """ - - inp_path = None - if os.path.isdir(in_file_path): - #a directory was passed in - #print 'is dir = {}'.format(in_file_path) - inps_in_dir = glob.glob1(in_file_path, "*.inp") - if len(inps_in_dir) == 1: - #there is only one INP in this directory -> good. - inp_path = os.path.join(in_file_path, inps_in_dir[0]) - #print 'only 1 inp found = {}'.format(inp_path) - - elif os.path.splitext(in_file_path)[1] == '.inp': - #an inp was passed in - inp_path = in_file_path - #print 'is inp path = {}'.format(in_file_path) - - if inp_path: - wd = os.path.dirname(inp_path) #working dir - name = os.path.splitext(os.path.basename(inp_path))[0] #basename - self.name = name - self.inp = inp(inp_path) #inp object - self.rpt = None #until we can confirm it initializes properly - #slots to hold processed data - self.organized_node_data = None - self.organized_conduit_data = None - self.bbox = None #to remember how the model data was clipped - self.scenario = self._get_scenario() - - #try to initialize a companion RPT object - rpt_path = os.path.join(wd, name + '.rpt') - if os.path.exists(rpt_path): - try: - self.rpt = rpt(rpt_path) - except: - print '{}.rpt failed to initialize'.format(name) - - self._nodes_df = None - self._conduits_df = None - self._orifices_df = None - self._weirs_df = None - self._pumps_df = None - self._subcatchments_df = None - - def rpt_is_valid(self , verbose=False): - """Return true if the .rpt file exists and has a revision date more - recent than the .inp file. If the inp has an modified date later than - the rpt, assume that the rpt should be regenerated""" - - if self.rpt is None: - if verbose: - print '{} does not have an rpt file'.format(self.name) - return False - - - #check if the rpt has ERRORS output from SWMM - with open (self.rpt.path) as f: - #jump to 500 bytes before the end of file - f.seek(self.rpt.file_size - 500) - for line in f: - spl = line.split() - if len(spl) > 0 and spl[0]=='ERROR': - #return false at first "ERROR" occurence - return False - - rpt_mod_time = os.path.getmtime(self.rpt.path) - inp_mod_time = os.path.getmtime(self.inp.path) - - if verbose: - print "{}.rpt: modified {}".format(self.name, ctime(rpt_mod_time)) - print "{}.inp: modified {}".format(self.name, ctime(inp_mod_time)) - - if inp_mod_time > rpt_mod_time: - #inp datetime modified greater than rpt datetime modified - return False - else: - return True - - def to_map(self, filename=None, inproj='epsg:2272'): - - conds = self.conduits() - nodes = self.nodes() - try: - import pyproj - except ImportError: - raise ImportError('pyproj module needed. get this package here: https://pypi.python.org/pypi/pyproj') - - #SET UP THE TO AND FROM COORDINATE PROJECTION - pa_plane = pyproj.Proj(init=inproj, preserve_units=True) - wgs = pyproj.Proj(proj='longlat', datum='WGS84', ellps='WGS84') #google maps, etc - - #get center point - c = ((nodes.X.max() + nodes.X.min())/2 , (nodes.Y.max() + nodes.Y.min())/2) - c = pyproj.transform(pa_plane, wgs, c[0], c[1]) - bbox = [(nodes.X.min(), nodes.Y.min()), - (nodes.X.max(), nodes.Y.max())] - bbox = [pyproj.transform(pa_plane, wgs, *xy) for xy in bbox] - - - geo_conduits = spatial.write_geojson(conds) - geo_nodes = spatial.write_geojson(nodes, geomtype='point') - - if filename is None: - filename = os.path.join(self.inp.dir, self.inp.name + '.html') - - with open(BETTER_BASEMAP_PATH, 'r') as bm: - with open(filename, 'wb') as newmap: - for line in bm: - if '//INSERT GEOJSON HERE ~~~~~' in line: - newmap.write('conduits = {};\n'.format(geojson.dumps(geo_conduits))) - newmap.write('nodes = {};\n'.format(geojson.dumps(geo_nodes))) - newmap.write('parcels = {};\n'.format(0)) - - if 'center: [-75.148946, 39.921685],' in line: - newmap.write('center:[{}, {}],\n'.format(c[0], c[1])) - if '//INSERT BBOX HERE' in line: - newmap.write('map.fitBounds([[{}, {}], [{}, {}]]);\n'.format(bbox[0][0], bbox[0][1], bbox[1][0], bbox[1][1])) - - else: - newmap.write(line) - - def _get_scenario(self): - """get a descrition of the model scenario by reading the raingage data""" - rg = create_dataframeINP(self.inp.path, '[RAINGAGES]') - storms = rg.DataSourceName.unique() - if len(storms) > 1: - return ', '.join(storms[:3]) + '...' - else: - return '{}'.format(storms[0]) - - def conduits(self): - - """ - collect all useful and available data related model conduits and - organize in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._conduits_df is not None: - return self._conduits_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - conduits_df = create_dataframeINP(inp.path, "[CONDUITS]", comment_cols=False) - xsections_df = create_dataframeINP(inp.path, "[XSECTIONS]", comment_cols=False) - conduits_df = conduits_df.join(xsections_df) - coords_df = create_dataframeINP(inp.path, "[COORDINATES]").drop_duplicates() - - if rpt: - #create a dictionary holding data from an rpt file, if provided - link_flow_df = create_dataframeRPT(rpt.path, "Link Flow Summary") - conduits_df = conduits_df.join(link_flow_df) - - #add conduit coordinates - #the xys.map() junk is to unpack a nested list - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = conduits_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) - df = conduits_df.assign(coords=xys.map(lambda x: x[0])) - - #add conduit up/down inverts and calculate slope - elevs = self.nodes()[['InvertElev']] - df = pd.merge(df, elevs, left_on='InletNode', right_index=True, how='left') - df = df.rename(index=str, columns={"InvertElev": "InletNodeInvert"}) - df = pd.merge(df, elevs, left_on='OutletNode', right_index=True, how='left') - df = df.rename(index=str, columns={"InvertElev": "OutletNodeInvert"}) - df['UpstreamInvert'] = df.InletNodeInvert + df.InletOffset - df['DownstreamInvert'] = df.OutletNodeInvert + df.OutletOffset - df['SlopeFtPerFt'] = (df.UpstreamInvert - df.DownstreamInvert) / df.Length - - self._conduits_df = df - - return df - - def orifices(self): - - """ - collect all useful and available data related model orifices and - organize in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._orifices_df is not None: - return self._orifices_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - orifices_df = create_dataframeINP(inp.path, "[ORIFICES]", comment_cols=False) - coords_df = create_dataframeINP(inp.path, "[COORDINATES]").drop_duplicates() - - #add conduit coordinates - #the xys.map() junk is to unpack a nested list - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = orifices_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) - df = orifices_df.assign(coords=xys.map(lambda x: x[0])) - - self._orifices_df = df - - return df - - def weirs(self): - - """ - collect all useful and available data related model weirs and - organize in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._weirs_df is not None: - return self._weirs_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - #BUG why can't comment_cols=False work here? - weirs_df = create_dataframeINP(inp.path, "[WEIRS]")[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] - coords_df = create_dataframeINP(inp.path, "[COORDINATES]").drop_duplicates() - - #add conduit coordinates - #the xys.map() junk is to unpack a nested list - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = weirs_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) - df = weirs_df.assign(coords=xys.map(lambda x: x[0])) - - self._weirs_df = df - - return df - - def pumps(self): - - """ - collect all useful and available data related model pumps and - organize in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._pumps_df is not None: - return self._pumps_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - pumps_df = create_dataframeINP(inp.path, "[PUMPS]", comment_cols=False) - coords_df = create_dataframeINP(inp.path, "[COORDINATES]").drop_duplicates() - - #add conduit coordinates - verts = create_dataframeINP(inp.path, '[VERTICES]') - xys = pumps_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) - df = pumps_df.assign(coords=xys.map(lambda x: x[0])) - - self._pumps_df = df - - return df - - def nodes(self, bbox=None, subset=None): - - """ - collect all useful and available data related model nodes and organize - in one dataframe. - """ - - #check if this has been done already and return that data accordingly - if self._nodes_df is not None and bbox==self.bbox: - return self._nodes_df - - #parse out the main objects of this model - inp = self.inp - rpt = self.rpt - - #create dataframes of relevant sections from the INP - juncs_df = create_dataframeINP(inp.path, "[JUNCTIONS]") - outfalls_df = create_dataframeINP(inp.path, "[OUTFALLS]") - storage_df = create_dataframeINP(inp.path, "[STORAGE]") - coords_df = create_dataframeINP(inp.path, "[COORDINATES]") - - #concatenate the DFs and keep only relevant cols - all_nodes = pd.concat([juncs_df, outfalls_df, storage_df]) - cols =['InvertElev', 'MaxDepth', 'SurchargeDepth', 'PondedArea'] - all_nodes = all_nodes[cols] - - if rpt: - #add results data if a rpt file was found - depth_summ = create_dataframeRPT(rpt.path, "Node Depth Summary") - flood_summ = create_dataframeRPT(rpt.path, "Node Flooding Summary") - - #join the rpt data (index on depth df, suffixes for common cols) - rpt_df = depth_summ.join(flood_summ,lsuffix='_depth',rsuffix='_flood') - all_nodes = all_nodes.join(rpt_df) #join to the all_nodes df - - all_nodes = all_nodes.join(coords_df[['X', 'Y']]) - def nodexy(row): - if math.isnan(row.X) or math.isnan(row.Y): - return None - else: - return [(row.X, row.Y)] - - xys = all_nodes.apply(lambda r: nodexy(r), axis=1) - all_nodes = all_nodes.assign(coords = xys) - - self._nodes_df = all_nodes - - return all_nodes - - def subcatchments(self): - """ - collect all useful and available data related subcatchments and organize - in one dataframe. - """ - subs = create_dataframeINP(self.inp.path, "[SUBCATCHMENTS]") - subs = subs.drop([';', 'Comment', 'Origin'], axis=1) - - if self.rpt: - flw = create_dataframeRPT(self.rpt.path, 'Subcatchment Runoff Summary') - subs = subs.join(flw) - - #more accurate runoff calculations - subs['RunoffAcFt'] = subs.TotalRunoffIn/ 12.0 * subs.Area - subs['RunoffMGAccurate'] = subs.RunoffAcFt / 3.06888785 - - self._subcatchments_df = subs - - return subs - - - def node(self, node, conduit=None): - - """ - DEPRECIATED/NOT SUPPORTED: organizeNodeData() - - method for provide information about specific model elements - returns a node object given its ID""" - if not self.organized_node_data: - self.organized_node_data = self.organizeNodeData() - - n = self.organized_node_data['node_objects'][node] - subcats_inp = self.inp.createDictionary("[SUBCATCHMENTS]") - subcats_rpt = self.rpt.createDictionary('Subcatchment Runoff Summary') - - n.nodes_upstream = functions.trace_from_node(self, node, mode='up')['nodes'] - n.subcats_direct = [k for k,v in subcats_inp.items() if v[1]==node] - n.subcats_upstream = [k for k,v in subcats_inp.items() if v[1] in n.nodes_upstream] - - - n.drainage_area_direct = sum([float(x) for x in [v[2] for k,v in subcats_inp.items() if k in n.subcats_direct]]) - n.drainage_area_upstream = sum([float(x) for x in [v[2] for k,v in subcats_inp.items() if k in n.subcats_upstream]]) - - n.runoff_upstream_mg = sum([float(x) for x in [v[5] for k,v - in subcats_rpt.items() if k in n.subcats_upstream]]) - n.runoff_upstream_cf = n.runoff_upstream_mg*1000000/7.48 - return n - - - def export_to_shapefile(self, shpdir, prj=None): - """ - export the model data into a shapefile. element_type dictates which type - of data will be included. - - default projection is PA State Plane - untested on other cases - """ - - #CREATE THE CONDUIT shp - conds = self.conduits() - conds_path = os.path.join(shpdir, self.inp.name + '_conduits.shp') - spatial.write_shapefile(conds, conds_path, prj=prj) - - #CREATE THE NODE shp - nodes = self.nodes() - nodes_path = os.path.join(shpdir, self.inp.name + '_nodes.shp') - spatial.write_shapefile(nodes, nodes_path, geomtype='point', prj=prj) - - -class SWMMIOFile(object): - - defaultSection = "Link Flow Summary" - - def __init__(self, file_path): - - #file name and path variables - self.path = file_path - self.name = os.path.splitext(os.path.basename(file_path))[0] - self.dir = os.path.dirname(file_path) - self.file_size = os.path.getsize(file_path) - - - def findByteRangeOfSection(self, startStr): - - #returns the start and end "byte" location of substrings in a text file - - with open(self.path) as f: - start = None - end = None - l = 0 #line bytes index - for line in f: - - #if start and len(line) <= 3 and (l - start) > 100: - if start and line.strip() == "" and (l - start) > 100: - #LOGIC ^ if start exists (was found) and the current line length is 3 or - #less (length of /n ) and we're more than 100 bytes from the start location - #then we are at the first "blank" line after our start section (aka the end of the section) - end = l - break - - if (startStr in line) and (not start): - start = l - - l += len(line) + len("\n") #increment length (bytes?) of current position - - return [start, end] - - def createDictionary (self, sectionTitle = defaultSection): - - """ - Help info about this method. - """ - - #preppedTempFilePath = self.readSectionAndCleanHeaders(sectionTitle) #pull relevant section and clean headers - preppedTempFilePath = txt.extract_section_from_file(self.path, sectionTitle) - if not preppedTempFilePath: - return None #if nothing was found, do nothing - - passedHeaders = False - - with open(preppedTempFilePath) as file: - the_dict = {} - for line in file: - - if len(line) <=3 and not ";" in line: break - if not passedHeaders: - passedHeaders = True - continue - - #check if line is commented out (having a semicolon before anything else) and skip accordingly - if ";" == line.replace(" ", "")[0]: - continue #omit this entire line - - line = line.split(";")[0] #don't look at anything to right of a semicolon (aka a comment) - - line = ' '.join(re.findall('\"[^\"]*\"|\S+', line)) - rowdata = line.replace("\n", "").split(" ") - the_dict[str(rowdata[0])] = rowdata[1:] #create dictionary row with key and array of remaing stuff on line as the value - - os.remove(preppedTempFilePath) - - return the_dict - -class rpt(SWMMIOFile): - - #creates an accessible SWMM .rpt object, inherits from SWMMIO object - defaultImageDir = r"P:\Tools\Pipe Capacity Graphics\Scripts\image" - def __init__(self, filePath): - - SWMMIOFile.__init__(self, filePath) #run the superclass init - - with open (filePath) as f: - for line in f: - if "Starting Date" in line: - simulationStart = line.split(".. ")[1].replace("\n", "") - if "Ending Date" in line: - simulationEnd = line.split(".. ")[1].replace("\n", "") - if "Report Time Step ........." in line: - timeStepMin = int(line.split(":")[1].replace("\n", "")) - break - - self.simulationStart = simulationStart - self.simulationEnd = simulationEnd - self.timeStepMin = timeStepMin - - #grab the date of analysis - with open (filePath) as f: - f.seek(self.file_size - 500) #jump to 500 bytes before the end of file - for line in f: - if "Analysis begun on" in line: - date = line.split("Analysis begun on: ")[1].replace("\n", "") - - self.dateOfAnalysis = date - - #assign the header list - #self.headerList = swmm_headers.rptHeaderList - self.byteLocDict = None #populated if necessary elsewhere (LEGACY, can prob remove) - self.elementByteLocations = {"Link Results":{}, "Node Results":{}} #populated if necessary elsewhere - - def createByteLocDict (self, sectionTitle = "Link Results"): - - #method creates a dictionary with Key = to SWMM element ID and - #Value as the starting byte location of its time series in the rpt file - #for rapidly accessing large rpt files - - #create set of other headers that are not the desired one, use to find end of section - possibleNextSections = set(['Link Results', 'Node Results', 'Subcatchment Results']) - set([sectionTitle]) - - print possibleNextSections - - startByte = self.findByteRangeOfSection(sectionTitle)[0] #+ len('\n ************') #move past the first asterisks - - id_byteDict = {} - with open(self.path) as f: - - f.seek(startByte) #jump to general area of file if we know it - l = startByte - for line in f: - - #if "<<<" in line and ">>>" in line: - if "<<<" and ">>>" in line: #cr - #found the begining of a link's section - lineCleaned = ' '.join(re.findall('\"[^\"]*\"|\S+', line)) - rowdata = lineCleaned.replace("\n", "").split(" ") - - #add to the dict - id_byteDict.update({rowdata[2]:l}) - - if any(header in line for header in possibleNextSections): - #checks if line includes any of the other headers, - #if so, we found next section, stop building dict - break - - l += len(line) + len("\n") #increment length (bytes) of current position - - self.byteLocDict = id_byteDict - self.elementByteLocations.update({sectionTitle:id_byteDict}) - return id_byteDict - - def returnDataAtDTime(self, id, dtime, sectionTitle="Link Results", startByte=0): - - #this is a slow ass function, when the file is big - can we improve this? - byteLocDict = self.elementByteLocations[sectionTitle] - if byteLocDict: - startByte = byteLocDict[id] - - elif startByte == 0: - startByte = self.findByteRangeOfSection(sectionTitle)[0] - print 'startByte ' + str(startByte) - - with open(self.path) as f: - - f.seek(startByte) #jump to general area of file if we know it - subsectionFound = False - - for line in f: - if id in line: subsectionFound = True - - if subsectionFound and dtime in line: - line = ' '.join(re.findall('\"[^\"]*\"|\S+', line)) - rowdata = line.replace("\n", "").split(" ") - return rowdata - -class inp(SWMMIOFile): - - #creates an accessible SWMM .inp object - #make sure INP has been saved in the GUI before using this - - def __init__(self, filePath): - #is this class necessary anymore? - SWMMIOFile.__init__(self, filePath) #run the superclass init - - - -#end diff --git a/swmmio/utils/functions.py b/swmmio/utils/functions.py index a25e5ed..9ad8eb4 100644 --- a/swmmio/utils/functions.py +++ b/swmmio/utils/functions.py @@ -79,15 +79,15 @@ def complete_rpt_headers (rptfilepath): return {'headers':foundheaders, 'order':order} def merge_dicts(*dict_args): - ''' - Given any number of dicts, shallow copy and merge into a new dict, - precedence goes to key value pairs in latter dicts. - ''' - result = {} + ''' + Given any number of dicts, shallow copy and merge into a new dict, + precedence goes to key value pairs in latter dicts. + ''' + result = {} for dictionary in dict_args: if dictionary: result.update(dictionary) - return result + return result def trace_from_node(conduits, startnode, mode='up', stopnode=None): From f88b4e9b9786d6d20b3f8cbd6db69ab8328d2256 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Wed, 6 Jun 2018 22:26:06 -0400 Subject: [PATCH 02/12] converted entire package to python3 --- swmmio/__main__.py | 16 ++++++++-------- swmmio/graphics/animate.py | 22 +++++++++++----------- swmmio/graphics/drawing.py | 10 +++++----- swmmio/graphics/options.py | 2 +- swmmio/reporting/batch.py | 8 ++++---- swmmio/reporting/reporting.py | 2 +- swmmio/reporting/utils.py | 2 +- swmmio/reporting/visualize.py | 2 +- swmmio/run_models/run.py | 8 ++++---- swmmio/utils/dataframes.py | 10 +++++----- swmmio/utils/functions.py | 4 ++-- swmmio/utils/text.py | 2 +- swmmio/vendor/images2gif.py | 15 ++++++++------- swmmio/version_control/inp.py | 10 +++++----- swmmio/version_control/tests/validate.py | 6 +++--- swmmio/version_control/utils.py | 4 ++-- swmmio/version_control/version_control.py | 18 +++++++++--------- tests/tests.py | 2 +- 18 files changed, 72 insertions(+), 71 deletions(-) diff --git a/swmmio/__main__.py b/swmmio/__main__.py index 8364a79..8fe72d8 100644 --- a/swmmio/__main__.py +++ b/swmmio/__main__.py @@ -1,6 +1,6 @@ from .run_models.run import run_simple, run_hot_start_sequence from .run_models import start_pool -from swmmio import Model +from .swmmio import Model from itertools import chain import os import argparse @@ -21,23 +21,23 @@ if args.model_to_run is not None: models_paths = [os.path.join(wd, f) for f in args.model_to_run] - print 'Adding models to queue:\n\t{}'.format('\n\t'.join(models_paths)) + print('Adding models to queue:\n\t{}'.format('\n\t'.join(models_paths))) #run the models in series (one after the other) - map(run_simple, models_paths) + list(map(run_simple, models_paths)) # run_simple(args.model_to_run) elif args.hotstart_model_to_run is not None: models_paths = [os.path.join(wd, f) for f in args.hotstart_model_to_run] - print 'hotstart_model_to_run the model: {}'.format(args.hotstart_model_to_run) + print('hotstart_model_to_run the model: {}'.format(args.hotstart_model_to_run)) # m = Model(args.hotstart_model_to_run) # run_hot_start_sequence(m)#args.hotstart_model_to_run) - map(run_hot_start_sequence, models_paths) + list(map(run_hot_start_sequence, models_paths)) elif args.start_pool is not None: models_dirs = [os.path.join(wd, f) for f in args.start_pool] - print 'Searching for models in:\n\t{}'.format('\n\t'.join(models_dirs)) + print('Searching for models in:\n\t{}'.format('\n\t'.join(models_dirs))) #combine the segments and options (combinations) into one iterable inp_paths = [] for root, dirs, files in chain.from_iterable(os.walk(path) for path in models_dirs): @@ -50,8 +50,8 @@ #call the main() function in start_pool.py start_pool.main(inp_paths, args.cores_left) - print "swmmio has completed running {} models".format(len(inp_paths)) + print("swmmio has completed running {} models".format(len(inp_paths))) else: - print 'you need to pass in some args' + print('you need to pass in some args') diff --git a/swmmio/graphics/animate.py b/swmmio/graphics/animate.py index 24813d9..260dd1b 100644 --- a/swmmio/graphics/animate.py +++ b/swmmio/graphics/animate.py @@ -22,7 +22,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): """ #unpack and update the options ops = du.default_draw_options() - for key, value in kwargs.iteritems(): + for key, value in kwargs.items(): ops.update({key:value}) #return ops width = ops['width'] @@ -63,8 +63,8 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): if userStartDT < simStartDT or userEndDT > simEndDT or timeStepMod != 0 or userEndDT < userStartDT: #user has entered fault date times either by not being within the #availble data in the rpt or by starting at something that doesn't fit the timestep - print "PROBLEM WITH DATETIME ENTERED. Make sure it fits within data and start time rest on factor of timestep in minutes." - print "userStartDT = ", userStartDT, "\nuserEndDT = ", userEndDT, "\nsimStartDT = ", simStartDT, "\nsimEndDT = ", simEndDT, "\nTIMESTEP = ", rpt.timeStepMin + print("PROBLEM WITH DATETIME ENTERED. Make sure it fits within data and start time rest on factor of timestep in minutes.") + print("userStartDT = ", userStartDT, "\nuserEndDT = ", userEndDT, "\nsimStartDT = ", simStartDT, "\nsimEndDT = ", simEndDT, "\nTIMESTEP = ", rpt.timeStepMin) return None currentT = datetime.strptime(startDtime, "%b-%d-%Y %H:%M:%S") #SWMM dtime format needed @@ -81,7 +81,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): if not os.path.isfile(byteLocDictionaryFName): #this is a heavy operation, allow a few minutes - print "generating byte dictionary..." + print("generating byte dictionary...") #conduitByteLocationDict = rpt.createByteLocDict("Link Results") rpt.createByteLocDict("Link Results") rpt.createByteLocDict("Node Results") @@ -96,7 +96,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): rpt.elementByteLocations = pickle.load( open(byteLocDictionaryFName, 'r') ) #rpt.byteLocDict = conduitByteLocationDict - print "Started Drawing at " + strftime("%b-%d-%Y %H:%M:%S") + print("Started Drawing at " + strftime("%b-%d-%Y %H:%M:%S")) log = "Started Drawing at " + strftime("%b-%d-%Y %H:%M:%S") + "\n\nErrors:\n\n" drawCount = 0 conduitErrorCount = 0 @@ -116,7 +116,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): #DRAW THE CONDUITS if ops['conduitSymb']: - for id, conduit in conduitDicts.iteritems(): + for id, conduit in conduitDicts.items(): #coordPair = coordPairDict['coordinates'] if conduit.coordinates: #this prevents draws if no flow is supplied (RDII and such) @@ -125,11 +125,11 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): drawCount += 1 - if drawCount > 0 and drawCount % 2000 == 0: print str(drawCount) + " pipes drawn - simulation time = " + currentTstr + if drawCount > 0 and drawCount % 2000 == 0: print(str(drawCount) + " pipes drawn - simulation time = " + currentTstr) #DRAW THE NODES if ops['nodeSymb']: - for id, node in nodeDicts.iteritems(): + for id, node in nodeDicts.items(): if node.coordinates: #this prevents draws if no flow is supplied (RDII and such) su.drawNode(node, nodeDict, draw, rpt=rpt, dTime=currentTstr, options=ops['nodeSymb'], xplier=xplier) drawCount += 1 @@ -153,7 +153,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): imgPath = os.path.join(tempImgDir, image) frames.append(Image.open(imgPath)) - print "building gif with " + str(len(glob.glob1(tempImgDir, "*.png"))) + " frames..." + print("building gif with " + str(len(glob.glob1(tempImgDir, "*.png"))) + " frames...") if not imgName: imgName = inp.name gifFile = os.path.join(imgDir, imgName) + ".gif" frameDuration = 1.0 / float(ops['fps']) @@ -165,7 +165,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): with open(os.path.join(imgDir, "log.txt"), 'w') as logFile: logFile.write(log) - print "Draw Count =" + str(drawCount) - print "Video saved to:\n\t" + gifFile + print("Draw Count =" + str(drawCount)) + print("Video saved to:\n\t" + gifFile) os.startfile(gifFile)#this doesn't seem to work diff --git a/swmmio/graphics/drawing.py b/swmmio/graphics/drawing.py index aea04ed..b014a71 100644 --- a/swmmio/graphics/drawing.py +++ b/swmmio/graphics/drawing.py @@ -136,7 +136,7 @@ def annotate_streets(df, img, text_col): #confirm font file location if not os.path.exists(config.font_file): - print 'Error loading defautl font. Check your config.font_file' + print('Error loading defautl font. Check your config.font_file') return None unique_sts = df[text_col].unique() @@ -244,13 +244,13 @@ def _annotateMap (canvas, model, model2=None, currentTstr = None, options=None, #Buid the title and files list (handle 1 or two input models) #this is hideous, or elegant? files = title = results_string = symbology_string = annotationTxt = "" - files = '\n'.join([m.rpt.path for m in filter(None, [model, model2])]) - title = ' to '.join([m.inp.name for m in filter(None, [model, model2])]) - symbology_string = ', '.join([s['title'] for s in filter(None, [nodeSymb, conduitSymb, parcelSymb])]) + files = '\n'.join([m.rpt.path for m in [_f for _f in [model, model2] if _f]]) + title = ' to '.join([m.inp.name for m in [_f for _f in [model, model2] if _f]]) + symbology_string = ', '.join([s['title'] for s in [_f for _f in [nodeSymb, conduitSymb, parcelSymb] if _f]]) title += "\n" + symbology_string #collect results - for result, value in results.iteritems(): + for result, value in results.items(): results_string += '\n' + result + ": " + str(value) #compile the annotation text diff --git a/swmmio/graphics/options.py b/swmmio/graphics/options.py index 95bccaf..dc6f54f 100644 --- a/swmmio/graphics/options.py +++ b/swmmio/graphics/options.py @@ -1,5 +1,5 @@ from definitions import PARCEL_FEATURES, GEODATABASE -from constants import * +from .constants import * font_file = r"C:\Data\Code\Fonts\Raleway-Regular.ttf" basemap_options = { diff --git a/swmmio/reporting/batch.py b/swmmio/reporting/batch.py index 79f3186..cc9b4bd 100644 --- a/swmmio/reporting/batch.py +++ b/swmmio/reporting/batch.py @@ -44,7 +44,7 @@ def batch_reports(project_dir, results_file, if '.inp' in f: inp_path = os.path.join(path,f) alt = Model(inp_path) - print 'reporting on {}'.format(alt.name) + print('reporting on {}'.format(alt.name)) #generate the reports frpt = reporting.FloodReport(alt, parcel_node_join_df) impact_rpt = reporting.ComparisonReport(baserpt, frpt, @@ -97,7 +97,7 @@ def batch_cost_estimates(baseline_dir, segments_dir, options_dir, results_file, costsdf = functions.estimate_cost_of_new_conduits(baseline, alt, supplemental_cost_data) cost_estimate = costsdf.TotalCostEstimate.sum() / math.pow(10, 6) - print '{}: ${}M'.format(alt.name, round(cost_estimate,1)) + print('{}: ${}M'.format(alt.name, round(cost_estimate,1))) model_id = os.path.splitext(f)[0] with open(results_file, 'a') as res: @@ -131,13 +131,13 @@ def batch_post_process(options_dir, baseline_dir, log_dir, bbox=None, overwrite= current_dir = os.path.join(options_dir, folder) report_dir = os.path.join(current_dir, REPORT_DIR_NAME) if not overwrite and os.path.exists(report_dir): - print 'skipping {}'.format(folder) + print('skipping {}'.format(folder)) continue else: #generate the report current_model = Model(current_dir) - print 'Generating report for {}'.format(current_model.inp.name) + print('Generating report for {}'.format(current_model.inp.name)) #reporting.generate_figures(baseline, current_model, bbox=bbox, imgDir=report_dir, verbose=True) report = reporting.Report(baseline, current_model) report.write(report_dir) diff --git a/swmmio/reporting/reporting.py b/swmmio/reporting/reporting.py index fafbe56..ca9af49 100644 --- a/swmmio/reporting/reporting.py +++ b/swmmio/reporting/reporting.py @@ -262,7 +262,7 @@ def generate_figures(self, rpt_dir, parcel_shp_df, bbox=d68d70): def __str__(self): """print friendly""" - catz = filter(None, self.flood_comparison.Category.unique()) + catz = [_f for _f in self.flood_comparison.Category.unique() if _f] a = ['{}: {}'.format(c, self.impact[c]) for c in catz] files = [self.baseline_report.model.inp.path, self.alt_report.model.inp.path] diff --git a/swmmio/reporting/utils.py b/swmmio/reporting/utils.py index b277da3..261d734 100644 --- a/swmmio/reporting/utils.py +++ b/swmmio/reporting/utils.py @@ -23,7 +23,7 @@ def insert_in_file_2(key, string, newfile): #start writing that thing key = '{}{}{}'.format('{{', key, '}}') #Django style - print key + print(key) with open(newfile, 'r') as newmap: for line in newmap: if key in line: diff --git a/swmmio/reporting/visualize.py b/swmmio/reporting/visualize.py index a023105..70feb9b 100644 --- a/swmmio/reporting/visualize.py +++ b/swmmio/reporting/visualize.py @@ -39,7 +39,7 @@ def create_map(model1, model2=None, bbox=None, crs=None, filename=None, geometries = [] #array of features #collect the links - for k,v in model2.list_objects('conduit', bbox, subset=subset).items(): + for k,v in list(model2.list_objects('conduit', bbox, subset=subset).items()): props = { 'MaxQPercent':v.maxQpercent, 'id':v.id, diff --git a/swmmio/run_models/run.py b/swmmio/run_models/run.py index 47aad14..a40720c 100644 --- a/swmmio/run_models/run.py +++ b/swmmio/run_models/run.py @@ -16,7 +16,7 @@ def run_simple(inp_path, swmm_eng=SWMM_ENGINE_PATH): """ run a model once as is. """ - print 'running {} with {}'.format(inp_path, swmm_eng) + print('running {} with {}'.format(inp_path, swmm_eng)) #inp_path = model.inp.path rpt_path = os.path.splitext(inp_path)[0] + '.rpt' @@ -32,7 +32,7 @@ def run_hot_start_sequence(inp_path, swmm_eng=SWMM_ENGINE_PATH): # if not os.path.exists(hotstart1) and not os.path.exists(hotstart2): #create new model inp with params to save hotstart1 - print 'create new model inp with params to save hotstart1' + print('create new model inp with params to save hotstart1') s = pd.Series(['SAVE HOTSTART "{}"'.format(hotstart1)]) hot1_df = pd.DataFrame(s, columns=['[FILES]']) model = replace_inp_section(model.inp.path, '[FILES]', hot1_df) @@ -42,7 +42,7 @@ def run_hot_start_sequence(inp_path, swmm_eng=SWMM_ENGINE_PATH): # if os.path.exists(hotstart1) and not os.path.exists(hotstart2): #create new model inp with params to use hotstart1 and save hotstart2 - print 'with params to use hotstart1 and save hotstart2' + print('with params to use hotstart1 and save hotstart2') s = pd.Series(['USE HOTSTART "{}"'.format(hotstart1), 'SAVE HOTSTART "{}"'.format(hotstart2)]) hot2_df = pd.DataFrame(s, columns=['[FILES]']) model = replace_inp_section(model.inp.path, '[FILES]', hot2_df) @@ -50,7 +50,7 @@ def run_hot_start_sequence(inp_path, swmm_eng=SWMM_ENGINE_PATH): # if os.path.exists(hotstart2): #create new model inp with params to use hotstart2 and not save anything - print 'params to use hotstart2 and not save anything' + print('params to use hotstart2 and not save anything') s = pd.Series(['USE HOTSTART "{}"'.format(hotstart2)]) hot3_df = pd.DataFrame(s, columns=['[FILES]']) diff --git a/swmmio/utils/dataframes.py b/swmmio/utils/dataframes.py index f82b3a7..96e706f 100644 --- a/swmmio/utils/dataframes.py +++ b/swmmio/utils/dataframes.py @@ -38,8 +38,8 @@ def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, if not tempfilepath: #if this head (section) was not found in the textfile, return a #blank dataframe with the appropriate schema - print 'header "{}" not found in "{}"'.format(section, inp_path) - print 'returning empty dataframe' + print('header "{}" not found in "{}"'.format(section, inp_path)) + print('returning empty dataframe') headerlist = headerdefs['headers'].get(section, 'blob').split() + [';', 'Comment', 'Origin'] blank_df = pd.DataFrame(data=None, columns=headerlist).set_index(headerlist[0]) return blank_df @@ -73,7 +73,7 @@ def get_link_coords(row, nodexys, verticies): x2 = round(nodexys.at[row.OutletNode, 'X'], 4) y2 = round(nodexys.at[row.OutletNode, 'Y'], 4) if None in [x1, x2, y1, y2]: - print row.name, 'problem, no coords' + print(row.name, 'problem, no coords') #grab any extra verts, place in between up/dwn nodes res = [(x1, y1)] if row.name in verticies.index: @@ -82,7 +82,7 @@ def get_link_coords(row, nodexys, verticies): if isinstance(xs, list) and isinstance(ys, list): #if more than one vert for this link exists, arrays are returned #from verticies.get_value(). it then needs to be zipped up - res = res + zip(xs, ys) + res = res + list(zip(xs, ys)) else: res = res + [(xs, ys)] @@ -104,7 +104,7 @@ def create_dataframeRPT(rpt_path, section='Link Flow Summary', element_id=None): element_id=element_id) if not tempfilepath: - print 'header "{}" not found in "{}"'.format(section, rpt_path) + print('header "{}" not found in "{}"'.format(section, rpt_path)) return None if headerdefs['headers'][section] == 'blob': diff --git a/swmmio/utils/functions.py b/swmmio/utils/functions.py index 9ad8eb4..45993b5 100644 --- a/swmmio/utils/functions.py +++ b/swmmio/utils/functions.py @@ -119,7 +119,7 @@ def trace(node_id): trace(data.OutletNode) #kickoff the trace - print "Starting trace {} from {}".format(mode, startnode) + print(("Starting trace {} from {}".format(mode, startnode))) trace(startnode) - print "Traced {0} nodes from {1}".format(len(traced_nodes), startnode) + print(("Traced {0} nodes from {1}".format(len(traced_nodes), startnode))) return {'nodes':traced_nodes, 'conduits':traced_conduits} diff --git a/swmmio/utils/text.py b/swmmio/utils/text.py index db47b29..0ea7764 100644 --- a/swmmio/utils/text.py +++ b/swmmio/utils/text.py @@ -211,7 +211,7 @@ def extract_section_from_rpt(filepath, sectionheader, element_id=None, cleanhead elem_start_string = ' '.join(["<<<", sectionheader.split()[0], element_id, ">>>"]) if element_id and elem_start_string in line: - print 'element_id found: {}'.format(line) + print('element_id found: {}'.format(line)) #if we should look for an element_id and it #is in the current line elementstartfound = True diff --git a/swmmio/vendor/images2gif.py b/swmmio/vendor/images2gif.py index 092f35c..484b035 100644 --- a/swmmio/vendor/images2gif.py +++ b/swmmio/vendor/images2gif.py @@ -587,7 +587,7 @@ def geta(self, alpha, rad): except KeyError: length = rad*2-1 mid = length/2 - q = np.array(range(mid-1,-1,-1)+range(-1,mid)) + q = np.array(list(range(mid-1,-1,-1))+list(range(-1,mid))) a = alpha*(rad*rad - q*q)/(rad*rad) a[mid] = 0 self.a_s[(alpha, rad)] = a @@ -670,7 +670,7 @@ def learn(self): if rad <= 1: rad = 0 - print "Beginning 1D learning: samplepixels =",samplepixels," rad =", rad + print("Beginning 1D learning: samplepixels =",samplepixels," rad =", rad) step = 0 pos = 0 @@ -689,7 +689,7 @@ def learn(self): if i%100 == 99: tmp = '\b'*len(printed_string) printed_string = str((i+1)*100/samplepixels)+"%\n" - print tmp + printed_string, + print(tmp + printed_string, end=' ') p = self.pixels[pos] r = (p >> 16) & 0xff g = (p >> 8) & 0xff @@ -717,7 +717,7 @@ def learn(self): rad = biasRadius >> self.RADIUSBIASSHIFT if rad <= 1: rad = 0 - print "Finished 1D learning: final alpha =",(1.0*alpha)/self.INITALPHA,"!" + print("Finished 1D learning: final alpha =",(1.0*alpha)/self.INITALPHA,"!") def fix(self): for i in range(self.NETSIZE): @@ -782,7 +782,7 @@ def quantize(self, image): if cKDTree: return self.quantize_with_scipy(image) else: - print 'Scipy not available, falling back to slower version.' + print('Scipy not available, falling back to slower version.') return self.quantize_without_scipy(image) @@ -794,7 +794,7 @@ def quantize_with_scipy(self, image): kdtree = cKDTree(self.colormap[:,:3],leafsize=10) result = kdtree.query(px2) colorindex = result[1] - print "Distance:", (result[0].sum()/(w*h)) + print("Distance:", (result[0].sum()/(w*h))) px2[:] = self.colormap[colorindex,:3] return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage()) @@ -818,7 +818,8 @@ def quantize_without_scipy(self, image): px[i,j,0],px[i,j,1],px[i,j,2] = val return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage()) - def convert(self, (r, g, b)): + def convert(self, xxx_todo_changeme): + (r, g, b) = xxx_todo_changeme i = self.inxsearch(r, g, b) return self.colormap[i,:3] diff --git a/swmmio/version_control/inp.py b/swmmio/version_control/inp.py index b59a5ed..ebaef1b 100644 --- a/swmmio/version_control/inp.py +++ b/swmmio/version_control/inp.py @@ -9,7 +9,7 @@ import sys from copy import deepcopy if sys.version_info[0] < 3: - from StringIO import StringIO + from io import StringIO else: from io import StringIO problem_sections = ['[CURVES]', '[TIMESERIES]', '[RDII]', '[HYDROGRAPHS]'] @@ -45,7 +45,7 @@ def __init__(self, build_instr_file=None): def __add__(self, other): bi = BuildInstructions() - for section, change_obj in self.instructions.iteritems(): + for section, change_obj in self.instructions.items(): if section in other.instructions: new_change = change_obj + other.instructions[section] bi.instructions[section] = new_change @@ -53,7 +53,7 @@ def __add__(self, other): #section doesn't exist in other, maintain current instructions bi.instructions[section] = change_obj - for section, change_obj in other.instructions.iteritems(): + for section, change_obj in other.instructions.items(): if section not in self.instructions: bi.instructions[section] = change_obj @@ -85,7 +85,7 @@ def save(self, dir, filename): filepath = os.path.join(dir, filename) with open (filepath, 'w') as f: vc_utils.write_meta_data(f, self.metadata) - for section, change_obj in self.instructions.iteritems(): + for section, change_obj in self.instructions.items(): section_df = pd.concat([change_obj.removed, change_obj.altered, change_obj.added]) vc_utils.write_inp_section(f, allheaders=None, sectionheader=section, section_data=section_df, pad_top=False, na_fill='NaN') @@ -243,7 +243,7 @@ def generate_inp_from_diffs(basemodel, inpdiffs, target_dir): #instructions applied with open (newinp, 'w') as f: for section in allheaders['order']: - print section + print(section) if section not in problem_sections and allheaders['headers'][section] != 'blob': #check if a changes from baseline spreadheet exists, and use this #information if available to create the changes array diff --git a/swmmio/version_control/tests/validate.py b/swmmio/version_control/tests/validate.py index 9dd9dc4..ca4591c 100644 --- a/swmmio/version_control/tests/validate.py +++ b/swmmio/version_control/tests/validate.py @@ -10,7 +10,7 @@ def search_for_duplicates(inp_path, verbose = False): """ headers = funcs.complete_inp_headers(inp_path)['headers'] dups_found = False - for header, cols, in headers.iteritems(): + for header, cols, in headers.items(): if cols != 'blob': df = dataframes.create_dataframeINP(inp_path, section=header) @@ -18,11 +18,11 @@ def search_for_duplicates(inp_path, verbose = False): n_unique = len(elements.unique()) #number of unique elements n_total = len(elements) #total number of elements if verbose: - print '{} -> (uniques, total) -> ({}, {})'.format(header, n_unique , n_total) + print('{} -> (uniques, total) -> ({}, {})'.format(header, n_unique , n_total)) if n_unique != n_total: dups = ', '.join(df[df.index.duplicated()].index.unique().tolist()) - print 'duplicate found in {}\nsection: {}\n{}'.format(inp_path, header, dups) + print('duplicate found in {}\nsection: {}\n{}'.format(inp_path, header, dups)) dups_found = True return dups_found diff --git a/swmmio/version_control/utils.py b/swmmio/version_control/utils.py index 06be24b..f845646 100644 --- a/swmmio/version_control/utils.py +++ b/swmmio/version_control/utils.py @@ -120,11 +120,11 @@ def bi_is_current(build_instr_file): #parents = baseline.update(alternatives) # print meta['Parent Models']['Baseline'] # print alternatives - for inp, revisiondate in baseline.iteritems(): + for inp, revisiondate in baseline.items(): if modification_date(inp) != revisiondate: return False - for inp, revisiondate in alternatives.iteritems(): + for inp, revisiondate in alternatives.items(): if modification_date(inp) != revisiondate: return False diff --git a/swmmio/version_control/version_control.py b/swmmio/version_control/version_control.py index f49f8a7..aab35e4 100644 --- a/swmmio/version_control/version_control.py +++ b/swmmio/version_control/version_control.py @@ -52,7 +52,7 @@ def propagate_changes_from_baseline(baseline_dir, alternatives_dir, combi_dir, bi.metadata['Parent Models']['Baseline'] = {baseinp:vc_utils.modification_date(baseinp)} bi.metadata['Log'].update({version_id:comments}) bi.save(vc_directory, version_id+'.txt') - print 'rebuilding {} with changes to baseline'.format(model.name) + print('rebuilding {} with changes to baseline'.format(model.name)) bi.build(baseline_dir, model.inp.path) #overwrite old inp @@ -92,14 +92,14 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', #identify all scenarios (cartesian product of sets of IPs between each RSN) #then isolate child scenarios with atleast 2 parents (sets with one parent #are already modeled as IPs within the RSNs) - all_scenarios = [filter(None, s) for s in itertools.product(*IPs)] + all_scenarios = [[_f for _f in s if _f] for s in itertools.product(*IPs)] child_scenarios = [s for s in all_scenarios if len(s) > 1] #notify user of what was initially found - str_IPs = '\n'.join([', '.join(filter(None, i)) for i in IPs]) - print ('Found {} implementation phases among {} networks:\n{}\n' + str_IPs = '\n'.join([', '.join([_f for _f in i if _f]) for i in IPs]) + print(('Found {} implementation phases among {} networks:\n{}\n' 'This yeilds {} combined scenarios ({} total)'.format(len(IP_dirs), - len(RSN_dirs),str_IPs,len(child_scenarios),len(all_scenarios) - 1)) + len(RSN_dirs),str_IPs,len(child_scenarios),len(all_scenarios) - 1))) # ========================================================================== # UPDATE/CREATE THE PARENT MODEL BUILD INSTRUCTIONS @@ -109,7 +109,7 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', vc_dir = os.path.join(ip_dir, 'vc') if not os.path.exists(vc_dir): - print 'creating new build instructions for {}'.format(ip_model.name) + print('creating new build instructions for {}'.format(ip_model.name)) inp.create_inp_build_instructions(baseinp, ip_model.inp.path, vc_dir, version_id, comments) @@ -120,7 +120,7 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', if not vc_utils.bi_is_current(latest_bi): #revision date of the alt doesn't match the newest build #instructions for this 'imp_level', so we should refresh it - print 'updating build instructions for {}'.format(ip_model.name) + print('updating build instructions for {}'.format(ip_model.name)) inp.create_inp_build_instructions(baseinp, ip_model.inp.path, vc_dir, version_id, comments) @@ -144,7 +144,7 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', os.mkdir(new_dir) newinppath = os.path.join(new_dir, newcombi + '.inp') - print 'creating new child model: {}'.format(newcombi) + print('creating new child model: {}'.format(newcombi)) new_build_instructions = sum(build_instrcts) new_build_instructions.save(vc_dir, version_id+'.txt') new_build_instructions.build(baseline_dir, newinppath) @@ -157,7 +157,7 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', if not vc_utils.bi_is_current(latest_bi): #revision date of the alt doesn't match the newest build #instructions for this 'imp_level', so we should refresh it - print 'updating child build instructions for {}'.format(newcombi) + print('updating child build instructions for {}'.format(newcombi)) newinppath = os.path.join(new_dir, newcombi + '.inp') new_build_instructions = sum(build_instrcts) new_build_instructions.save(vc_dir, version_id+'.txt') diff --git a/tests/tests.py b/tests/tests.py index 168fa1d..3bb4cee 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -11,7 +11,7 @@ def test(self): #print f.read(1000) text = f.read() for hpair in swmm_headers.inpHeaderList: - print hpair[0][:200] + print(hpair[0][:200]) matched_an_inp_header = False #hpair[0] if hpair[0] in text: From 5fbddf0401958911843d840f4f1e2c3cf0748a3b Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Wed, 6 Jun 2018 23:37:04 -0400 Subject: [PATCH 03/12] moved definitions.py into module --- definitions.py => swmmio/definitions.py | 0 swmmio/graphics/__init__.py | 2 +- swmmio/graphics/options.py | 2 +- swmmio/graphics/swmm_graphics.py | 2 +- swmmio/reporting/batch.py | 2 +- swmmio/reporting/reporting.py | 2 +- swmmio/reporting/serialize.py | 2 +- swmmio/reporting/utils.py | 2 +- swmmio/reporting/visualize.py | 2 +- swmmio/run_models/run.py | 2 +- swmmio/swmmio.py | 2 +- swmmio/utils/spatial.py | 2 +- 12 files changed, 11 insertions(+), 11 deletions(-) rename definitions.py => swmmio/definitions.py (100%) diff --git a/definitions.py b/swmmio/definitions.py similarity index 100% rename from definitions.py rename to swmmio/definitions.py diff --git a/swmmio/graphics/__init__.py b/swmmio/graphics/__init__.py index 09d5542..ceb4fab 100644 --- a/swmmio/graphics/__init__.py +++ b/swmmio/graphics/__init__.py @@ -1,4 +1,4 @@ -from definitions import FONT_PATH +from swmmio.definitions import FONT_PATH class _dotdict(dict): """dot.notation access to dictionary attributes""" diff --git a/swmmio/graphics/options.py b/swmmio/graphics/options.py index dc6f54f..a78b2d8 100644 --- a/swmmio/graphics/options.py +++ b/swmmio/graphics/options.py @@ -1,4 +1,4 @@ -from definitions import PARCEL_FEATURES, GEODATABASE +from swmmio.definitions import PARCEL_FEATURES, GEODATABASE from .constants import * font_file = r"C:\Data\Code\Fonts\Raleway-Regular.ttf" diff --git a/swmmio/graphics/swmm_graphics.py b/swmmio/graphics/swmm_graphics.py index faa8a56..a1e4161 100644 --- a/swmmio/graphics/swmm_graphics.py +++ b/swmmio/graphics/swmm_graphics.py @@ -1,5 +1,5 @@ #graphical functions for SWMM files -from definitions import * +from swmmio.definitions import * from swmmio.damage import parcels as pdamage from swmmio.graphics import config, options from swmmio.graphics.constants import * #constants diff --git a/swmmio/reporting/batch.py b/swmmio/reporting/batch.py index cc9b4bd..b69abc6 100644 --- a/swmmio/reporting/batch.py +++ b/swmmio/reporting/batch.py @@ -9,7 +9,7 @@ import shutil import math from itertools import chain -from definitions import * +from swmmio.definitions import * import pandas as pd diff --git a/swmmio/reporting/reporting.py b/swmmio/reporting/reporting.py index ca9af49..7fea0b0 100644 --- a/swmmio/reporting/reporting.py +++ b/swmmio/reporting/reporting.py @@ -14,7 +14,7 @@ import os import math import pandas as pd -from definitions import * +from swmmio.definitions import * import json, geojson import shutil diff --git a/swmmio/reporting/serialize.py b/swmmio/reporting/serialize.py index b00771c..3b53f4a 100644 --- a/swmmio/reporting/serialize.py +++ b/swmmio/reporting/serialize.py @@ -5,7 +5,7 @@ from swmmio.utils import spatial from swmmio.graphics import swmm_graphics as sg from swmmio.reporting.reporting import FloodReport -from definitions import * +from swmmio.definitions import * import geojson diff --git a/swmmio/reporting/utils.py b/swmmio/reporting/utils.py index 261d734..e6ea963 100644 --- a/swmmio/reporting/utils.py +++ b/swmmio/reporting/utils.py @@ -1,4 +1,4 @@ -from definitions import * +from swmmio.definitions import * import shutil #THIS STUFF IS INCOMPLETE/ MAYBE BROKEN diff --git a/swmmio/reporting/visualize.py b/swmmio/reporting/visualize.py index 70feb9b..dcbfdf4 100644 --- a/swmmio/reporting/visualize.py +++ b/swmmio/reporting/visualize.py @@ -1,7 +1,7 @@ import os, shutil import pandas as pd from swmmio.version_control.inp import INPDiff -from definitions import * +from swmmio.definitions import * def create_map(model1, model2=None, bbox=None, crs=None, filename=None, diff --git a/swmmio/run_models/run.py b/swmmio/run_models/run.py index a40720c..9725717 100644 --- a/swmmio/run_models/run.py +++ b/swmmio/run_models/run.py @@ -6,7 +6,7 @@ from swmmio.utils import dataframes from swmmio.run_models import defs from swmmio.swmmio import Model -from definitions import SWMM_ENGINE_PATH +from swmmio.definitions import SWMM_ENGINE_PATH #path to the SWMM5 Engine diff --git a/swmmio/swmmio.py b/swmmio/swmmio.py index 19c4c6f..c5bddfc 100644 --- a/swmmio/swmmio.py +++ b/swmmio/swmmio.py @@ -10,7 +10,7 @@ import geojson from .utils import text as txt from .utils.dataframes import create_dataframeINP, create_dataframeRPT, get_link_coords -from definitions import * +from swmmio.definitions import * class Model(object): diff --git a/swmmio/utils/spatial.py b/swmmio/utils/spatial.py index 2d19e4f..0a8cd9c 100644 --- a/swmmio/utils/spatial.py +++ b/swmmio/utils/spatial.py @@ -1,4 +1,4 @@ -from definitions import ROOT_DIR +from swmmio.definitions import ROOT_DIR import geojson import json import pandas as pd From 94c471a8bd816c3093f398e6675c1f9ed965d0d7 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Sun, 24 Jun 2018 17:51:45 -0400 Subject: [PATCH 04/12] added slot for TAGs, moved defnitions.py --- swmmio/definitions.py | 6 +++--- swmmio/defs/sectionheaders.py | 1 + swmmio/swmmio.py | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/swmmio/definitions.py b/swmmio/definitions.py index 2e5870d..36ccbc9 100644 --- a/swmmio/definitions.py +++ b/swmmio/definitions.py @@ -19,8 +19,8 @@ REPORT_DIR_NAME = r'Report' #path to the basemap file used to create custom basemaps -BASEMAP_PATH = os.path.join(ROOT_DIR,'swmmio','reporting','basemaps','index.html') -BETTER_BASEMAP_PATH = os.path.join(ROOT_DIR,'swmmio','reporting','basemaps','mapbox_base.html') +BASEMAP_PATH = os.path.join(ROOT_DIR,'reporting','basemaps','index.html') +BETTER_BASEMAP_PATH = os.path.join(ROOT_DIR,'reporting','basemaps','mapbox_base.html') #path to the basemap file used to create custom basemaps -FONT_PATH = os.path.join(ROOT_DIR,'swmmio','graphics','fonts','Verdana.ttf') +FONT_PATH = os.path.join(ROOT_DIR,'graphics','fonts','Verdana.ttf') diff --git a/swmmio/defs/sectionheaders.py b/swmmio/defs/sectionheaders.py index fe52c1e..b94baf5 100644 --- a/swmmio/defs/sectionheaders.py +++ b/swmmio/defs/sectionheaders.py @@ -25,6 +25,7 @@ '[INFILTRATION]':'Subcatchment Suction HydCon IMDmax', '[Polygons]':'Name X Y', '[REPORT]':'Param Status', + '[TAGS]':'ElementType Name Tag', #'[CURVES]':'Name Type X-Value Y-Value', #'[TIMESERIES]':'Name Date Time Value' } diff --git a/swmmio/swmmio.py b/swmmio/swmmio.py index c5bddfc..6d4a885 100644 --- a/swmmio/swmmio.py +++ b/swmmio/swmmio.py @@ -130,7 +130,7 @@ def to_map(self, filename=None, inproj='epsg:2272'): filename = os.path.join(self.inp.dir, self.inp.name + '.html') with open(BETTER_BASEMAP_PATH, 'r') as bm: - with open(filename, 'wb') as newmap: + with open(filename, 'w') as newmap: for line in bm: if '//INSERT GEOJSON HERE ~~~~~' in line: newmap.write('conduits = {};\n'.format(geojson.dumps(geo_conduits))) From 0275b82fb0dcdc4fc69581f176d413a332953c7e Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Thu, 12 Jul 2018 19:57:33 -0400 Subject: [PATCH 05/12] rpt dataframe support for Storage Volume Summary --- swmmio/defs/sectionheaders.py | 1 + 1 file changed, 1 insertion(+) diff --git a/swmmio/defs/sectionheaders.py b/swmmio/defs/sectionheaders.py index b94baf5..dad30a1 100644 --- a/swmmio/defs/sectionheaders.py +++ b/swmmio/defs/sectionheaders.py @@ -42,6 +42,7 @@ 'Node Flooding Summary':'Name HoursFlooded MaxQ MaxDay MaxHr TotalFloodVol MaximumPondDepth', 'Node Inflow Summary':'Name Type MaxLatInflow MaxTotalInflow MaxDay MaxHr LatInflowV TotalInflowV FlowBalErrorPerc XXX', 'Node Surcharge Summary':'Name Type HourSurcharged MaxHeightAboveCrown MinDepthBelowRim', + 'Storage Volume Summary':'Name AvgVolume AvgPctFull EvapPctLoss ExfilPctLoss MaxVolume MaxPctFull MaxDay MaxFullHr MaxOutflow', 'Node Depth Summary':'Name Type AvgDepth MaxNodeDepth MaxHGL MaxDay MaxHr', 'Link Flow Summary':'Name Type MaxQ MaxDay MaxHr MaxV MaxQPerc MaxDPerc', 'Subcatchment Results': 'Date Time PrecipInchPerHour LossesInchPerHr RunoffCFS', From caa2b61318b727a010a0cdf965a60989ae5b63dd Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Thu, 30 Aug 2018 15:50:07 -0400 Subject: [PATCH 06/12] cast lookup ID to string in get_link_coords() --- swmmio/utils/dataframes.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/swmmio/utils/dataframes.py b/swmmio/utils/dataframes.py index 96e706f..f2fe539 100644 --- a/swmmio/utils/dataframes.py +++ b/swmmio/utils/dataframes.py @@ -63,15 +63,21 @@ def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, df[';'] = ';' os.remove(tempfilepath) - + return df def get_link_coords(row, nodexys, verticies): """for use in an df.apply, to get coordinates of a conduit/link """ - x1 = round(nodexys.at[row.InletNode, 'X'], 4) - y1 = round(nodexys.at[row.InletNode, 'Y'], 4) - x2 = round(nodexys.at[row.OutletNode, 'X'], 4) - y2 = round(nodexys.at[row.OutletNode, 'Y'], 4) + + #cast IDs to string + inlet_id = str(row.InletNode) + outlet_id =str(row.OutletNode) + xys_str = nodexys.rename(index=str) + + x1 = round(xys_str.at[inlet_id, 'X'], 4) + y1 = round(xys_str.at[inlet_id, 'Y'], 4) + x2 = round(xys_str.at[outlet_id, 'X'], 4) + y2 = round(xys_str.at[outlet_id, 'Y'], 4) if None in [x1, x2, y1, y2]: print(row.name, 'problem, no coords') #grab any extra verts, place in between up/dwn nodes From 12f9631e4e1609a994f3777e913a9414f7a3634d Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Wed, 10 Oct 2018 20:29:53 -0400 Subject: [PATCH 07/12] cast element IDs as strings, return empty DF when pumps or orifices dont exist in model --- swmmio/swmmio.py | 25 +++++++++++++++++++++---- swmmio/utils/dataframes.py | 4 ++-- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/swmmio/swmmio.py b/swmmio/swmmio.py index 6d4a885..8142497 100644 --- a/swmmio/swmmio.py +++ b/swmmio/swmmio.py @@ -199,6 +199,9 @@ def conduits(self): df['DownstreamInvert'] = df.OutletNodeInvert + df.OutletOffset df['SlopeFtPerFt'] = (df.UpstreamInvert - df.DownstreamInvert) / df.Length + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) + self._conduits_df = df return df @@ -220,6 +223,9 @@ def orifices(self): #create dataframes of relevant sections from the INP orifices_df = create_dataframeINP(inp.path, "[ORIFICES]", comment_cols=False) + if orifices_df.empty: + return pd.DataFrame() + coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() #add conduit coordinates @@ -227,7 +233,8 @@ def orifices(self): verts = create_dataframeINP(inp.path, '[VERTICES]') xys = orifices_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) df = orifices_df.assign(coords=xys.map(lambda x: x[0])) - + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) self._orifices_df = df return df @@ -248,8 +255,11 @@ def weirs(self): rpt = self.rpt #create dataframes of relevant sections from the INP - #BUG why can't comment_cols=False work here? - weirs_df = create_dataframeINP(inp.path, "[WEIRS]")[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] + weirs_df = create_dataframeINP(inp.path, "[WEIRS]")#[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] + if weirs_df.empty: + return pd.DataFrame() + + weirs_df = weirs_df[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() #add conduit coordinates @@ -257,6 +267,8 @@ def weirs(self): verts = create_dataframeINP(inp.path, '[VERTICES]') xys = weirs_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) df = weirs_df.assign(coords=xys.map(lambda x: x[0])) + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) self._weirs_df = df @@ -279,12 +291,17 @@ def pumps(self): #create dataframes of relevant sections from the INP pumps_df = create_dataframeINP(inp.path, "[PUMPS]", comment_cols=False) + if pumps_df.empty: + return pd.DataFrame() + coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() #add conduit coordinates verts = create_dataframeINP(inp.path, '[VERTICES]') xys = pumps_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) df = pumps_df.assign(coords=xys.map(lambda x: x[0])) + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) self._pumps_df = df @@ -334,7 +351,7 @@ def nodexy(row): xys = all_nodes.apply(lambda r: nodexy(r), axis=1) all_nodes = all_nodes.assign(coords = xys) - + all_nodes = all_nodes.rename(index=str) self._nodes_df = all_nodes return all_nodes diff --git a/swmmio/utils/dataframes.py b/swmmio/utils/dataframes.py index f2fe539..1e9272e 100644 --- a/swmmio/utils/dataframes.py +++ b/swmmio/utils/dataframes.py @@ -63,8 +63,8 @@ def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, df[';'] = ';' os.remove(tempfilepath) - - return df + + return df.rename(index=str) def get_link_coords(row, nodexys, verticies): """for use in an df.apply, to get coordinates of a conduit/link """ From bbfb994325d48b49ae8667d18c450e20359c5be3 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Wed, 10 Oct 2018 23:55:53 -0400 Subject: [PATCH 08/12] py2 and py3 CI checks --- .travis.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 6f3488d..eba1512 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ language: python python: - "2.7" - # - "3.6" + - "3.6" # - "3.7" # command to install dependencies install: diff --git a/setup.py b/setup.py index 055a776..2d0c031 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() -VERSION = '0.2.1' +VERSION = '0.3.0' AUTHOR_NAME = 'Adam Erispaha' AUTHOR_EMAIL = 'aerispaha@gmail.com' From f1b008a9ff5ee9c52143ecf6f3ac8c41bba036cb Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Thu, 11 Oct 2018 01:06:17 -0400 Subject: [PATCH 09/12] added version control tests --- swmmio/tests/data/__init__.py | 6 ++ swmmio/tests/data/alt_test1.inp | 116 +++++++++++++++++++++++++++ swmmio/tests/data/alt_test2.inp | 116 +++++++++++++++++++++++++++ swmmio/tests/data/alt_test3.inp | 115 ++++++++++++++++++++++++++ swmmio/tests/data/baseline_test.inp | 116 +++++++++++++++++++++++++++ swmmio/tests/test_version_control.py | 34 ++++++++ 6 files changed, 503 insertions(+) create mode 100644 swmmio/tests/data/alt_test1.inp create mode 100644 swmmio/tests/data/alt_test2.inp create mode 100644 swmmio/tests/data/alt_test3.inp create mode 100644 swmmio/tests/data/baseline_test.inp create mode 100644 swmmio/tests/test_version_control.py diff --git a/swmmio/tests/data/__init__.py b/swmmio/tests/data/__init__.py index 1c2291b..da8106d 100644 --- a/swmmio/tests/data/__init__.py +++ b/swmmio/tests/data/__init__.py @@ -15,3 +15,9 @@ # Test models paths MODEL_FULL_FEATURES_PATH = os.path.join(DATA_PATH, 'model_full_features.inp') MODEL_BROWARD_COUNTY_PATH = os.path.join(DATA_PATH, 'RUNOFF46_SW5.INP') + +#version control test models +MODEL_XSECTION_BASELINE = os.path.join(DATA_PATH, 'baseline_test.inp') +MODEL_XSECTION_ALT_01 = os.path.join(DATA_PATH, 'alt_test1.inp') +MODEL_XSECTION_ALT_02 = os.path.join(DATA_PATH, 'alt_test2.inp') +MODEL_XSECTION_ALT_03 = os.path.join(DATA_PATH, 'alt_test3.inp') diff --git a/swmmio/tests/data/alt_test1.inp b/swmmio/tests/data/alt_test1.inp new file mode 100644 index 0000000..d99e88a --- /dev/null +++ b/swmmio/tests/data/alt_test1.inp @@ -0,0 +1,116 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING KINWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 03/05/2018 +START_TIME 00:00:00 +REPORT_START_DATE 03/05/2018 +REPORT_START_TIME 00:00:00 +END_DATE 03/05/2018 +END_TIME 06:00:00 +SWEEP_START 1/1 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:15:00 +WET_STEP 00:05:00 +DRY_STEP 01:00:00 +ROUTING_STEP 0:00:30 + +INERTIAL_DAMPING PARTIAL +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 0 +MAX_TRIALS 0 +HEAD_TOLERANCE 0 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +dummy_node1 -10.99 30 0 0 0 +dummy_node2 -9.24 20 0 0 0 +dummy_node3 -7.76 20 0 0 0 +dummy_node4 -6.98 12.59314 0 0 177885 +dummy_node5 -6.96 13.05439 0 0 73511 +dummy_node6 -6.8 13.27183 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +dummy_outfall -11 FREE YES + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe dummy_node1 dummy_outfall 200 0.013 0 0 0 0 +pipe1 dummy_node2 dummy_node1 1675 0.013 0 0 0 0 +pipe2 dummy_node3 dummy_node2 400 0.01 0 0 0 0 +pipe3 dummy_node4 dummy_node3 594 0.013 0 0 0 0 +pipe4 dummy_node5 dummy_node4 400 0.013 0 0 0 0 +pipe5 dummy_node6 dummy_node5 188 0.013 0 0 0 0 + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe RECT_CLOSED 7 14 0 0 1 +pipe1 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe2 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe3 CIRCULAR 8 0 0 0 1 +pipe4 CIRCULAR 6.5 0 0 0 1 +pipe5 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 + +[DWF] +;;Node Constituent Baseline Patterns +;;-------------- ---------------- ---------- ---------- +dummy_node2 FLOW 0.000275704 "" "" +dummy_node6 FLOW 0.008150676 "" "" + +[REPORT] +;;Reporting Options +INPUT NO +CONTROLS NO +SUBCATCHMENTS ALL +NODES ALL +LINKS ALL + +[TAGS] + +[MAP] +DIMENSIONS 0.000 0.000 10000.000 10000.000 +Units None + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +dummy_node1 2054.575 6051.364 +dummy_node2 -2937.400 7704.655 +dummy_node3 -4205.457 9695.024 +dummy_node4 -6163.724 12857.143 +dummy_node5 -9871.589 13723.917 +dummy_node6 -12712.681 14927.769 +dummy_outfall 4927.769 5280.899 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ + diff --git a/swmmio/tests/data/alt_test2.inp b/swmmio/tests/data/alt_test2.inp new file mode 100644 index 0000000..436872b --- /dev/null +++ b/swmmio/tests/data/alt_test2.inp @@ -0,0 +1,116 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING KINWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 03/05/2018 +START_TIME 00:00:00 +REPORT_START_DATE 03/05/2018 +REPORT_START_TIME 00:00:00 +END_DATE 03/05/2018 +END_TIME 06:00:00 +SWEEP_START 1/1 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:15:00 +WET_STEP 00:05:00 +DRY_STEP 01:00:00 +ROUTING_STEP 0:00:30 + +INERTIAL_DAMPING PARTIAL +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 0 +MAX_TRIALS 0 +HEAD_TOLERANCE 0 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +dummy_node1 -10.99 30 0 0 0 +dummy_node2 -9.24 20 0 0 0 +dummy_node3 -7.76 20 0 0 0 +dummy_node4 -6.98 12.59314 0 0 177885 +dummy_node5 -6.96 13.05439 0 0 73511 +dummy_node6 -6.8 13.27183 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +dummy_outfall -11 FREE YES + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe dummy_node1 dummy_outfall 200 0.013 0 0 0 0 +pipe1 dummy_node2 dummy_node1 1675 0.013 0 0 0 0 +pipe2 dummy_node3 dummy_node2 400 0.01 0 0 0 0 +pipe3 dummy_node4 dummy_node3 594 0.013 0 0 0 0 +pipe4 dummy_node5 dummy_node4 400 0.013 0 0 0 0 +pipe5 dummy_node6 dummy_node5 188 0.013 0 0 0 0 + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe RECT_CLOSED 7 14 0 0 1 +pipe1 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe2 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe3 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe4 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 +pipe5 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 + +[DWF] +;;Node Constituent Baseline Patterns +;;-------------- ---------------- ---------- ---------- +dummy_node2 FLOW 0.000275704 "" "" "" +dummy_node6 FLOW 0.008150676 "" "" "" + +[REPORT] +;;Reporting Options +INPUT NO +CONTROLS NO +SUBCATCHMENTS ALL +NODES ALL +LINKS ALL + +[TAGS] + +[MAP] +DIMENSIONS 0.000 0.000 10000.000 10000.000 +Units None + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +dummy_node1 2054.575 6051.364 +dummy_node2 -2937.400 7704.655 +dummy_node3 -4205.457 9695.024 +dummy_node4 -6163.724 12857.143 +dummy_node5 -9871.589 13723.917 +dummy_node6 -12712.681 14927.769 +dummy_outfall 4927.769 5280.899 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ + diff --git a/swmmio/tests/data/alt_test3.inp b/swmmio/tests/data/alt_test3.inp new file mode 100644 index 0000000..50bdea1 --- /dev/null +++ b/swmmio/tests/data/alt_test3.inp @@ -0,0 +1,115 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING KINWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 03/05/2018 +START_TIME 00:00:00 +REPORT_START_DATE 03/05/2018 +REPORT_START_TIME 00:00:00 +END_DATE 03/05/2018 +END_TIME 06:00:00 +SWEEP_START 1/1 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:15:00 +WET_STEP 00:05:00 +DRY_STEP 01:00:00 +ROUTING_STEP 0:00:30 + +INERTIAL_DAMPING PARTIAL +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 0 +MAX_TRIALS 0 +HEAD_TOLERANCE 0 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +dummy_node1 -15 30 0 0 0 +dummy_node2 -9.24 20 0 0 0 +dummy_node3 -7.76 20 0 0 0 +dummy_node4 -6.98 12.59314 0 0 177885 +dummy_node5 -6.96 15 0 0 73511 +dummy_node6 -6.8 13.27183 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +dummy_outfall -11 FREE YES + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe dummy_node1 dummy_outfall 200 0.013 0 0 0 0 +pipe1 dummy_node2 dummy_node1 1675 0.013 0 0 0 0 +pipe2 dummy_node3 dummy_node2 400 0.01 0 0 0 0 +pipe3 dummy_node4 dummy_node3 594 0.013 0 0 0 0 +pipe4 dummy_node5 dummy_node4 400 0.013 0 0 0 0 +pipe5 dummy_node6 dummy_node5 666 0.013 0 0 0 0 + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe RECT_CLOSED 7 14 0 0 1 +pipe1 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe2 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe3 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe4 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 +pipe5 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 + +[DWF] +;;Node Constituent Baseline Patterns +;;-------------- ---------------- ---------- ---------- +dummy_node2 FLOW 0.000275704 "" "" "" +dummy_node6 FLOW 0.008150676 "" "" "" + +[REPORT] +;;Reporting Options +INPUT NO +CONTROLS NO +SUBCATCHMENTS ALL +NODES ALL +LINKS ALL + +[TAGS] + +[MAP] +DIMENSIONS 0.000 0.000 10000.000 10000.000 +Units None + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +dummy_node1 2054.575 6051.364 +dummy_node2 -2937.400 7704.655 +dummy_node3 -4205.457 9695.024 +dummy_node4 -6163.724 12857.143 +dummy_node5 -9871.589 13723.917 +dummy_node6 -12712.681 14927.769 +dummy_outfall 4927.769 5280.899 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ diff --git a/swmmio/tests/data/baseline_test.inp b/swmmio/tests/data/baseline_test.inp new file mode 100644 index 0000000..db508f5 --- /dev/null +++ b/swmmio/tests/data/baseline_test.inp @@ -0,0 +1,116 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING KINWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 03/05/2018 +START_TIME 00:00:00 +REPORT_START_DATE 03/05/2018 +REPORT_START_TIME 00:00:00 +END_DATE 03/05/2018 +END_TIME 06:00:00 +SWEEP_START 1/1 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:15:00 +WET_STEP 00:05:00 +DRY_STEP 01:00:00 +ROUTING_STEP 0:00:30 + +INERTIAL_DAMPING PARTIAL +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 0 +MAX_TRIALS 0 +HEAD_TOLERANCE 0 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +dummy_node1 -10.99 30 0 0 0 +dummy_node2 -9.24 20 0 0 0 +dummy_node3 -7.76 20 0 0 0 +dummy_node4 -6.98 12.59314 0 0 177885 +dummy_node5 -6.96 13.05439 0 0 73511 +dummy_node6 -6.8 13.27183 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +dummy_outfall -11 FREE YES + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe dummy_node1 dummy_outfall 200 0.013 0 0 0 0 +pipe1 dummy_node2 dummy_node1 1675 0.013 0 0 0 0 +pipe2 dummy_node3 dummy_node2 400 0.01 0 0 0 0 +pipe3 dummy_node4 dummy_node3 594 0.013 0 0 0 0 +pipe4 dummy_node5 dummy_node4 400 0.013 0 0 0 0 +pipe5 dummy_node6 dummy_node5 188 0.013 0 0 0 0 + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe RECT_CLOSED 7 14 0 0 1 +pipe1 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe2 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe3 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe4 CIRCULAR 6.5 0 0 0 1 +pipe5 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 + +[DWF] +;;Node Constituent Baseline Patterns +;;-------------- ---------------- ---------- ---------- +dummy_node2 FLOW 0.000275704 "" "" "" +dummy_node6 FLOW 0.008150676 "" "" "" + +[REPORT] +;;Reporting Options +INPUT NO +CONTROLS NO +SUBCATCHMENTS ALL +NODES ALL +LINKS ALL + +[TAGS] + +[MAP] +DIMENSIONS 0.000 0.000 10000.000 10000.000 +Units None + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +dummy_node1 2054.575 6051.364 +dummy_node2 -2937.400 7704.655 +dummy_node3 -4205.457 9695.024 +dummy_node4 -6163.724 12857.143 +dummy_node5 -9871.589 13723.917 +dummy_node6 -12712.681 14927.769 +dummy_outfall 4927.769 5280.899 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ + diff --git a/swmmio/tests/test_version_control.py b/swmmio/tests/test_version_control.py new file mode 100644 index 0000000..6ecb376 --- /dev/null +++ b/swmmio/tests/test_version_control.py @@ -0,0 +1,34 @@ +from swmmio.tests.data import (MODEL_XSECTION_BASELINE, MODEL_XSECTION_ALT_01, + MODEL_XSECTION_ALT_02, MODEL_XSECTION_ALT_03) +from swmmio import swmmio +from swmmio.version_control import utils as vc_utils +from swmmio.version_control import inp +from swmmio.utils import functions as funcs + + +def test_complete_inp_headers(): + + headers = [ + '[TITLE]','[OPTIONS]','[EVAPORATION]','[JUNCTIONS]','[OUTFALLS]', + '[CONDUITS]','[XSECTIONS]','[DWF]','[REPORT]','[TAGS]','[MAP]', + '[COORDINATES]','[VERTICES]', + ] + + h1 = funcs.complete_inp_headers(MODEL_XSECTION_BASELINE) + + assert(all(h in h1['headers'] for h in headers)) + assert(h1['order'] == headers) + + +def test_create_inp_build_instructions(): + + inp.create_inp_build_instructions(MODEL_XSECTION_BASELINE, + MODEL_XSECTION_ALT_03, + 'vc_dir', + 'test_version_id', 'cool comments') + + latest_bi = vc_utils.newest_file('vc_dir') + bi = inp.BuildInstructions(latest_bi) + + juncs = bi.instructions['[JUNCTIONS]'] + assert(all(j in juncs.altered.index for j in ['dummy_node1', 'dummy_node5'])) From 2ac2c4c97ed90047d320e9ed495806255a07c4ea Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Fri, 12 Oct 2018 17:28:43 -0400 Subject: [PATCH 10/12] appveyor support --- appveyor.yml | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 appveyor.yml diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000..9d4c5ee --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,38 @@ +#matrix: +# fast_finish: true + +#branches: +# only: +# - master +# - /dev-.*/ + +environment: + matrix: + # For Python versions available on Appveyor, see + # http://www.appveyor.com/docs/installed-software#python + # The list here is complete (excluding Python 2.6, which + # isn't covered by this document) at the time of writing. + - PYTHON: "C:\\Python27" + - PYTHON: "C:\\Python36" + +install: + - "%PYTHON%\\python setup.py develop" + - "%PYTHON%\\python.exe -m pip install -r %APPVEYOR_BUILD_FOLDER%\\requirements.txt -q" + +build: off + +test_script: + + - "%PYTHON%\\Scripts\\pytest %APPVEYOR_BUILD_FOLDER%" + + # Asserting pep8 formatting checks (using autopep8 tool) + # - ps: | + # $output = C:\\Python36\\Scripts\\autopep8 -d --recursive . + # if($output) + # { + # echo $output; + # $host.SetShouldExit(1) + # Write-Host "autopep8 failed: + # Please this command locally: + # 'autopep8 -i -a -r .'" + # } From aef001714ff869af64d57c90b3e1743d541dcf16 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Fri, 12 Oct 2018 17:34:39 -0400 Subject: [PATCH 11/12] changed Pillow v number in requirements --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 20feba6..f787d97 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ # Build dependencies #python pytest -pillow==3.0.0 +pillow numpy pandas pyshp From c6e11b52558fa5f3b5ace960892f04befd614367 Mon Sep 17 00:00:00 2001 From: Adam Erispaha Date: Fri, 12 Oct 2018 17:51:12 -0400 Subject: [PATCH 12/12] build status badge --- README.md | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 19f4f70..6fb33b4 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ # SWMMIO + +[![Build status](https://ci.appveyor.com/api/projects/status/qywujm5w2wm0y2tv?svg=true)](https://ci.appveyor.com/project/aerispaha/swmmio) + ![Kool Picture](docs/img/impact_of_option.png?raw=true "Impact of Option") SWMMIO is a set of python tools aiming to provide a means for version control and visualizing results from the EPA Stormwater Management Model (SWMM). Command line tools are also provided for running models individually and in parallel via Python's `multiprocessing` module. These tools are being developed specifically for the application of flood risk management, though most functionality is applicable to SWMM modeling in general. @@ -8,7 +11,7 @@ SWMMIO functions primarily by interfacing with .inp and .rpt (input and report) ### Dependencies -* [pillow](http://python-pillow.org/): 3.0.0 +* [pillow](http://python-pillow.org/) * [matplotlib](http://matplotlib.org/) * [numpy](http://www.numpy.org/) * [pandas](https://github.com/pydata/pandas) @@ -19,10 +22,6 @@ SWMMIO functions primarily by interfacing with .inp and .rpt (input and report) Before installation, it's recommended to first activate a [virtualenv](https://github.com/pypa/virtualenv) to not crowd your system's package library. If you don't use any of the dependencies listed above, this step is less important. SWMMIO can be installed via pip in your command line: ```bash -#on Windows: -python -m pip install swmmio - -#on Unix-type systems, i do this: pip install swmmio ```