diff --git a/.gitignore b/.gitignore index d24bac6..7e9b36d 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ private.py private/ # Setuptools distribution folder. /dist/ +/build/ # Python egg metadata, regenerated from source files by setuptools. /*.egg-info notes/ diff --git a/.travis.yml b/.travis.yml index 6f3488d..eba1512 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ language: python python: - "2.7" - # - "3.6" + - "3.6" # - "3.7" # command to install dependencies install: diff --git a/README.md b/README.md index 19f4f70..6fb33b4 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ # SWMMIO + +[![Build status](https://ci.appveyor.com/api/projects/status/qywujm5w2wm0y2tv?svg=true)](https://ci.appveyor.com/project/aerispaha/swmmio) + ![Kool Picture](docs/img/impact_of_option.png?raw=true "Impact of Option") SWMMIO is a set of python tools aiming to provide a means for version control and visualizing results from the EPA Stormwater Management Model (SWMM). Command line tools are also provided for running models individually and in parallel via Python's `multiprocessing` module. These tools are being developed specifically for the application of flood risk management, though most functionality is applicable to SWMM modeling in general. @@ -8,7 +11,7 @@ SWMMIO functions primarily by interfacing with .inp and .rpt (input and report) ### Dependencies -* [pillow](http://python-pillow.org/): 3.0.0 +* [pillow](http://python-pillow.org/) * [matplotlib](http://matplotlib.org/) * [numpy](http://www.numpy.org/) * [pandas](https://github.com/pydata/pandas) @@ -19,10 +22,6 @@ SWMMIO functions primarily by interfacing with .inp and .rpt (input and report) Before installation, it's recommended to first activate a [virtualenv](https://github.com/pypa/virtualenv) to not crowd your system's package library. If you don't use any of the dependencies listed above, this step is less important. SWMMIO can be installed via pip in your command line: ```bash -#on Windows: -python -m pip install swmmio - -#on Unix-type systems, i do this: pip install swmmio ``` diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000..9d4c5ee --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,38 @@ +#matrix: +# fast_finish: true + +#branches: +# only: +# - master +# - /dev-.*/ + +environment: + matrix: + # For Python versions available on Appveyor, see + # http://www.appveyor.com/docs/installed-software#python + # The list here is complete (excluding Python 2.6, which + # isn't covered by this document) at the time of writing. + - PYTHON: "C:\\Python27" + - PYTHON: "C:\\Python36" + +install: + - "%PYTHON%\\python setup.py develop" + - "%PYTHON%\\python.exe -m pip install -r %APPVEYOR_BUILD_FOLDER%\\requirements.txt -q" + +build: off + +test_script: + + - "%PYTHON%\\Scripts\\pytest %APPVEYOR_BUILD_FOLDER%" + + # Asserting pep8 formatting checks (using autopep8 tool) + # - ps: | + # $output = C:\\Python36\\Scripts\\autopep8 -d --recursive . + # if($output) + # { + # echo $output; + # $host.SetShouldExit(1) + # Write-Host "autopep8 failed: + # Please this command locally: + # 'autopep8 -i -a -r .'" + # } diff --git a/requirements.txt b/requirements.txt index 20feba6..f787d97 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ # Build dependencies #python pytest -pillow==3.0.0 +pillow numpy pandas pyshp diff --git a/setup.py b/setup.py index c09e2fc..2d0c031 100644 --- a/setup.py +++ b/setup.py @@ -6,12 +6,13 @@ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() -VERSION = '0.2.1' +VERSION = '0.3.0' AUTHOR_NAME = 'Adam Erispaha' AUTHOR_EMAIL = 'aerispaha@gmail.com' install_requires = [ - 'pillow==3.0.0', + # 'pillow==3.0.0', + 'Pillow', 'numpy', 'pandas', 'pyshp', diff --git a/swmmio/__main__.py b/swmmio/__main__.py index 8364a79..8fe72d8 100644 --- a/swmmio/__main__.py +++ b/swmmio/__main__.py @@ -1,6 +1,6 @@ from .run_models.run import run_simple, run_hot_start_sequence from .run_models import start_pool -from swmmio import Model +from .swmmio import Model from itertools import chain import os import argparse @@ -21,23 +21,23 @@ if args.model_to_run is not None: models_paths = [os.path.join(wd, f) for f in args.model_to_run] - print 'Adding models to queue:\n\t{}'.format('\n\t'.join(models_paths)) + print('Adding models to queue:\n\t{}'.format('\n\t'.join(models_paths))) #run the models in series (one after the other) - map(run_simple, models_paths) + list(map(run_simple, models_paths)) # run_simple(args.model_to_run) elif args.hotstart_model_to_run is not None: models_paths = [os.path.join(wd, f) for f in args.hotstart_model_to_run] - print 'hotstart_model_to_run the model: {}'.format(args.hotstart_model_to_run) + print('hotstart_model_to_run the model: {}'.format(args.hotstart_model_to_run)) # m = Model(args.hotstart_model_to_run) # run_hot_start_sequence(m)#args.hotstart_model_to_run) - map(run_hot_start_sequence, models_paths) + list(map(run_hot_start_sequence, models_paths)) elif args.start_pool is not None: models_dirs = [os.path.join(wd, f) for f in args.start_pool] - print 'Searching for models in:\n\t{}'.format('\n\t'.join(models_dirs)) + print('Searching for models in:\n\t{}'.format('\n\t'.join(models_dirs))) #combine the segments and options (combinations) into one iterable inp_paths = [] for root, dirs, files in chain.from_iterable(os.walk(path) for path in models_dirs): @@ -50,8 +50,8 @@ #call the main() function in start_pool.py start_pool.main(inp_paths, args.cores_left) - print "swmmio has completed running {} models".format(len(inp_paths)) + print("swmmio has completed running {} models".format(len(inp_paths))) else: - print 'you need to pass in some args' + print('you need to pass in some args') diff --git a/swmmio/defs/sectionheaders.py b/swmmio/defs/sectionheaders.py index fe52c1e..dad30a1 100644 --- a/swmmio/defs/sectionheaders.py +++ b/swmmio/defs/sectionheaders.py @@ -25,6 +25,7 @@ '[INFILTRATION]':'Subcatchment Suction HydCon IMDmax', '[Polygons]':'Name X Y', '[REPORT]':'Param Status', + '[TAGS]':'ElementType Name Tag', #'[CURVES]':'Name Type X-Value Y-Value', #'[TIMESERIES]':'Name Date Time Value' } @@ -41,6 +42,7 @@ 'Node Flooding Summary':'Name HoursFlooded MaxQ MaxDay MaxHr TotalFloodVol MaximumPondDepth', 'Node Inflow Summary':'Name Type MaxLatInflow MaxTotalInflow MaxDay MaxHr LatInflowV TotalInflowV FlowBalErrorPerc XXX', 'Node Surcharge Summary':'Name Type HourSurcharged MaxHeightAboveCrown MinDepthBelowRim', + 'Storage Volume Summary':'Name AvgVolume AvgPctFull EvapPctLoss ExfilPctLoss MaxVolume MaxPctFull MaxDay MaxFullHr MaxOutflow', 'Node Depth Summary':'Name Type AvgDepth MaxNodeDepth MaxHGL MaxDay MaxHr', 'Link Flow Summary':'Name Type MaxQ MaxDay MaxHr MaxV MaxQPerc MaxDPerc', 'Subcatchment Results': 'Date Time PrecipInchPerHour LossesInchPerHr RunoffCFS', diff --git a/swmmio/graphics/animate.py b/swmmio/graphics/animate.py index 24813d9..260dd1b 100644 --- a/swmmio/graphics/animate.py +++ b/swmmio/graphics/animate.py @@ -22,7 +22,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): """ #unpack and update the options ops = du.default_draw_options() - for key, value in kwargs.iteritems(): + for key, value in kwargs.items(): ops.update({key:value}) #return ops width = ops['width'] @@ -63,8 +63,8 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): if userStartDT < simStartDT or userEndDT > simEndDT or timeStepMod != 0 or userEndDT < userStartDT: #user has entered fault date times either by not being within the #availble data in the rpt or by starting at something that doesn't fit the timestep - print "PROBLEM WITH DATETIME ENTERED. Make sure it fits within data and start time rest on factor of timestep in minutes." - print "userStartDT = ", userStartDT, "\nuserEndDT = ", userEndDT, "\nsimStartDT = ", simStartDT, "\nsimEndDT = ", simEndDT, "\nTIMESTEP = ", rpt.timeStepMin + print("PROBLEM WITH DATETIME ENTERED. Make sure it fits within data and start time rest on factor of timestep in minutes.") + print("userStartDT = ", userStartDT, "\nuserEndDT = ", userEndDT, "\nsimStartDT = ", simStartDT, "\nsimEndDT = ", simEndDT, "\nTIMESTEP = ", rpt.timeStepMin) return None currentT = datetime.strptime(startDtime, "%b-%d-%Y %H:%M:%S") #SWMM dtime format needed @@ -81,7 +81,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): if not os.path.isfile(byteLocDictionaryFName): #this is a heavy operation, allow a few minutes - print "generating byte dictionary..." + print("generating byte dictionary...") #conduitByteLocationDict = rpt.createByteLocDict("Link Results") rpt.createByteLocDict("Link Results") rpt.createByteLocDict("Node Results") @@ -96,7 +96,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): rpt.elementByteLocations = pickle.load( open(byteLocDictionaryFName, 'r') ) #rpt.byteLocDict = conduitByteLocationDict - print "Started Drawing at " + strftime("%b-%d-%Y %H:%M:%S") + print("Started Drawing at " + strftime("%b-%d-%Y %H:%M:%S")) log = "Started Drawing at " + strftime("%b-%d-%Y %H:%M:%S") + "\n\nErrors:\n\n" drawCount = 0 conduitErrorCount = 0 @@ -116,7 +116,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): #DRAW THE CONDUITS if ops['conduitSymb']: - for id, conduit in conduitDicts.iteritems(): + for id, conduit in conduitDicts.items(): #coordPair = coordPairDict['coordinates'] if conduit.coordinates: #this prevents draws if no flow is supplied (RDII and such) @@ -125,11 +125,11 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): drawCount += 1 - if drawCount > 0 and drawCount % 2000 == 0: print str(drawCount) + " pipes drawn - simulation time = " + currentTstr + if drawCount > 0 and drawCount % 2000 == 0: print(str(drawCount) + " pipes drawn - simulation time = " + currentTstr) #DRAW THE NODES if ops['nodeSymb']: - for id, node in nodeDicts.iteritems(): + for id, node in nodeDicts.items(): if node.coordinates: #this prevents draws if no flow is supplied (RDII and such) su.drawNode(node, nodeDict, draw, rpt=rpt, dTime=currentTstr, options=ops['nodeSymb'], xplier=xplier) drawCount += 1 @@ -153,7 +153,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): imgPath = os.path.join(tempImgDir, image) frames.append(Image.open(imgPath)) - print "building gif with " + str(len(glob.glob1(tempImgDir, "*.png"))) + " frames..." + print("building gif with " + str(len(glob.glob1(tempImgDir, "*.png"))) + " frames...") if not imgName: imgName = inp.name gifFile = os.path.join(imgDir, imgName) + ".gif" frameDuration = 1.0 / float(ops['fps']) @@ -165,7 +165,7 @@ def animateModel(model, startDtime=None, endDtime=None, **kwargs): with open(os.path.join(imgDir, "log.txt"), 'w') as logFile: logFile.write(log) - print "Draw Count =" + str(drawCount) - print "Video saved to:\n\t" + gifFile + print("Draw Count =" + str(drawCount)) + print("Video saved to:\n\t" + gifFile) os.startfile(gifFile)#this doesn't seem to work diff --git a/swmmio/graphics/drawing.py b/swmmio/graphics/drawing.py index aea04ed..b014a71 100644 --- a/swmmio/graphics/drawing.py +++ b/swmmio/graphics/drawing.py @@ -136,7 +136,7 @@ def annotate_streets(df, img, text_col): #confirm font file location if not os.path.exists(config.font_file): - print 'Error loading defautl font. Check your config.font_file' + print('Error loading defautl font. Check your config.font_file') return None unique_sts = df[text_col].unique() @@ -244,13 +244,13 @@ def _annotateMap (canvas, model, model2=None, currentTstr = None, options=None, #Buid the title and files list (handle 1 or two input models) #this is hideous, or elegant? files = title = results_string = symbology_string = annotationTxt = "" - files = '\n'.join([m.rpt.path for m in filter(None, [model, model2])]) - title = ' to '.join([m.inp.name for m in filter(None, [model, model2])]) - symbology_string = ', '.join([s['title'] for s in filter(None, [nodeSymb, conduitSymb, parcelSymb])]) + files = '\n'.join([m.rpt.path for m in [_f for _f in [model, model2] if _f]]) + title = ' to '.join([m.inp.name for m in [_f for _f in [model, model2] if _f]]) + symbology_string = ', '.join([s['title'] for s in [_f for _f in [nodeSymb, conduitSymb, parcelSymb] if _f]]) title += "\n" + symbology_string #collect results - for result, value in results.iteritems(): + for result, value in results.items(): results_string += '\n' + result + ": " + str(value) #compile the annotation text diff --git a/swmmio/reporting/batch.py b/swmmio/reporting/batch.py index fad01df..aea313f 100644 --- a/swmmio/reporting/batch.py +++ b/swmmio/reporting/batch.py @@ -44,7 +44,7 @@ def batch_reports(project_dir, results_file, if '.inp' in f: inp_path = os.path.join(path,f) alt = Model(inp_path) - print 'reporting on {}'.format(alt.name) + print('reporting on {}'.format(alt.name)) #generate the reports frpt = reporting.FloodReport(alt, parcel_node_join_df) impact_rpt = reporting.ComparisonReport(baserpt, frpt, @@ -97,7 +97,7 @@ def batch_cost_estimates(baseline_dir, segments_dir, options_dir, results_file, costsdf = functions.estimate_cost_of_new_conduits(baseline, alt, supplemental_cost_data) cost_estimate = costsdf.TotalCostEstimate.sum() / math.pow(10, 6) - print '{}: ${}M'.format(alt.name, round(cost_estimate,1)) + print('{}: ${}M'.format(alt.name, round(cost_estimate,1))) model_id = os.path.splitext(f)[0] with open(results_file, 'a') as res: @@ -131,13 +131,13 @@ def batch_post_process(options_dir, baseline_dir, log_dir, bbox=None, overwrite= current_dir = os.path.join(options_dir, folder) report_dir = os.path.join(current_dir, REPORT_DIR_NAME) if not overwrite and os.path.exists(report_dir): - print 'skipping {}'.format(folder) + print('skipping {}'.format(folder)) continue else: #generate the report current_model = Model(current_dir) - print 'Generating report for {}'.format(current_model.inp.name) + print('Generating report for {}'.format(current_model.inp.name)) #reporting.generate_figures(baseline, current_model, bbox=bbox, imgDir=report_dir, verbose=True) report = reporting.Report(baseline, current_model) report.write(report_dir) diff --git a/swmmio/reporting/reporting.py b/swmmio/reporting/reporting.py index d1a9ee2..44d606b 100644 --- a/swmmio/reporting/reporting.py +++ b/swmmio/reporting/reporting.py @@ -262,7 +262,7 @@ def generate_figures(self, rpt_dir, parcel_shp_df, bbox=d68d70): def __str__(self): """print friendly""" - catz = filter(None, self.flood_comparison.Category.unique()) + catz = [_f for _f in self.flood_comparison.Category.unique() if _f] a = ['{}: {}'.format(c, self.impact[c]) for c in catz] files = [self.baseline_report.model.inp.path, self.alt_report.model.inp.path] diff --git a/swmmio/reporting/serialize.py b/swmmio/reporting/serialize.py index 7c75491..b5ffc48 100644 --- a/swmmio/reporting/serialize.py +++ b/swmmio/reporting/serialize.py @@ -5,7 +5,11 @@ from swmmio.utils import spatial from swmmio.graphics import swmm_graphics as sg from swmmio.reporting.reporting import FloodReport +<<<<<<< HEAD +from swmmio.definitions import * +======= from swmmio.defs.config import * +>>>>>>> master import geojson diff --git a/swmmio/reporting/utils.py b/swmmio/reporting/utils.py index 71bf8e5..da5d3e0 100644 --- a/swmmio/reporting/utils.py +++ b/swmmio/reporting/utils.py @@ -23,7 +23,7 @@ def insert_in_file_2(key, string, newfile): #start writing that thing key = '{}{}{}'.format('{{', key, '}}') #Django style - print key + print(key) with open(newfile, 'r') as newmap: for line in newmap: if key in line: diff --git a/swmmio/reporting/visualize.py b/swmmio/reporting/visualize.py index 0a6b067..3c21b31 100644 --- a/swmmio/reporting/visualize.py +++ b/swmmio/reporting/visualize.py @@ -39,7 +39,7 @@ def create_map(model1, model2=None, bbox=None, crs=None, filename=None, geometries = [] #array of features #collect the links - for k,v in model2.list_objects('conduit', bbox, subset=subset).items(): + for k,v in list(model2.list_objects('conduit', bbox, subset=subset).items()): props = { 'MaxQPercent':v.maxQpercent, 'id':v.id, diff --git a/swmmio/run_models/run.py b/swmmio/run_models/run.py index 3114044..356c77d 100644 --- a/swmmio/run_models/run.py +++ b/swmmio/run_models/run.py @@ -16,7 +16,7 @@ def run_simple(inp_path, swmm_eng=SWMM_ENGINE_PATH): """ run a model once as is. """ - print 'running {} with {}'.format(inp_path, swmm_eng) + print('running {} with {}'.format(inp_path, swmm_eng)) #inp_path = model.inp.path rpt_path = os.path.splitext(inp_path)[0] + '.rpt' @@ -32,7 +32,7 @@ def run_hot_start_sequence(inp_path, swmm_eng=SWMM_ENGINE_PATH): # if not os.path.exists(hotstart1) and not os.path.exists(hotstart2): #create new model inp with params to save hotstart1 - print 'create new model inp with params to save hotstart1' + print('create new model inp with params to save hotstart1') s = pd.Series(['SAVE HOTSTART "{}"'.format(hotstart1)]) hot1_df = pd.DataFrame(s, columns=['[FILES]']) model = replace_inp_section(model.inp.path, '[FILES]', hot1_df) @@ -42,7 +42,7 @@ def run_hot_start_sequence(inp_path, swmm_eng=SWMM_ENGINE_PATH): # if os.path.exists(hotstart1) and not os.path.exists(hotstart2): #create new model inp with params to use hotstart1 and save hotstart2 - print 'with params to use hotstart1 and save hotstart2' + print('with params to use hotstart1 and save hotstart2') s = pd.Series(['USE HOTSTART "{}"'.format(hotstart1), 'SAVE HOTSTART "{}"'.format(hotstart2)]) hot2_df = pd.DataFrame(s, columns=['[FILES]']) model = replace_inp_section(model.inp.path, '[FILES]', hot2_df) @@ -50,7 +50,7 @@ def run_hot_start_sequence(inp_path, swmm_eng=SWMM_ENGINE_PATH): # if os.path.exists(hotstart2): #create new model inp with params to use hotstart2 and not save anything - print 'params to use hotstart2 and not save anything' + print('params to use hotstart2 and not save anything') s = pd.Series(['USE HOTSTART "{}"'.format(hotstart2)]) hot3_df = pd.DataFrame(s, columns=['[FILES]']) diff --git a/swmmio/swmmio.py b/swmmio/swmmio.py index d4b47c0..a0c68d1 100644 --- a/swmmio/swmmio.py +++ b/swmmio/swmmio.py @@ -130,7 +130,7 @@ def to_map(self, filename=None, inproj='epsg:2272'): filename = os.path.join(self.inp.dir, self.inp.name + '.html') with open(BETTER_BASEMAP_PATH, 'r') as bm: - with open(filename, 'wb') as newmap: + with open(filename, 'w') as newmap: for line in bm: if '//INSERT GEOJSON HERE ~~~~~' in line: newmap.write('conduits = {};\n'.format(geojson.dumps(geo_conduits))) @@ -199,6 +199,9 @@ def conduits(self): df['DownstreamInvert'] = df.OutletNodeInvert + df.OutletOffset df['SlopeFtPerFt'] = (df.UpstreamInvert - df.DownstreamInvert) / df.Length + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) + self._conduits_df = df return df @@ -220,6 +223,9 @@ def orifices(self): #create dataframes of relevant sections from the INP orifices_df = create_dataframeINP(inp.path, "[ORIFICES]", comment_cols=False) + if orifices_df.empty: + return pd.DataFrame() + coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() #add conduit coordinates @@ -227,7 +233,8 @@ def orifices(self): verts = create_dataframeINP(inp.path, '[VERTICES]') xys = orifices_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) df = orifices_df.assign(coords=xys.map(lambda x: x[0])) - + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) self._orifices_df = df return df @@ -248,8 +255,11 @@ def weirs(self): rpt = self.rpt #create dataframes of relevant sections from the INP - #BUG why can't comment_cols=False work here? - weirs_df = create_dataframeINP(inp.path, "[WEIRS]")[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] + weirs_df = create_dataframeINP(inp.path, "[WEIRS]")#[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] + if weirs_df.empty: + return pd.DataFrame() + + weirs_df = weirs_df[['InletNode', 'OutletNode', 'WeirType', 'CrestHeight']] coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() #add conduit coordinates @@ -257,6 +267,8 @@ def weirs(self): verts = create_dataframeINP(inp.path, '[VERTICES]') xys = weirs_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) df = weirs_df.assign(coords=xys.map(lambda x: x[0])) + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) self._weirs_df = df @@ -279,12 +291,17 @@ def pumps(self): #create dataframes of relevant sections from the INP pumps_df = create_dataframeINP(inp.path, "[PUMPS]", comment_cols=False) + if pumps_df.empty: + return pd.DataFrame() + coords_df = create_dataframeINP(inp.path, "[COORDINATES]")#.drop_duplicates() #add conduit coordinates verts = create_dataframeINP(inp.path, '[VERTICES]') xys = pumps_df.apply(lambda r: get_link_coords(r,coords_df,verts), axis=1) df = pumps_df.assign(coords=xys.map(lambda x: x[0])) + df.InletNode = df.InletNode.astype(str) + df.OutletNode = df.OutletNode.astype(str) self._pumps_df = df @@ -334,7 +351,7 @@ def nodexy(row): xys = all_nodes.apply(lambda r: nodexy(r), axis=1) all_nodes = all_nodes.assign(coords = xys) - + all_nodes = all_nodes.rename(index=str) self._nodes_df = all_nodes return all_nodes diff --git a/swmmio/tests/data/__init__.py b/swmmio/tests/data/__init__.py index 1c2291b..da8106d 100644 --- a/swmmio/tests/data/__init__.py +++ b/swmmio/tests/data/__init__.py @@ -15,3 +15,9 @@ # Test models paths MODEL_FULL_FEATURES_PATH = os.path.join(DATA_PATH, 'model_full_features.inp') MODEL_BROWARD_COUNTY_PATH = os.path.join(DATA_PATH, 'RUNOFF46_SW5.INP') + +#version control test models +MODEL_XSECTION_BASELINE = os.path.join(DATA_PATH, 'baseline_test.inp') +MODEL_XSECTION_ALT_01 = os.path.join(DATA_PATH, 'alt_test1.inp') +MODEL_XSECTION_ALT_02 = os.path.join(DATA_PATH, 'alt_test2.inp') +MODEL_XSECTION_ALT_03 = os.path.join(DATA_PATH, 'alt_test3.inp') diff --git a/swmmio/tests/data/alt_test1.inp b/swmmio/tests/data/alt_test1.inp new file mode 100644 index 0000000..d99e88a --- /dev/null +++ b/swmmio/tests/data/alt_test1.inp @@ -0,0 +1,116 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING KINWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 03/05/2018 +START_TIME 00:00:00 +REPORT_START_DATE 03/05/2018 +REPORT_START_TIME 00:00:00 +END_DATE 03/05/2018 +END_TIME 06:00:00 +SWEEP_START 1/1 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:15:00 +WET_STEP 00:05:00 +DRY_STEP 01:00:00 +ROUTING_STEP 0:00:30 + +INERTIAL_DAMPING PARTIAL +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 0 +MAX_TRIALS 0 +HEAD_TOLERANCE 0 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +dummy_node1 -10.99 30 0 0 0 +dummy_node2 -9.24 20 0 0 0 +dummy_node3 -7.76 20 0 0 0 +dummy_node4 -6.98 12.59314 0 0 177885 +dummy_node5 -6.96 13.05439 0 0 73511 +dummy_node6 -6.8 13.27183 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +dummy_outfall -11 FREE YES + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe dummy_node1 dummy_outfall 200 0.013 0 0 0 0 +pipe1 dummy_node2 dummy_node1 1675 0.013 0 0 0 0 +pipe2 dummy_node3 dummy_node2 400 0.01 0 0 0 0 +pipe3 dummy_node4 dummy_node3 594 0.013 0 0 0 0 +pipe4 dummy_node5 dummy_node4 400 0.013 0 0 0 0 +pipe5 dummy_node6 dummy_node5 188 0.013 0 0 0 0 + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe RECT_CLOSED 7 14 0 0 1 +pipe1 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe2 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe3 CIRCULAR 8 0 0 0 1 +pipe4 CIRCULAR 6.5 0 0 0 1 +pipe5 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 + +[DWF] +;;Node Constituent Baseline Patterns +;;-------------- ---------------- ---------- ---------- +dummy_node2 FLOW 0.000275704 "" "" +dummy_node6 FLOW 0.008150676 "" "" + +[REPORT] +;;Reporting Options +INPUT NO +CONTROLS NO +SUBCATCHMENTS ALL +NODES ALL +LINKS ALL + +[TAGS] + +[MAP] +DIMENSIONS 0.000 0.000 10000.000 10000.000 +Units None + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +dummy_node1 2054.575 6051.364 +dummy_node2 -2937.400 7704.655 +dummy_node3 -4205.457 9695.024 +dummy_node4 -6163.724 12857.143 +dummy_node5 -9871.589 13723.917 +dummy_node6 -12712.681 14927.769 +dummy_outfall 4927.769 5280.899 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ + diff --git a/swmmio/tests/data/alt_test2.inp b/swmmio/tests/data/alt_test2.inp new file mode 100644 index 0000000..436872b --- /dev/null +++ b/swmmio/tests/data/alt_test2.inp @@ -0,0 +1,116 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING KINWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 03/05/2018 +START_TIME 00:00:00 +REPORT_START_DATE 03/05/2018 +REPORT_START_TIME 00:00:00 +END_DATE 03/05/2018 +END_TIME 06:00:00 +SWEEP_START 1/1 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:15:00 +WET_STEP 00:05:00 +DRY_STEP 01:00:00 +ROUTING_STEP 0:00:30 + +INERTIAL_DAMPING PARTIAL +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 0 +MAX_TRIALS 0 +HEAD_TOLERANCE 0 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +dummy_node1 -10.99 30 0 0 0 +dummy_node2 -9.24 20 0 0 0 +dummy_node3 -7.76 20 0 0 0 +dummy_node4 -6.98 12.59314 0 0 177885 +dummy_node5 -6.96 13.05439 0 0 73511 +dummy_node6 -6.8 13.27183 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +dummy_outfall -11 FREE YES + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe dummy_node1 dummy_outfall 200 0.013 0 0 0 0 +pipe1 dummy_node2 dummy_node1 1675 0.013 0 0 0 0 +pipe2 dummy_node3 dummy_node2 400 0.01 0 0 0 0 +pipe3 dummy_node4 dummy_node3 594 0.013 0 0 0 0 +pipe4 dummy_node5 dummy_node4 400 0.013 0 0 0 0 +pipe5 dummy_node6 dummy_node5 188 0.013 0 0 0 0 + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe RECT_CLOSED 7 14 0 0 1 +pipe1 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe2 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe3 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe4 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 +pipe5 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 + +[DWF] +;;Node Constituent Baseline Patterns +;;-------------- ---------------- ---------- ---------- +dummy_node2 FLOW 0.000275704 "" "" "" +dummy_node6 FLOW 0.008150676 "" "" "" + +[REPORT] +;;Reporting Options +INPUT NO +CONTROLS NO +SUBCATCHMENTS ALL +NODES ALL +LINKS ALL + +[TAGS] + +[MAP] +DIMENSIONS 0.000 0.000 10000.000 10000.000 +Units None + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +dummy_node1 2054.575 6051.364 +dummy_node2 -2937.400 7704.655 +dummy_node3 -4205.457 9695.024 +dummy_node4 -6163.724 12857.143 +dummy_node5 -9871.589 13723.917 +dummy_node6 -12712.681 14927.769 +dummy_outfall 4927.769 5280.899 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ + diff --git a/swmmio/tests/data/alt_test3.inp b/swmmio/tests/data/alt_test3.inp new file mode 100644 index 0000000..50bdea1 --- /dev/null +++ b/swmmio/tests/data/alt_test3.inp @@ -0,0 +1,115 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING KINWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 03/05/2018 +START_TIME 00:00:00 +REPORT_START_DATE 03/05/2018 +REPORT_START_TIME 00:00:00 +END_DATE 03/05/2018 +END_TIME 06:00:00 +SWEEP_START 1/1 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:15:00 +WET_STEP 00:05:00 +DRY_STEP 01:00:00 +ROUTING_STEP 0:00:30 + +INERTIAL_DAMPING PARTIAL +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 0 +MAX_TRIALS 0 +HEAD_TOLERANCE 0 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +dummy_node1 -15 30 0 0 0 +dummy_node2 -9.24 20 0 0 0 +dummy_node3 -7.76 20 0 0 0 +dummy_node4 -6.98 12.59314 0 0 177885 +dummy_node5 -6.96 15 0 0 73511 +dummy_node6 -6.8 13.27183 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +dummy_outfall -11 FREE YES + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe dummy_node1 dummy_outfall 200 0.013 0 0 0 0 +pipe1 dummy_node2 dummy_node1 1675 0.013 0 0 0 0 +pipe2 dummy_node3 dummy_node2 400 0.01 0 0 0 0 +pipe3 dummy_node4 dummy_node3 594 0.013 0 0 0 0 +pipe4 dummy_node5 dummy_node4 400 0.013 0 0 0 0 +pipe5 dummy_node6 dummy_node5 666 0.013 0 0 0 0 + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe RECT_CLOSED 7 14 0 0 1 +pipe1 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe2 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe3 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe4 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 +pipe5 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 + +[DWF] +;;Node Constituent Baseline Patterns +;;-------------- ---------------- ---------- ---------- +dummy_node2 FLOW 0.000275704 "" "" "" +dummy_node6 FLOW 0.008150676 "" "" "" + +[REPORT] +;;Reporting Options +INPUT NO +CONTROLS NO +SUBCATCHMENTS ALL +NODES ALL +LINKS ALL + +[TAGS] + +[MAP] +DIMENSIONS 0.000 0.000 10000.000 10000.000 +Units None + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +dummy_node1 2054.575 6051.364 +dummy_node2 -2937.400 7704.655 +dummy_node3 -4205.457 9695.024 +dummy_node4 -6163.724 12857.143 +dummy_node5 -9871.589 13723.917 +dummy_node6 -12712.681 14927.769 +dummy_outfall 4927.769 5280.899 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ diff --git a/swmmio/tests/data/baseline_test.inp b/swmmio/tests/data/baseline_test.inp new file mode 100644 index 0000000..db508f5 --- /dev/null +++ b/swmmio/tests/data/baseline_test.inp @@ -0,0 +1,116 @@ +[TITLE] +;;Project Title/Notes + +[OPTIONS] +;;Option Value +FLOW_UNITS CFS +INFILTRATION HORTON +FLOW_ROUTING KINWAVE +LINK_OFFSETS DEPTH +MIN_SLOPE 0 +ALLOW_PONDING NO +SKIP_STEADY_STATE NO + +START_DATE 03/05/2018 +START_TIME 00:00:00 +REPORT_START_DATE 03/05/2018 +REPORT_START_TIME 00:00:00 +END_DATE 03/05/2018 +END_TIME 06:00:00 +SWEEP_START 1/1 +SWEEP_END 12/31 +DRY_DAYS 0 +REPORT_STEP 00:15:00 +WET_STEP 00:05:00 +DRY_STEP 01:00:00 +ROUTING_STEP 0:00:30 + +INERTIAL_DAMPING PARTIAL +NORMAL_FLOW_LIMITED BOTH +FORCE_MAIN_EQUATION H-W +VARIABLE_STEP 0.75 +LENGTHENING_STEP 0 +MIN_SURFAREA 0 +MAX_TRIALS 0 +HEAD_TOLERANCE 0 +SYS_FLOW_TOL 5 +LAT_FLOW_TOL 5 +MINIMUM_STEP 0.5 +THREADS 1 + +[EVAPORATION] +;;Data Source Parameters +;;-------------- ---------------- +CONSTANT 0.0 +DRY_ONLY NO + +[JUNCTIONS] +;;Name Elevation MaxDepth InitDepth SurDepth Aponded +;;-------------- ---------- ---------- ---------- ---------- ---------- +dummy_node1 -10.99 30 0 0 0 +dummy_node2 -9.24 20 0 0 0 +dummy_node3 -7.76 20 0 0 0 +dummy_node4 -6.98 12.59314 0 0 177885 +dummy_node5 -6.96 13.05439 0 0 73511 +dummy_node6 -6.8 13.27183 0 0 0 + +[OUTFALLS] +;;Name Elevation Type Stage Data Gated Route To +;;-------------- ---------- ---------- ---------------- -------- ---------------- +dummy_outfall -11 FREE YES + +[CONDUITS] +;;Name From Node To Node Length Roughness InOffset OutOffset InitFlow MaxFlow +;;-------------- ---------------- ---------------- ---------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe dummy_node1 dummy_outfall 200 0.013 0 0 0 0 +pipe1 dummy_node2 dummy_node1 1675 0.013 0 0 0 0 +pipe2 dummy_node3 dummy_node2 400 0.01 0 0 0 0 +pipe3 dummy_node4 dummy_node3 594 0.013 0 0 0 0 +pipe4 dummy_node5 dummy_node4 400 0.013 0 0 0 0 +pipe5 dummy_node6 dummy_node5 188 0.013 0 0 0 0 + +[XSECTIONS] +;;Link Shape Geom1 Geom2 Geom3 Geom4 Barrels Culvert +;;-------------- ------------ ---------------- ---------- ---------- ---------- ---------- ---------- +outfall_pipe RECT_CLOSED 7 14 0 0 1 +pipe1 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe2 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe3 RECT_TRIANGULAR 7 14 1.5 0 1 +pipe4 CIRCULAR 6.5 0 0 0 1 +pipe5 RECT_TRIANGULAR 6.5 13 1.434756791 0 1 + +[DWF] +;;Node Constituent Baseline Patterns +;;-------------- ---------------- ---------- ---------- +dummy_node2 FLOW 0.000275704 "" "" "" +dummy_node6 FLOW 0.008150676 "" "" "" + +[REPORT] +;;Reporting Options +INPUT NO +CONTROLS NO +SUBCATCHMENTS ALL +NODES ALL +LINKS ALL + +[TAGS] + +[MAP] +DIMENSIONS 0.000 0.000 10000.000 10000.000 +Units None + +[COORDINATES] +;;Node X-Coord Y-Coord +;;-------------- ------------------ ------------------ +dummy_node1 2054.575 6051.364 +dummy_node2 -2937.400 7704.655 +dummy_node3 -4205.457 9695.024 +dummy_node4 -6163.724 12857.143 +dummy_node5 -9871.589 13723.917 +dummy_node6 -12712.681 14927.769 +dummy_outfall 4927.769 5280.899 + +[VERTICES] +;;Link X-Coord Y-Coord +;;-------------- ------------------ ------------------ + diff --git a/swmmio/tests/test_version_control.py b/swmmio/tests/test_version_control.py new file mode 100644 index 0000000..6ecb376 --- /dev/null +++ b/swmmio/tests/test_version_control.py @@ -0,0 +1,34 @@ +from swmmio.tests.data import (MODEL_XSECTION_BASELINE, MODEL_XSECTION_ALT_01, + MODEL_XSECTION_ALT_02, MODEL_XSECTION_ALT_03) +from swmmio import swmmio +from swmmio.version_control import utils as vc_utils +from swmmio.version_control import inp +from swmmio.utils import functions as funcs + + +def test_complete_inp_headers(): + + headers = [ + '[TITLE]','[OPTIONS]','[EVAPORATION]','[JUNCTIONS]','[OUTFALLS]', + '[CONDUITS]','[XSECTIONS]','[DWF]','[REPORT]','[TAGS]','[MAP]', + '[COORDINATES]','[VERTICES]', + ] + + h1 = funcs.complete_inp_headers(MODEL_XSECTION_BASELINE) + + assert(all(h in h1['headers'] for h in headers)) + assert(h1['order'] == headers) + + +def test_create_inp_build_instructions(): + + inp.create_inp_build_instructions(MODEL_XSECTION_BASELINE, + MODEL_XSECTION_ALT_03, + 'vc_dir', + 'test_version_id', 'cool comments') + + latest_bi = vc_utils.newest_file('vc_dir') + bi = inp.BuildInstructions(latest_bi) + + juncs = bi.instructions['[JUNCTIONS]'] + assert(all(j in juncs.altered.index for j in ['dummy_node1', 'dummy_node5'])) diff --git a/swmmio/utils/dataframes.py b/swmmio/utils/dataframes.py index f82b3a7..1e9272e 100644 --- a/swmmio/utils/dataframes.py +++ b/swmmio/utils/dataframes.py @@ -38,8 +38,8 @@ def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, if not tempfilepath: #if this head (section) was not found in the textfile, return a #blank dataframe with the appropriate schema - print 'header "{}" not found in "{}"'.format(section, inp_path) - print 'returning empty dataframe' + print('header "{}" not found in "{}"'.format(section, inp_path)) + print('returning empty dataframe') headerlist = headerdefs['headers'].get(section, 'blob').split() + [';', 'Comment', 'Origin'] blank_df = pd.DataFrame(data=None, columns=headerlist).set_index(headerlist[0]) return blank_df @@ -64,16 +64,22 @@ def create_dataframeINP(inp_path, section='[CONDUITS]', ignore_comments=True, os.remove(tempfilepath) - return df + return df.rename(index=str) def get_link_coords(row, nodexys, verticies): """for use in an df.apply, to get coordinates of a conduit/link """ - x1 = round(nodexys.at[row.InletNode, 'X'], 4) - y1 = round(nodexys.at[row.InletNode, 'Y'], 4) - x2 = round(nodexys.at[row.OutletNode, 'X'], 4) - y2 = round(nodexys.at[row.OutletNode, 'Y'], 4) + + #cast IDs to string + inlet_id = str(row.InletNode) + outlet_id =str(row.OutletNode) + xys_str = nodexys.rename(index=str) + + x1 = round(xys_str.at[inlet_id, 'X'], 4) + y1 = round(xys_str.at[inlet_id, 'Y'], 4) + x2 = round(xys_str.at[outlet_id, 'X'], 4) + y2 = round(xys_str.at[outlet_id, 'Y'], 4) if None in [x1, x2, y1, y2]: - print row.name, 'problem, no coords' + print(row.name, 'problem, no coords') #grab any extra verts, place in between up/dwn nodes res = [(x1, y1)] if row.name in verticies.index: @@ -82,7 +88,7 @@ def get_link_coords(row, nodexys, verticies): if isinstance(xs, list) and isinstance(ys, list): #if more than one vert for this link exists, arrays are returned #from verticies.get_value(). it then needs to be zipped up - res = res + zip(xs, ys) + res = res + list(zip(xs, ys)) else: res = res + [(xs, ys)] @@ -104,7 +110,7 @@ def create_dataframeRPT(rpt_path, section='Link Flow Summary', element_id=None): element_id=element_id) if not tempfilepath: - print 'header "{}" not found in "{}"'.format(section, rpt_path) + print('header "{}" not found in "{}"'.format(section, rpt_path)) return None if headerdefs['headers'][section] == 'blob': diff --git a/swmmio/utils/functions.py b/swmmio/utils/functions.py index be5142f..a25dd84 100644 --- a/swmmio/utils/functions.py +++ b/swmmio/utils/functions.py @@ -119,7 +119,7 @@ def trace(node_id): trace(data.OutletNode) #kickoff the trace - print "Starting trace {} from {}".format(mode, startnode) + print ("Starting trace {} from {}".format(mode, startnode)) trace(startnode) - print "Traced {0} nodes from {1}".format(len(traced_nodes), startnode) + print ("Traced {0} nodes from {1}".format(len(traced_nodes), startnode)) return {'nodes':traced_nodes, 'conduits':traced_conduits} diff --git a/swmmio/utils/text.py b/swmmio/utils/text.py index db47b29..0ea7764 100644 --- a/swmmio/utils/text.py +++ b/swmmio/utils/text.py @@ -211,7 +211,7 @@ def extract_section_from_rpt(filepath, sectionheader, element_id=None, cleanhead elem_start_string = ' '.join(["<<<", sectionheader.split()[0], element_id, ">>>"]) if element_id and elem_start_string in line: - print 'element_id found: {}'.format(line) + print('element_id found: {}'.format(line)) #if we should look for an element_id and it #is in the current line elementstartfound = True diff --git a/swmmio/vendor/images2gif.py b/swmmio/vendor/images2gif.py index 092f35c..484b035 100644 --- a/swmmio/vendor/images2gif.py +++ b/swmmio/vendor/images2gif.py @@ -587,7 +587,7 @@ def geta(self, alpha, rad): except KeyError: length = rad*2-1 mid = length/2 - q = np.array(range(mid-1,-1,-1)+range(-1,mid)) + q = np.array(list(range(mid-1,-1,-1))+list(range(-1,mid))) a = alpha*(rad*rad - q*q)/(rad*rad) a[mid] = 0 self.a_s[(alpha, rad)] = a @@ -670,7 +670,7 @@ def learn(self): if rad <= 1: rad = 0 - print "Beginning 1D learning: samplepixels =",samplepixels," rad =", rad + print("Beginning 1D learning: samplepixels =",samplepixels," rad =", rad) step = 0 pos = 0 @@ -689,7 +689,7 @@ def learn(self): if i%100 == 99: tmp = '\b'*len(printed_string) printed_string = str((i+1)*100/samplepixels)+"%\n" - print tmp + printed_string, + print(tmp + printed_string, end=' ') p = self.pixels[pos] r = (p >> 16) & 0xff g = (p >> 8) & 0xff @@ -717,7 +717,7 @@ def learn(self): rad = biasRadius >> self.RADIUSBIASSHIFT if rad <= 1: rad = 0 - print "Finished 1D learning: final alpha =",(1.0*alpha)/self.INITALPHA,"!" + print("Finished 1D learning: final alpha =",(1.0*alpha)/self.INITALPHA,"!") def fix(self): for i in range(self.NETSIZE): @@ -782,7 +782,7 @@ def quantize(self, image): if cKDTree: return self.quantize_with_scipy(image) else: - print 'Scipy not available, falling back to slower version.' + print('Scipy not available, falling back to slower version.') return self.quantize_without_scipy(image) @@ -794,7 +794,7 @@ def quantize_with_scipy(self, image): kdtree = cKDTree(self.colormap[:,:3],leafsize=10) result = kdtree.query(px2) colorindex = result[1] - print "Distance:", (result[0].sum()/(w*h)) + print("Distance:", (result[0].sum()/(w*h))) px2[:] = self.colormap[colorindex,:3] return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage()) @@ -818,7 +818,8 @@ def quantize_without_scipy(self, image): px[i,j,0],px[i,j,1],px[i,j,2] = val return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage()) - def convert(self, (r, g, b)): + def convert(self, xxx_todo_changeme): + (r, g, b) = xxx_todo_changeme i = self.inxsearch(r, g, b) return self.colormap[i,:3] diff --git a/swmmio/version_control/inp.py b/swmmio/version_control/inp.py index b59a5ed..ebaef1b 100644 --- a/swmmio/version_control/inp.py +++ b/swmmio/version_control/inp.py @@ -9,7 +9,7 @@ import sys from copy import deepcopy if sys.version_info[0] < 3: - from StringIO import StringIO + from io import StringIO else: from io import StringIO problem_sections = ['[CURVES]', '[TIMESERIES]', '[RDII]', '[HYDROGRAPHS]'] @@ -45,7 +45,7 @@ def __init__(self, build_instr_file=None): def __add__(self, other): bi = BuildInstructions() - for section, change_obj in self.instructions.iteritems(): + for section, change_obj in self.instructions.items(): if section in other.instructions: new_change = change_obj + other.instructions[section] bi.instructions[section] = new_change @@ -53,7 +53,7 @@ def __add__(self, other): #section doesn't exist in other, maintain current instructions bi.instructions[section] = change_obj - for section, change_obj in other.instructions.iteritems(): + for section, change_obj in other.instructions.items(): if section not in self.instructions: bi.instructions[section] = change_obj @@ -85,7 +85,7 @@ def save(self, dir, filename): filepath = os.path.join(dir, filename) with open (filepath, 'w') as f: vc_utils.write_meta_data(f, self.metadata) - for section, change_obj in self.instructions.iteritems(): + for section, change_obj in self.instructions.items(): section_df = pd.concat([change_obj.removed, change_obj.altered, change_obj.added]) vc_utils.write_inp_section(f, allheaders=None, sectionheader=section, section_data=section_df, pad_top=False, na_fill='NaN') @@ -243,7 +243,7 @@ def generate_inp_from_diffs(basemodel, inpdiffs, target_dir): #instructions applied with open (newinp, 'w') as f: for section in allheaders['order']: - print section + print(section) if section not in problem_sections and allheaders['headers'][section] != 'blob': #check if a changes from baseline spreadheet exists, and use this #information if available to create the changes array diff --git a/swmmio/version_control/tests/validate.py b/swmmio/version_control/tests/validate.py index 9dd9dc4..ca4591c 100644 --- a/swmmio/version_control/tests/validate.py +++ b/swmmio/version_control/tests/validate.py @@ -10,7 +10,7 @@ def search_for_duplicates(inp_path, verbose = False): """ headers = funcs.complete_inp_headers(inp_path)['headers'] dups_found = False - for header, cols, in headers.iteritems(): + for header, cols, in headers.items(): if cols != 'blob': df = dataframes.create_dataframeINP(inp_path, section=header) @@ -18,11 +18,11 @@ def search_for_duplicates(inp_path, verbose = False): n_unique = len(elements.unique()) #number of unique elements n_total = len(elements) #total number of elements if verbose: - print '{} -> (uniques, total) -> ({}, {})'.format(header, n_unique , n_total) + print('{} -> (uniques, total) -> ({}, {})'.format(header, n_unique , n_total)) if n_unique != n_total: dups = ', '.join(df[df.index.duplicated()].index.unique().tolist()) - print 'duplicate found in {}\nsection: {}\n{}'.format(inp_path, header, dups) + print('duplicate found in {}\nsection: {}\n{}'.format(inp_path, header, dups)) dups_found = True return dups_found diff --git a/swmmio/version_control/utils.py b/swmmio/version_control/utils.py index 06be24b..f845646 100644 --- a/swmmio/version_control/utils.py +++ b/swmmio/version_control/utils.py @@ -120,11 +120,11 @@ def bi_is_current(build_instr_file): #parents = baseline.update(alternatives) # print meta['Parent Models']['Baseline'] # print alternatives - for inp, revisiondate in baseline.iteritems(): + for inp, revisiondate in baseline.items(): if modification_date(inp) != revisiondate: return False - for inp, revisiondate in alternatives.iteritems(): + for inp, revisiondate in alternatives.items(): if modification_date(inp) != revisiondate: return False diff --git a/swmmio/version_control/version_control.py b/swmmio/version_control/version_control.py index f49f8a7..aab35e4 100644 --- a/swmmio/version_control/version_control.py +++ b/swmmio/version_control/version_control.py @@ -52,7 +52,7 @@ def propagate_changes_from_baseline(baseline_dir, alternatives_dir, combi_dir, bi.metadata['Parent Models']['Baseline'] = {baseinp:vc_utils.modification_date(baseinp)} bi.metadata['Log'].update({version_id:comments}) bi.save(vc_directory, version_id+'.txt') - print 'rebuilding {} with changes to baseline'.format(model.name) + print('rebuilding {} with changes to baseline'.format(model.name)) bi.build(baseline_dir, model.inp.path) #overwrite old inp @@ -92,14 +92,14 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', #identify all scenarios (cartesian product of sets of IPs between each RSN) #then isolate child scenarios with atleast 2 parents (sets with one parent #are already modeled as IPs within the RSNs) - all_scenarios = [filter(None, s) for s in itertools.product(*IPs)] + all_scenarios = [[_f for _f in s if _f] for s in itertools.product(*IPs)] child_scenarios = [s for s in all_scenarios if len(s) > 1] #notify user of what was initially found - str_IPs = '\n'.join([', '.join(filter(None, i)) for i in IPs]) - print ('Found {} implementation phases among {} networks:\n{}\n' + str_IPs = '\n'.join([', '.join([_f for _f in i if _f]) for i in IPs]) + print(('Found {} implementation phases among {} networks:\n{}\n' 'This yeilds {} combined scenarios ({} total)'.format(len(IP_dirs), - len(RSN_dirs),str_IPs,len(child_scenarios),len(all_scenarios) - 1)) + len(RSN_dirs),str_IPs,len(child_scenarios),len(all_scenarios) - 1))) # ========================================================================== # UPDATE/CREATE THE PARENT MODEL BUILD INSTRUCTIONS @@ -109,7 +109,7 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', vc_dir = os.path.join(ip_dir, 'vc') if not os.path.exists(vc_dir): - print 'creating new build instructions for {}'.format(ip_model.name) + print('creating new build instructions for {}'.format(ip_model.name)) inp.create_inp_build_instructions(baseinp, ip_model.inp.path, vc_dir, version_id, comments) @@ -120,7 +120,7 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', if not vc_utils.bi_is_current(latest_bi): #revision date of the alt doesn't match the newest build #instructions for this 'imp_level', so we should refresh it - print 'updating build instructions for {}'.format(ip_model.name) + print('updating build instructions for {}'.format(ip_model.name)) inp.create_inp_build_instructions(baseinp, ip_model.inp.path, vc_dir, version_id, comments) @@ -144,7 +144,7 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', os.mkdir(new_dir) newinppath = os.path.join(new_dir, newcombi + '.inp') - print 'creating new child model: {}'.format(newcombi) + print('creating new child model: {}'.format(newcombi)) new_build_instructions = sum(build_instrcts) new_build_instructions.save(vc_dir, version_id+'.txt') new_build_instructions.build(baseline_dir, newinppath) @@ -157,7 +157,7 @@ def create_combinations(baseline_dir, rsn_dir, combi_dir, version_id='', if not vc_utils.bi_is_current(latest_bi): #revision date of the alt doesn't match the newest build #instructions for this 'imp_level', so we should refresh it - print 'updating child build instructions for {}'.format(newcombi) + print('updating child build instructions for {}'.format(newcombi)) newinppath = os.path.join(new_dir, newcombi + '.inp') new_build_instructions = sum(build_instrcts) new_build_instructions.save(vc_dir, version_id+'.txt')