Skip to content

Commit

Permalink
Read and display some model infos #232
Browse files Browse the repository at this point in the history
  • Loading branch information
glebbelov committed Mar 12, 2024
1 parent 864fc07 commit b84688f
Show file tree
Hide file tree
Showing 8 changed files with 280 additions and 11 deletions.
5 changes: 4 additions & 1 deletion include/mp/flat/constr_algebraic.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

#include <string>
#include <cmath>
#include <cfloat>

#include "mp/flat/constr_base.h"
#include "mp/flat/expr_quadratic.h"
Expand Down Expand Up @@ -247,7 +248,9 @@ inline void WriteJSON(JSONW jw,
/// Write alg con range
inline void WriteJSON(JSONW jw,
const AlgConRange& acr) {
jw << acr.lb() << acr.ub();
jw
<< (acr.lb() < -DBL_MAX ? -DBL_MAX : acr.lb())
<< (acr.ub() > DBL_MAX ? DBL_MAX : acr.ub());
}

/// Write alg con rhs
Expand Down
5 changes: 4 additions & 1 deletion include/mp/flat/converter_model.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
#include <vector>
#include <algorithm>
#include <cmath>
#include <cfloat>

#include "mp/flat/obj_std.h"
#include "mp/flat/constr_std.h"
Expand Down Expand Up @@ -89,7 +90,9 @@ class FlatModel
jw["VAR_index"] = i_actual;
if (var_names_storage_.size() > i)
jw["name"] = var_names_[i];
jw["bounds"] << lbs[i] << ubs[i];
jw["bounds"]
<< (lbs[i] < -DBL_MAX ? -DBL_MAX : lbs[i])
<< (ubs[i] > DBL_MAX ? DBL_MAX : ubs[i]);
jw["type"] = (int)types[i];
jw["is_from_nl"] = (int)is_var_original(i_actual);
}
Expand Down
45 changes: 36 additions & 9 deletions support/modelexplore/modelexplore.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,53 @@

import streamlit as st

# To work in st 1.30.1, see
# https://discuss.streamlit.io/t/axioserror-request-failed-with-status-code-403/38112/13
input_file = st.file_uploader("Model file (JSONL)")
from scripts.python.explorer import ModelExplorer
from scripts.python.modelreader import ReadExplorerModel
from scripts.python.matcher import MatchSubmodel

# To work with local files in st 1.30.1, see
# https://discuss.streamlit.io/t/axioserror-request-failed-with-status-code-403/38112/13.
# The corresponding settings should not be used on a server.
uploader = st.file_uploader("Model file (JSONL)")

left_column, right_column = st.columns(2)

# You can use a column just like st.sidebar:
srch = left_column.text_input('Search pattern:')

# Or even better, call Streamlit functions inside a "with" block:
fwd = right_column.checkbox('Add descendants', disabled=True)
bwd = right_column.checkbox('Add ancestors', disabled=True)

if input_file is not None:
explorer = ModelExplorer()

# Cache the reading function
@st.cache_data
def ReadModel(uploader):
return ReadExplorerModel(uploader)

# Cache the matching function?
# @st.cache_data Need cacheable Model.
def MatchSelection(m, srch, fwd, bwd):
return MatchSubmodel(m, srch, fwd, bwd)

# Write dictionary of entries
def WriteDict(d):
for k, v in d.items():
with st.expander("""### """ + k):
st.write(v)

# Or even better, call Streamlit functions inside a "with" block:
if uploader is not None:
model = ReadModel(uploader)
subm1, subm2 = MatchSelection(model, srch, fwd, bwd)
bytes1_data = subm1.GetData()
bytes2_data = subm2.GetData()
with left_column:
bytes_data = input_file.read()
st.write("NL model")
st.write(bytes_data)
st.write("""## NL model""")
WriteDict(bytes1_data)
with right_column:
st.write("Flat model")
st.write("""## Flat model""")
WriteDict(bytes2_data)
else:
with left_column:
st.write("No file selected.")
23 changes: 23 additions & 0 deletions support/modelexplore/scripts/python/graph.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# This Python file uses the following encoding: utf-8

# if __name__ == "__main__":
# pass

class DiGraph:
"""
A simple digraph or a wrapper around some graph library
"""

def __init__(self):
self._nodes = []
self._arcs = []

def AddNode(self, data=None):
self._nodes.append(data)
return len(self._nodes)-1

def GetNode(self, idx):
return self._nodes[idx]

def ToText(self):
return str(self._nodes)
28 changes: 28 additions & 0 deletions support/modelexplore/scripts/python/matcher.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# This Python file uses the following encoding: utf-8

# if __name__ == "__main__":
# pass

from scripts.python.model import Model
from scripts.python.modelview import ModelView

class Matcher:
"""
Selects a submodel
"""

def __init__(self):
self.data = None


def MatchSubmodel(m: Model, patt: str, fwd: bool, bwd: bool):
"""
Match a submodel containg the \a pattern,
optionally extended by forward/backward
reformulation graph search
"""
mv1 = ModelView()
mv2 = ModelView()
mv1.SetData(m.MatchOrigModel(patt))
mv2.SetData(m.MatchFinalModel(patt))
return mv1, mv2
115 changes: 115 additions & 0 deletions support/modelexplore/scripts/python/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
# This Python file uses the following encoding: utf-8

# if __name__ == "__main__":
# pass

import streamlit as st

from scripts.python.graph import DiGraph

class Model:
"""
An optimization model with conversion
graph
"""

def __init__(self):
self._graph = DiGraph() ## Underlyng graph

self._vars = [] ## Pointers to various parts of the graph
self._cons_NL_all = []
self._cons_NL = { ## NL + SOS
"All" : [],
#"Linear" : [],
#"Logical": [],
"SOS": []}
self._cons_Flat = {}
self._cons_Flat_Group = {}
self._objs_NL = []
self._objs = []

def UpdateVar(self, idx, data):
self._updateNodeData(self._vars, idx, data)

def UpdateNLObj(self, idx, data):
self._updateNodeData(self._objs_NL, idx, data)

def UpdateFlatObj(self, idx, data):
self._updateNodeData(self._objs, idx, data)

def UpdateNLCon(self, type, idx, data):
if "nonlin"==type:
self._updateNodeData(self._cons_NL["All"], idx, data)
elif "lin"==type:
self._updateNodeData(self._cons_NL["All"], idx, data)
else: # "logical"==type:
self._updateNodeData(self._cons_NL["All"], idx, data)

def UpdateFlatConGroup(self, type, data):
self._cons_Flat_Group[type] = data

def UpdateFlatCon(self, type, idx, data):
if type not in self._cons_Flat:
self._cons_Flat[type] = []
self._updateNodeData(self._cons_Flat[type], idx, data)

def _updateNodeData(self, specnodecnt, idx, data):
data1, upd = self._updateItemData(specnodecnt, idx, data)
if (not upd):
idx = self._graph.AddNode(data1)
data1["node_index"] = idx

def _updateItemData(self, specnodecnt, idx, data):
if len(specnodecnt)<=idx:
specnodecnt.insert(idx, {})
if (specnodecnt[idx] is None): ## No such item
specnodecnt[idx] = {}
ifEmpty = 0==len(specnodecnt[idx])
self._updateMap(specnodecnt[idx], data)
return specnodecnt[idx], ifEmpty

def _updateMap(self, data1, data2):
data1.update(data2)

# Match keyword to the original model
def MatchOrigModel(self, keyw):
result = {}
result["NL Variables"] = self._matchRecords(self._vars, keyw, "is_from_nl")
result["NL Objectives"] = self._matchRecords(self._objs_NL, keyw)
result["NL Constraints"] \
= self._matchRecords(self._cons_NL.get("All"), keyw)
#result["NL Nonlinear Constraints"] \
# = self._matchRecords(self._cons_NL.get("Nonlinear"), keyw)
#result["NL Linear Constraints"] \
# = self._matchRecords(self._cons_NL.get("Linear"), keyw)
#result["NL Logical Constraints"] \
# = self._matchRecords(self._cons_NL.get("Logical"), keyw)
#result["NL SOS Constraints"] \
# = self._matchRecords(self._cons_NL.get("SOS"), keyw)
return result

# Match keyword to the final model
def MatchFinalModel(self, keyw):
result = {}
result["Variables"] = self._matchRecords(self._vars, keyw)
result["Objectives"] = self._matchRecords(self._objs, keyw)
for ct, cv in self._cons_Flat.items():
result["Constraints '" + ct + "'"] \
= self._matchRecords(self._cons_Flat[ct], keyw)
return result

# Add records containing keyword
# @return array of strings
def _matchRecords(self, cnt, keyw, keyNeed1=None):
result = ""
if cnt is None:
return result
for i in cnt:
pr = str(i) ## TODO printed form
if "printed" in i:
pr = i["printed"]
if (""==keyw or keyw in pr) \
and (keyNeed1==None \
or (keyNeed1 in i and 1==i[keyNeed1])):
result = result + "\n\n" + pr ## Markdown
return result
48 changes: 48 additions & 0 deletions support/modelexplore/scripts/python/modelreader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# This Python file uses the following encoding: utf-8

# if __name__ == "__main__":
# pass

import streamlit as st

import json
from scripts.python.model import Model

class ModelReader:
"""
Model reader
"""

def __init__(self):
self._model = Model()

def ReadModel(self, uploader):
for line in uploader:
# removing the new line characters
self._processLine(line.rstrip())
return self._model

# Process next line
def _processLine(self, line: str):
values = json.loads(line)
self._addDataChunk(values)

# Add data chunk as a JSON-like object
def _addDataChunk(self, chunk):
if "VAR_index" in chunk:
self._model.UpdateVar(chunk["VAR_index"], chunk)
elif "NL_OBJECTIVE_index" in chunk:
self._model.UpdateNLObj(chunk["NL_OBJECTIVE_index"], chunk)
elif "NL_CON_TYPE" in chunk:
self._model.UpdateNLCon(chunk["NL_CON_TYPE"], chunk["index"], chunk)
elif "OBJECTIVE_index" in chunk:
self._model.UpdateFlatObj(chunk["OBJECTIVE_index"], chunk)
elif "CON_GROUP" in chunk:
self._model.UpdateFlatConGroup(chunk["CON_TYPE"], chunk)
elif "CON_TYPE" in chunk:
self._model.UpdateFlatCon(chunk["CON_TYPE"], chunk["index"], chunk)


def ReadExplorerModel(uploader):
mr = ModelReader()
return mr.ReadModel(uploader)
22 changes: 22 additions & 0 deletions support/modelexplore/scripts/python/modelview.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# This Python file uses the following encoding: utf-8

# if __name__ == "__main__":
# pass

class ModelView:
"""
A view of a (sub) model
"""

def __init__(self):
self._data = None

self._vars = {"Variables": []}
self._cons = {"Constraints": []}
self._objs = {"Objectives": []}

def SetData(self, data):
self._data = data

def GetData(self):
return self._data

0 comments on commit b84688f

Please sign in to comment.