Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Submit python model #426

Open
wants to merge 22 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions dash-pipeline/python_model/__byte_counter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from __vars import *

class byte_counter:
def __init__(self, size):
self.counters = [0] * size

def count(self, index):
self.counters[index] += pkt_in.get_pkt_size()
2 changes: 2 additions & 0 deletions dash-pipeline/python_model/__dash_enum.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
class dash_enum:
pass
19 changes: 19 additions & 0 deletions dash-pipeline/python_model/__id_map.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@

id_map = {}

def _search_obj(search_obj):
global id_map
for id, obj in id_map.items():
if obj is search_obj:
return id
return None

_id = 0
def generate_id(obj):
global _id, id_map
res = _search_obj(obj)
if res is None:
res = _id
id_map[res] = obj
_id += 1
return res
60 changes: 60 additions & 0 deletions dash-pipeline/python_model/__jsonize.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@

def _write_str(file, str_value):
file.write('"')
file.write(str_value)
file.write('"')

def _write_int(file, int_value):
file.write(str(int_value))

def _write_bool(file, bool_value):
file.write(str(bool_value))

def _write_dict(file, dict_value, level):
file.write("{\n")
i = 0
for k in dict_value:
file.write(" " * level)
file.write('"')
file.write(k)
file.write('": ')
_write_value(file, dict_value[k], level + 1)
if i < len(dict_value) - 1:
file.write(',')
file.write("\n")
i += 1
file.write(" " * (level - 1))
file.write("}")

def _write_list(file, list_value, level):
file.write("[\n")
i = 0
for item in list_value:
file.write(" " * level)
_write_value(file, item, level + 1)
if i < len(list_value) - 1:
file.write(',')
file.write("\n")
i += 1
file.write(" " * (level - 1))
file.write("]")

def _write_value(file, value, level):
value_type = type(value)
if value_type == str:
_write_str(file, value)
elif value_type == dict:
_write_dict(file, value, level)
elif value_type == list:
_write_list(file, value, level)
elif value_type == int:
_write_int(file, value)
elif value_type == bool:
_write_bool(file, value)
else:
raise ValueError("Type not supported")

def jsonize(file_name, value):
file = open(file_name, "w")
_write_value(file, value, 1)
file.close()
24 changes: 24 additions & 0 deletions dash-pipeline/python_model/__main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from __packet_in import *
from dash_parser import *
from __vars import *
from dash_pipeline import *

def run_pipeline(pkt_bytes):
pkt_in.reset()
hdr.reset()
pkt_in.set_data(pkt_bytes)
state = dash_parser(pkt_in, hdr)
if state == State.reject:
raise ValueError("Parser rejected the packet")
standard_metadata.ingress_port = 0
meta.dropped = False
meta.appliance_id = 0
meta.encap_data.original_overlay_sip = 0
meta.encap_data.original_overlay_dip = 0
apply()
if is_dropped(standard_metadata):
raise ValueError("Pipeline dropped the packet")
pkt_out = packet_out()
dash_deparser(pkt_out, hdr)
final_pkt = pkt_out.data + pkt_in.get_unparsed_slice()
return final_pkt.tobytes()
30 changes: 30 additions & 0 deletions dash-pipeline/python_model/__model_apis.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@

class Ternary:
value: str
mask: str

class LPM:
value: str
prefix_len: int

class Range:
first: str
last: str

class Value:
exact: str
ternary: Ternary
prefix: LPM
range: Range
ternary_list: list[Ternary]
range_list: list[Range]

class InsertRequest:
table : int
values: list[Value]
action: int
params: list[str]
priority: int

def insert_entry(insertRequest: InsertRequest):
pass
148 changes: 148 additions & 0 deletions dash-pipeline/python_model/__model_server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
from scapy.all import *
from __main import *
from __id_map import *
import socketserver
import json

class InsertRequest:
class Value:
class Ternary:
value : str
mask : str

class LPM:
value : str
prefix_len : int

class Range:
first : str
last : str

exact : str
ternary : Ternary
prefix : LPM
range : Range
ternary_list : list[Ternary]
range_list : list[Range]

table : int
values : list[Value]
action : int
params : list[str]
priority : int


def insert_request_to_table_entry(insertRequest: InsertRequest, key_format: list):
entry = Entry()

entry.values = []
for idx, val in enumerate(insertRequest.values):
if key_format[idx] is EXACT:
entry.values.append(int(val.exact, 0))
elif key_format[idx] is TERNARY:
ternary = Entry.Ternary()
ternary.value = int(val.ternary.value , 0)
ternary.mask = int(val.ternary.mask , 0)
entry.values.append(ternary)
elif key_format[idx] is LIST:
ternary_list = []
for t in val.ternary_list:
ternary = Entry.Ternary()
ternary.value = int(t.value , 0)
ternary.mask = int(t.mask , 0)
ternary_list.append(ternary)
entry.values.append(ternary_list)
elif key_format[idx] is RANGE:
range = Entry.Range()
range.first = int(val.range.first , 0)
range.last = int(val.range.last , 0)
entry.values.append(range)
elif key_format[idx] is RANGE_LIST:
range_list = []
for r in val.range_list:
range = Entry.Range()
range.first = int(r.first , 0)
range.last = int(r.last , 0)
range_list.append(range)
entry.values.append(range_list)
elif key_format[idx] is LPM:
lpm = Entry.LPM()
lpm.value = int(val.prefix.value , 0)
lpm.prefix_len = val.prefix.prefix_len
entry.values.append(lpm)

entry.action = id_map[insertRequest.action]

entry.params = []
for param_str in insertRequest.params:
entry.params.append(int(param_str , 0))

entry.priority = insertRequest.priority
return entry

def table_insert_api(insertRequest: InsertRequest):
table = id_map[insertRequest.table]
table.insert(insert_request_to_table_entry(insertRequest, list(table.key.values())))

def json_obj_to_insert_request(json_obj):
insertRequest = InsertRequest()
insertRequest.table = json_obj["table"]

insertRequest.values = []
for value_in_json in json_obj["values"]:
value = InsertRequest.Value()
value.exact = value_in_json["exact"]

value.ternary = InsertRequest.Value.Ternary()
value.ternary.value = value_in_json["ternary"]["value"]
value.ternary.mask = value_in_json["ternary"]["mask"]

value.prefix = InsertRequest.Value.LPM()
value.prefix.value = value_in_json["prefix"]["value"]
value.prefix.prefix_len = value_in_json["prefix"]["prefix_len"]

value.range = InsertRequest.Value.Range()
value.range.first = value_in_json["range"]["first"]
value.range.last = value_in_json["range"]["last"]

value.ternary_list = []
for ternary_in_json in value_in_json["ternary_list"]:
ternary = InsertRequest.Value.Ternary()
ternary.value = ternary_in_json["value"]
ternary.mask = ternary_in_json["mask"]
value.ternary_list.append(ternary)

value.range_list = []
for range_in_json in value_in_json["range_list"]:
range = InsertRequest.Value.Range()
range.first = range_in_json["first"]
range.last = range_in_json["last"]
value.range_list.append(range)

insertRequest.values.append(value)

insertRequest.action = json_obj["action"]

insertRequest.params = []
for param_str in json_obj["params"]:
insertRequest.params.append(param_str)

insertRequest.priority = json_obj["priority"]
return insertRequest

class ModelTCPHandler(socketserver.BaseRequestHandler):
def handle(self):
api_id = self.request.recv(1)[0]
if api_id == 0:
json_buf_size = int(self.request.recv(8).decode("ascii"), 16)
json_obj = json.loads(self.request.recv(json_buf_size))
insertRequest = json_obj_to_insert_request(json_obj)
table_insert_api(insertRequest)
self.request.sendall(b'\x00')

HOST, PORT = "localhost", 46500

with socketserver.TCPServer((HOST, PORT), ModelTCPHandler) as server:
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever()
32 changes: 32 additions & 0 deletions dash-pipeline/python_model/__packet_in.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
from inspect import *
from bitarray import *
from bitarray.util import *

class packet_in:
def __init__(self):
self.reset()

def reset(self):
self.data = bitarray(endian="big")
self.index = 0

def set_data(self, data: bytes):
self.data.frombytes(data)

def extract(self, hdr_type):
hdr = hdr_type()
annotations = get_annotations(hdr_type)
for k in annotations:
width = annotations[k].__metadata__[0]
if self.index + width > len(self.data):
return None
value = ba2int(self.data[self.index : self.index + width])
setattr(hdr, k, value)
self.index += width
return hdr

def get_pkt_size(self):
return int(len(self.data) / 8)

def get_unparsed_slice(self):
return self.data[self.index:]
15 changes: 15 additions & 0 deletions dash-pipeline/python_model/__packet_out.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from inspect import *
from bitarray import *
from bitarray.util import *

class packet_out:
def __init__(self):
self.data = bitarray(endian="big")

def emit(self, hdr):
if hdr:
annotations = get_annotations(type(hdr))
for k in annotations:
width = annotations[k].__metadata__[0]
value = getattr(hdr, k)
self.data.extend(int2ba(value, width))
10 changes: 10 additions & 0 deletions dash-pipeline/python_model/__sai_keys.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@

API_NAME = "api_name"
SAI_KEY_NAME = "sai_key_name"
DEFAULT_ONLY = "default_only"
ISOBJECT = "isobject"
TYPE = "type"
ISRESOURCETYPE = "isresourcetype"
OBJECTS = "objects"
ISREADONLY = "isreadonly"
SKIPATTR = "skipattr"
13 changes: 13 additions & 0 deletions dash-pipeline/python_model/__standard_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
class standard_metadata_t:
egress_spec : int

_DROP_PORT = 511

def mark_to_drop(standard_metadata: standard_metadata_t):
standard_metadata.egress_spec = _DROP_PORT

def is_dropped(standard_metadata: standard_metadata_t):
return standard_metadata.egress_spec == _DROP_PORT

def NoAction():
pass
Loading