Skip to content

Commit

Permalink
Merge pull request #1 from jezsadler/padding
Browse files Browse the repository at this point in the history
Explicit padding support
  • Loading branch information
jezsadler authored Nov 13, 2023
2 parents 7d660c4 + abf7a4d commit 149bc30
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 16 deletions.
23 changes: 12 additions & 11 deletions src/omlt/io/onnx_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,16 +368,18 @@ def _consume_conv_nodes(self, node, next_nodes):
raise ValueError(
f"{node} has multiple groups ({attr['group']}). This is not supported."
)
if "pads" in attr and np.any(attr["pads"]):
raise ValueError(
f"{node} has non-zero pads ({attr['pads']}). This is not supported."
)
if "pads" in attr:
pads = attr["pads"]
else:
pads = 2*(len(input_output_size)-1)*[0]

# generate new nodes for the node output
padding = 0
padding = [
pads[i] + pads[i + len(input_output_size)-1]
for i in range(len(input_output_size)-1)]
output_size = [out_channels]
for w, k, s in zip(input_output_size[1:], kernel_shape, strides):
new_w = int((w - k + 2 * padding) / s) + 1
for w, k, s, p in zip(input_output_size[1:], kernel_shape, strides, padding):
new_w = int((w - k + p) / s) + 1
output_size.append(new_w)

activation = "linear"
Expand All @@ -401,6 +403,7 @@ def _consume_conv_nodes(self, node, next_nodes):
output_size,
strides,
weights,
pads=pads,
activation=activation,
input_index_mapper=transformer,
)
Expand Down Expand Up @@ -467,17 +470,14 @@ def _consume_pool_nodes(self, node, next_nodes):
kernel_depth = attr["kernel_shape"][0]
kernel_shape = attr["kernel_shape"][1:]
strides = attr["strides"] if "strides" in attr else [1] * len(kernel_shape)
pads = attr["pads"] if "pads" in attr else None

# check only kernel shape, stride, storage order are set
# everything else is not supported
if "dilations" in attr and attr["dilations"] != [1, 1]:
raise ValueError(
f"{node.name} has non-identity dilations ({attr['dilations']}). This is not supported."
)
if "pads" in attr and np.any(attr["pads"]):
raise ValueError(
f"{node.name} has non-zero pads ({attr['pads']}). This is not supported."
)
if ("auto_pad" in attr) and (attr["auto_pad"] != "NOTSET"):
raise ValueError(
f"{node.name} has autopad set ({attr['auto_pad']}). This is not supported."
Expand Down Expand Up @@ -519,6 +519,7 @@ def _consume_pool_nodes(self, node, next_nodes):
pool_func_name,
tuple(kernel_shape),
kernel_depth,
pads=pads,
activation=activation,
input_index_mapper=transformer,
)
Expand Down
33 changes: 28 additions & 5 deletions src/omlt/neuralnet/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,8 @@ class Layer2D(Layer):
the size of the output.
strides : matrix-like
stride of the kernel.
pads : matrix-like
Padding for the kernel. Given as [left, bottom, right, top]
activation : str or None
activation function name
input_index_mapper : IndexMapper or None
Expand All @@ -237,6 +239,7 @@ def __init__(
output_size,
strides,
*,
pads=None,
activation=None,
input_index_mapper=None,
):
Expand All @@ -247,12 +250,21 @@ def __init__(
input_index_mapper=input_index_mapper,
)
self.__strides = strides
if pads is None:
self.__pads = [0, 0, 0, 0]
else:
self.__pads = pads

@property
def strides(self):
"""Return the stride of the layer"""
return self.__strides

@property
def pads(self):
"""Return the padding of the layer"""
return self.__pads

@property
def kernel_shape(self):
"""Return the shape of the kernel"""
Expand Down Expand Up @@ -280,12 +292,14 @@ def kernel_index_with_input_indexes(self, out_d, out_r, out_c):
kernel_d = self.kernel_depth
[kernel_r, kernel_c] = self.kernel_shape
[rows_stride, cols_stride] = self.__strides
[pads_row, pads_col] = self.__pads[:1]
start_in_d = 0
start_in_r = out_r * rows_stride
start_in_c = out_c * cols_stride
mapper = lambda x: x
if self.input_index_mapper is not None:
mapper = self.input_index_mapper
start_in_r = out_r * rows_stride - pads_row
start_in_c = out_c * cols_stride - pads_col
# Defined but never used:
# mapper = lambda x: x
# if self.input_index_mapper is not None:
# mapper = self.input_index_mapper

for k_d in range(kernel_d):
for k_r in range(kernel_r):
Expand All @@ -299,6 +313,7 @@ def kernel_index_with_input_indexes(self, out_d, out_r, out_c):
# even though we loop over ALL kernel indexes.
if not all(
input_index[i] < self.input_size[i]
and input_index[i] >= 0
for i in range(len(input_index))
):
continue
Expand Down Expand Up @@ -345,6 +360,8 @@ class PoolingLayer2D(Layer2D):
the size of the output.
strides : matrix-like
stride of the kernel.
pads : matrix-like
Padding for the kernel. Given as [left, bottom, right, top]
pool_func : str
name of function used to pool values in a kernel to a single value.
transpose : bool
Expand All @@ -367,13 +384,15 @@ def __init__(
kernel_shape,
kernel_depth,
*,
pads=None,
activation=None,
input_index_mapper=None,
):
super().__init__(
input_size,
output_size,
strides,
pads=pads,
activation=activation,
input_index_mapper=input_index_mapper,
)
Expand Down Expand Up @@ -421,6 +440,8 @@ class ConvLayer2D(Layer2D):
stride of the cross-correlation kernel.
kernel : matrix-like
the cross-correlation kernel.
pads : matrix-like
Padding for the kernel. Given as [left, bottom, right, top]
activation : str or None
activation function name
input_index_mapper : IndexMapper or None
Expand All @@ -434,13 +455,15 @@ def __init__(
strides,
kernel,
*,
pads=None,
activation=None,
input_index_mapper=None,
):
super().__init__(
input_size,
output_size,
strides,
pads=pads,
activation=activation,
input_index_mapper=input_index_mapper,
)
Expand Down

0 comments on commit 149bc30

Please sign in to comment.