-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add
LCNet
and fix model serialization (#10)
* Merge `apply_depthwise_separation_block` * Add `LCNet` * Speed up gpu test * Cleanup * Update `add_model_to_registry` * Fix model serialization
- Loading branch information
1 parent
7a0f2e7
commit ce979af
Showing
22 changed files
with
776 additions
and
201 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,9 @@ | ||
from kimm.blocks.base_block import apply_activation | ||
from kimm.blocks.base_block import apply_conv2d_block | ||
from kimm.blocks.base_block import apply_se_block | ||
from kimm.blocks.depthwise_separation_block import ( | ||
apply_depthwise_separation_block, | ||
) | ||
from kimm.blocks.inverted_residual_block import apply_inverted_residual_block | ||
from kimm.blocks.transformer_block import apply_mlp_block | ||
from kimm.blocks.transformer_block import apply_transformer_block |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
from keras import layers | ||
|
||
from kimm.blocks.base_block import apply_conv2d_block | ||
from kimm.blocks.base_block import apply_se_block | ||
|
||
|
||
def apply_depthwise_separation_block( | ||
inputs, | ||
output_channels, | ||
depthwise_kernel_size=3, | ||
pointwise_kernel_size=1, | ||
strides=1, | ||
se_ratio=0.0, | ||
activation="swish", | ||
se_activation="relu", | ||
se_gate_activation="sigmoid", | ||
se_make_divisible_number=None, | ||
pw_activation=None, | ||
skip=True, | ||
bn_epsilon=1e-5, | ||
padding=None, | ||
name="depthwise_separation_block", | ||
): | ||
input_channels = inputs.shape[-1] | ||
has_skip = skip and (strides == 1 and input_channels == output_channels) | ||
|
||
x = inputs | ||
x = apply_conv2d_block( | ||
x, | ||
kernel_size=depthwise_kernel_size, | ||
strides=strides, | ||
activation=activation, | ||
use_depthwise=True, | ||
bn_epsilon=bn_epsilon, | ||
padding=padding, | ||
name=f"{name}_conv_dw", | ||
) | ||
if se_ratio > 0: | ||
x = apply_se_block( | ||
x, | ||
se_ratio, | ||
activation=se_activation, | ||
gate_activation=se_gate_activation, | ||
make_divisible_number=se_make_divisible_number, | ||
name=f"{name}_se", | ||
) | ||
x = apply_conv2d_block( | ||
x, | ||
output_channels, | ||
pointwise_kernel_size, | ||
1, | ||
activation=pw_activation, | ||
bn_epsilon=bn_epsilon, | ||
padding=padding, | ||
name=f"{name}_conv_pw", | ||
) | ||
if has_skip: | ||
x = layers.Add()([x, inputs]) | ||
return x |
Oops, something went wrong.