From b568c58e3f2cbeeb3ac49e79c62c45a5f60ef60e Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Fri, 23 Oct 2020 12:12:28 -0700 Subject: [PATCH 01/24] fixed 2D kernels in Conv1D --- keras_resnet/models/_1d.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index e7485c1..ec426b1 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -74,10 +74,10 @@ def __init__( numerical_names = [True] * len(blocks) x = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1")(inputs) - x = keras.layers.Conv1D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) + x = keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1")(x) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling1D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1")(x) features = 64 From f1de4268fddb488abbe7056674c3871eb2201f60 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Fri, 23 Oct 2020 13:07:21 -0700 Subject: [PATCH 02/24] fixed the axis definition for 1D. --- keras_resnet/models/_1d.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index ec426b1..1f468a1 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -66,7 +66,7 @@ def __init__( **kwargs ): if keras.backend.image_data_format() == "channels_last": - axis = 3 + axis = -1 else: axis = 1 From ccfe7766e7fe95cf60e0ccaf67ff5fd320361ba0 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 14:46:46 -0700 Subject: [PATCH 03/24] added call function --- keras_resnet/models/_1d.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 1f468a1..2b73902 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -103,11 +103,15 @@ def __init__( x = keras.layers.GlobalAveragePooling1D(name="pool5")(x) x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) + self.ret = x super(ResNet1D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) else: # Else output each stages features + self.ret = outputs super(ResNet1D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) + def call(self): + return self.ret class ResNet1D18(ResNet1D): """ @@ -152,6 +156,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) + def call (self): + return super(ResNet1D18, self).call() + class ResNet1D34(ResNet1D): """ From ffb71f56360aa60fe5b7023e94954a30861e9991 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 14:58:06 -0700 Subject: [PATCH 04/24] added inputs --- keras_resnet/models/_1d.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 2b73902..f9d404a 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -110,7 +110,7 @@ def __init__( self.ret = outputs super(ResNet1D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) - def call(self): + def call(self, inputs): return self.ret class ResNet1D18(ResNet1D): @@ -157,7 +157,7 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b ) def call (self): - return super(ResNet1D18, self).call() + return super(ResNet1D18, self).call(None) class ResNet1D34(ResNet1D): From 9d9e3b70d2ecaaf4dfa68850582b02b682d47ecb Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 15:06:38 -0700 Subject: [PATCH 05/24] added inputs to call --- keras_resnet/models/_1d.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index f9d404a..f4eda8e 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -156,8 +156,8 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) - def call (self): - return super(ResNet1D18, self).call(None) + def call (self, inputs): + return super(ResNet1D18, self).call(inputs) class ResNet1D34(ResNet1D): From 79c93c40070ba0cadad047d3328ce4680bf78789 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 15:07:40 -0700 Subject: [PATCH 06/24] added logging --- keras_resnet/models/_1d.py | 1 + 1 file changed, 1 insertion(+) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index f4eda8e..d2c1136 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -157,6 +157,7 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b ) def call (self, inputs): + print ("added inputs") return super(ResNet1D18, self).call(inputs) From d1e66c170796510ea04f0c385555f05449c522f1 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 15:08:59 -0700 Subject: [PATCH 07/24] changed logging --- keras_resnet/models/_1d.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index d2c1136..1d6f3ea 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -144,7 +144,8 @@ class ResNet1D18(ResNet1D): def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [2, 2, 2, 2] - + + print ("added inputs") super(ResNet1D18, self).__init__( inputs, blocks, @@ -157,7 +158,6 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b ) def call (self, inputs): - print ("added inputs") return super(ResNet1D18, self).call(inputs) From 1eb1c6999e30517ed6cc6d163915b69d1aaccb40 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 15:24:21 -0700 Subject: [PATCH 08/24] added args, kwargs --- keras_resnet/models/_1d.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 1d6f3ea..e5dc1fb 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -110,7 +110,8 @@ def __init__( self.ret = outputs super(ResNet1D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) - def call(self, inputs): + + def call(self, inputs, *args, **kwargs): return self.ret class ResNet1D18(ResNet1D): @@ -145,7 +146,6 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b if blocks is None: blocks = [2, 2, 2, 2] - print ("added inputs") super(ResNet1D18, self).__init__( inputs, blocks, @@ -157,7 +157,7 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) - def call (self, inputs): + def call (self, inputs, *args, **kwargs): return super(ResNet1D18, self).call(inputs) From 880de079d30672aae1360b51c445add7a8bc190a Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 15:33:12 -0700 Subject: [PATCH 09/24] modifications --- keras_resnet/models/_1d.py | 39 ++++++++++++++++++++++++++++++++++---- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index e5dc1fb..abfa62c 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -111,8 +111,27 @@ def __init__( super(ResNet1D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) - def call(self, inputs, *args, **kwargs): - return self.ret + def call(self, + inputs, + blocks, + block, + include_top=True, + classes=1000, + freeze_bn=True, + numerical_names=None, + *args, + **kwargs): + + mymodel = ResNet1D(inputs, + blocks, + block, + include_top, + classes, + freeze_bn, + *args, + **kwargs) + + return mymodel.ret class ResNet1D18(ResNet1D): """ @@ -157,8 +176,20 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) - def call (self, inputs, *args, **kwargs): - return super(ResNet1D18, self).call(inputs) + def call (self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + if blocks is None: + blocks = [2, 2, 2, 2] + + return super(ResNet1D18, self).call( + inputs, + blocks, + block=keras_resnet.blocks.basic_1d, + include_top=include_top, + classes=classes, + freeze_bn=freeze_bn, + *args, + **kwargs + ) class ResNet1D34(ResNet1D): From 5a0866c5f56081d2661c16f9d9df4466380ed45e Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 15:39:57 -0700 Subject: [PATCH 10/24] removed extras --- keras_resnet/models/_1d.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index abfa62c..c681ad5 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -111,27 +111,18 @@ def __init__( super(ResNet1D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) - def call(self, + def call( + self, inputs, blocks, block, include_top=True, classes=1000, freeze_bn=True, - numerical_names=None, *args, **kwargs): - mymodel = ResNet1D(inputs, - blocks, - block, - include_top, - classes, - freeze_bn, - *args, - **kwargs) - - return mymodel.ret + return self.ret class ResNet1D18(ResNet1D): """ From abc22f473eb95e24a601302e93db306add7ab124 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 15:43:46 -0700 Subject: [PATCH 11/24] debug log --- keras_resnet/models/_1d.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index c681ad5..443c05f 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -121,6 +121,8 @@ def call( freeze_bn=True, *args, **kwargs): + + print (inputs) return self.ret From aa9bdf15fb93ae03cf9080592867e090f7287cec Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 27 Oct 2020 15:45:20 -0700 Subject: [PATCH 12/24] remove args kwargs --- keras_resnet/models/_1d.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 443c05f..9ea5dee 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -118,9 +118,8 @@ def call( block, include_top=True, classes=1000, - freeze_bn=True, - *args, - **kwargs): + freeze_bn=True + ): print (inputs) @@ -179,9 +178,7 @@ def call (self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=F block=keras_resnet.blocks.basic_1d, include_top=include_top, classes=classes, - freeze_bn=freeze_bn, - *args, - **kwargs + freeze_bn=freeze_bn ) From 08da51723682b7ea0342cce2f17d45c70ff58e22 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Wed, 28 Oct 2020 10:54:34 -0700 Subject: [PATCH 13/24] added call method to _1d.py --- keras_resnet/layers/_batch_normalization.py | 2 - keras_resnet/models/_1d.py | 98 ++++++++++----------- 2 files changed, 45 insertions(+), 55 deletions(-) diff --git a/keras_resnet/layers/_batch_normalization.py b/keras_resnet/layers/_batch_normalization.py index 1946a3f..6c7aae4 100644 --- a/keras_resnet/layers/_batch_normalization.py +++ b/keras_resnet/layers/_batch_normalization.py @@ -1,6 +1,4 @@ import keras - - class BatchNormalization(keras.layers.BatchNormalization): """ Identical to keras.layers.BatchNormalization, but adds the option to freeze parameters. diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 9ea5dee..5f376fe 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -55,7 +55,6 @@ class ResNet1D(keras.Model): """ def __init__( self, - inputs, blocks, block, include_top=True, @@ -65,65 +64,69 @@ def __init__( *args, **kwargs ): + super(ResNet1D, self).__init__(*args, **kwargs) + self.classes = classes + self.include_top = include_top + if keras.backend.image_data_format() == "channels_last": axis = -1 else: axis = 1 if numerical_names is None: - numerical_names = [True] * len(blocks) + self.numerical_names = [True] * len(blocks) - x = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1")(inputs) - x = keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1")(x) - x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1")(x) + self.zeropad1 = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1") + self.conv1 = keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1") + self.rnbn1 = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1") + self.relu1 = keras.layers.Activation("relu", name="conv1_relu") + self.maxpool1 = keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1") features = 64 - - outputs = [] + self.layers = [] + self.iterations = [] for stage_id, iterations in enumerate(blocks): + self.iterations.append(iterations) for block_id in range(iterations): - x = block( + self.layers.append (block( features, stage_id, block_id, numerical_name=(block_id > 0 and numerical_names[stage_id]), freeze_bn=freeze_bn - )(x) - + )) features *= 2 - outputs.append(x) - - if include_top: - assert classes > 0 - - x = keras.layers.GlobalAveragePooling1D(name="pool5")(x) - x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) - - self.ret = x - super(ResNet1D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) - else: - # Else output each stages features - self.ret = outputs - super(ResNet1D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) + self.glopoollast = keras.layers.GlobalAveragePooling1D(name="pool5") + self.fclast = keras.layers.Dense(classes, activation="softmax", name="fc1000") - def call( - self, - inputs, - blocks, - block, - include_top=True, - classes=1000, - freeze_bn=True - ): - - print (inputs) + def call(self, inputs): + x = self.zeropad1(inputs) + x = self.conv1(x) + x = self.rnbn1(x) + x = self.relu1(x) + x = self.maxpool1(x) + + outputs = [] + i = 0 + while len(self.layers) > 0: + x = self.layers[0](x) + self.layers.pop() + i += 1 + if i == self.iterations[0]: + outputs.append(x) + self.iterations.pop() + i = 0 + + if self.include_top: + assert self.classes > 0 + x = self.glopoollast(x) + return self.fclast(x) + else: + return outputs - return self.ret class ResNet1D18(ResNet1D): """ @@ -145,7 +148,7 @@ class ResNet1D18(ResNet1D): >>> import keras_resnet.models - >>> shape, classes = (224, 224, 3), 1000 + >>> shape, classes = (224, 3), 1000 >>> x = keras.layers.Input(shape) @@ -153,12 +156,11 @@ class ResNet1D18(ResNet1D): >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [2, 2, 2, 2] super(ResNet1D18, self).__init__( - inputs, blocks, block=keras_resnet.blocks.basic_1d, include_top=include_top, @@ -168,18 +170,8 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) - def call (self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): - if blocks is None: - blocks = [2, 2, 2, 2] - - return super(ResNet1D18, self).call( - inputs, - blocks, - block=keras_resnet.blocks.basic_1d, - include_top=include_top, - classes=classes, - freeze_bn=freeze_bn - ) + def call (self, inputs): + return super(ResNet1D18, self).call(inputs) class ResNet1D34(ResNet1D): From 35510fa9cec7f51c6b28778e62b8028b2a476e57 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Wed, 28 Oct 2020 12:04:31 -0700 Subject: [PATCH 14/24] fix name conflict --- keras_resnet/models/_1d.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 5f376fe..66eec55 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -83,13 +83,13 @@ def __init__( self.maxpool1 = keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1") features = 64 - self.layers = [] + self.lyrs = [] self.iterations = [] for stage_id, iterations in enumerate(blocks): self.iterations.append(iterations) for block_id in range(iterations): - self.layers.append (block( + self.lyrs.append (block( features, stage_id, block_id, @@ -111,9 +111,9 @@ def call(self, inputs): outputs = [] i = 0 - while len(self.layers) > 0: - x = self.layers[0](x) - self.layers.pop() + while len(self.lyrs) > 0: + x = self.lyrs[0](x) + self.lyrs.pop() i += 1 if i == self.iterations[0]: outputs.append(x) From 9f2bc5f2e4b0573580a75a3571f9c11bbb5bfdd6 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Wed, 28 Oct 2020 15:00:16 -0700 Subject: [PATCH 15/24] added call method to _1d. explicit definition of inputs in call method for BN --- keras_resnet/layers/_batch_normalization.py | 4 +- keras_resnet/models/_1d.py | 106 ++++++-------------- 2 files changed, 30 insertions(+), 80 deletions(-) diff --git a/keras_resnet/layers/_batch_normalization.py b/keras_resnet/layers/_batch_normalization.py index 6c7aae4..5cce406 100644 --- a/keras_resnet/layers/_batch_normalization.py +++ b/keras_resnet/layers/_batch_normalization.py @@ -10,11 +10,11 @@ def __init__(self, freeze, *args, **kwargs): # set to non-trainable if freeze is true self.trainable = not self.freeze - def call(self, *args, **kwargs): + def call(self, inputs, *args, **kwargs): # Force test mode if frozen, otherwise use default behaviour (i.e., training=None). if self.freeze: kwargs['training'] = False - return super(BatchNormalization, self).call(*args, **kwargs) + return super(BatchNormalization, self).call(inputs, *args, **kwargs) def get_config(self): config = super(BatchNormalization, self).get_config() diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 66eec55..ca19528 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -62,7 +62,7 @@ def __init__( freeze_bn=True, numerical_names=None, *args, - **kwargs + **kwargs, ): super(ResNet1D, self).__init__(*args, **kwargs) self.classes = classes @@ -74,7 +74,7 @@ def __init__( axis = 1 if numerical_names is None: - self.numerical_names = [True] * len(blocks) + numerical_names = [True] * len(blocks) self.zeropad1 = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1") self.conv1 = keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1") @@ -84,10 +84,10 @@ def __init__( features = 64 self.lyrs = [] - self.iterations = [] + self.iters = [] for stage_id, iterations in enumerate(blocks): - self.iterations.append(iterations) + self.iters.append(iterations) for block_id in range(iterations): self.lyrs.append (block( features, @@ -115,9 +115,9 @@ def call(self, inputs): x = self.lyrs[0](x) self.lyrs.pop() i += 1 - if i == self.iterations[0]: + if i == self.iters[0]: outputs.append(x) - self.iterations.pop() + self.iters.pop() i = 0 if self.include_top: @@ -126,7 +126,7 @@ def call(self, inputs): return self.fclast(x) else: return outputs - + class ResNet1D18(ResNet1D): """ @@ -155,7 +155,8 @@ class ResNet1D18(ResNet1D): >>> model = keras_resnet.models.ResNet18(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) - """ + """ + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [2, 2, 2, 2] @@ -177,37 +178,24 @@ def call (self, inputs): class ResNet1D34(ResNet1D): """ Constructs a `keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) - :param blocks: the network’s residual architecture - :param include_top: if true, includes classification layers - :param classes: number of classes to classify (include_top must be true) - :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) - :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) - Usage: - >>> import keras_resnet.models - >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) - >>> model = keras_resnet.models.ResNet34(x, classes=classes) - >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [3, 4, 6, 3] super(ResNet1D34, self).__init__( - inputs, blocks, block=keras_resnet.blocks.basic_1d, include_top=include_top, @@ -217,43 +205,33 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) + def call (self, inputs): + return super(ResNet1D34, self).call(inputs) + class ResNet1D50(ResNet1D): """ Constructs a `keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) - :param blocks: the network’s residual architecture - :param include_top: if true, includes classification layers - :param classes: number of classes to classify (include_top must be true) - :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) - :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) - Usage: - >>> import keras_resnet.models - >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) - >>> model = keras_resnet.models.ResNet50(x) - >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [3, 4, 6, 3] numerical_names = [False, False, False, False] super(ResNet1D50, self).__init__( - inputs, blocks, numerical_names=numerical_names, block=keras_resnet.blocks.bottleneck_1d, @@ -264,43 +242,33 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) + def call (self, inputs): + return super(ResNet1D50, self).call(inputs) + class ResNet1D101(ResNet1D): """ Constructs a `keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) - :param blocks: the network’s residual architecture - :param include_top: if true, includes classification layers - :param classes: number of classes to classify (include_top must be true) - :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) - :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) - Usage: - >>> import keras_resnet.models - >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) - >>> model = keras_resnet.models.ResNet101(x, classes=classes) - >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [3, 4, 23, 3] numerical_names = [False, True, True, False] super(ResNet1D101, self).__init__( - inputs, blocks, numerical_names=numerical_names, block=keras_resnet.blocks.bottleneck_1d, @@ -311,43 +279,33 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) + def call (self, inputs): + return super(ResNet1D101, self).call(inputs) + class ResNet1D152(ResNet1D): """ Constructs a `keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) - :param blocks: the network’s residual architecture - :param include_top: if true, includes classification layers - :param classes: number of classes to classify (include_top must be true) - :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) - :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) - Usage: - >>> import keras_resnet.models - >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) - >>> model = keras_resnet.models.ResNet152(x, classes=classes) - >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [3, 8, 36, 3] numerical_names = [False, True, True, False] super(ResNet1D152, self).__init__( - inputs, blocks, numerical_names=numerical_names, block=keras_resnet.blocks.bottleneck_1d, @@ -358,43 +316,32 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) + def call (self, inputs): + return super(ResNet1D152, self).call(inputs) class ResNet1D200(ResNet1D): """ Constructs a `keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) - :param blocks: the network’s residual architecture - :param include_top: if true, includes classification layers - :param classes: number of classes to classify (include_top must be true) - :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) - :return model: ResNet model with encoding output (if `include_top=False`) or classification output (if `include_top=True`) - Usage: - >>> import keras_resnet.models - >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) - >>> model = keras_resnet.models.ResNet200(x, classes=classes) - >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [3, 24, 36, 3] numerical_names = [False, True, True, False] super(ResNet1D200, self).__init__( - inputs, blocks, numerical_names=numerical_names, block=keras_resnet.blocks.bottleneck_1d, @@ -404,3 +351,6 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b *args, **kwargs ) + + def call (self, inputs): + return super(ResNet1D200, self).call(inputs) \ No newline at end of file From 7f1e94658148c45682ba8f6057b96b5363dc0437 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Wed, 28 Oct 2020 15:26:51 -0700 Subject: [PATCH 16/24] fixed invalid *args call due to missing inputs in custom bn layer --- keras_resnet/layers/_batch_normalization.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keras_resnet/layers/_batch_normalization.py b/keras_resnet/layers/_batch_normalization.py index 1946a3f..d547c38 100644 --- a/keras_resnet/layers/_batch_normalization.py +++ b/keras_resnet/layers/_batch_normalization.py @@ -12,11 +12,11 @@ def __init__(self, freeze, *args, **kwargs): # set to non-trainable if freeze is true self.trainable = not self.freeze - def call(self, *args, **kwargs): + def call(self, inputs, *args, **kwargs): # Force test mode if frozen, otherwise use default behaviour (i.e., training=None). if self.freeze: kwargs['training'] = False - return super(BatchNormalization, self).call(*args, **kwargs) + return super(BatchNormalization, self).call(inputs, *args, **kwargs) def get_config(self): config = super(BatchNormalization, self).get_config() From f20537ba5ae9af064175a16eb824fd72182cadd2 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Fri, 11 Dec 2020 17:14:35 -0800 Subject: [PATCH 17/24] replaced keras with tensorflow.keras for Apple M1 support. --- README.rst | 8 +- keras_resnet/benchmarks/__init__.py | 24 ++--- keras_resnet/blocks/_1d.py | 43 ++++---- keras_resnet/blocks/_2d.py | 42 ++++---- keras_resnet/blocks/_3d.py | 42 ++++---- keras_resnet/blocks/_time_distributed_2d.py | 56 +++++----- keras_resnet/classifiers/_2d.py | 68 ++++++------ keras_resnet/layers/_batch_normalization.py | 6 +- keras_resnet/metrics.py | 6 +- keras_resnet/models/_1d.py | 72 +++++++------ keras_resnet/models/_2d.py | 64 +++++------ keras_resnet/models/_3d.py | 66 ++++++------ keras_resnet/models/_feature_pyramid_2d.py | 46 ++++---- keras_resnet/models/_time_distributed_2d.py | 112 ++++++++++---------- tests/conftest.py | 4 +- tools/export-caffe-weights.py | 0 tools/import-caffe-weights.py | 10 +- 17 files changed, 335 insertions(+), 334 deletions(-) mode change 100755 => 100644 tools/export-caffe-weights.py mode change 100755 => 100644 tools/import-caffe-weights.py diff --git a/README.rst b/README.rst index ac8ed7b..c38352a 100644 --- a/README.rst +++ b/README.rst @@ -12,21 +12,21 @@ A tantalizing preview of Keras-ResNet simplicity: .. code-block:: python - >>> import keras + >>> import tensorflow.keras >>> import keras_resnet.models >>> shape, classes = (32, 32, 3), 10 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) - >>> (training_x, training_y), (_, _) = keras.datasets.cifar10.load_data() + >>> (training_x, training_y), (_, _) = tensorflow.keras.datasets.cifar10.load_data() - >>> training_y = keras.utils.np_utils.to_categorical(training_y) + >>> training_y = tensorflow.keras.utils.np_utils.to_categorical(training_y) >>> model.fit(training_x, training_y) diff --git a/keras_resnet/benchmarks/__init__.py b/keras_resnet/benchmarks/__init__.py index 415aa58..5d79fd3 100644 --- a/keras_resnet/benchmarks/__init__.py +++ b/keras_resnet/benchmarks/__init__.py @@ -1,8 +1,8 @@ import os.path import click -import keras -import keras.preprocessing.image +import tensorflow.keras +import tensorflow.tensorflow.keras.preprocessing.image import numpy import pkg_resources import sklearn.model_selection @@ -12,9 +12,9 @@ import keras_resnet.models _benchmarks = { - "CIFAR-10": keras.datasets.cifar10, - "CIFAR-100": keras.datasets.cifar100, - "MNIST": keras.datasets.mnist + "CIFAR-10": tensorflow.tensorflow.keras.datasets.cifar10, + "CIFAR-100": tensorflow.tensorflow.keras.datasets.cifar100, + "MNIST": tensorflow.tensorflow.keras.datasets.mnist } @@ -65,7 +65,7 @@ def __main__(benchmark, device, name): session = tensorflow.Session(config=configuration) - keras.backend.set_session(session) + tensorflow.keras.backend.set_session(session) (training_x, training_y), _ = _benchmarks[benchmark].load_data() @@ -74,14 +74,14 @@ def __main__(benchmark, device, name): if benchmark is "MNIST": training_x = numpy.expand_dims(training_x, -1) - training_y = keras.utils.np_utils.to_categorical(training_y) + training_y = tensorflow.keras.utils.np_utils.to_categorical(training_y) training_x, validation_x, training_y, validation_y = sklearn.model_selection.train_test_split( training_x, training_y ) - generator = keras.preprocessing.image.ImageDataGenerator( + generator = tensorflow.keras.preprocessing.image.ImageDataGenerator( horizontal_flip=True ) @@ -93,7 +93,7 @@ def __main__(benchmark, device, name): batch_size=256 ) - validation_data = keras.preprocessing.image.ImageDataGenerator() + validation_data = tensorflow.keras.preprocessing.image.ImageDataGenerator() validation_data.fit(validation_x) @@ -105,7 +105,7 @@ def __main__(benchmark, device, name): shape, classes = training_x.shape[1:], training_y.shape[-1] - x = keras.layers.Input(shape) + x = tensorflow.keras.layers.Input(shape) model = _names[name](inputs=x, classes=classes) @@ -120,13 +120,13 @@ def __main__(benchmark, device, name): pathname = pkg_resources.resource_filename("keras_resnet", pathname) - model_checkpoint = keras.callbacks.ModelCheckpoint(pathname) + model_checkpoint = tensorflow.keras.callbacks.ModelCheckpoint(pathname) pathname = os.path.join("data", "logs", benchmark, "{}.csv".format(name)) pathname = pkg_resources.resource_filename("keras_resnet", pathname) - csv_logger = keras.callbacks.CSVLogger(pathname) + csv_logger = tensorflow.keras.callbacks.CSVLogger(pathname) callbacks = [ csv_logger, diff --git a/keras_resnet/blocks/_1d.py b/keras_resnet/blocks/_1d.py index f220396..1386a78 100644 --- a/keras_resnet/blocks/_1d.py +++ b/keras_resnet/blocks/_1d.py @@ -6,9 +6,8 @@ This module implements a number of popular one-dimensional residual blocks. """ - -import keras.layers -import keras.regularizers +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.regularizers import keras_resnet.layers @@ -55,7 +54,7 @@ def basic_1d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = -1 else: axis = 1 @@ -68,12 +67,12 @@ def basic_1d( stage_char = str(stage + 2) def f(x): - y = keras.layers.ZeroPadding1D( + y = tensorflow.keras.layers.ZeroPadding1D( padding=1, name="padding{}{}_branch2a".format(stage_char, block_char) )(x) - y = keras.layers.Conv1D( + y = tensorflow.keras.layers.Conv1D( filters, kernel_size, strides=stride, @@ -89,17 +88,17 @@ def f(x): name="bn{}{}_branch2a".format(stage_char, block_char) )(y) - y = keras.layers.Activation( + y = tensorflow.keras.layers.Activation( "relu", name="res{}{}_branch2a_relu".format(stage_char, block_char) )(y) - y = keras.layers.ZeroPadding1D( + y = tensorflow.keras.layers.ZeroPadding1D( padding=1, name="padding{}{}_branch2b".format(stage_char, block_char) )(y) - y = keras.layers.Conv1D( + y = tensorflow.keras.layers.Conv1D( filters, kernel_size, use_bias=False, @@ -115,7 +114,7 @@ def f(x): )(y) if block == 0: - shortcut = keras.layers.Conv1D( + shortcut = tensorflow.keras.layers.Conv1D( filters, 1, strides=stride, @@ -133,11 +132,11 @@ def f(x): else: shortcut = x - y = keras.layers.Add( + y = tensorflow.keras.layers.Add( name="res{}{}".format(stage_char, block_char) )([y, shortcut]) - y = keras.layers.Activation( + y = tensorflow.keras.layers.Activation( "relu", name="res{}{}_relu".format(stage_char, block_char) )(y) @@ -182,7 +181,7 @@ def bottleneck_1d( if stride is None: stride = 1 if block != 0 or stage == 0 else 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = -1 else: axis = 1 @@ -195,7 +194,7 @@ def bottleneck_1d( stage_char = str(stage + 2) def f(x): - y = keras.layers.Conv1D( + y = tensorflow.keras.layers.Conv1D( filters, 1, strides=stride, @@ -211,17 +210,17 @@ def f(x): name="bn{}{}_branch2a".format(stage_char, block_char) )(y) - y = keras.layers.Activation( + y = tensorflow.keras.layers.Activation( "relu", name="res{}{}_branch2a_relu".format(stage_char, block_char) )(y) - y = keras.layers.ZeroPadding1D( + y = tensorflow.keras.layers.ZeroPadding1D( padding=1, name="padding{}{}_branch2b".format(stage_char, block_char) )(y) - y = keras.layers.Conv1D( + y = tensorflow.keras.layers.Conv1D( filters, kernel_size, use_bias=False, @@ -236,12 +235,12 @@ def f(x): name="bn{}{}_branch2b".format(stage_char, block_char) )(y) - y = keras.layers.Activation( + y = tensorflow.keras.layers.Activation( "relu", name="res{}{}_branch2b_relu".format(stage_char, block_char) )(y) - y = keras.layers.Conv1D( + y = tensorflow.keras.layers.Conv1D( filters * 4, 1, use_bias=False, @@ -257,7 +256,7 @@ def f(x): )(y) if block == 0: - shortcut = keras.layers.Conv1D( + shortcut = tensorflow.keras.layers.Conv1D( filters * 4, 1, strides=stride, @@ -275,11 +274,11 @@ def f(x): else: shortcut = x - y = keras.layers.Add( + y = tensorflow.keras.layers.Add( name="res{}{}".format(stage_char, block_char) )([y, shortcut]) - y = keras.layers.Activation( + y = tensorflow.keras.layers.Activation( "relu", name="res{}{}_relu".format(stage_char, block_char) )(y) diff --git a/keras_resnet/blocks/_2d.py b/keras_resnet/blocks/_2d.py index ce774fb..366bba3 100644 --- a/keras_resnet/blocks/_2d.py +++ b/keras_resnet/blocks/_2d.py @@ -7,8 +7,8 @@ This module implements a number of popular two-dimensional residual blocks. """ -import keras.layers -import keras.regularizers +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.regularizers import keras_resnet.layers @@ -55,7 +55,7 @@ def basic_2d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -68,30 +68,30 @@ def basic_2d( stage_char = str(stage + 2) def f(x): - y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -136,7 +136,7 @@ def bottleneck_2d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -149,34 +149,34 @@ def bottleneck_2d( stage_char = str(stage + 2) def f(x): - y = keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) + y = tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/blocks/_3d.py b/keras_resnet/blocks/_3d.py index ba11f6e..941a2e1 100644 --- a/keras_resnet/blocks/_3d.py +++ b/keras_resnet/blocks/_3d.py @@ -7,8 +7,8 @@ This module implements a number of popular three-dimensional residual blocks. """ -import keras.layers -import keras.regularizers +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.regularizers import keras_resnet.layers @@ -55,7 +55,7 @@ def basic_3d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -68,30 +68,30 @@ def basic_3d( stage_char = str(stage + 2) def f(x): - y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = keras.layers.Conv3D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv3D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = tensorflow.keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -136,7 +136,7 @@ def bottleneck_3d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -149,34 +149,34 @@ def bottleneck_3d( stage_char = str(stage + 2) def f(x): - y = keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) + y = tensorflow.keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = keras.layers.Conv3D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv3D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.Conv3D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = tensorflow.keras.layers.Conv3D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/blocks/_time_distributed_2d.py b/keras_resnet/blocks/_time_distributed_2d.py index 31f5bcd..0162e46 100644 --- a/keras_resnet/blocks/_time_distributed_2d.py +++ b/keras_resnet/blocks/_time_distributed_2d.py @@ -7,8 +7,8 @@ This module implements a number of popular time distributed two-dimensional residual blocks. """ -import keras.layers -import keras.regularizers +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.regularizers import keras_resnet.layers @@ -57,7 +57,7 @@ def time_distributed_basic_2d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -70,30 +70,30 @@ def time_distributed_basic_2d( stage_char = str(stage + 2) def f(x): - y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) + shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) - shortcut = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) + shortcut = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -140,7 +140,7 @@ def time_distributed_bottleneck_2d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -153,34 +153,34 @@ def time_distributed_bottleneck_2d( stage_char = str(stage + 2) def f(x): - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(x) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(x) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, **parameters), name="res{}{}_branch2c".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, **parameters), name="res{}{}_branch2c".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2c".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.TimeDistributed(keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) + shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) - shortcut = keras.layers.TimeDistributed(keras.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) + shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/classifiers/_2d.py b/keras_resnet/classifiers/_2d.py index b9ac7a3..17496af 100644 --- a/keras_resnet/classifiers/_2d.py +++ b/keras_resnet/classifiers/_2d.py @@ -7,19 +7,19 @@ This module implements popular residual two-dimensional classifiers. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.tensorflow.keras.backend +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.models +import tensorflow.tensorflow.keras.regularizers import keras_resnet.models -class ResNet18(keras.models.Model): +class ResNet18(tensorflow.keras.models.Model): """ A :class:`ResNet18 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -27,7 +27,7 @@ class ResNet18(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet18(x) @@ -36,18 +36,18 @@ class ResNet18(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet18(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet18, self).__init__(inputs, outputs) -class ResNet34(keras.models.Model): +class ResNet34(tensorflow.keras.models.Model): """ A :class:`ResNet34 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -55,7 +55,7 @@ class ResNet34(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet34(x) @@ -64,18 +64,18 @@ class ResNet34(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet34(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet34, self).__init__(inputs, outputs) -class ResNet50(keras.models.Model): +class ResNet50(tensorflow.keras.models.Model): """ A :class:`ResNet50 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -83,7 +83,7 @@ class ResNet50(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet50(x) @@ -92,18 +92,18 @@ class ResNet50(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet50(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet50, self).__init__(inputs, outputs) -class ResNet101(keras.models.Model): +class ResNet101(tensorflow.keras.models.Model): """ A :class:`ResNet101 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -111,7 +111,7 @@ class ResNet101(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet101(x) @@ -120,18 +120,18 @@ class ResNet101(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet101(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet101, self).__init__(inputs, outputs) -class ResNet152(keras.models.Model): +class ResNet152(tensorflow.keras.models.Model): """ A :class:`ResNet152 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -139,7 +139,7 @@ class ResNet152(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet152(x) @@ -149,18 +149,18 @@ class ResNet152(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet152(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet152, self).__init__(inputs, outputs) -class ResNet200(keras.models.Model): +class ResNet200(tensorflow.keras.models.Model): """ A :class:`ResNet200 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -168,7 +168,7 @@ class ResNet200(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet200(x) @@ -177,8 +177,8 @@ class ResNet200(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet200(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet200, self).__init__(inputs, outputs) diff --git a/keras_resnet/layers/_batch_normalization.py b/keras_resnet/layers/_batch_normalization.py index 5cce406..b8367b4 100644 --- a/keras_resnet/layers/_batch_normalization.py +++ b/keras_resnet/layers/_batch_normalization.py @@ -1,7 +1,7 @@ -import keras -class BatchNormalization(keras.layers.BatchNormalization): +import tensorflow.keras +class BatchNormalization(tensorflow.keras.layers.BatchNormalization): """ - Identical to keras.layers.BatchNormalization, but adds the option to freeze parameters. + Identical to tensorflow.keras.layers.BatchNormalization, but adds the option to freeze parameters. """ def __init__(self, freeze, *args, **kwargs): self.freeze = freeze diff --git a/keras_resnet/metrics.py b/keras_resnet/metrics.py index f642619..a40c246 100644 --- a/keras_resnet/metrics.py +++ b/keras_resnet/metrics.py @@ -1,9 +1,9 @@ -import keras.metrics +import tensorflow.tensorflow.keras.metrics def top_1_categorical_error(y_true, y_pred): - return 1.0 - keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) + return 1.0 - tensorflow.tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) def top_5_categorical_error(y_true, y_pred): - return 1.0 - keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) + return 1.0 - tensorflow.tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index ca19528..6f345c9 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -7,20 +7,20 @@ This module implements popular one-dimensional residual models. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.tensorflow.keras.backend +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.models +import tensorflow.tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class ResNet1D(keras.Model): +class ResNet1D(tensorflow.keras.Model): """ - Constructs a `keras.models.Model` object using the given block count. + Constructs a `tensorflow.keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -43,7 +43,7 @@ class ResNet1D(keras.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -68,7 +68,7 @@ def __init__( self.classes = classes self.include_top = include_top - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = -1 else: axis = 1 @@ -76,11 +76,11 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - self.zeropad1 = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1") - self.conv1 = keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1") + self.zeropad1 = tensorflow.keras.layers.ZeroPadding1D(padding=3, name="padding_conv1") + self.conv1 = tensorflow.keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1") self.rnbn1 = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1") - self.relu1 = keras.layers.Activation("relu", name="conv1_relu") - self.maxpool1 = keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1") + self.relu1 = tensorflow.keras.layers.Activation("relu", name="conv1_relu") + self.maxpool1 = tensorflow.keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1") features = 64 self.lyrs = [] @@ -89,17 +89,19 @@ def __init__( for stage_id, iterations in enumerate(blocks): self.iters.append(iterations) for block_id in range(iterations): - self.lyrs.append (block( + lyr = block( features, stage_id, block_id, numerical_name=(block_id > 0 and numerical_names[stage_id]), freeze_bn=freeze_bn - )) + ) + self.lyrs.append (lyr) + self.layers.append (lyr) features *= 2 - self.glopoollast = keras.layers.GlobalAveragePooling1D(name="pool5") - self.fclast = keras.layers.Dense(classes, activation="softmax", name="fc1000") + self.glopoollast = tensorflow.keras.layers.GlobalAveragePooling1D(name="pool5") + self.fclast = tensorflow.keras.layers.Dense(classes, activation="softmax", name="fc1000") def call(self, inputs): @@ -130,9 +132,9 @@ def call(self, inputs): class ResNet1D18(ResNet1D): """ - Constructs a `keras.models.Model` according to the ResNet18 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -150,7 +152,7 @@ class ResNet1D18(ResNet1D): >>> shape, classes = (224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet18(x, classes=classes) @@ -177,8 +179,8 @@ def call (self, inputs): class ResNet1D34(ResNet1D): """ - Constructs a `keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + Constructs a `tensorflow.keras.models.Model` according to the ResNet34 specifications. + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -187,7 +189,7 @@ class ResNet1D34(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet34(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -211,8 +213,8 @@ def call (self, inputs): class ResNet1D50(ResNet1D): """ - Constructs a `keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + Constructs a `tensorflow.keras.models.Model` according to the ResNet50 specifications. + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -221,7 +223,7 @@ class ResNet1D50(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -248,8 +250,8 @@ def call (self, inputs): class ResNet1D101(ResNet1D): """ - Constructs a `keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + Constructs a `tensorflow.keras.models.Model` according to the ResNet101 specifications. + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -258,7 +260,7 @@ class ResNet1D101(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet101(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -285,8 +287,8 @@ def call (self, inputs): class ResNet1D152(ResNet1D): """ - Constructs a `keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + Constructs a `tensorflow.keras.models.Model` according to the ResNet152 specifications. + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -295,7 +297,7 @@ class ResNet1D152(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet152(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -321,8 +323,8 @@ def call (self, inputs): class ResNet1D200(ResNet1D): """ - Constructs a `keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + Constructs a `tensorflow.keras.models.Model` according to the ResNet200 specifications. + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -331,7 +333,7 @@ class ResNet1D200(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet200(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ diff --git a/keras_resnet/models/_2d.py b/keras_resnet/models/_2d.py index 8d8f874..998c0e0 100644 --- a/keras_resnet/models/_2d.py +++ b/keras_resnet/models/_2d.py @@ -7,20 +7,20 @@ This module implements popular two-dimensional residual models. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.tensorflow.keras.backend +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.models +import tensorflow.tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class ResNet2D(keras.Model): +class ResNet2D(tensorflow.keras.Model): """ - Constructs a `keras.models.Model` object using the given block count. + Constructs a `tensorflow.keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -43,7 +43,7 @@ class ResNet2D(keras.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -65,7 +65,7 @@ def __init__( *args, **kwargs ): - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -73,10 +73,10 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) + x = tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) + x = tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -99,8 +99,8 @@ def __init__( if include_top: assert classes > 0 - x = keras.layers.GlobalAveragePooling2D(name="pool5")(x) - x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) + x = tensorflow.keras.layers.GlobalAveragePooling2D(name="pool5")(x) + x = tensorflow.keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) super(ResNet2D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) else: @@ -110,9 +110,9 @@ def __init__( class ResNet2D18(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet18 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -130,7 +130,7 @@ class ResNet2D18(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet18(x, classes=classes) @@ -154,9 +154,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D34(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet34 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -174,7 +174,7 @@ class ResNet2D34(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet34(x, classes=classes) @@ -198,9 +198,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D50(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet50 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -218,7 +218,7 @@ class ResNet2D50(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x) @@ -245,9 +245,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D101(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet101 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -265,7 +265,7 @@ class ResNet2D101(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet101(x, classes=classes) @@ -292,9 +292,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D152(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet152 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -312,7 +312,7 @@ class ResNet2D152(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet152(x, classes=classes) @@ -339,9 +339,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D200(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet200 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -359,7 +359,7 @@ class ResNet2D200(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet200(x, classes=classes) diff --git a/keras_resnet/models/_3d.py b/keras_resnet/models/_3d.py index cd98a50..142dc15 100644 --- a/keras_resnet/models/_3d.py +++ b/keras_resnet/models/_3d.py @@ -7,20 +7,20 @@ This module implements popular three-dimensional residual models. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.tensorflow.keras.backend +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.models +import tensorflow.tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class ResNet3D(keras.Model): +class ResNet3D(tensorflow.keras.Model): """ - Constructs a `keras.models.Model` object using the given block count. + Constructs a `tensorflow.keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -43,7 +43,7 @@ class ResNet3D(keras.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -65,7 +65,7 @@ def __init__( *args, **kwargs ): - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -73,11 +73,11 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = keras.layers.ZeroPadding3D(padding=3, name="padding_conv1")(inputs) - x = keras.layers.Conv3D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) + x = tensorflow.keras.layers.ZeroPadding3D(padding=3, name="padding_conv1")(inputs) + x = tensorflow.keras.layers.Conv3D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling3D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) + x = tensorflow.keras.layers.MaxPooling3D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -100,8 +100,8 @@ def __init__( if include_top: assert classes > 0 - x = keras.layers.GlobalAveragePooling3D(name="pool5")(x) - x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) + x = tensorflow.keras.layers.GlobalAveragePooling3D(name="pool5")(x) + x = tensorflow.keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) super(ResNet3D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) else: @@ -111,9 +111,9 @@ def __init__( class ResNet3D18(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet18 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -131,7 +131,7 @@ class ResNet3D18(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet18(x, classes=classes) @@ -155,9 +155,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D34(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet34 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -175,7 +175,7 @@ class ResNet3D34(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet34(x, classes=classes) @@ -199,9 +199,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D50(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet50 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -219,7 +219,7 @@ class ResNet3D50(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x) @@ -246,9 +246,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D101(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet101 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -266,7 +266,7 @@ class ResNet3D101(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet101(x, classes=classes) @@ -293,9 +293,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D152(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet152 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -313,7 +313,7 @@ class ResNet3D152(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet152(x, classes=classes) @@ -340,9 +340,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D200(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet200 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -360,7 +360,7 @@ class ResNet3D200(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet200(x, classes=classes) diff --git a/keras_resnet/models/_feature_pyramid_2d.py b/keras_resnet/models/_feature_pyramid_2d.py index fe3ce1a..b4cd6e5 100644 --- a/keras_resnet/models/_feature_pyramid_2d.py +++ b/keras_resnet/models/_feature_pyramid_2d.py @@ -7,16 +7,16 @@ This module implements popular two-dimensional feature pyramid networks (FPNs). """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.tensorflow.keras.backend +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.models +import tensorflow.tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class FPN2D(keras.Model): +class FPN2D(tensorflow.keras.Model): def __init__( self, inputs, @@ -27,7 +27,7 @@ def __init__( *args, **kwargs ): - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -35,10 +35,10 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) + x = tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) + x = tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -60,7 +60,7 @@ def __init__( c2, c3, c4, c5 = outputs - pyramid_5 = keras.layers.Conv2D( + pyramid_5 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -68,13 +68,13 @@ def __init__( name="c5_reduced" )(c5) - upsampled_p5 = keras.layers.UpSampling2D( + upsampled_p5 = tensorflow.keras.layers.UpSampling2D( interpolation="bilinear", name="p5_upsampled", size=(2, 2) )(pyramid_5) - pyramid_4 = keras.layers.Conv2D( + pyramid_4 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -82,17 +82,17 @@ def __init__( name="c4_reduced" )(c4) - pyramid_4 = keras.layers.Add( + pyramid_4 = tensorflow.keras.layers.Add( name="p4_merged" )([upsampled_p5, pyramid_4]) - upsampled_p4 = keras.layers.UpSampling2D( + upsampled_p4 = tensorflow.keras.layers.UpSampling2D( interpolation="bilinear", name="p4_upsampled", size=(2, 2) )(pyramid_4) - pyramid_4 = keras.layers.Conv2D( + pyramid_4 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -100,7 +100,7 @@ def __init__( name="p4" )(pyramid_4) - pyramid_3 = keras.layers.Conv2D( + pyramid_3 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -108,17 +108,17 @@ def __init__( name="c3_reduced" )(c3) - pyramid_3 = keras.layers.Add( + pyramid_3 = tensorflow.keras.layers.Add( name="p3_merged" )([upsampled_p4, pyramid_3]) - upsampled_p3 = keras.layers.UpSampling2D( + upsampled_p3 = tensorflow.keras.layers.UpSampling2D( interpolation="bilinear", name="p3_upsampled", size=(2, 2) )(pyramid_3) - pyramid_3 = keras.layers.Conv2D( + pyramid_3 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -126,7 +126,7 @@ def __init__( name="p3" )(pyramid_3) - pyramid_2 = keras.layers.Conv2D( + pyramid_2 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -134,11 +134,11 @@ def __init__( name="c2_reduced" )(c2) - pyramid_2 = keras.layers.Add( + pyramid_2 = tensorflow.keras.layers.Add( name="p2_merged" )([upsampled_p3, pyramid_2]) - pyramid_2 = keras.layers.Conv2D( + pyramid_2 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -146,7 +146,7 @@ def __init__( name="p2" )(pyramid_2) - pyramid_6 = keras.layers.MaxPooling2D(strides=2, name="p6")(pyramid_5) + pyramid_6 = tensorflow.keras.layers.MaxPooling2D(strides=2, name="p6")(pyramid_5) outputs = [ pyramid_2, diff --git a/keras_resnet/models/_time_distributed_2d.py b/keras_resnet/models/_time_distributed_2d.py index bb4947d..9370e42 100644 --- a/keras_resnet/models/_time_distributed_2d.py +++ b/keras_resnet/models/_time_distributed_2d.py @@ -7,10 +7,10 @@ This module implements popular time distributed two-dimensional residual networks. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.tensorflow.keras.backend +import tensorflow.tensorflow.keras.layers +import tensorflow.tensorflow.keras.models +import tensorflow.tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers @@ -18,9 +18,9 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, freeze_bn=True, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` object using the given block count. + Constructs a time distributed `tensorflow.keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -41,7 +41,7 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -49,24 +49,24 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, >>> y = keras_resnet.models.TimeDistributedResNet(x, classes, blocks, blocks) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 - x = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=3), name="padding_conv1")(inputs) - x = keras.layers.TimeDistributed(keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False), name="conv1")(x) - x = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn_conv1")(x) - x = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="conv1_relu")(x) - x = keras.layers.TimeDistributed(keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same"), name="pool1")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=3), name="padding_conv1")(inputs) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False), name="conv1")(x) + x = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn_conv1")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="conv1_relu")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same"), name="pool1")(x) features = 64 @@ -82,20 +82,20 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, if include_top: assert classes > 0 - x = keras.layers.TimeDistributed(keras.layers.GlobalAveragePooling2D(), name="pool5")(x) - x = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"), name="fc1000")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.GlobalAveragePooling2D(), name="pool5")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"), name="fc1000")(x) - return keras.models.Model(inputs=inputs, outputs=x, *args, **kwargs) + return tensorflow.keras.models.Model(inputs=inputs, outputs=x, *args, **kwargs) else: # Else output each stages features - return keras.models.Model(inputs=inputs, outputs=outputs, *args, **kwargs) + return tensorflow.keras.models.Model(inputs=inputs, outputs=outputs, *args, **kwargs) def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet18 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -111,15 +111,15 @@ def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet18(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -131,9 +131,9 @@ def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet34 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -149,15 +149,15 @@ def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet34(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -169,9 +169,9 @@ def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet50 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -185,15 +185,15 @@ def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet50(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -205,9 +205,9 @@ def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet101 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -223,15 +223,15 @@ def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet101(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -243,9 +243,9 @@ def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000 def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet152 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -261,15 +261,15 @@ def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet152(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -281,9 +281,9 @@ def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000 def TimeDistributedResNet200(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet200 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -299,15 +299,15 @@ def TimeDistributedResNet200(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet200(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ diff --git a/tests/conftest.py b/tests/conftest.py index ebfd7b8..01de718 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,4 @@ -import keras.layers +import tensorflow.tensorflow.keras.layers import pytest @@ -6,4 +6,4 @@ def x(): shape = (224, 224, 3) - return keras.layers.Input(shape) + return tensorflow.keras.layers.Input(shape) diff --git a/tools/export-caffe-weights.py b/tools/export-caffe-weights.py old mode 100755 new mode 100644 diff --git a/tools/import-caffe-weights.py b/tools/import-caffe-weights.py old mode 100755 new mode 100644 index dd84351..c56b9b2 --- a/tools/import-caffe-weights.py +++ b/tools/import-caffe-weights.py @@ -1,7 +1,7 @@ #!/usr/bin/env python import keras_resnet.models -import keras +import tensorflow.keras import h5py import argparse @@ -21,7 +21,7 @@ def create_model(resnet): if resnet not in valid: raise ValueError("Invalid resnet argument (valid: {}) : '{}'".format(valid, resnet)) - image = keras.layers.Input((None, None, 3)) + image = tensorflow.keras.layers.Input((None, None, 3)) if resnet == "resnet50": return keras_resnet.models.ResNet50(image) elif resnet == "resnet101": @@ -50,11 +50,11 @@ def parse_args(): # port each layer for index, l in enumerate(model.layers): - if isinstance(l, keras.layers.Conv2D): + if isinstance(l, tensorflow.keras.layers.Conv2D): l.set_weights([convert_conv_weights(weights.get(l.name).get("0"))]) - elif isinstance(l, keras.layers.Dense): + elif isinstance(l, tensorflow.keras.layers.Dense): l.set_weights(convert_dense_weights(weights.get(l.name).get("0"), weights.get(l.name).get("1"))) - elif isinstance(l, keras.layers.BatchNormalization): + elif isinstance(l, tensorflow.keras.layers.BatchNormalization): scale_name = l.name.replace("bn", "scale") bn_weights = weights.get(l.name) scale_weights = weights.get(scale_name) From 4cf287cf09e8cbbb6940ecaafec8d908a37d4eb1 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 16 Feb 2021 19:16:49 -0800 Subject: [PATCH 18/24] fixed tensorflow.tensorflow imports --- keras_resnet/blocks/_1d.py | 4 ++-- keras_resnet/blocks/_2d.py | 4 ++-- keras_resnet/blocks/_3d.py | 4 ++-- keras_resnet/blocks/_time_distributed_2d.py | 4 ++-- keras_resnet/models/_1d.py | 8 ++++---- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/keras_resnet/blocks/_1d.py b/keras_resnet/blocks/_1d.py index 1386a78..beac875 100644 --- a/keras_resnet/blocks/_1d.py +++ b/keras_resnet/blocks/_1d.py @@ -6,8 +6,8 @@ This module implements a number of popular one-dimensional residual blocks. """ -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.layers +import tensorflow.keras.regularizers import keras_resnet.layers diff --git a/keras_resnet/blocks/_2d.py b/keras_resnet/blocks/_2d.py index 366bba3..e2b6a36 100644 --- a/keras_resnet/blocks/_2d.py +++ b/keras_resnet/blocks/_2d.py @@ -7,8 +7,8 @@ This module implements a number of popular two-dimensional residual blocks. """ -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.layers +import tensorflow.keras.regularizers import keras_resnet.layers diff --git a/keras_resnet/blocks/_3d.py b/keras_resnet/blocks/_3d.py index 941a2e1..0255131 100644 --- a/keras_resnet/blocks/_3d.py +++ b/keras_resnet/blocks/_3d.py @@ -7,8 +7,8 @@ This module implements a number of popular three-dimensional residual blocks. """ -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.layers +import tensorflow.keras.regularizers import keras_resnet.layers diff --git a/keras_resnet/blocks/_time_distributed_2d.py b/keras_resnet/blocks/_time_distributed_2d.py index 0162e46..dc1a2fb 100644 --- a/keras_resnet/blocks/_time_distributed_2d.py +++ b/keras_resnet/blocks/_time_distributed_2d.py @@ -7,8 +7,8 @@ This module implements a number of popular time distributed two-dimensional residual blocks. """ -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.layers +import tensorflow.keras.regularizers import keras_resnet.layers diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 6f345c9..7b9bf34 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -7,10 +7,10 @@ This module implements popular one-dimensional residual models. """ -import tensorflow.tensorflow.keras.backend -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.models -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers From 02341580c9050d1f4818905974b9bc846e8f473e Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 16 Feb 2021 19:17:07 -0800 Subject: [PATCH 19/24] fixed tensorflow.tensorflow bug 2 --- keras_resnet/benchmarks/__init__.py | 8 ++++---- keras_resnet/classifiers/_2d.py | 8 ++++---- keras_resnet/metrics.py | 6 +++--- keras_resnet/models/_2d.py | 8 ++++---- keras_resnet/models/_3d.py | 8 ++++---- keras_resnet/models/_feature_pyramid_2d.py | 8 ++++---- keras_resnet/models/_time_distributed_2d.py | 8 ++++---- tests/conftest.py | 2 +- 8 files changed, 28 insertions(+), 28 deletions(-) diff --git a/keras_resnet/benchmarks/__init__.py b/keras_resnet/benchmarks/__init__.py index 5d79fd3..0cdedd1 100644 --- a/keras_resnet/benchmarks/__init__.py +++ b/keras_resnet/benchmarks/__init__.py @@ -2,7 +2,7 @@ import click import tensorflow.keras -import tensorflow.tensorflow.keras.preprocessing.image +import tensorflow.keras.preprocessing.image import numpy import pkg_resources import sklearn.model_selection @@ -12,9 +12,9 @@ import keras_resnet.models _benchmarks = { - "CIFAR-10": tensorflow.tensorflow.keras.datasets.cifar10, - "CIFAR-100": tensorflow.tensorflow.keras.datasets.cifar100, - "MNIST": tensorflow.tensorflow.keras.datasets.mnist + "CIFAR-10": tensorflow.keras.datasets.cifar10, + "CIFAR-100": tensorflow.keras.datasets.cifar100, + "MNIST": tensorflow.keras.datasets.mnist } diff --git a/keras_resnet/classifiers/_2d.py b/keras_resnet/classifiers/_2d.py index 17496af..46d2a75 100644 --- a/keras_resnet/classifiers/_2d.py +++ b/keras_resnet/classifiers/_2d.py @@ -7,10 +7,10 @@ This module implements popular residual two-dimensional classifiers. """ -import tensorflow.tensorflow.keras.backend -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.models -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.models diff --git a/keras_resnet/metrics.py b/keras_resnet/metrics.py index a40c246..1b1a343 100644 --- a/keras_resnet/metrics.py +++ b/keras_resnet/metrics.py @@ -1,9 +1,9 @@ -import tensorflow.tensorflow.keras.metrics +import tensorflow.keras.metrics def top_1_categorical_error(y_true, y_pred): - return 1.0 - tensorflow.tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) + return 1.0 - tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) def top_5_categorical_error(y_true, y_pred): - return 1.0 - tensorflow.tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) + return 1.0 - tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) diff --git a/keras_resnet/models/_2d.py b/keras_resnet/models/_2d.py index 998c0e0..e9e490b 100644 --- a/keras_resnet/models/_2d.py +++ b/keras_resnet/models/_2d.py @@ -7,10 +7,10 @@ This module implements popular two-dimensional residual models. """ -import tensorflow.tensorflow.keras.backend -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.models -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers diff --git a/keras_resnet/models/_3d.py b/keras_resnet/models/_3d.py index 142dc15..0bc95e5 100644 --- a/keras_resnet/models/_3d.py +++ b/keras_resnet/models/_3d.py @@ -7,10 +7,10 @@ This module implements popular three-dimensional residual models. """ -import tensorflow.tensorflow.keras.backend -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.models -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers diff --git a/keras_resnet/models/_feature_pyramid_2d.py b/keras_resnet/models/_feature_pyramid_2d.py index b4cd6e5..2ec67d5 100644 --- a/keras_resnet/models/_feature_pyramid_2d.py +++ b/keras_resnet/models/_feature_pyramid_2d.py @@ -7,10 +7,10 @@ This module implements popular two-dimensional feature pyramid networks (FPNs). """ -import tensorflow.tensorflow.keras.backend -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.models -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers diff --git a/keras_resnet/models/_time_distributed_2d.py b/keras_resnet/models/_time_distributed_2d.py index 9370e42..a952d70 100644 --- a/keras_resnet/models/_time_distributed_2d.py +++ b/keras_resnet/models/_time_distributed_2d.py @@ -7,10 +7,10 @@ This module implements popular time distributed two-dimensional residual networks. """ -import tensorflow.tensorflow.keras.backend -import tensorflow.tensorflow.keras.layers -import tensorflow.tensorflow.keras.models -import tensorflow.tensorflow.keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers diff --git a/tests/conftest.py b/tests/conftest.py index 01de718..e3ddd8a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,4 @@ -import tensorflow.tensorflow.keras.layers +import tensorflow.keras.layers import pytest From 1f2418e8ed984cc80a99289ec34d24332cf962d8 Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Tue, 9 Mar 2021 17:05:53 -0800 Subject: [PATCH 20/24] fixed syntax error --- .vscode/settings.json | 3 +++ keras_resnet/models/_1d.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..0e324d6 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.pythonPath": "/Users/tanay/miniforge3/envs/python38/bin/python" +} \ No newline at end of file diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 7b9bf34..7d3f108 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -62,7 +62,7 @@ def __init__( freeze_bn=True, numerical_names=None, *args, - **kwargs, + **kwargs ): super(ResNet1D, self).__init__(*args, **kwargs) self.classes = classes From 6a3eb530de3e76b77f5251638fb949ce07cac0a0 Mon Sep 17 00:00:00 2001 From: tackoo Date: Thu, 15 Apr 2021 08:53:03 -0700 Subject: [PATCH 21/24] updated gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index d56fc37..33475f2 100644 --- a/.gitignore +++ b/.gitignore @@ -49,3 +49,4 @@ sdist/ target/ var/ venv/ +.vscode \ No newline at end of file From e896128c84bb5ef641929ccc1802564939efdeae Mon Sep 17 00:00:00 2001 From: Tanay Topac Date: Thu, 15 Apr 2021 08:53:48 -0700 Subject: [PATCH 22/24] Delete .vscode directory --- .vscode/settings.json | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 0e324d6..0000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "python.pythonPath": "/Users/tanay/miniforge3/envs/python38/bin/python" -} \ No newline at end of file From d3b4ac5d3f325b13d73fc42242e91e6bae6e9711 Mon Sep 17 00:00:00 2001 From: tackoo Date: Thu, 15 Apr 2021 08:58:52 -0700 Subject: [PATCH 23/24] remove tf references to preserve compatibility --- README.rst | 8 +- keras_resnet/benchmarks/__init__.py | 24 ++--- keras_resnet/blocks/_1d.py | 42 ++++---- keras_resnet/blocks/_2d.py | 42 ++++---- keras_resnet/blocks/_3d.py | 42 ++++---- keras_resnet/blocks/_time_distributed_2d.py | 56 +++++----- keras_resnet/classifiers/_2d.py | 68 ++++++------ keras_resnet/layers/_batch_normalization.py | 6 +- keras_resnet/metrics.py | 6 +- keras_resnet/models/_1d.py | 66 ++++++------ keras_resnet/models/_2d.py | 64 +++++------ keras_resnet/models/_3d.py | 66 ++++++------ keras_resnet/models/_feature_pyramid_2d.py | 46 ++++---- keras_resnet/models/_time_distributed_2d.py | 112 ++++++++++---------- tests/conftest.py | 4 +- tools/import-caffe-weights.py | 10 +- 16 files changed, 331 insertions(+), 331 deletions(-) diff --git a/README.rst b/README.rst index c38352a..ac8ed7b 100644 --- a/README.rst +++ b/README.rst @@ -12,21 +12,21 @@ A tantalizing preview of Keras-ResNet simplicity: .. code-block:: python - >>> import tensorflow.keras + >>> import keras >>> import keras_resnet.models >>> shape, classes = (32, 32, 3), 10 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) - >>> (training_x, training_y), (_, _) = tensorflow.keras.datasets.cifar10.load_data() + >>> (training_x, training_y), (_, _) = keras.datasets.cifar10.load_data() - >>> training_y = tensorflow.keras.utils.np_utils.to_categorical(training_y) + >>> training_y = keras.utils.np_utils.to_categorical(training_y) >>> model.fit(training_x, training_y) diff --git a/keras_resnet/benchmarks/__init__.py b/keras_resnet/benchmarks/__init__.py index 0cdedd1..415aa58 100644 --- a/keras_resnet/benchmarks/__init__.py +++ b/keras_resnet/benchmarks/__init__.py @@ -1,8 +1,8 @@ import os.path import click -import tensorflow.keras -import tensorflow.keras.preprocessing.image +import keras +import keras.preprocessing.image import numpy import pkg_resources import sklearn.model_selection @@ -12,9 +12,9 @@ import keras_resnet.models _benchmarks = { - "CIFAR-10": tensorflow.keras.datasets.cifar10, - "CIFAR-100": tensorflow.keras.datasets.cifar100, - "MNIST": tensorflow.keras.datasets.mnist + "CIFAR-10": keras.datasets.cifar10, + "CIFAR-100": keras.datasets.cifar100, + "MNIST": keras.datasets.mnist } @@ -65,7 +65,7 @@ def __main__(benchmark, device, name): session = tensorflow.Session(config=configuration) - tensorflow.keras.backend.set_session(session) + keras.backend.set_session(session) (training_x, training_y), _ = _benchmarks[benchmark].load_data() @@ -74,14 +74,14 @@ def __main__(benchmark, device, name): if benchmark is "MNIST": training_x = numpy.expand_dims(training_x, -1) - training_y = tensorflow.keras.utils.np_utils.to_categorical(training_y) + training_y = keras.utils.np_utils.to_categorical(training_y) training_x, validation_x, training_y, validation_y = sklearn.model_selection.train_test_split( training_x, training_y ) - generator = tensorflow.keras.preprocessing.image.ImageDataGenerator( + generator = keras.preprocessing.image.ImageDataGenerator( horizontal_flip=True ) @@ -93,7 +93,7 @@ def __main__(benchmark, device, name): batch_size=256 ) - validation_data = tensorflow.keras.preprocessing.image.ImageDataGenerator() + validation_data = keras.preprocessing.image.ImageDataGenerator() validation_data.fit(validation_x) @@ -105,7 +105,7 @@ def __main__(benchmark, device, name): shape, classes = training_x.shape[1:], training_y.shape[-1] - x = tensorflow.keras.layers.Input(shape) + x = keras.layers.Input(shape) model = _names[name](inputs=x, classes=classes) @@ -120,13 +120,13 @@ def __main__(benchmark, device, name): pathname = pkg_resources.resource_filename("keras_resnet", pathname) - model_checkpoint = tensorflow.keras.callbacks.ModelCheckpoint(pathname) + model_checkpoint = keras.callbacks.ModelCheckpoint(pathname) pathname = os.path.join("data", "logs", benchmark, "{}.csv".format(name)) pathname = pkg_resources.resource_filename("keras_resnet", pathname) - csv_logger = tensorflow.keras.callbacks.CSVLogger(pathname) + csv_logger = keras.callbacks.CSVLogger(pathname) callbacks = [ csv_logger, diff --git a/keras_resnet/blocks/_1d.py b/keras_resnet/blocks/_1d.py index beac875..c9a0321 100644 --- a/keras_resnet/blocks/_1d.py +++ b/keras_resnet/blocks/_1d.py @@ -6,8 +6,8 @@ This module implements a number of popular one-dimensional residual blocks. """ -import tensorflow.keras.layers -import tensorflow.keras.regularizers +import keras.layers +import keras.regularizers import keras_resnet.layers @@ -54,7 +54,7 @@ def basic_1d( else: stride = 2 - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = -1 else: axis = 1 @@ -67,12 +67,12 @@ def basic_1d( stage_char = str(stage + 2) def f(x): - y = tensorflow.keras.layers.ZeroPadding1D( + y = keras.layers.ZeroPadding1D( padding=1, name="padding{}{}_branch2a".format(stage_char, block_char) )(x) - y = tensorflow.keras.layers.Conv1D( + y = keras.layers.Conv1D( filters, kernel_size, strides=stride, @@ -88,17 +88,17 @@ def f(x): name="bn{}{}_branch2a".format(stage_char, block_char) )(y) - y = tensorflow.keras.layers.Activation( + y = keras.layers.Activation( "relu", name="res{}{}_branch2a_relu".format(stage_char, block_char) )(y) - y = tensorflow.keras.layers.ZeroPadding1D( + y = keras.layers.ZeroPadding1D( padding=1, name="padding{}{}_branch2b".format(stage_char, block_char) )(y) - y = tensorflow.keras.layers.Conv1D( + y = keras.layers.Conv1D( filters, kernel_size, use_bias=False, @@ -114,7 +114,7 @@ def f(x): )(y) if block == 0: - shortcut = tensorflow.keras.layers.Conv1D( + shortcut = keras.layers.Conv1D( filters, 1, strides=stride, @@ -132,11 +132,11 @@ def f(x): else: shortcut = x - y = tensorflow.keras.layers.Add( + y = keras.layers.Add( name="res{}{}".format(stage_char, block_char) )([y, shortcut]) - y = tensorflow.keras.layers.Activation( + y = keras.layers.Activation( "relu", name="res{}{}_relu".format(stage_char, block_char) )(y) @@ -181,7 +181,7 @@ def bottleneck_1d( if stride is None: stride = 1 if block != 0 or stage == 0 else 2 - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = -1 else: axis = 1 @@ -194,7 +194,7 @@ def bottleneck_1d( stage_char = str(stage + 2) def f(x): - y = tensorflow.keras.layers.Conv1D( + y = keras.layers.Conv1D( filters, 1, strides=stride, @@ -210,17 +210,17 @@ def f(x): name="bn{}{}_branch2a".format(stage_char, block_char) )(y) - y = tensorflow.keras.layers.Activation( + y = keras.layers.Activation( "relu", name="res{}{}_branch2a_relu".format(stage_char, block_char) )(y) - y = tensorflow.keras.layers.ZeroPadding1D( + y = keras.layers.ZeroPadding1D( padding=1, name="padding{}{}_branch2b".format(stage_char, block_char) )(y) - y = tensorflow.keras.layers.Conv1D( + y = keras.layers.Conv1D( filters, kernel_size, use_bias=False, @@ -235,12 +235,12 @@ def f(x): name="bn{}{}_branch2b".format(stage_char, block_char) )(y) - y = tensorflow.keras.layers.Activation( + y = keras.layers.Activation( "relu", name="res{}{}_branch2b_relu".format(stage_char, block_char) )(y) - y = tensorflow.keras.layers.Conv1D( + y = keras.layers.Conv1D( filters * 4, 1, use_bias=False, @@ -256,7 +256,7 @@ def f(x): )(y) if block == 0: - shortcut = tensorflow.keras.layers.Conv1D( + shortcut = keras.layers.Conv1D( filters * 4, 1, strides=stride, @@ -274,11 +274,11 @@ def f(x): else: shortcut = x - y = tensorflow.keras.layers.Add( + y = keras.layers.Add( name="res{}{}".format(stage_char, block_char) )([y, shortcut]) - y = tensorflow.keras.layers.Activation( + y = keras.layers.Activation( "relu", name="res{}{}_relu".format(stage_char, block_char) )(y) diff --git a/keras_resnet/blocks/_2d.py b/keras_resnet/blocks/_2d.py index e2b6a36..ce774fb 100644 --- a/keras_resnet/blocks/_2d.py +++ b/keras_resnet/blocks/_2d.py @@ -7,8 +7,8 @@ This module implements a number of popular two-dimensional residual blocks. """ -import tensorflow.keras.layers -import tensorflow.keras.regularizers +import keras.layers +import keras.regularizers import keras_resnet.layers @@ -55,7 +55,7 @@ def basic_2d( else: stride = 2 - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -68,30 +68,30 @@ def basic_2d( stage_char = str(stage + 2) def f(x): - y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = tensorflow.keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) + y = keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -136,7 +136,7 @@ def bottleneck_2d( else: stride = 2 - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -149,34 +149,34 @@ def bottleneck_2d( stage_char = str(stage + 2) def f(x): - y = tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) + y = keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) + y = keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/blocks/_3d.py b/keras_resnet/blocks/_3d.py index 0255131..ba11f6e 100644 --- a/keras_resnet/blocks/_3d.py +++ b/keras_resnet/blocks/_3d.py @@ -7,8 +7,8 @@ This module implements a number of popular three-dimensional residual blocks. """ -import tensorflow.keras.layers -import tensorflow.keras.regularizers +import keras.layers +import keras.regularizers import keras_resnet.layers @@ -55,7 +55,7 @@ def basic_3d( else: stride = 2 - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -68,30 +68,30 @@ def basic_3d( stage_char = str(stage + 2) def f(x): - y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = tensorflow.keras.layers.Conv3D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) + y = keras.layers.Conv3D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = tensorflow.keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -136,7 +136,7 @@ def bottleneck_3d( else: stride = 2 - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -149,34 +149,34 @@ def bottleneck_3d( stage_char = str(stage + 2) def f(x): - y = tensorflow.keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) + y = keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.Conv3D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) + y = keras.layers.Conv3D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = tensorflow.keras.layers.Conv3D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = keras.layers.Conv3D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/blocks/_time_distributed_2d.py b/keras_resnet/blocks/_time_distributed_2d.py index dc1a2fb..31f5bcd 100644 --- a/keras_resnet/blocks/_time_distributed_2d.py +++ b/keras_resnet/blocks/_time_distributed_2d.py @@ -7,8 +7,8 @@ This module implements a number of popular time distributed two-dimensional residual blocks. """ -import tensorflow.keras.layers -import tensorflow.keras.regularizers +import keras.layers +import keras.regularizers import keras_resnet.layers @@ -57,7 +57,7 @@ def time_distributed_basic_2d( else: stride = 2 - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -70,30 +70,30 @@ def time_distributed_basic_2d( stage_char = str(stage + 2) def f(x): - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) + shortcut = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) - shortcut = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) + shortcut = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -140,7 +140,7 @@ def time_distributed_bottleneck_2d( else: stride = 2 - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -153,34 +153,34 @@ def time_distributed_bottleneck_2d( stage_char = str(stage + 2) def f(x): - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(x) + y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(x) - y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, **parameters), name="res{}{}_branch2c".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, **parameters), name="res{}{}_branch2c".format(stage_char, block_char))(y) - y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2c".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) + shortcut = keras.layers.TimeDistributed(keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) - shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) + shortcut = keras.layers.TimeDistributed(keras.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) + y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/classifiers/_2d.py b/keras_resnet/classifiers/_2d.py index 46d2a75..b9ac7a3 100644 --- a/keras_resnet/classifiers/_2d.py +++ b/keras_resnet/classifiers/_2d.py @@ -7,19 +7,19 @@ This module implements popular residual two-dimensional classifiers. """ -import tensorflow.keras.backend -import tensorflow.keras.layers -import tensorflow.keras.models -import tensorflow.keras.regularizers +import keras.backend +import keras.layers +import keras.models +import keras.regularizers import keras_resnet.models -class ResNet18(tensorflow.keras.models.Model): +class ResNet18(keras.models.Model): """ A :class:`ResNet18 ` object. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) Usage: @@ -27,7 +27,7 @@ class ResNet18(tensorflow.keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet18(x) @@ -36,18 +36,18 @@ class ResNet18(tensorflow.keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet18(inputs) - outputs = tensorflow.keras.layers.Flatten()(outputs.output) + outputs = keras.layers.Flatten()(outputs.output) - outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet18, self).__init__(inputs, outputs) -class ResNet34(tensorflow.keras.models.Model): +class ResNet34(keras.models.Model): """ A :class:`ResNet34 ` object. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) Usage: @@ -55,7 +55,7 @@ class ResNet34(tensorflow.keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet34(x) @@ -64,18 +64,18 @@ class ResNet34(tensorflow.keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet34(inputs) - outputs = tensorflow.keras.layers.Flatten()(outputs.output) + outputs = keras.layers.Flatten()(outputs.output) - outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet34, self).__init__(inputs, outputs) -class ResNet50(tensorflow.keras.models.Model): +class ResNet50(keras.models.Model): """ A :class:`ResNet50 ` object. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) Usage: @@ -83,7 +83,7 @@ class ResNet50(tensorflow.keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet50(x) @@ -92,18 +92,18 @@ class ResNet50(tensorflow.keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet50(inputs) - outputs = tensorflow.keras.layers.Flatten()(outputs.output) + outputs = keras.layers.Flatten()(outputs.output) - outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet50, self).__init__(inputs, outputs) -class ResNet101(tensorflow.keras.models.Model): +class ResNet101(keras.models.Model): """ A :class:`ResNet101 ` object. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) Usage: @@ -111,7 +111,7 @@ class ResNet101(tensorflow.keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet101(x) @@ -120,18 +120,18 @@ class ResNet101(tensorflow.keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet101(inputs) - outputs = tensorflow.keras.layers.Flatten()(outputs.output) + outputs = keras.layers.Flatten()(outputs.output) - outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet101, self).__init__(inputs, outputs) -class ResNet152(tensorflow.keras.models.Model): +class ResNet152(keras.models.Model): """ A :class:`ResNet152 ` object. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) Usage: @@ -139,7 +139,7 @@ class ResNet152(tensorflow.keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet152(x) @@ -149,18 +149,18 @@ class ResNet152(tensorflow.keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet152(inputs) - outputs = tensorflow.keras.layers.Flatten()(outputs.output) + outputs = keras.layers.Flatten()(outputs.output) - outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet152, self).__init__(inputs, outputs) -class ResNet200(tensorflow.keras.models.Model): +class ResNet200(keras.models.Model): """ A :class:`ResNet200 ` object. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) Usage: @@ -168,7 +168,7 @@ class ResNet200(tensorflow.keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet200(x) @@ -177,8 +177,8 @@ class ResNet200(tensorflow.keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet200(inputs) - outputs = tensorflow.keras.layers.Flatten()(outputs.output) + outputs = keras.layers.Flatten()(outputs.output) - outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet200, self).__init__(inputs, outputs) diff --git a/keras_resnet/layers/_batch_normalization.py b/keras_resnet/layers/_batch_normalization.py index b8367b4..5cce406 100644 --- a/keras_resnet/layers/_batch_normalization.py +++ b/keras_resnet/layers/_batch_normalization.py @@ -1,7 +1,7 @@ -import tensorflow.keras -class BatchNormalization(tensorflow.keras.layers.BatchNormalization): +import keras +class BatchNormalization(keras.layers.BatchNormalization): """ - Identical to tensorflow.keras.layers.BatchNormalization, but adds the option to freeze parameters. + Identical to keras.layers.BatchNormalization, but adds the option to freeze parameters. """ def __init__(self, freeze, *args, **kwargs): self.freeze = freeze diff --git a/keras_resnet/metrics.py b/keras_resnet/metrics.py index 1b1a343..f642619 100644 --- a/keras_resnet/metrics.py +++ b/keras_resnet/metrics.py @@ -1,9 +1,9 @@ -import tensorflow.keras.metrics +import keras.metrics def top_1_categorical_error(y_true, y_pred): - return 1.0 - tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) + return 1.0 - keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) def top_5_categorical_error(y_true, y_pred): - return 1.0 - tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) + return 1.0 - keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 7d3f108..479e18b 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -7,20 +7,20 @@ This module implements popular one-dimensional residual models. """ -import tensorflow.keras.backend -import tensorflow.keras.layers -import tensorflow.keras.models -import tensorflow.keras.regularizers +import keras.backend +import keras.layers +import keras.models +import keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class ResNet1D(tensorflow.keras.Model): +class ResNet1D(keras.Model): """ - Constructs a `tensorflow.keras.models.Model` object using the given block count. + Constructs a `keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -43,7 +43,7 @@ class ResNet1D(tensorflow.keras.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -68,7 +68,7 @@ def __init__( self.classes = classes self.include_top = include_top - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = -1 else: axis = 1 @@ -76,11 +76,11 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - self.zeropad1 = tensorflow.keras.layers.ZeroPadding1D(padding=3, name="padding_conv1") - self.conv1 = tensorflow.keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1") + self.zeropad1 = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1") + self.conv1 = keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1") self.rnbn1 = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1") - self.relu1 = tensorflow.keras.layers.Activation("relu", name="conv1_relu") - self.maxpool1 = tensorflow.keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1") + self.relu1 = keras.layers.Activation("relu", name="conv1_relu") + self.maxpool1 = keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1") features = 64 self.lyrs = [] @@ -100,8 +100,8 @@ def __init__( self.layers.append (lyr) features *= 2 - self.glopoollast = tensorflow.keras.layers.GlobalAveragePooling1D(name="pool5") - self.fclast = tensorflow.keras.layers.Dense(classes, activation="softmax", name="fc1000") + self.glopoollast = keras.layers.GlobalAveragePooling1D(name="pool5") + self.fclast = keras.layers.Dense(classes, activation="softmax", name="fc1000") def call(self, inputs): @@ -132,9 +132,9 @@ def call(self, inputs): class ResNet1D18(ResNet1D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet18 specifications. + Constructs a `keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -152,7 +152,7 @@ class ResNet1D18(ResNet1D): >>> shape, classes = (224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet18(x, classes=classes) @@ -179,8 +179,8 @@ def call (self, inputs): class ResNet1D34(ResNet1D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + Constructs a `keras.models.Model` according to the ResNet34 specifications. + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -189,7 +189,7 @@ class ResNet1D34(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet34(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -213,8 +213,8 @@ def call (self, inputs): class ResNet1D50(ResNet1D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + Constructs a `keras.models.Model` according to the ResNet50 specifications. + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -223,7 +223,7 @@ class ResNet1D50(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -250,8 +250,8 @@ def call (self, inputs): class ResNet1D101(ResNet1D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + Constructs a `keras.models.Model` according to the ResNet101 specifications. + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -260,7 +260,7 @@ class ResNet1D101(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet101(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -287,8 +287,8 @@ def call (self, inputs): class ResNet1D152(ResNet1D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + Constructs a `keras.models.Model` according to the ResNet152 specifications. + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -297,7 +297,7 @@ class ResNet1D152(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet152(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -323,8 +323,8 @@ def call (self, inputs): class ResNet1D200(ResNet1D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + Constructs a `keras.models.Model` according to the ResNet200 specifications. + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture :param include_top: if true, includes classification layers :param classes: number of classes to classify (include_top must be true) @@ -333,7 +333,7 @@ class ResNet1D200(ResNet1D): Usage: >>> import keras_resnet.models >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet200(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ diff --git a/keras_resnet/models/_2d.py b/keras_resnet/models/_2d.py index e9e490b..8d8f874 100644 --- a/keras_resnet/models/_2d.py +++ b/keras_resnet/models/_2d.py @@ -7,20 +7,20 @@ This module implements popular two-dimensional residual models. """ -import tensorflow.keras.backend -import tensorflow.keras.layers -import tensorflow.keras.models -import tensorflow.keras.regularizers +import keras.backend +import keras.layers +import keras.models +import keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class ResNet2D(tensorflow.keras.Model): +class ResNet2D(keras.Model): """ - Constructs a `tensorflow.keras.models.Model` object using the given block count. + Constructs a `keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -43,7 +43,7 @@ class ResNet2D(tensorflow.keras.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -65,7 +65,7 @@ def __init__( *args, **kwargs ): - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -73,10 +73,10 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) + x = keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) - x = tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = keras.layers.Activation("relu", name="conv1_relu")(x) + x = keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -99,8 +99,8 @@ def __init__( if include_top: assert classes > 0 - x = tensorflow.keras.layers.GlobalAveragePooling2D(name="pool5")(x) - x = tensorflow.keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) + x = keras.layers.GlobalAveragePooling2D(name="pool5")(x) + x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) super(ResNet2D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) else: @@ -110,9 +110,9 @@ def __init__( class ResNet2D18(ResNet2D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet18 specifications. + Constructs a `keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -130,7 +130,7 @@ class ResNet2D18(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet18(x, classes=classes) @@ -154,9 +154,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D34(ResNet2D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet34 specifications. + Constructs a `keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -174,7 +174,7 @@ class ResNet2D34(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet34(x, classes=classes) @@ -198,9 +198,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D50(ResNet2D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet50 specifications. + Constructs a `keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -218,7 +218,7 @@ class ResNet2D50(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x) @@ -245,9 +245,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D101(ResNet2D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet101 specifications. + Constructs a `keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -265,7 +265,7 @@ class ResNet2D101(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet101(x, classes=classes) @@ -292,9 +292,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D152(ResNet2D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet152 specifications. + Constructs a `keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -312,7 +312,7 @@ class ResNet2D152(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet152(x, classes=classes) @@ -339,9 +339,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D200(ResNet2D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet200 specifications. + Constructs a `keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -359,7 +359,7 @@ class ResNet2D200(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet200(x, classes=classes) diff --git a/keras_resnet/models/_3d.py b/keras_resnet/models/_3d.py index 0bc95e5..cd98a50 100644 --- a/keras_resnet/models/_3d.py +++ b/keras_resnet/models/_3d.py @@ -7,20 +7,20 @@ This module implements popular three-dimensional residual models. """ -import tensorflow.keras.backend -import tensorflow.keras.layers -import tensorflow.keras.models -import tensorflow.keras.regularizers +import keras.backend +import keras.layers +import keras.models +import keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class ResNet3D(tensorflow.keras.Model): +class ResNet3D(keras.Model): """ - Constructs a `tensorflow.keras.models.Model` object using the given block count. + Constructs a `keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -43,7 +43,7 @@ class ResNet3D(tensorflow.keras.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -65,7 +65,7 @@ def __init__( *args, **kwargs ): - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -73,11 +73,11 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = tensorflow.keras.layers.ZeroPadding3D(padding=3, name="padding_conv1")(inputs) - x = tensorflow.keras.layers.Conv3D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) + x = keras.layers.ZeroPadding3D(padding=3, name="padding_conv1")(inputs) + x = keras.layers.Conv3D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) - x = tensorflow.keras.layers.MaxPooling3D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = keras.layers.Activation("relu", name="conv1_relu")(x) + x = keras.layers.MaxPooling3D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -100,8 +100,8 @@ def __init__( if include_top: assert classes > 0 - x = tensorflow.keras.layers.GlobalAveragePooling3D(name="pool5")(x) - x = tensorflow.keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) + x = keras.layers.GlobalAveragePooling3D(name="pool5")(x) + x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) super(ResNet3D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) else: @@ -111,9 +111,9 @@ def __init__( class ResNet3D18(ResNet3D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet18 specifications. + Constructs a `keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -131,7 +131,7 @@ class ResNet3D18(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet18(x, classes=classes) @@ -155,9 +155,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D34(ResNet3D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet34 specifications. + Constructs a `keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -175,7 +175,7 @@ class ResNet3D34(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet34(x, classes=classes) @@ -199,9 +199,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D50(ResNet3D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet50 specifications. + Constructs a `keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -219,7 +219,7 @@ class ResNet3D50(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x) @@ -246,9 +246,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D101(ResNet3D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet101 specifications. + Constructs a `keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -266,7 +266,7 @@ class ResNet3D101(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet101(x, classes=classes) @@ -293,9 +293,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D152(ResNet3D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet152 specifications. + Constructs a `keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -313,7 +313,7 @@ class ResNet3D152(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet152(x, classes=classes) @@ -340,9 +340,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D200(ResNet3D): """ - Constructs a `tensorflow.keras.models.Model` according to the ResNet200 specifications. + Constructs a `keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -360,7 +360,7 @@ class ResNet3D200(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet200(x, classes=classes) diff --git a/keras_resnet/models/_feature_pyramid_2d.py b/keras_resnet/models/_feature_pyramid_2d.py index 2ec67d5..fe3ce1a 100644 --- a/keras_resnet/models/_feature_pyramid_2d.py +++ b/keras_resnet/models/_feature_pyramid_2d.py @@ -7,16 +7,16 @@ This module implements popular two-dimensional feature pyramid networks (FPNs). """ -import tensorflow.keras.backend -import tensorflow.keras.layers -import tensorflow.keras.models -import tensorflow.keras.regularizers +import keras.backend +import keras.layers +import keras.models +import keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class FPN2D(tensorflow.keras.Model): +class FPN2D(keras.Model): def __init__( self, inputs, @@ -27,7 +27,7 @@ def __init__( *args, **kwargs ): - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -35,10 +35,10 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) + x = keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) - x = tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = keras.layers.Activation("relu", name="conv1_relu")(x) + x = keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -60,7 +60,7 @@ def __init__( c2, c3, c4, c5 = outputs - pyramid_5 = tensorflow.keras.layers.Conv2D( + pyramid_5 = keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -68,13 +68,13 @@ def __init__( name="c5_reduced" )(c5) - upsampled_p5 = tensorflow.keras.layers.UpSampling2D( + upsampled_p5 = keras.layers.UpSampling2D( interpolation="bilinear", name="p5_upsampled", size=(2, 2) )(pyramid_5) - pyramid_4 = tensorflow.keras.layers.Conv2D( + pyramid_4 = keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -82,17 +82,17 @@ def __init__( name="c4_reduced" )(c4) - pyramid_4 = tensorflow.keras.layers.Add( + pyramid_4 = keras.layers.Add( name="p4_merged" )([upsampled_p5, pyramid_4]) - upsampled_p4 = tensorflow.keras.layers.UpSampling2D( + upsampled_p4 = keras.layers.UpSampling2D( interpolation="bilinear", name="p4_upsampled", size=(2, 2) )(pyramid_4) - pyramid_4 = tensorflow.keras.layers.Conv2D( + pyramid_4 = keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -100,7 +100,7 @@ def __init__( name="p4" )(pyramid_4) - pyramid_3 = tensorflow.keras.layers.Conv2D( + pyramid_3 = keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -108,17 +108,17 @@ def __init__( name="c3_reduced" )(c3) - pyramid_3 = tensorflow.keras.layers.Add( + pyramid_3 = keras.layers.Add( name="p3_merged" )([upsampled_p4, pyramid_3]) - upsampled_p3 = tensorflow.keras.layers.UpSampling2D( + upsampled_p3 = keras.layers.UpSampling2D( interpolation="bilinear", name="p3_upsampled", size=(2, 2) )(pyramid_3) - pyramid_3 = tensorflow.keras.layers.Conv2D( + pyramid_3 = keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -126,7 +126,7 @@ def __init__( name="p3" )(pyramid_3) - pyramid_2 = tensorflow.keras.layers.Conv2D( + pyramid_2 = keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -134,11 +134,11 @@ def __init__( name="c2_reduced" )(c2) - pyramid_2 = tensorflow.keras.layers.Add( + pyramid_2 = keras.layers.Add( name="p2_merged" )([upsampled_p3, pyramid_2]) - pyramid_2 = tensorflow.keras.layers.Conv2D( + pyramid_2 = keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -146,7 +146,7 @@ def __init__( name="p2" )(pyramid_2) - pyramid_6 = tensorflow.keras.layers.MaxPooling2D(strides=2, name="p6")(pyramid_5) + pyramid_6 = keras.layers.MaxPooling2D(strides=2, name="p6")(pyramid_5) outputs = [ pyramid_2, diff --git a/keras_resnet/models/_time_distributed_2d.py b/keras_resnet/models/_time_distributed_2d.py index a952d70..bb4947d 100644 --- a/keras_resnet/models/_time_distributed_2d.py +++ b/keras_resnet/models/_time_distributed_2d.py @@ -7,10 +7,10 @@ This module implements popular time distributed two-dimensional residual networks. """ -import tensorflow.keras.backend -import tensorflow.keras.layers -import tensorflow.keras.models -import tensorflow.keras.regularizers +import keras.backend +import keras.layers +import keras.models +import keras.regularizers import keras_resnet.blocks import keras_resnet.layers @@ -18,9 +18,9 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, freeze_bn=True, *args, **kwargs): """ - Constructs a time distributed `tensorflow.keras.models.Model` object using the given block count. + Constructs a time distributed `keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -41,7 +41,7 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -49,24 +49,24 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, >>> y = keras_resnet.models.TimeDistributedResNet(x, classes, blocks, blocks) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) + >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = tensorflow.keras.models.Model(x, y) + >>> model = keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - if tensorflow.keras.backend.image_data_format() == "channels_last": + if keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 - x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=3), name="padding_conv1")(inputs) - x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False), name="conv1")(x) - x = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn_conv1")(x) - x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="conv1_relu")(x) - x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same"), name="pool1")(x) + x = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=3), name="padding_conv1")(inputs) + x = keras.layers.TimeDistributed(keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False), name="conv1")(x) + x = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn_conv1")(x) + x = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="conv1_relu")(x) + x = keras.layers.TimeDistributed(keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same"), name="pool1")(x) features = 64 @@ -82,20 +82,20 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, if include_top: assert classes > 0 - x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.GlobalAveragePooling2D(), name="pool5")(x) - x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"), name="fc1000")(x) + x = keras.layers.TimeDistributed(keras.layers.GlobalAveragePooling2D(), name="pool5")(x) + x = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"), name="fc1000")(x) - return tensorflow.keras.models.Model(inputs=inputs, outputs=x, *args, **kwargs) + return keras.models.Model(inputs=inputs, outputs=x, *args, **kwargs) else: # Else output each stages features - return tensorflow.keras.models.Model(inputs=inputs, outputs=outputs, *args, **kwargs) + return keras.models.Model(inputs=inputs, outputs=outputs, *args, **kwargs) def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet18 specifications. + Constructs a time distributed `keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -111,15 +111,15 @@ def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet18(x) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) + >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = tensorflow.keras.models.Model(x, y) + >>> model = keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -131,9 +131,9 @@ def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet34 specifications. + Constructs a time distributed `keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -149,15 +149,15 @@ def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet34(x) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) + >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = tensorflow.keras.models.Model(x, y) + >>> model = keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -169,9 +169,9 @@ def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet50 specifications. + Constructs a time distributed `keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -185,15 +185,15 @@ def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet50(x) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) + >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = tensorflow.keras.models.Model(x, y) + >>> model = keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -205,9 +205,9 @@ def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet101 specifications. + Constructs a time distributed `keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -223,15 +223,15 @@ def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet101(x) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) + >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = tensorflow.keras.models.Model(x, y) + >>> model = keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -243,9 +243,9 @@ def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000 def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet152 specifications. + Constructs a time distributed `keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -261,15 +261,15 @@ def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet152(x) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) + >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = tensorflow.keras.models.Model(x, y) + >>> model = keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -281,9 +281,9 @@ def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000 def TimeDistributedResNet200(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet200 specifications. + Constructs a time distributed `keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) :param blocks: the network’s residual architecture @@ -299,15 +299,15 @@ def TimeDistributedResNet200(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = tensorflow.keras.layers.Input(shape) + >>> x = keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet200(x) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) + >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) - >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = tensorflow.keras.models.Model(x, y) + >>> model = keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ diff --git a/tests/conftest.py b/tests/conftest.py index e3ddd8a..ebfd7b8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,4 @@ -import tensorflow.keras.layers +import keras.layers import pytest @@ -6,4 +6,4 @@ def x(): shape = (224, 224, 3) - return tensorflow.keras.layers.Input(shape) + return keras.layers.Input(shape) diff --git a/tools/import-caffe-weights.py b/tools/import-caffe-weights.py index c56b9b2..dd84351 100644 --- a/tools/import-caffe-weights.py +++ b/tools/import-caffe-weights.py @@ -1,7 +1,7 @@ #!/usr/bin/env python import keras_resnet.models -import tensorflow.keras +import keras import h5py import argparse @@ -21,7 +21,7 @@ def create_model(resnet): if resnet not in valid: raise ValueError("Invalid resnet argument (valid: {}) : '{}'".format(valid, resnet)) - image = tensorflow.keras.layers.Input((None, None, 3)) + image = keras.layers.Input((None, None, 3)) if resnet == "resnet50": return keras_resnet.models.ResNet50(image) elif resnet == "resnet101": @@ -50,11 +50,11 @@ def parse_args(): # port each layer for index, l in enumerate(model.layers): - if isinstance(l, tensorflow.keras.layers.Conv2D): + if isinstance(l, keras.layers.Conv2D): l.set_weights([convert_conv_weights(weights.get(l.name).get("0"))]) - elif isinstance(l, tensorflow.keras.layers.Dense): + elif isinstance(l, keras.layers.Dense): l.set_weights(convert_dense_weights(weights.get(l.name).get("0"), weights.get(l.name).get("1"))) - elif isinstance(l, tensorflow.keras.layers.BatchNormalization): + elif isinstance(l, keras.layers.BatchNormalization): scale_name = l.name.replace("bn", "scale") bn_weights = weights.get(l.name) scale_weights = weights.get(scale_name) From d6bd81978c3637feea5b3deb6c25ed2a712e32e9 Mon Sep 17 00:00:00 2001 From: tackoo Date: Fri, 4 Jun 2021 13:19:52 -0700 Subject: [PATCH 24/24] minor fix for allowing 1D's other than 18 --- keras_resnet/models/_1d.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index 479e18b..8a90f49 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -117,10 +117,13 @@ def call(self, inputs): x = self.lyrs[0](x) self.lyrs.pop() i += 1 - if i == self.iters[0]: - outputs.append(x) - self.iters.pop() - i = 0 + try: + if i == self.iters[0]: + outputs.append(x) + self.iters.pop() + i = 0 + except: + pass if self.include_top: assert self.classes > 0