diff --git a/keras/src/layers/activations/softmax.py b/keras/src/layers/activations/softmax.py index c1fee581a89..90884e50a86 100644 --- a/keras/src/layers/activations/softmax.py +++ b/keras/src/layers/activations/softmax.py @@ -46,7 +46,6 @@ def __init__(self, axis=-1, **kwargs): super().__init__(**kwargs) self.supports_masking = True self.axis = axis - self.built = True def call(self, inputs, mask=None): if mask is not None: diff --git a/keras/src/testing/test_case.py b/keras/src/testing/test_case.py index 9651eab71c0..351f58bcb91 100644 --- a/keras/src/testing/test_case.py +++ b/keras/src/testing/test_case.py @@ -271,6 +271,33 @@ def run_layer_test( lambda _: "float32", input_shape ) + # Estimate actual number of weights, variables, seed generators if + # expected ones not set. When using layers uses composition it should + # build each sublayer manually. + if input_data is not None or input_shape is not None: + if input_data is None: + input_data = create_eager_tensors( + input_shape, input_dtype, input_sparse + ) + layer = layer_cls(**init_kwargs) + if isinstance(input_data, dict): + layer(**input_data, **call_kwargs) + else: + layer(input_data, **call_kwargs) + + if expected_num_trainable_weights is None: + expected_num_trainable_weights = len(layer.trainable_weights) + if expected_num_non_trainable_weights is None: + expected_num_non_trainable_weights = len( + layer.non_trainable_weights + ) + if expected_num_non_trainable_variables is None: + expected_num_non_trainable_variables = len( + layer.non_trainable_variables + ) + if expected_num_seed_generators is None: + expected_num_seed_generators = len(get_seed_generators(layer)) + # Serialization test. layer = layer_cls(**init_kwargs) self.run_class_serialization_test(layer, custom_objects)