diff --git a/kimm/models/ghostnet.py b/kimm/models/ghostnet.py index e4c4bd8..92c1534 100644 --- a/kimm/models/ghostnet.py +++ b/kimm/models/ghostnet.py @@ -1,4 +1,5 @@ import typing +import warnings import keras from keras import backend @@ -398,7 +399,7 @@ def __init__( dropout_rate: float = 0.2, classes: int = 1000, classifier_activation: str = "softmax", - weights: typing.Optional[str] = None, + weights: typing.Optional[str] = "imagenet", name: typing.Optional[str] = None, **kwargs, ): @@ -409,6 +410,10 @@ def __init__( ) kwargs = self.fix_config(kwargs) if len(getattr(self, "available_weights", [])) == 0: + warnings.warn( + f"{self.__class__.__name__} doesn't have pretrained weights " + f"for '{weights}'." + ) weights = None super().__init__( width=self.width, diff --git a/kimm/models/mobilenet_v3.py b/kimm/models/mobilenet_v3.py index d604b14..1013b24 100644 --- a/kimm/models/mobilenet_v3.py +++ b/kimm/models/mobilenet_v3.py @@ -1,5 +1,6 @@ import math import typing +import warnings import keras from keras import layers @@ -335,6 +336,10 @@ def __init__( if hasattr(self, "padding"): kwargs["padding"] = self.padding if len(getattr(self, "available_weights", [])) == 0: + warnings.warn( + f"{self.__class__.__name__} doesn't have pretrained weights " + f"for '{weights}'." + ) weights = None super().__init__( width=self.width, diff --git a/kimm/models/vision_transformer.py b/kimm/models/vision_transformer.py index 52e73b6..0295676 100644 --- a/kimm/models/vision_transformer.py +++ b/kimm/models/vision_transformer.py @@ -1,4 +1,5 @@ import typing +import warnings import keras from keras import backend @@ -176,6 +177,10 @@ def __init__( ) kwargs = self.fix_config(kwargs) if len(getattr(self, "available_weights", [])) == 0: + warnings.warn( + f"{self.__class__.__name__} doesn't have pretrained weights " + f"for '{weights}'." + ) weights = None super().__init__( patch_size=self.patch_size,