Skip to content

Commit

Permalink
Throw when pretrained weights not available and pretrained=True (prin…
Browse files Browse the repository at this point in the history
…ciple of least surprise).
  • Loading branch information
rwightman committed May 10, 2023
1 parent 8ce9a2c commit ff2464e
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions timm/models/_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,7 @@ def load_pretrained(
"""
pretrained_cfg = pretrained_cfg or getattr(model, 'pretrained_cfg', None)
if not pretrained_cfg:
_logger.warning("Invalid pretrained config, cannot load weights.")
return
raise RuntimeError("Invalid pretrained config, cannot load weights. Use `pretrained=False` for random init.")

load_from, pretrained_loc = _resolve_pretrained_source(pretrained_cfg)
if load_from == 'state_dict':
Expand Down Expand Up @@ -186,8 +185,8 @@ def load_pretrained(
else:
state_dict = load_state_dict_from_hf(pretrained_loc)
else:
_logger.warning("No pretrained weights exist or were found for this model. Using random initialization.")
return
model_name = pretrained_cfg.get('architecture', 'this model')
raise RuntimeError(f"No pretrained weights exist for {model_name}. Use `pretrained=False` for random init.")

if filter_fn is not None:
try:
Expand Down

0 comments on commit ff2464e

Please sign in to comment.