Skip to content

Commit

Permalink
Fix type hint of StaticEmbedding.__init__ (#3196)
Browse files Browse the repository at this point in the history
  • Loading branch information
altescy authored Jan 25, 2025
1 parent dfef1d6 commit db6ce94
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions sentence_transformers/models/StaticEmbedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ class StaticEmbedding(nn.Module):
def __init__(
self,
tokenizer: Tokenizer | PreTrainedTokenizerFast,
embedding_weights: np.array | torch.Tensor | None = None,
embedding_weights: np.ndarray | torch.Tensor | None = None,
embedding_dim: int | None = None,
**kwargs,
) -> None:
Expand All @@ -30,7 +30,7 @@ def __init__(
Args:
tokenizer (Tokenizer | PreTrainedTokenizerFast): The tokenizer to be used. Must be a fast tokenizer
from ``transformers`` or ``tokenizers``.
embedding_weights (np.array | torch.Tensor | None, optional): Pre-trained embedding weights.
embedding_weights (np.ndarray | torch.Tensor | None, optional): Pre-trained embedding weights.
Defaults to None.
embedding_dim (int | None, optional): Dimension of the embeddings. Required if embedding_weights
is not provided. Defaults to None.
Expand Down

0 comments on commit db6ce94

Please sign in to comment.