Skip to content

Commit

Permalink
Fix typo
Browse files Browse the repository at this point in the history
  • Loading branch information
lewtun committed Feb 20, 2025
1 parent 3767378 commit e89a987
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions src/lighteval/models/sglang/sglang_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,14 +216,14 @@ def greedy_until(
if max_new_tokens is not None:
if context_size + max_new_tokens > self.max_length:
logger.warning(
f"{context_size + max_new_tokens=} which is greather than {self.max_length=}. Truncating context to {self.max_length - max_new_tokens} tokens."
f"{context_size + max_new_tokens=} which is greater than {self.max_length=}. Truncating context to {self.max_length - max_new_tokens} tokens."
)
context_size = self.max_length - max_new_tokens
inputs = [input[-context_size:] for input in inputs]
else:
if context_size > self.max_length:
logger.warning(
f"{context_size=} which is greather than {self.max_length=}. Truncating context to {self.max_length} tokens."
f"{context_size=} which is greater than {self.max_length=}. Truncating context to {self.max_length} tokens."
)
context_size = self.max_length
inputs = [input[-context_size:] for input in inputs]
Expand Down
4 changes: 2 additions & 2 deletions src/lighteval/models/vllm/vllm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ def greedy_until(
if max_new_tokens is not None:
if context_size + max_new_tokens > self.max_length:
logger.warning(
f"{context_size + max_new_tokens=} which is greather than {self.max_length=}. Truncating context to {self.max_length - max_new_tokens} tokens."
f"{context_size + max_new_tokens=} which is greater than {self.max_length=}. Truncating context to {self.max_length - max_new_tokens} tokens."
)
context_size = self.max_length - max_new_tokens
if context_size < 0:
Expand All @@ -278,7 +278,7 @@ def greedy_until(
else:
if context_size > self.max_length:
logger.warning(
f"{context_size=} which is greather than {self.max_length=}. Truncating context to {self.max_length} tokens."
f"{context_size=} which is greater than {self.max_length=}. Truncating context to {self.max_length} tokens."
)
context_size = self.max_length
inputs = [input[-context_size:] for input in inputs]
Expand Down

0 comments on commit e89a987

Please sign in to comment.