Skip to content

Commit

Permalink
Fix json benchmark (#2043)
Browse files Browse the repository at this point in the history
  • Loading branch information
merrymercy authored Nov 15, 2024
1 parent 954f4e6 commit c29b98e
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 2 deletions.
12 changes: 10 additions & 2 deletions benchmark/json_schema/bench_sglang.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from datasets import load_dataset

import sglang as sgl
from sglang.global_config import global_config
from sglang.srt.hf_transformers_utils import get_tokenizer
from sglang.test.test_utils import (
add_common_sglang_args_and_parse,
select_sglang_backend,
Expand Down Expand Up @@ -103,19 +105,25 @@ def bench_schema(args):
print(e)
indexs.append(i)

assert len(indexs) == 0, f"Invalid json outputs: {indexs}"
return states, latency


def main(args):
states, latency = bench_schema(args)

# Compute accuracy
tokenizer = get_tokenizer(
global_config.default_backend.get_server_args()["tokenizer_path"]
)
output_jsons = [state["json_output"] for state in states]
num_output_tokens = sum(len(tokenizer.encode(x)) for x in output_jsons)
print(f"Latency: {latency:.3f}")
print(f"Output throughput: {num_output_tokens / latency:.3f} token/s")
print(f"#output tokens: {num_output_tokens}")

# Write results
dump_state_text(f"tmp_output_{args.backend}.txt", states)
with open(f"{args.backend}.json", "w") as fout:
with open(f"{args.backend}.jsonl", "w") as fout:
for state in states:
fout.write(state["json_output"] + "\n")

Expand Down
1 change: 1 addition & 0 deletions python/sglang/srt/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ async def get_model_info():
"""Get the model information."""
result = {
"model_path": tokenizer_manager.model_path,
"tokenizer_path": tokenizer_manager.server_args.tokenizer_path,
"is_generation": tokenizer_manager.is_generation,
}
return result
Expand Down

0 comments on commit c29b98e

Please sign in to comment.