From 9534579ee9ab23d3d3dd05ea048f8e4730b44891 Mon Sep 17 00:00:00 2001 From: jiqing-feng Date: Thu, 12 Dec 2024 12:21:00 +0000 Subject: [PATCH] update readme Signed-off-by: jiqing-feng --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b3879ef380..0cd317c78d 100644 --- a/README.md +++ b/README.md @@ -168,7 +168,7 @@ To load your IPEX model, you can just replace your `AutoModelForXxx` class with model_id = "gpt2" - model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16) -+ model = IPEXModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16, export=True) ++ model = IPEXModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16) tokenizer = AutoTokenizer.from_pretrained(model_id) pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) results = pipe("He's a dreadful magician and")