From 35cf1d289a0f15e056c1dae5d9967ba027b3acec Mon Sep 17 00:00:00 2001 From: jiqing-feng Date: Thu, 12 Dec 2024 16:59:07 +0800 Subject: [PATCH] Update readme (#1066) Signed-off-by: jiqing-feng --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b3879ef380..0cd317c78d 100644 --- a/README.md +++ b/README.md @@ -168,7 +168,7 @@ To load your IPEX model, you can just replace your `AutoModelForXxx` class with model_id = "gpt2" - model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16) -+ model = IPEXModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16, export=True) ++ model = IPEXModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.bfloat16) tokenizer = AutoTokenizer.from_pretrained(model_id) pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) results = pipe("He's a dreadful magician and")