From d2ec511b0565fbb30e3d2804eb3cee0c2c19bdc3 Mon Sep 17 00:00:00 2001 From: Yuwen Hu Date: Thu, 30 May 2024 13:36:23 +0800 Subject: [PATCH] Small doc updates for llm cpu --- docs/docs/examples/llm/ipex_llm.ipynb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/docs/examples/llm/ipex_llm.ipynb b/docs/docs/examples/llm/ipex_llm.ipynb index bc7d88829e928..c500e80dda1ba 100644 --- a/docs/docs/examples/llm/ipex_llm.ipynb +++ b/docs/docs/examples/llm/ipex_llm.ipynb @@ -10,7 +10,9 @@ "\n", "This example goes over how to use LlamaIndex to interact with [`ipex-llm`](https://github.com/intel-analytics/ipex-llm/) for text generation and chat on CPU. \n", "\n", - "For more examples and usage, refer to [Examples](https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/llms/llama-index-llms-ipex-llm/examples)." + "> **Note**\n", + ">\n", + "> You could refer to [here](https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/llms/llama-index-llms-ipex-llm/examples) for full examples of `IpexLLM`. Please note that for running on Intel CPU, please specify `-d 'cpu'` in command argument when running the examples." ] }, {