diff --git a/docs/docs/examples/llm/ipex_llm.ipynb b/docs/docs/examples/llm/ipex_llm.ipynb
index bc7d88829e9281..c500e80dda1ba6 100644
--- a/docs/docs/examples/llm/ipex_llm.ipynb
+++ b/docs/docs/examples/llm/ipex_llm.ipynb
@@ -10,7 +10,9 @@
     "\n",
     "This example goes over how to use LlamaIndex to interact with [`ipex-llm`](https://github.com/intel-analytics/ipex-llm/) for text generation and chat on CPU. \n",
     "\n",
-    "For more examples and usage, refer to [Examples](https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/llms/llama-index-llms-ipex-llm/examples)."
+    "> **Note**\n",
+    ">\n",
+    "> You could refer to [here](https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/llms/llama-index-llms-ipex-llm/examples) for full examples of `IpexLLM`. Please note that for running on Intel CPU, please specify `-d 'cpu'` in command argument when running the examples."
    ]
   },
   {
diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-ipex-llm/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-ipex-llm/pyproject.toml
index 3db75ee5d7fa52..fa59eadeda87c7 100644
--- a/llama-index-integrations/embeddings/llama-index-embeddings-ipex-llm/pyproject.toml
+++ b/llama-index-integrations/embeddings/llama-index-embeddings-ipex-llm/pyproject.toml
@@ -30,20 +30,21 @@ license = "MIT"
 name = "llama-index-embeddings-ipex-llm"
 packages = [{include = "llama_index/"}]
 readme = "README.md"
-version = "0.1.2"
+version = "0.1.3"
 
 [tool.poetry.dependencies]
 python = ">=3.9,<4.0"
 llama-index-core = "^0.10.0"
-ipex-llm = {allow-prereleases = true, extras = ["llama-index"], version = ">=2.1.0b20240514"}
+ipex-llm = {allow-prereleases = true, extras = ["llama-index"], version = ">=2.1.0b20240529"}
 torch = {optional = true, source = "ipex-xpu-src-us", version = "2.1.0a0"}
 torchvision = {optional = true, source = "ipex-xpu-src-us", version = "0.16.0a0"}
 intel_extension_for_pytorch = {optional = true, source = "ipex-xpu-src-us", version = "2.1.10+xpu"}
 bigdl-core-xe-21 = {optional = true, version = "*"}
-bigdl-core-xe-esimd-21 = {optional = true, version = "*"}
+bigdl-core-xe-batch-21 = {optional = true, version = "*"}
+bigdl-core-xe-addons-21 = {optional = true, version = "*"}
 
 [tool.poetry.extras]
-xpu = ["bigdl-core-xe-21", "bigdl-core-xe-esimd-21", "intel_extension_for_pytorch", "torch", "torchvision"]
+xpu = ["bigdl-core-xe-21", "bigdl-core-xe-addons-21", "bigdl-core-xe-batch-21", "intel_extension_for_pytorch", "torch", "torchvision"]
 
 [tool.poetry.group.dev.dependencies]
 black = {extras = ["jupyter"], version = "<=23.9.1,>=23.7.0"}
diff --git a/llama-index-integrations/llms/llama-index-llms-ipex-llm/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-ipex-llm/pyproject.toml
index b02b8f6d05361f..c61f063f74e78a 100644
--- a/llama-index-integrations/llms/llama-index-llms-ipex-llm/pyproject.toml
+++ b/llama-index-integrations/llms/llama-index-llms-ipex-llm/pyproject.toml
@@ -30,20 +30,21 @@ license = "MIT"
 name = "llama-index-llms-ipex-llm"
 packages = [{include = "llama_index/"}]
 readme = "README.md"
-version = "0.1.3"
+version = "0.1.4"
 
 [tool.poetry.dependencies]
 python = ">=3.9,<4.0"
 llama-index-core = "^0.10.0"
-ipex-llm = {allow-prereleases = true, extras = ["llama-index"], version = ">=2.1.0b20240514"}
+ipex-llm = {allow-prereleases = true, extras = ["llama-index"], version = ">=2.1.0b20240529"}
 torch = {optional = true, source = "ipex-xpu-src-us", version = "2.1.0a0"}
 torchvision = {optional = true, source = "ipex-xpu-src-us", version = "0.16.0a0"}
 intel_extension_for_pytorch = {optional = true, source = "ipex-xpu-src-us", version = "2.1.10+xpu"}
 bigdl-core-xe-21 = {optional = true, version = "*"}
-bigdl-core-xe-esimd-21 = {optional = true, version = "*"}
+bigdl-core-xe-batch-21 = {optional = true, version = "*"}
+bigdl-core-xe-addons-21 = {optional = true, version = "*"}
 
 [tool.poetry.extras]
-xpu = ["bigdl-core-xe-21", "bigdl-core-xe-esimd-21", "intel_extension_for_pytorch", "torch", "torchvision"]
+xpu = ["bigdl-core-xe-21", "bigdl-core-xe-addons-21", "bigdl-core-xe-batch-21", "intel_extension_for_pytorch", "torch", "torchvision"]
 
 [tool.poetry.group.dev.dependencies]
 black = {extras = ["jupyter"], version = "<=23.9.1,>=23.7.0"}