From d8bf0a4930179eac032ffcd695494e9da0c79027 Mon Sep 17 00:00:00 2001 From: Jiarui Fang Date: Thu, 5 Dec 2024 20:45:07 +0800 Subject: [PATCH] diffusers as optional during install (#383) --- README.md | 11 +++++++---- setup.py | 10 +++++----- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index b0a6f50..a695c67 100644 --- a/README.md +++ b/README.md @@ -189,18 +189,21 @@ Currently, if you need the parallel version of ComfyUI, please fill in this [app ### 1. Install from pip +We set diffusers as an optional installation requirement. +First, if you only use the USP interface, you don't need to install diffusers. Second, different models have different requirements for diffusers - for example, the latest models may need to be installed from the diffusers main branch. + ``` pip install xfuser -# Or optionally, with flash_attn -pip install "xfuser[flash_attn]" +# Or optionally, with diffusers +pip install "xfuser[diffusers]" ``` ### 2. Install from source ``` pip install -e . -# Or optionally, with flash_attn -pip install -e ".[flash_attn]" +# Or optionally, with diffusers +pip install -e ".[diffusers]" ``` Note that we use two self-maintained packages: diff --git a/setup.py b/setup.py index 8823d8c..c049b49 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,6 @@ def get_cuda_version(): install_requires=[ "torch>=2.1.0", "accelerate>=0.33.0", - "diffusers@git+https://github.com/huggingface/diffusers", # NOTE: diffusers>=0.32.0.dev is necessary for CogVideoX and Flux "transformers>=4.39.1", "sentencepiece>=0.1.99", "beautifulsoup4>=4.12.3", @@ -39,12 +38,13 @@ def get_cuda_version(): "opencv-python", "imageio", "imageio-ffmpeg", - "optimum-quanto" + "optimum-quanto", + "flash_attn>=2.6.3" ], extras_require={ - "flash_attn": [ - "flash_attn>=2.6.3", - ], + "diffusers": [ + "diffusers>=0.31.0", # NOTE: diffusers>=0.32.0.dev is necessary for CogVideoX and Flux + ] }, url="https://github.com/xdit-project/xDiT.", description="xDiT: A Scalable Inference Engine for Diffusion Transformers (DiTs) on multi-GPU Clusters",