Skip to content

Commit

Permalink
Added Docker support with wrapper
Browse files Browse the repository at this point in the history
  • Loading branch information
aug2uag committed Nov 26, 2024
1 parent 805da85 commit d2b3346
Show file tree
Hide file tree
Showing 3 changed files with 290 additions and 0 deletions.
108 changes: 108 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,3 +85,111 @@ $ python -m flux.api --prompt="A beautiful beach" save outputs/api
# open the image directly
$ python -m flux.api --prompt="A beautiful beach" image show
```

## Docker Usage

We provide Docker support for both local model inference and API access. This makes it easy to run FLUX without installing dependencies directly on your system.

### Installation

1. Build the Docker image:
```bash
# Clone the repository
git clone https://github.com/black-forest-labs/flux
cd flux

# For Apple Silicon (M1/M2/M3)
docker build --platform linux/arm64 -t flux-project -f docker/Dockerfile .

# For Intel/AMD with NVIDIA GPU
docker build --platform linux/amd64 -t flux-project -f docker/Dockerfile .
```

2. Install the CLI tool:
```bash
# Make the script executable
chmod +x docker/flux-cli.sh

# Option 1: Create a symbolic link (recommended)
sudo ln -s "$(pwd)/docker/flux-cli.sh" /usr/local/bin/flux-cli

# Option 2: Copy the script (alternative)
sudo cp docker/flux-cli.sh /usr/local/bin/flux-cli

# Verify installation
flux-cli --help
```

#### Apple Silicon Macs

```bash
# API usage (recommended for M-series Macs)
flux-cli --api-key "your-api-key" \
--prompt "A beautiful sunset" \
--output sunset.jpg

# Local model usage
flux-cli --local \
--model flux.1-schnell \
--prompt "A beautiful forest" \
--output forest.jpg
```

#### NVIDIA GPU Systems

```bash
# API usage
flux-cli --api-key "your-api-key" \
--prompt "A beautiful sunset" \
--output sunset.jpg

# Local model usage with GPU acceleration
flux-cli --local \
--model flux.1-schnell \
--prompt "A beautiful forest" \
--output forest.jpg \
--gpu
```

### Output Formats

The CLI supports multiple output formats:

```bash
# Save to file (default)
flux-cli --api-key "your-key" --prompt "prompt" --output image.jpg

# Get URL (API mode only)
flux-cli --api-key "your-key" --prompt "prompt" --format url

# Display image directly
flux-cli --api-key "your-key" --prompt "prompt" --format image
```

### Directory Structure

FLUX CLI uses the following directory structure by default:
```
~/.flux/
├── models/ # Cache for downloaded models
└── outputs/ # Default location for generated images
```

You can customize these locations using environment variables:
```bash
export FLUX_HOME=/path/to/flux/data # Base directory
export FLUX_OUTPUTS=/path/to/outputs # Output directory
export FLUX_MODELS=/path/to/models # Models directory
```

The CLI can be run from any directory and supports both absolute and relative output paths:
```bash
# Save to current directory
flux-cli --prompt "A sunset" --output ./sunset.jpg

# Save to specific location
flux-cli --prompt "A forest" --output /path/to/images/forest.jpg

# Save to default outputs directory
flux-cli --prompt "A beach" --output beach.jpg
```
58 changes: 58 additions & 0 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# Specify multi-platform base images
FROM --platform=$TARGETPLATFORM ubuntu:22.04 as base

# Set architecture-specific variables
ARG TARGETPLATFORM
ARG BUILDPLATFORM

# Set up Python and basic dependencies
FROM base as python-deps
ENV PYTHONUNBUFFERED=1 \
DEBIAN_FRONTEND=noninteractive

# Install Python and dependencies based on architecture
RUN apt-get update && apt-get install -y \
python3.10 \
python3.10-venv \
python3-pip \
git \
&& rm -rf /var/lib/apt/lists/*

# Create venv and install dependencies
RUN python3.10 -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"

WORKDIR /app
COPY requirements.txt .

# Install PyTorch based on architecture
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
# Install PyTorch for Apple Silicon
pip install --no-cache-dir torch torchvision torchaudio; \
else \
# Install CUDA version for NVIDIA GPUs
pip install --no-cache-dir torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118; \
fi

RUN pip install --no-cache-dir -r requirements.txt

# Final stage
FROM base
COPY --from=python-deps /opt/venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"

WORKDIR /app
COPY . .

# Install the package
RUN pip install -e ".[all]"

# Create volume mount points
VOLUME ["/app/outputs", "/app/models"]

# Set environment variables
ENV TORCH_HOME=/app/models
ENV HF_HOME=/app/models

# Default command that can be overridden
ENTRYPOINT ["python3", "-m", "flux.api"]
124 changes: 124 additions & 0 deletions docker/flux-cli.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
#!/bin/bash

# Enhanced CLI wrapper for FLUX Docker implementation
# Supports both local model inference and API access

set -e

# Default values
USE_LOCAL=false
API_KEY="${BFL_API_KEY:-}"
MODEL="flux.1-pro"
PROMPT=""
OUTPUT="flux-output.jpg"
OUTPUT_FORMAT="save"
GPU_SUPPORT=""

# Set up working directories
WORK_DIR="$(pwd)"
FLUX_HOME="${FLUX_HOME:-$HOME/.flux}"
FLUX_OUTPUTS="${FLUX_OUTPUTS:-$FLUX_HOME/outputs}"
FLUX_MODELS="${FLUX_MODELS:-$FLUX_HOME/models}"

# Ensure directories exist
mkdir -p "$FLUX_OUTPUTS"
mkdir -p "$FLUX_MODELS"

usage() {
cat << EOF
Usage: $0 [options]
Options:
--local Use local model instead of API
--api-key KEY API key for remote usage
--model NAME Model name to use (default: flux.1-pro)
--prompt TEXT Prompt for image generation
--output PATH Output path (default: flux-output.jpg)
--format FORMAT Output format: save|url|image (default: save)
--gpu Enable GPU support
-h, --help Show this help message
Environment variables:
FLUX_HOME Base directory for FLUX data (default: ~/.flux)
FLUX_OUTPUTS Output directory (default: $FLUX_HOME/outputs)
FLUX_MODELS Models directory (default: $FLUX_HOME/models)
BFL_API_KEY API key (can be set instead of --api-key)
Examples:
$0 --prompt "A beautiful sunset" --output sunset.jpg
$0 --local --model flux.1-schnell --prompt "A forest" --gpu
EOF
}

# Parse arguments
while [[ "$#" -gt 0 ]]; do
case $1 in
--local) USE_LOCAL=true ;;
--api-key) API_KEY="$2"; shift ;;
--model) MODEL="$2"; shift ;;
--prompt) PROMPT="$2"; shift ;;
--output) OUTPUT="$2"; shift ;;
--format) OUTPUT_FORMAT="$2"; shift ;;
--gpu) GPU_SUPPORT="--gpus all" ;;
-h|--help) usage; exit 0 ;;
*) echo "Unknown parameter: $1"; usage; exit 1 ;;
esac
shift
done

# Validate required arguments
if [ -z "$PROMPT" ]; then
echo "Error: --prompt is required"
usage
exit 1
fi

if [ "$USE_LOCAL" = true ] && [ -z "$MODEL" ]; then
echo "Error: --model is required when using local mode"
usage
exit 1
fi

if [ "$USE_LOCAL" = false ] && [ -z "$API_KEY" ]; then
echo "Error: --api-key is required when using API mode"
usage
exit 1
fi

# Handle output path
if [[ "$OUTPUT" = /* ]]; then
# Absolute path
FINAL_OUTPUT="$OUTPUT"
OUTPUT_DIR="$(dirname "$OUTPUT")"
else
# Relative path - make it relative to current directory
FINAL_OUTPUT="$WORK_DIR/$OUTPUT"
OUTPUT_DIR="$(dirname "$FINAL_OUTPUT")"
fi

# Ensure output directory exists
mkdir -p "$OUTPUT_DIR"

# Build Docker command
DOCKER_CMD="docker run --rm ${GPU_SUPPORT} \
-v $FLUX_OUTPUTS:/app/outputs \
-v $FLUX_MODELS:/app/models \
-v $OUTPUT_DIR:/app/current"

if [ "$USE_LOCAL" = false ]; then
DOCKER_CMD="$DOCKER_CMD -e BFL_API_KEY=$API_KEY"
fi

# Execute Docker command
if [ "$USE_LOCAL" = true ]; then
$DOCKER_CMD flux-project \
--model "$MODEL" \
--prompt "$PROMPT" \
"$OUTPUT_FORMAT" "/app/current/$(basename "$OUTPUT")"
else
$DOCKER_CMD flux-project \
--prompt "$PROMPT" \
"$OUTPUT_FORMAT" "/app/current/$(basename "$OUTPUT")"
fi

echo "Output saved to: $FINAL_OUTPUT"

0 comments on commit d2b3346

Please sign in to comment.