{!isGithubMode && }
diff --git a/web_ui/src/pages/DocumentationPage.tsx b/web_ui/src/pages/DocumentationPage.tsx
new file mode 100644
index 0000000..e301fdf
--- /dev/null
+++ b/web_ui/src/pages/DocumentationPage.tsx
@@ -0,0 +1,459 @@
+import { Book, ChevronLeft, Menu, X } from "lucide-react";
+import React, { useEffect, useState } from "react";
+import { MarkdownRenderer } from "../components/MarkdownRenderer";
+
+interface DocSection {
+ id: string;
+ title: string;
+ content: string;
+}
+
+export const DocumentationPage: React.FC = () => {
+ const [selectedSection, setSelectedSection] =
+ useState("getting-started");
+ const [sidebarOpen, setSidebarOpen] = useState(false);
+
+ // Documentation sections with embedded content
+ const docSections: DocSection[] = [
+ {
+ id: "getting-started",
+ title: "Getting Started",
+ content: `# Getting Started with LLM Canvas
+
+LLM Canvas is a powerful visualization tool for complex LLM conversation flows. Create branching conversation trees, explore different response paths, and visualize tool interactions through an intuitive web interface.
+
+## 🚀 Quick Start
+
+### Installation
+
+\`\`\`bash
+pip install llm-canvas
+\`\`\`
+
+### Start Local Server
+
+\`\`\`bash
+# Start the local server
+llm-canvas server --port 8000
+
+# Server starts at http://localhost:8000
+# Create and view your canvases in the web interface
+\`\`\`
+
+### Basic Usage
+
+\`\`\`python
+from llm_canvas import canvas_client
+
+# Create a new canvas
+canvas = canvas_client.create_canvas(
+ title="My First Canvas",
+ description="A canvas for exploring LLM interactions"
+)
+
+# Get main branch
+main_branch = canvas.checkout(name="main", create_if_not_exists=True)
+
+# Create and commit messages
+user_message = {
+ "role": "user",
+ "content": [{"type": "text", "text": "Hello Canvas"}]
+}
+main_branch.commit_message(user_message)
+
+assistant_message = {
+ "role": "assistant",
+ "content": [{"type": "text", "text": "Hello! How can I help you today?"}]
+}
+main_branch.commit_message(assistant_message)
+\`\`\`
+
+## 🌟 Key Features
+
+- **🌳 Branching Conversations**: Create and explore multiple conversation paths from any message
+- **🔧 Tool Call Visualization**: See how your LLM uses tools with clear input/output flows
+- **📦 Zero Dependencies**: Self-contained with built-in web UI
+- **🔒 Privacy First**: Local deployment keeps your data private
+- **🎨 Interactive Canvas**: Infinite canvas for complex conversation flows`,
+ },
+ {
+ id: "canvas-management",
+ title: "Canvas Management",
+ content: `# Canvas Management Guide
+
+## Overview
+
+The **Canvas** is the central concept in LLM Canvas. It represents a conversation workspace that can contain multiple messages, branches, and tool calls. Unlike traditional linear chat logs, a canvas allows for complex, non-linear interactions with sophisticated branching and history management.
+
+## Key Concepts
+
+Canvas management is inspired by Git version control, so if you're familiar with Git, you'll find these concepts familiar:
+
+- **Canvas**: A workspace for your LLM conversations that can contain multiple branches. Each canvas starts with a default branch called 'main'
+- **Branch**: A linear chat history within a canvas. All messages are committed to branches, not directly to the canvas
+- **Commit Message**: Adding a new message to the current working branch
+- **Merge Branch**: Combining conversation paths from multiple branches into a single branch
+
+## Creating a New Canvas
+
+To start working with LLM Canvas, create a new canvas using the \`create_canvas\` method:
+
+\`\`\`python
+from llm_canvas import canvas_client
+
+# Create a canvas with title and description
+canvas = canvas_client.create_canvas(
+ title="My First Canvas",
+ description="A canvas for exploring LLM interactions"
+)
+\`\`\`
+
+## Working with Branches
+
+Before adding messages to your canvas, you need to understand that **all messages are committed to branches, not directly to the canvas**. A canvas can contain multiple branches, and you must have an active working branch to commit messages.
+
+### Creating and Switching to a Branch
+
+\`\`\`python
+# Create and checkout the main branch
+main_branch = canvas.checkout(name="main", create_if_not_exists=True)
+
+# Create a new branch from current position
+feature_branch = canvas.checkout(name="feature", create_if_not_exists=True)
+\`\`\`
+
+### Adding Messages to Your Current Branch
+
+\`\`\`python
+# Commit a user message
+user_message = {
+ "role": "user",
+ "content": [{"type": "text", "text": "What is machine learning?"}]
+}
+main_branch.commit_message(user_message)
+
+# Commit an assistant response
+assistant_message = {
+ "role": "assistant",
+ "content": [{"type": "text", "text": "Machine learning is a subset of artificial intelligence..."}]
+}
+main_branch.commit_message(assistant_message)
+\`\`\`
+
+## Managing Your Canvas
+
+### Switching Between Branches
+
+\`\`\`python
+# Switch to different branches
+main_branch = canvas.checkout(name="main")
+feature_branch = canvas.checkout(name="feature")
+\`\`\`
+
+### Merging Branches
+
+\`\`\`python
+# Merge feature branch into main
+main_branch.merge(feature_branch, strategy="concat")
+\`\`\``,
+ },
+ {
+ id: "message-types",
+ title: "Message Types",
+ content: `# Supported Message Types
+
+LLM Canvas supports rich message content that goes beyond simple text strings. Messages can contain various types of blocks to represent complex interactions including tool usage.
+
+## Message Structure
+
+Each message in LLM Canvas follows this structure:
+
+\`\`\`python
+class Message(TypedDict):
+ content: Union[str, list[MessageBlock]]
+ role: Literal["user", "assistant", "system"]
+\`\`\`
+
+## Content Types
+
+**Simple Text Content:**
+
+\`\`\`python
+message = {
+ "content": "Hello, world!",
+ "role": "user"
+}
+\`\`\`
+
+**Rich Content with Blocks:**
+Messages can contain a list of different block types for more complex interactions:
+
+\`\`\`python
+message = {
+ "content": [
+ {"type": "text", "text": "Please analyze this image:"},
+ {"type": "image_url", "image_url": {"url": "data:image/jpeg;base64,..."}}
+ ],
+ "role": "user"
+}
+\`\`\`
+
+## Supported Block Types
+
+### Text Block
+The most basic content type for text messages:
+
+\`\`\`python
+{
+ "type": "text",
+ "text": "Your text content here"
+}
+\`\`\`
+
+### Image URL Block
+For including images in messages:
+
+\`\`\`python
+{
+ "type": "image_url",
+ "image_url": {
+ "url": "https://example.com/image.jpg",
+ "detail": "high" # optional: "low", "high", "auto"
+ }
+}
+\`\`\`
+
+### Tool Call Block
+Represents a request to call a specific tool:
+
+\`\`\`python
+{
+ "type": "tool_call",
+ "id": "call_123",
+ "function": {
+ "name": "get_weather",
+ "arguments": '{"location": "New York"}'
+ }
+}
+\`\`\`
+
+### Tool Response Block
+Contains the result of a tool execution:
+
+\`\`\`python
+{
+ "type": "tool_response",
+ "tool_call_id": "call_123",
+ "content": "The weather in New York is sunny, 72°F"
+}
+\`\`\`
+
+## Example: Complex Message with Multiple Blocks
+
+\`\`\`python
+complex_message = {
+ "role": "assistant",
+ "content": [
+ {"type": "text", "text": "I'll help you get the weather information."},
+ {
+ "type": "tool_call",
+ "id": "call_weather_123",
+ "function": {
+ "name": "get_current_weather",
+ "arguments": '{"location": "New York, NY"}'
+ }
+ },
+ {"type": "text", "text": "Let me check the current weather for you."}
+ ]
+}
+\`\`\``,
+ },
+ {
+ id: "server-setup",
+ title: "Server Setup",
+ content: `# Start the LLM Canvas Local Server
+
+LLM Canvas offers a **free & open source** local server that provides complete visualization capabilities for Large Language Model conversations. The local deployment runs entirely in your environment, giving you full privacy control while offering all core features.
+
+## Important Limitation
+
+⚠️ **No Data Persistence** - The local server uses session-based storage only:
+
+- Data is lost when the server restarts
+- No backup or recovery mechanisms
+- No cross-session data retention
+
+*For permanent data storage and cross-device access, consider our cloud-based plans.*
+
+## Installation
+
+To install the local server, you need Python and pip installed. Then run:
+
+\`\`\`bash
+pip install llm-canvas[server]
+\`\`\`
+
+## Running the Server
+
+Start the local server with the following command:
+
+\`\`\`bash
+llm-canvas server --port 8000 --log-level info
+\`\`\`
+
+### Command Options
+
+- \`--port\`: Port number for the server (default: 8000)
+- \`--host\`: Host address to bind to (default: 127.0.0.1)
+- \`--log-level\`: Logging level (debug, info, warning, error)
+- \`--cors\`: Enable CORS for cross-origin requests
+- \`--dev\`: Enable development mode with auto-reload
+
+### Example with Custom Configuration
+
+\`\`\`bash
+# Start server on different port with CORS enabled
+llm-canvas server --port 9000 --host 0.0.0.0 --cors --log-level debug
+\`\`\`
+
+## Accessing the Web Interface
+
+Once the server is running, you can access the web interface at:
+
+- **Local**: http://localhost:8000
+- **Custom**: http://your-host:your-port
+
+The web interface provides:
+- Canvas gallery view
+- Interactive conversation visualization
+- Real-time updates via Server-Sent Events
+- Branch and merge operations
+- Tool call visualization
+
+## Development Mode
+
+For development purposes, you can run the server in development mode:
+
+\`\`\`bash
+llm-canvas server --dev --port 8000
+\`\`\`
+
+This enables:
+- Auto-reload on code changes
+- Enhanced debugging output
+- CORS enabled by default
+- Development-friendly error messages
+
+## Configuration
+
+The server can be configured through environment variables:
+
+\`\`\`bash
+export LLM_CANVAS_PORT=8000
+export LLM_CANVAS_HOST=127.0.0.1
+export LLM_CANVAS_LOG_LEVEL=info
+
+llm-canvas server
+\`\`\``,
+ },
+ ];
+
+ // Close sidebar when section changes on mobile
+ useEffect(() => {
+ setSidebarOpen(false);
+ }, [selectedSection]);
+
+ const currentSection = docSections.find(
+ section => section.id === selectedSection
+ );
+
+ return (
+