From 51f9fd9993b90beb76255789dcda02e8f09c7373 Mon Sep 17 00:00:00 2001 From: Nick Taylor Date: Thu, 29 May 2025 23:09:00 -0400 Subject: [PATCH 1/3] fix: now reasoning output is rendered in the UI --- src/components/Chat.tsx | 39 +++++++++++++++++ src/components/ChatMessage.tsx | 32 +------------- src/components/MarkdownContent.tsx | 41 +++++++++++++++++ src/components/ReasoningMessage.tsx | 58 ++++++++++++++++++++++++ src/lib/streaming.ts | 68 +++++++++++++++++++++++++++++ src/routes/api/chat.ts | 7 +++ 6 files changed, 215 insertions(+), 30 deletions(-) create mode 100644 src/components/MarkdownContent.tsx create mode 100644 src/components/ReasoningMessage.tsx diff --git a/src/components/Chat.tsx b/src/components/Chat.tsx index 5728d6a..b56bc6f 100644 --- a/src/components/Chat.tsx +++ b/src/components/Chat.tsx @@ -7,6 +7,7 @@ import { generateMessageId } from '../mcp/client' import type { Message } from 'ai' import { type Servers } from '../lib/schemas' import { ToolCallMessage } from './ToolCallMessage' +import { ReasoningMessage } from './ReasoningMessage' import { useModel } from '../contexts/ModelContext' import { useUser } from '../contexts/UserContext' @@ -23,6 +24,15 @@ type StreamEvent = arguments?: unknown } | { type: 'user'; id: string; content: string } + | { + type: 'reasoning' + effort: string + summary: string | null + model?: string + serviceTier?: string + temperature?: number + topP?: number + } export function Chat() { const messagesEndRef = useRef(null) @@ -75,6 +85,22 @@ export function Chat() { try { const toolState = JSON.parse(line.slice(2)) + if (toolState.type === 'reasoning') { + setStreamBuffer((prev) => [ + ...prev, + { + type: 'reasoning', + effort: toolState.effort, + summary: toolState.summary, + model: toolState.model, + serviceTier: toolState.serviceTier, + temperature: toolState.temperature, + topP: toolState.topP, + }, + ]) + return + } + if ('delta' in toolState) { try { toolState.delta = @@ -214,6 +240,19 @@ export function Chat() { args={event} /> ) + } else if ('type' in event && event.type === 'reasoning') { + return ( + + ) } else if ('type' in event && event.type === 'assistant') { const assistantEvent = event as Extract< StreamEvent, diff --git a/src/components/ChatMessage.tsx b/src/components/ChatMessage.tsx index db7cfa4..dd221bb 100644 --- a/src/components/ChatMessage.tsx +++ b/src/components/ChatMessage.tsx @@ -2,7 +2,7 @@ import { cn } from '../lib/utils' import type { Message } from '../mcp/client' import { formatTimestamp } from '../lib/utils' import { Bot, User, CheckCircle2, Clock, AlertCircle } from 'lucide-react' -import ReactMarkdown from 'react-markdown' +import { MarkdownContent } from './MarkdownContent' type ChatMessageProps = { message: Message @@ -49,35 +49,7 @@ export function ChatMessage({ message, isLoading }: ChatMessageProps) { )} >
- ( -
-                ),
-                code: ({ node, ...props }) => (
-                  
-                ),
-                a: ({ href, children, ...props }) => (
-                  
-                    {children}
-                  
-                ),
-              }}
-            >
-              {message.content}
-            
+            
           
diff --git a/src/components/MarkdownContent.tsx b/src/components/MarkdownContent.tsx new file mode 100644 index 0000000..53465e9 --- /dev/null +++ b/src/components/MarkdownContent.tsx @@ -0,0 +1,41 @@ +import ReactMarkdown from 'react-markdown' + +type MarkdownContentProps = { + content: string +} + +export function MarkdownContent({ content }: MarkdownContentProps) { + return ( +
+ ( +
+          ),
+          code: ({ node, ...props }) => (
+            
+          ),
+          a: ({ href, children, ...props }) => (
+            
+              {children}
+            
+          ),
+        }}
+      >
+        {content}
+      
+    
+ ) +} diff --git a/src/components/ReasoningMessage.tsx b/src/components/ReasoningMessage.tsx new file mode 100644 index 0000000..9c1c989 --- /dev/null +++ b/src/components/ReasoningMessage.tsx @@ -0,0 +1,58 @@ +import { Brain } from 'lucide-react' +import { cn } from '../lib/utils' +import { MarkdownContent } from './MarkdownContent' + +type ReasoningMessageProps = { + effort: string + summary: string | null + model?: string + serviceTier?: string + temperature?: number + topP?: number + isLoading?: boolean +} + +export function ReasoningMessage({ + effort, + summary, + model, + serviceTier, + temperature, + topP, + isLoading, +}: ReasoningMessageProps) { + return ( +
+
+ +
+ +
+
+
Reasoning
+
+ {effort &&
Effort: {effort}
} + {summary && ( +
+ +
+ )} + {model &&
Model: {model}
} + {serviceTier &&
Service Tier: {serviceTier}
} +
+ {temperature !== undefined && ( +
Temperature: {temperature}
+ )} + {topP !== undefined &&
Top P: {topP}
} +
+
+
+
+
+ ) +} diff --git a/src/lib/streaming.ts b/src/lib/streaming.ts index 3e76e15..dcb972e 100644 --- a/src/lib/streaming.ts +++ b/src/lib/streaming.ts @@ -21,6 +21,7 @@ export function streamText( } let buffer = '' + let reasoningSummaryBuffer = '' const flush = () => { if (buffer) { @@ -75,6 +76,48 @@ export function streamText( } } break + + case 'response.content_part.added': + case 'response.content_part.done': + if (chunk.part?.type === 'output_text' && chunk.part.text) { + buffer += chunk.part.text + flush() + } + break + + case 'response.reasoning.delta': + if (typeof chunk.delta === 'string') { + controller.enqueue( + encoder.encode( + `t:${JSON.stringify({ + type: 'reasoning', + effort: chunk.effort, + summary: chunk.delta, + model: chunk.model, + serviceTier: chunk.service_tier, + temperature: chunk.temperature, + topP: chunk.top_p, + })}\n`, + ), + ) + } + break + + case 'response.created': + case 'response.in_progress': + if (chunk.response?.reasoning) { + controller.enqueue( + encoder.encode( + `t:${JSON.stringify({ + type: 'reasoning', + effort: chunk.response.reasoning.effort, + summary: chunk.response.reasoning.summary, + })}\n`, + ), + ) + } + break + case 'response.mcp_call.failed': console.error('[TOOL CALL FAILED]', chunk) @@ -163,6 +206,31 @@ export function streamText( } break + case 'response.reasoning_summary_text.delta': + if (typeof chunk.delta === 'string') { + reasoningSummaryBuffer += chunk.delta + } + break + + case 'response.reasoning_summary_text.done': + if (reasoningSummaryBuffer) { + controller.enqueue( + encoder.encode( + `t:${JSON.stringify({ + type: 'reasoning', + effort: chunk.effort, + summary: reasoningSummaryBuffer, + model: chunk.model, + serviceTier: chunk.service_tier, + temperature: chunk.temperature, + topP: chunk.top_p, + })}\n`, + ), + ) + reasoningSummaryBuffer = '' + } + break + default: break } diff --git a/src/routes/api/chat.ts b/src/routes/api/chat.ts index a2d935f..a2a5082 100644 --- a/src/routes/api/chat.ts +++ b/src/routes/api/chat.ts @@ -88,6 +88,13 @@ export const ServerRoute = createServerFileRoute('/api/chat').methods({ input, stream: true, user: userId, + ...(model.startsWith('o3') || model.startsWith('o4') + ? { + reasoning: { + summary: 'detailed', + }, + } + : {}), }) return streamText(answer) From 9033f42e11c2aab7c61688335724616e2dbe09e4 Mon Sep 17 00:00:00 2001 From: Nick Taylor Date: Fri, 30 May 2025 05:32:47 -0400 Subject: [PATCH 2/3] Update Chat.tsx Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- src/components/Chat.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/components/Chat.tsx b/src/components/Chat.tsx index b56bc6f..e7259ff 100644 --- a/src/components/Chat.tsx +++ b/src/components/Chat.tsx @@ -243,7 +243,7 @@ export function Chat() { } else if ('type' in event && event.type === 'reasoning') { return ( Date: Fri, 30 May 2025 13:04:04 -0400 Subject: [PATCH 3/3] now reasoning deltas stream in --- src/components/Chat.tsx | 64 +++++++++++++++++++++++++++++++++++++++++ src/lib/streaming.ts | 27 ++++++++++------- 2 files changed, 81 insertions(+), 10 deletions(-) diff --git a/src/components/Chat.tsx b/src/components/Chat.tsx index e7259ff..5671f2d 100644 --- a/src/components/Chat.tsx +++ b/src/components/Chat.tsx @@ -32,6 +32,7 @@ type StreamEvent = serviceTier?: string temperature?: number topP?: number + done?: boolean } export function Chat() { @@ -85,6 +86,68 @@ export function Chat() { try { const toolState = JSON.parse(line.slice(2)) + // Handle reasoning summary streaming + if (toolState.type === 'reasoning_summary_delta') { + setStreamBuffer((prev) => { + // Find the last reasoning message + const last = prev[prev.length - 1] + if (last && last.type === 'reasoning' && !last.done) { + // Append delta to summary + return [ + ...prev.slice(0, -1), + { + ...last, + summary: (last.summary || '') + toolState.delta, + effort: toolState.effort || last.effort, + model: toolState.model || last.model, + serviceTier: toolState.serviceTier || last.serviceTier, + temperature: toolState.temperature ?? last.temperature, + topP: toolState.topP ?? last.topP, + }, + ] + } else { + // Start a new reasoning message + return [ + ...prev, + { + type: 'reasoning', + summary: toolState.delta, + effort: toolState.effort || '', + model: toolState.model, + serviceTier: toolState.serviceTier, + temperature: toolState.temperature, + topP: toolState.topP, + done: false, + }, + ] + } + }) + return + } + + if (toolState.type === 'reasoning_summary_done') { + setStreamBuffer((prev) => { + // Mark the last reasoning message as done + const last = prev[prev.length - 1] + if (last && last.type === 'reasoning' && !last.done) { + return [ + ...prev.slice(0, -1), + { + ...last, + done: true, + effort: toolState.effort || last.effort, + model: toolState.model || last.model, + serviceTier: toolState.serviceTier || last.serviceTier, + temperature: toolState.temperature ?? last.temperature, + topP: toolState.topP ?? last.topP, + }, + ] + } + return prev + }) + return + } + if (toolState.type === 'reasoning') { setStreamBuffer((prev) => [ ...prev, @@ -101,6 +164,7 @@ export function Chat() { return } + // Tool call fallback (for other tool types) if ('delta' in toolState) { try { toolState.delta = diff --git a/src/lib/streaming.ts b/src/lib/streaming.ts index dcb972e..ffa7491 100644 --- a/src/lib/streaming.ts +++ b/src/lib/streaming.ts @@ -21,7 +21,6 @@ export function streamText( } let buffer = '' - let reasoningSummaryBuffer = '' const flush = () => { if (buffer) { @@ -208,18 +207,12 @@ export function streamText( case 'response.reasoning_summary_text.delta': if (typeof chunk.delta === 'string') { - reasoningSummaryBuffer += chunk.delta - } - break - - case 'response.reasoning_summary_text.done': - if (reasoningSummaryBuffer) { controller.enqueue( encoder.encode( `t:${JSON.stringify({ - type: 'reasoning', + type: 'reasoning_summary_delta', + delta: chunk.delta, effort: chunk.effort, - summary: reasoningSummaryBuffer, model: chunk.model, serviceTier: chunk.service_tier, temperature: chunk.temperature, @@ -227,10 +220,24 @@ export function streamText( })}\n`, ), ) - reasoningSummaryBuffer = '' } break + case 'response.reasoning_summary_text.done': + controller.enqueue( + encoder.encode( + `t:${JSON.stringify({ + type: 'reasoning_summary_done', + effort: chunk.effort, + model: chunk.model, + serviceTier: chunk.service_tier, + temperature: chunk.temperature, + topP: chunk.top_p, + })}\n`, + ), + ) + break + default: break }