Skip to content

fix: now reasoning output is rendered in the UI #29

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
103 changes: 103 additions & 0 deletions src/components/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { generateMessageId } from '../mcp/client'
import type { Message } from 'ai'
import { type Servers } from '../lib/schemas'
import { ToolCallMessage } from './ToolCallMessage'
import { ReasoningMessage } from './ReasoningMessage'
import { useModel } from '../contexts/ModelContext'
import { useUser } from '../contexts/UserContext'

Expand All @@ -23,6 +24,16 @@ type StreamEvent =
arguments?: unknown
}
| { type: 'user'; id: string; content: string }
| {
type: 'reasoning'
effort: string
summary: string | null
model?: string
serviceTier?: string
temperature?: number
topP?: number
done?: boolean
}

export function Chat() {
const messagesEndRef = useRef<HTMLDivElement>(null)
Expand Down Expand Up @@ -75,6 +86,85 @@ export function Chat() {
try {
const toolState = JSON.parse(line.slice(2))

// Handle reasoning summary streaming
if (toolState.type === 'reasoning_summary_delta') {
setStreamBuffer((prev) => {
// Find the last reasoning message
const last = prev[prev.length - 1]
if (last && last.type === 'reasoning' && !last.done) {
// Append delta to summary
return [
...prev.slice(0, -1),
{
...last,
summary: (last.summary || '') + toolState.delta,
effort: toolState.effort || last.effort,
model: toolState.model || last.model,
serviceTier: toolState.serviceTier || last.serviceTier,
temperature: toolState.temperature ?? last.temperature,
topP: toolState.topP ?? last.topP,
},
]
} else {
// Start a new reasoning message
return [
...prev,
{
type: 'reasoning',
summary: toolState.delta,
effort: toolState.effort || '',
model: toolState.model,
serviceTier: toolState.serviceTier,
temperature: toolState.temperature,
topP: toolState.topP,
done: false,
},
]
}
})
return
}

if (toolState.type === 'reasoning_summary_done') {
setStreamBuffer((prev) => {
// Mark the last reasoning message as done
const last = prev[prev.length - 1]
if (last && last.type === 'reasoning' && !last.done) {
return [
...prev.slice(0, -1),
{
...last,
done: true,
effort: toolState.effort || last.effort,
model: toolState.model || last.model,
serviceTier: toolState.serviceTier || last.serviceTier,
temperature: toolState.temperature ?? last.temperature,
topP: toolState.topP ?? last.topP,
},
]
}
return prev
})
return
}

if (toolState.type === 'reasoning') {
setStreamBuffer((prev) => [
...prev,
{
type: 'reasoning',
effort: toolState.effort,
summary: toolState.summary,
model: toolState.model,
serviceTier: toolState.serviceTier,
temperature: toolState.temperature,
topP: toolState.topP,
},
])
return
}

// Tool call fallback (for other tool types)
if ('delta' in toolState) {
try {
toolState.delta =
Expand Down Expand Up @@ -214,6 +304,19 @@ export function Chat() {
args={event}
/>
)
} else if ('type' in event && event.type === 'reasoning') {
return (
<ReasoningMessage
key={`reasoning-${idx}-${event.effort}-${event.summary || ''}`}
effort={event.effort}
summary={event.summary}
model={event.model}
serviceTier={event.serviceTier}
temperature={event.temperature}
topP={event.topP}
isLoading={streaming && idx === renderEvents.length - 1}
/>
)
} else if ('type' in event && event.type === 'assistant') {
const assistantEvent = event as Extract<
StreamEvent,
Expand Down
32 changes: 2 additions & 30 deletions src/components/ChatMessage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { cn } from '../lib/utils'
import type { Message } from '../mcp/client'
import { formatTimestamp } from '../lib/utils'
import { Bot, User, CheckCircle2, Clock, AlertCircle } from 'lucide-react'
import ReactMarkdown from 'react-markdown'
import { MarkdownContent } from './MarkdownContent'

type ChatMessageProps = {
message: Message
Expand Down Expand Up @@ -49,35 +49,7 @@ export function ChatMessage({ message, isLoading }: ChatMessageProps) {
)}
>
<div className="prose prose-sm dark:prose-invert max-w-none break-words break-all whitespace-pre-wrap">
<ReactMarkdown
components={{
pre: ({ node, ...props }) => (
<pre
className="overflow-x-auto whitespace-pre-wrap break-words break-all"
{...props}
/>
),
code: ({ node, ...props }) => (
<code
className="break-words break-all whitespace-pre-wrap"
{...props}
/>
),
a: ({ href, children, ...props }) => (
<a
href={href}
className="break-words break-all"
target="_blank"
rel="noopener noreferrer"
{...props}
>
{children}
</a>
),
}}
>
{message.content}
</ReactMarkdown>
<MarkdownContent content={message.content} />
</div>
</div>

Expand Down
41 changes: 41 additions & 0 deletions src/components/MarkdownContent.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import ReactMarkdown from 'react-markdown'

type MarkdownContentProps = {
content: string
}

export function MarkdownContent({ content }: MarkdownContentProps) {
return (
<div className="prose prose-sm dark:prose-invert max-w-none break-words break-all whitespace-pre-wrap">
<ReactMarkdown
components={{
pre: ({ node, ...props }) => (
<pre
className="overflow-x-auto whitespace-pre-wrap break-words break-all"
{...props}
/>
),
code: ({ node, ...props }) => (
<code
className="break-words break-all whitespace-pre-wrap"
{...props}
/>
),
a: ({ href, children, ...props }) => (
<a
href={href}
className="break-words break-all"
target="_blank"
rel="noopener noreferrer"
{...props}
>
{children}
</a>
),
}}
>
{content}
</ReactMarkdown>
</div>
)
}
58 changes: 58 additions & 0 deletions src/components/ReasoningMessage.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import { Brain } from 'lucide-react'
import { cn } from '../lib/utils'
import { MarkdownContent } from './MarkdownContent'

type ReasoningMessageProps = {
effort: string
summary: string | null
model?: string
serviceTier?: string
temperature?: number
topP?: number
isLoading?: boolean
}

export function ReasoningMessage({
effort,
summary,
model,
serviceTier,
temperature,
topP,
isLoading,
}: ReasoningMessageProps) {
return (
<div className="flex w-full max-w-full gap-2 py-2 animate-in fade-in justify-start">
<div
className={cn(
'flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md bg-purple-100 text-purple-600 dark:bg-purple-900 dark:text-purple-300',
isLoading && 'animate-[pulse_1.5s_ease-in-out_infinite] opacity-80',
)}
>
<Brain className="h-5 w-5" />
</div>

<div className="flex flex-col space-y-1 items-start w-full sm:w-[85%] md:w-[75%] lg:w-[65%]">
<div className="rounded-2xl px-4 py-2 text-sm w-full bg-purple-50 text-purple-900 dark:bg-purple-950 dark:text-purple-100">
<div className="font-medium mb-1">Reasoning</div>
<div className="text-xs space-y-1">
{effort && <div>Effort: {effort}</div>}
{summary && (
<div className="prose prose-sm dark:prose-invert max-w-none break-words break-all whitespace-pre-wrap">
<MarkdownContent content={summary} />
</div>
)}
{model && <div>Model: {model}</div>}
{serviceTier && <div>Service Tier: {serviceTier}</div>}
<div className="flex gap-4">
{temperature !== undefined && (
<div>Temperature: {temperature}</div>
)}
{topP !== undefined && <div>Top P: {topP}</div>}
</div>
</div>
</div>
</div>
</div>
)
}
75 changes: 75 additions & 0 deletions src/lib/streaming.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,48 @@ export function streamText(
}
}
break

case 'response.content_part.added':
case 'response.content_part.done':
if (chunk.part?.type === 'output_text' && chunk.part.text) {
buffer += chunk.part.text
flush()
}
break

case 'response.reasoning.delta':
if (typeof chunk.delta === 'string') {
controller.enqueue(
encoder.encode(
`t:${JSON.stringify({
type: 'reasoning',
effort: chunk.effort,
summary: chunk.delta,
model: chunk.model,
serviceTier: chunk.service_tier,
temperature: chunk.temperature,
topP: chunk.top_p,
})}\n`,
),
)
}
break

case 'response.created':
case 'response.in_progress':
if (chunk.response?.reasoning) {
controller.enqueue(
encoder.encode(
`t:${JSON.stringify({
type: 'reasoning',
effort: chunk.response.reasoning.effort,
summary: chunk.response.reasoning.summary,
})}\n`,
),
)
}
break

case 'response.mcp_call.failed':
console.error('[TOOL CALL FAILED]', chunk)

Expand Down Expand Up @@ -163,6 +205,39 @@ export function streamText(
}
break

case 'response.reasoning_summary_text.delta':
if (typeof chunk.delta === 'string') {
controller.enqueue(
encoder.encode(
`t:${JSON.stringify({
type: 'reasoning_summary_delta',
delta: chunk.delta,
effort: chunk.effort,
model: chunk.model,
serviceTier: chunk.service_tier,
temperature: chunk.temperature,
topP: chunk.top_p,
})}\n`,
),
)
}
break

case 'response.reasoning_summary_text.done':
controller.enqueue(
encoder.encode(
`t:${JSON.stringify({
type: 'reasoning_summary_done',
effort: chunk.effort,
model: chunk.model,
serviceTier: chunk.service_tier,
temperature: chunk.temperature,
topP: chunk.top_p,
})}\n`,
),
)
break

default:
break
}
Expand Down
7 changes: 7 additions & 0 deletions src/routes/api/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,13 @@ export const ServerRoute = createServerFileRoute('/api/chat').methods({
input,
stream: true,
user: userId,
...(model.startsWith('o3') || model.startsWith('o4')
? {
reasoning: {
summary: 'detailed',
},
}
: {}),
})

return streamText(answer)
Expand Down