From 5a4f3255c72e7dcf95c91d911def8763cf31148a Mon Sep 17 00:00:00 2001 From: Francisco Moretti Date: Sat, 22 Jun 2024 12:01:02 +0100 Subject: [PATCH 1/4] draft of vercel ai streaming --- .../learn/002-get-started/001-vercel-ai.mdx | 59 +++++++++++++++---- 1 file changed, 48 insertions(+), 11 deletions(-) diff --git a/docs/docs/learn/002-get-started/001-vercel-ai.mdx b/docs/docs/learn/002-get-started/001-vercel-ai.mdx index 4211aa22..8c721091 100644 --- a/docs/docs/learn/002-get-started/001-vercel-ai.mdx +++ b/docs/docs/learn/002-get-started/001-vercel-ai.mdx @@ -147,8 +147,7 @@ Then add the following code: ```tsx import { openai } from '@ai-sdk/openai'; -import { generateText } from 'ai'; -import {NextResponse} from 'next/server'; +import { streamText } from 'ai'; // Allow streaming responses up to 30 seconds export const maxDuration = 30; @@ -156,15 +155,19 @@ export const maxDuration = 30; export async function POST(req: Request) { const { prompt } = await req.json(); - const result = await generateText({ + const result = await streamText({ model: openai('gpt-4-turbo'), messages: [{ role: 'system', content: prompt, - }] + }], + async onFinish({ text, toolCalls, toolResults, usage, finishReason }) { + // implement your own logic here, e.g. for storing messages + // or recording token usage + }, }); - return NextResponse.json({ reply: result.responseMessages[0].content[0].text }); + return result.toTextStreamResponse(); } ``` @@ -182,24 +185,58 @@ and provide a user message input: ```tsx 'use client'; -import {AiChat, ChatAdapter} from '@nlux/react'; +import {AiChat, ChatAdapter, StreamingAdapterObserver} from '@nlux/react'; import '@nlux/themes/nova.css'; export default function Chat() { - const chatAdapter: ChatAdapter = { batchText: async (prompt: string) => { + const chatAdapter: ChatAdapter = { + + streamText: async (prompt: string, observer: StreamingAdapterObserver) => { const response = await fetch('/api/chat', { method: 'POST', body: JSON.stringify({prompt: prompt}), headers: {'Content-Type': 'application/json'}, }); - const {reply} = await response.json(); - return reply; - }}; + if (response.status !== 200) { + observer.error(new Error('Failed to connect to the server')); + return; + } + + if (!response.body) { + return; + } + + // Read a stream of server-sent events + // and feed them to the observer as they are being generated + const reader = response.body.getReader(); + const textDecoder = new TextDecoder(); + + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + + const content = textDecoder.decode(value); + console.log(content, value) + if (content) { + observer.next(content); + } + } + + observer.complete(); + } + } return (
- +
); From a725c59c98cd3c8dc8e8d35e25124261af879497 Mon Sep 17 00:00:00 2001 From: Francisco Moretti Date: Sat, 22 Jun 2024 12:05:35 +0100 Subject: [PATCH 2/4] small step update --- docs/docs/learn/002-get-started/001-vercel-ai.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/learn/002-get-started/001-vercel-ai.mdx b/docs/docs/learn/002-get-started/001-vercel-ai.mdx index 8c721091..5f04aa68 100644 --- a/docs/docs/learn/002-get-started/001-vercel-ai.mdx +++ b/docs/docs/learn/002-get-started/001-vercel-ai.mdx @@ -249,7 +249,7 @@ Let's take a look at what is happening in this code: This tells the Next.js framework that **this file is intended to run on the client-side**. 1. We import `AiChat` from `@nlux/react` and the default theme `@nlux/themes/nova.css`. 2. We define a `chatAdapter` object that implements the interface `ChatAdapter`.
- It contains on method `batchText` that handles chat responses generated in a single batch. + It contains on method `streamText` that handles chat responses streaming. 3. We render the `` component with the `chatAdapter` object. --- From 6229173a63b44344b47cf1a65fa7accb0cdf89f8 Mon Sep 17 00:00:00 2001 From: Francisco Moretti Date: Sat, 22 Jun 2024 12:14:22 +0100 Subject: [PATCH 3/4] removed console log --- docs/docs/learn/002-get-started/001-vercel-ai.mdx | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/docs/learn/002-get-started/001-vercel-ai.mdx b/docs/docs/learn/002-get-started/001-vercel-ai.mdx index 5f04aa68..139b5bb8 100644 --- a/docs/docs/learn/002-get-started/001-vercel-ai.mdx +++ b/docs/docs/learn/002-get-started/001-vercel-ai.mdx @@ -218,7 +218,6 @@ export default function Chat() { } const content = textDecoder.decode(value); - console.log(content, value) if (content) { observer.next(content); } From fff46d6bc9b222dd0e6936e9a4998a2dedc0e669 Mon Sep 17 00:00:00 2001 From: Francisco Moretti Date: Sat, 22 Jun 2024 12:15:31 +0100 Subject: [PATCH 4/4] remove experiment code --- docs/docs/learn/002-get-started/001-vercel-ai.mdx | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/docs/docs/learn/002-get-started/001-vercel-ai.mdx b/docs/docs/learn/002-get-started/001-vercel-ai.mdx index 139b5bb8..3c6d5829 100644 --- a/docs/docs/learn/002-get-started/001-vercel-ai.mdx +++ b/docs/docs/learn/002-get-started/001-vercel-ai.mdx @@ -230,12 +230,7 @@ export default function Chat() { return (
- +
);