Commit 080cb4c 1 parent b7935c0 commit 080cb4c Copy full SHA for 080cb4c
File tree 3 files changed +22
-4
lines changed
3 files changed +22
-4
lines changed Original file line number Diff line number Diff line change @@ -50,7 +50,7 @@ export default function Home() {
50
50
const controller = new AbortController ( ) ;
51
51
const body : RequestBody = { messages : newMessages , model, apiKey } ;
52
52
53
- const response = await fetch ( "/api/openai " , {
53
+ const response = await fetch ( "/api/chat " , {
54
54
method : "POST" ,
55
55
headers : {
56
56
"Content-Type" : "application/json" ,
@@ -86,8 +86,8 @@ export default function Home() {
86
86
} ;
87
87
88
88
return (
89
- < main className = "container flex-1 w-full flex flex-wrapq " >
90
- < div className = "flex border flex-col justify-between w-full md:w-1/2" >
89
+ < main className = "container flex-1 w-full flex flex-wrap " >
90
+ < div className = "flex border md:border-r-0 flex-col justify-between w-full md:w-1/2" >
91
91
< div className = "" >
92
92
< div className = "" >
93
93
{ messages . map ( ( message ) => {
Original file line number Diff line number Diff line change @@ -3,7 +3,6 @@ const nextConfig = {
3
3
experimental : {
4
4
appDir : true ,
5
5
} ,
6
- runtime : "experimental-edge" ,
7
6
} ;
8
7
9
8
module . exports = nextConfig ;
Original file line number Diff line number Diff line change
1
+ import { OpenAIStream } from "@/lib/utils" ;
2
+ import { type RequestBody } from "@/types/type" ;
3
+
4
+ export const config = {
5
+ runtime : "edge" ,
6
+ } ;
7
+
8
+ export default async function chat ( req : Request ) {
9
+ try {
10
+ const { messages, model, apiKey } = ( await req . json ( ) ) as RequestBody ;
11
+
12
+ const stream = await OpenAIStream ( messages , model , apiKey ) ;
13
+
14
+ return new Response ( stream ) ;
15
+ } catch ( error ) {
16
+ console . error ( error ) ;
17
+ return new Response ( "Error" , { status : 500 } ) ;
18
+ }
19
+ }
You can’t perform that action at this time.
0 commit comments