-
-
Notifications
You must be signed in to change notification settings - Fork 111
/
Copy pathShadow chat gpt
82 lines (70 loc) · 2.48 KB
/
Shadow chat gpt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import type { NextApiRequest, NextApiResponse } from 'next';
import type { Document } from 'langchain/document';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { PineconeStore } from 'langchain/vectorstores/pinecone';
import { makeChain } from '@/utils/makechain';
import { pinecone } from '@/utils/pinecone-client';
import { PINECONE_INDEX_NAME, PINECONE_NAME_SPACE } from '@/config/pinecone';
export default async function handler(
req: NextApiRequest,
res: NextApiResponse,
) {
const { question, history } = req.body;
console.log('question', question);
console.log('history', history);
//only accept post requests
if (req.method !== 'POST') {
res.status(405).json({ error: 'Method not allowed' });
return;
}
if (!question) {
return res.status(400).json({ message: 'No question in the request' });
}
// OpenAI recommends replacing newlines with spaces for best results
const sanitizedQuestion = question.trim().replaceAll('\n', ' ');
try {
const index = pinecone.Index(PINECONE_INDEX_NAME);
/* create vectorstore*/
const vectorStore = await PineconeStore.fromExistingIndex(
new OpenAIEmbeddings({}),
{
pineconeIndex: index,
textKey: 'text',
namespace: PINECONE_NAME_SPACE, //namespace comes from your config folder
},
);
// Use a callback to get intermediate sources from the middle of the chain
let resolveWithDocuments: (value: Document[]) => void;
const documentPromise = new Promise<Document[]>((resolve) => {
resolveWithDocuments = resolve;
});
const retriever = vectorStore.asRetriever({
callbacks: [
{
handleRetrieverEnd(documents) {
resolveWithDocuments(documents);
},
},
],
});
//create chain
const chain = makeChain(retriever);
const pastMessages = history
.map((message: [string, string]) => {
return [`Human: ${message[0]}`, `Assistant: ${message[1]}`].join('\n');
})
.join('\n');
console.log(pastMessages);
//Ask a question using chat history
const response = await chain.invoke({
question: sanitizedQuestion,
chat_history: pastMessages,
});
const sourceDocuments = await documentPromise;
console.log('response', response);
res.status(200).json({ text: response, sourceDocuments });
} catch (error: any) {
console.log('error', error);
res.status(500).json({ error: error.message || 'Something went wrong' });
}
}