Skip to content

Commit

Permalink
Merged sessions, resolved confilcts
Browse files Browse the repository at this point in the history
  • Loading branch information
Krzysztof Filipów committed Apr 11, 2024
2 parents c82af1b + d0b58f2 commit 5ac7124
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 15 deletions.
9 changes: 9 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
},
"homepage": "https://github.com/senacor/chatbot-poc-backend#readme",
"dependencies": {
"@isaacs/ttlcache": "^1.4.1",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
"express": "^4.18.3",
Expand Down
14 changes: 10 additions & 4 deletions src/chat/get-init.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import { z } from "zod";
import { makeGetEndpoint } from "../middleware/validation/makeGetEndpoint.js";
import { addMessages, getUserVisibleMessages } from "../util/messageStore.js";
import { IDENTITY_HEADER } from "./index.js";

//TODO: Rework type inference of fileReader
export const init = makeGetEndpoint(z.any(), async (_request, response) => {

return response
.status(200)
.send([
const identity = _request.header(IDENTITY_HEADER);
if (!identity) {
return response.status(400).send(`Missing ${IDENTITY_HEADER} header.`)
}
addMessages(identity, [
{
role: 'system',
content: "Ask the user to provide the content of the file they want to chat about. Talk only about the content of the provided file."
Expand All @@ -16,4 +19,7 @@ export const init = makeGetEndpoint(z.any(), async (_request, response) => {
content: "Hallo! Bitte laden Sie die Datei hoch, über die Sie chatten möchten."
}
]);
return response
.status(200)
.send(getUserVisibleMessages(identity));
});
2 changes: 2 additions & 0 deletions src/chat/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import fileUpload from "./upload-file.js";
const PROMPT_FILE_NAME = "Prompt_Baufinanzierung.docx"
const OPENAI_MODEL: ChatCompletionCreateParamsBase["model"] = "gpt-3.5-turbo";
const RESPONSE_FORMAT: ChatCompletionCreateParams.ResponseFormat = {type: "text"};
const IDENTITY_HEADER = 'X-Identity';


const chatRouter = express.Router();
Expand All @@ -29,6 +30,7 @@ export {
PROMPT_FILE_NAME,
OPENAI_MODEL,
RESPONSE_FORMAT,
IDENTITY_HEADER,
chatRouter,
openai,
}
29 changes: 18 additions & 11 deletions src/chat/post-new-message.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,17 @@
import { z } from "zod";
import { makePostEndpoint } from "../middleware/validation/makePostEndpoint.js";
import { OPENAI_MODEL, RESPONSE_FORMAT, openai } from "./index.js";
import { ChatCompletionMessageParam } from "openai/resources/index.js";
import { IDENTITY_HEADER, OPENAI_MODEL, RESPONSE_FORMAT, openai } from "./index.js";
import {ChatCompletionMessageParam } from "openai/resources/index.js";
import { addMessage, getMessages, getUserVisibleMessages } from "../util/messageStore.js";

const ChatCompletionRole = z.union([z.literal('user'), z.literal('system'), z.literal('assistant')]);
export type ChatCompletionRole = z.infer<typeof ChatCompletionRole>;

const MessageHistory = z.object({
messages: z.array(
const MessageHistory =
z.object({
role: ChatCompletionRole,
content: z.string(),
})
).nonempty()
});
});
type MessageHistory = z.infer<typeof MessageHistory>;

const makeApiRequest = async (messages: ChatCompletionMessageParam[]) => {
Expand All @@ -24,17 +22,26 @@ const makeApiRequest = async (messages: ChatCompletionMessageParam[]) => {
});
}

const processMessages = async (messages: ChatCompletionMessageParam[], response: any) => {
const processMessages = async (messages: ChatCompletionMessageParam[], response: any, identity: string) => {
const completion = await makeApiRequest(messages);
const chatResponse = completion.choices[0];
console.log(chatResponse);
if(!chatResponse){
return response.status(500).send("Got no response from the bot");
}
return response.status(200).send(chatResponse.message);
addMessage(identity, chatResponse.message);
return response.status(200).send(getUserVisibleMessages(identity));

}


export const newMessage = makePostEndpoint(MessageHistory, async (request, response) => {
const messages = request.body.messages;
processMessages(messages, response);
const message = request.body;
const identity = request.header(IDENTITY_HEADER);
if (!identity) {
return response.status(400).send(`Missing ${IDENTITY_HEADER} header.`)
}
addMessage(identity, message);
const messages = getMessages(identity) ?? []; //TODO handle no messages
return processMessages(messages, response, identity);
});
22 changes: 22 additions & 0 deletions src/util/messageStore.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { ChatCompletionMessageParam } from "openai/resources";
import TTLCache from '@isaacs/ttlcache';

const sessionStore = new TTLCache<string, ChatCompletionMessageParam[]>({ max: 10000, ttl: 50 * 60 * 1000 });

export const addMessage = (key: string, message: ChatCompletionMessageParam) => {
const value = sessionStore.get(key) ?? [];
sessionStore.set(key, [...value, message]);
}

export const addMessages = (key: string, messages: ChatCompletionMessageParam[]) => {
const value = sessionStore.get(key) ?? [];
sessionStore.set(key, value.concat(messages));
}

export const getMessages = (key: string) => {
return sessionStore.get(key);
}

export const getUserVisibleMessages = (key: string) => {
return getMessages(key)?.filter(message => message.role === 'user' || message.role === 'assistant' && !message.tool_calls);
}

0 comments on commit 5ac7124

Please sign in to comment.