diff --git a/package-lock.json b/package-lock.json index 14a384c..b48cde8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,8 @@ "dotenv": "^16.4.5", "express": "^4.18.3", "officeparser": "^4.0.8", - "openai": "^4.29.2" + "openai": "^4.29.2", + "zod": "^3.22.4" }, "devDependencies": { "@tsconfig/strictest": "^2.0.3", @@ -2820,6 +2821,14 @@ "engines": { "node": ">=6" } + }, + "node_modules/zod": { + "version": "3.22.4", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.4.tgz", + "integrity": "sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } } } } diff --git a/package.json b/package.json index f933dae..7e40036 100644 --- a/package.json +++ b/package.json @@ -2,12 +2,12 @@ "name": "chatbot-poc-backend", "version": "1.0.0", "description": "", - "main": "./dist/index.js", + "main": "./dist/app.js", "type": "module", "scripts": { - "dev": "nodemon -r dotenv/config --exec node --loader ts-node/esm src/index.ts", + "dev": "nodemon -r dotenv/config --exec node --loader ts-node/esm src/app.ts", "build": "rimraf ./dist && tsc", - "start": "npm run build && node dist/index.js" + "start": "npm run build && node dist/app.js" }, "repository": { "type": "git", @@ -24,7 +24,8 @@ "dotenv": "^16.4.5", "express": "^4.18.3", "officeparser": "^4.0.8", - "openai": "^4.29.2" + "openai": "^4.29.2", + "zod": "^3.22.4" }, "devDependencies": { "@tsconfig/strictest": "^2.0.3", diff --git a/prompts/json-format-prompt.docx b/prompts/json-format-prompt.docx new file mode 100644 index 0000000..9154913 Binary files /dev/null and b/prompts/json-format-prompt.docx differ diff --git a/src/app.ts b/src/app.ts new file mode 100644 index 0000000..ea57206 --- /dev/null +++ b/src/app.ts @@ -0,0 +1,14 @@ +import "dotenv/config"; +import express from "express"; +import { registerRoutes } from "./index.js"; + +const app = express(); +const port = process.env["PORT"] || 3000; + +app.listen(port, () =>{ + console.log(`Server is running at http://localhost:${port}`); + registerRoutes(); +}) + + +export { app }; \ No newline at end of file diff --git a/src/chat/get-init.ts b/src/chat/get-init.ts new file mode 100644 index 0000000..5bde397 --- /dev/null +++ b/src/chat/get-init.ts @@ -0,0 +1,59 @@ +import { z } from "zod"; +import { makeGetEndpoint } from "../middleware/validation/makeGetEndpoint.js"; +import { fileReader, parseFileReaderResponse } from "../util/fileReader.js"; +import { OPENAI_MODEL, PROMPT_FILE_NAME, RESPONSE_FORMAT, openai } from "./index.js"; + +//TODO: Rework type inference of fileReader +export const init = makeGetEndpoint(z.any(), async (_request, response) => { + let messages: string[] = [] + const promptFile = await fileReader(PROMPT_FILE_NAME); + let jsonPrompt; + + if(RESPONSE_FORMAT.type === "json_object"){ + + jsonPrompt = await fileReader('json-format-prompt.docx'); + if(parseFileReaderResponse(jsonPrompt)){ + messages.push(jsonPrompt.content); + } + else{ + return response.status(500).send({ + status: 500, + message: jsonPrompt.error + }) + } + } + + if(parseFileReaderResponse(promptFile)){ + messages.push(promptFile.content); + } + else{ + return response.status(500).send({ + status: 500, + message: promptFile.error + }) + } + + + const completion = await openai.chat.completions.create({ + messages: messages.map(message => ({role: "system", content: message})), + model: OPENAI_MODEL, + response_format: RESPONSE_FORMAT + }); + console.log(completion.choices[0]?.message); + return response + .status(200) + .send([ + { + role: "system", + content: promptFile.content + }, + { + role: "system", + content: jsonPrompt?.content !== undefined ? jsonPrompt.content : "" + }, + { + role: "assistant", + content: "Hi, ich bin der virtuelle Assistent von Qonto und kann alles rund um Qonto und unsere Leistungen beantworten. Bitte fragen Sie mich etwas" + } + ]); +}); \ No newline at end of file diff --git a/src/chat/index.ts b/src/chat/index.ts index 25f9696..8a92256 100644 --- a/src/chat/index.ts +++ b/src/chat/index.ts @@ -1,59 +1,32 @@ -import express, { Request, Response} from "express"; +import express from "express"; import "dotenv/config"; import OpenAI from "openai"; -import { ChatCompletionCreateParamsBase } from "openai/resources/chat/completions.js"; -import { fileReader, hasErrors } from "../util/fileReader.js"; +import { ChatCompletionCreateParams, ChatCompletionCreateParamsBase } from "openai/resources/chat/completions.js"; +import { init } from "./get-init.js"; +import { newMessage } from "./post-new-message.js"; +/** + * Change the prompt file, the model or the response format here + */ const PROMPT_FILE_NAME = "Chatbot Qonto.docx" const OPENAI_MODEL: ChatCompletionCreateParamsBase["model"] = "gpt-3.5-turbo"; +const RESPONSE_FORMAT: ChatCompletionCreateParams.ResponseFormat = {type: "text"}; -export const chatRouter = express.Router(); - -export const openai = new OpenAI(); - - -chatRouter.post('/newMessage', async (request: Request, response: Response) => { - const messages = request.body.messages; - const completion = await openai.chat.completions.create({ - messages: messages, - model: OPENAI_MODEL, - }); - const choice = completion.choices[0]; - if(choice === undefined){ - response.status(500).send({ - status: 500, - message: "Expected an answer from the bot but got none." - }) - } - else{ - response.status(200).send(choice.message); - } -}); - -chatRouter.get('/init', async (_request: Request, response: Response) => { - const fileContent = await fileReader(PROMPT_FILE_NAME); - if(hasErrors(fileContent)){ - console.log(fileContent.error.stack); - response.status(500).json({ - status: 500, - message: 'Something went wrong' - }); - } - else{ - await openai.chat.completions.create({ - messages: [{role: "system", content: fileContent.data}], - model: OPENAI_MODEL, - }); - response.status(200).send([ - { - role: "system", - content: fileContent.data - }, - { - role: "assistant", - content: "Hi, ich bin der virtuelle Assistent von Qonto und kann alles rund um Qonto und unsere Leistungen beantworten. Bitte fragen Sie mich etwas" - } - ]); - } -}); +const chatRouter = express.Router(); +const openai = new OpenAI(); + + +chatRouter.post('/newMessage', newMessage); +chatRouter.get("/test") + +chatRouter.get('/init', init); + + +export { + PROMPT_FILE_NAME, + OPENAI_MODEL, + RESPONSE_FORMAT, + chatRouter, + openai, +} diff --git a/src/chat/post-new-message.ts b/src/chat/post-new-message.ts new file mode 100644 index 0000000..6a1445e --- /dev/null +++ b/src/chat/post-new-message.ts @@ -0,0 +1,31 @@ +import { z } from "zod"; +import { makePostEndpoint } from "../middleware/validation/makePostEndpoint.js"; +import { OPENAI_MODEL, RESPONSE_FORMAT, openai } from "./index.js"; + +const ChatCompletionRole = z.union([z.literal('user'), z.literal('system'), z.literal('assistant')]); +export type ChatCompletionRole = z.infer; + +const MessageHistory = z.object({ + messages: z.array( + z.object({ + role: ChatCompletionRole, + content: z.string(), + }) + ).nonempty() +}); +type MessageHistory = z.infer; + +export const newMessage = makePostEndpoint(MessageHistory, async (request, response) => { + const messages = request.body.messages; + const completion = await openai.chat.completions.create({ + messages, + model: OPENAI_MODEL, + response_format: RESPONSE_FORMAT + }); + const chatResponse = completion.choices[0]; + if(!chatResponse){ + return response.status(500).send("Got no response from the bot"); + } + console.log(chatResponse); + return response.status(200).send(chatResponse.message); +}); diff --git a/src/index.ts b/src/index.ts index ff9a7af..97b3d75 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,10 +1,7 @@ -import "dotenv/config"; -import express, { Request, Response } from "express"; -import cors from "cors"; +import express from "express"; import { chatRouter } from "./chat/index.js"; - -const app = express(); -const port = process.env["PORT"] || 3000; +import { app } from "./app.js"; +import cors from "cors"; const corsOptions = { @@ -12,15 +9,13 @@ const corsOptions = { optionsSuccessStatus: 204 } -app.options('*', cors(corsOptions)); -app.use(cors(corsOptions)); -app.use(express.json()); - -app.use('/chat', chatRouter); -app.get("/", (_: Request, response: Response) => { - response.status(200).json({message: "Hello World, running in Container"}); -}) +export const registerRoutes = () => { + app.options('*', cors(corsOptions)); + app.use(cors(corsOptions)); + + + app.use(express.json()); + + app.use('/chat', chatRouter); +} -app.listen(port, () =>{ - console.log(`Server is running at http://localhost:${port}`); -}) diff --git a/src/middleware/validation/makeGetEndpoint.ts b/src/middleware/validation/makeGetEndpoint.ts new file mode 100644 index 0000000..b52ae28 --- /dev/null +++ b/src/middleware/validation/makeGetEndpoint.ts @@ -0,0 +1,22 @@ +import { Request, Response } from "express" +import { z } from "zod" + +export const makeGetEndpoint = ( + schema: z.Schema, + callback: ( + req: Request, + res: Response + ) => void +) => (req:Request, res:Response) => { + + const bodyValidation = schema.safeParse(req.query); + if(!bodyValidation.success){ + return res + .status(400) + .send({ + status: 400, + message: bodyValidation.error.message + }); + } + return callback(req as any, res); +} \ No newline at end of file diff --git a/src/middleware/validation/makePostEndpoint.ts b/src/middleware/validation/makePostEndpoint.ts new file mode 100644 index 0000000..7c5628f --- /dev/null +++ b/src/middleware/validation/makePostEndpoint.ts @@ -0,0 +1,22 @@ +import { Request, Response } from "express" +import { z } from "zod" + +export const makePostEndpoint = ( + schema: z.Schema, + callback: ( + req: Request, + res: Response + ) => void +) => (req:Request, res:Response) => { + + const bodyValidation = schema.safeParse(req.body); + if(!bodyValidation.success){ + return res + .status(400) + .send({ + status: 400, + message: bodyValidation.error.message + }); + } + return callback(req, res); +} \ No newline at end of file diff --git a/src/models/message.ts b/src/models/message.ts deleted file mode 100644 index d441dad..0000000 --- a/src/models/message.ts +++ /dev/null @@ -1,4 +0,0 @@ -interface Message{ - content: string; - role: "user" | "system"; -} \ No newline at end of file diff --git a/src/util/fileReader.ts b/src/util/fileReader.ts index d07d7a6..ae63358 100644 --- a/src/util/fileReader.ts +++ b/src/util/fileReader.ts @@ -1,35 +1,51 @@ //@ts-ignore import officeParser from 'officeparser'; +import { z } from 'zod'; -export const fileReader = async (fileName:string): Promise<{data: string, error: undefined} | {data: undefined, error: Error}> => { +const FileReaderSuccess = z.object({ + content: z.string(), + error: z.undefined() +}); + +const FileReaderError = z.object({ + content: z.undefined(), + error: z.string(), +}) + +export type FileReaderSuccess = z.infer; +export type FileReaderError = z.infer; + +const stringParser = z.string(); + +export const fileReader = async (fileName:string): Promise => { try{ const data = await officeParser.parseOfficeAsync(`${process.cwd()}/prompts/${fileName}`); - if(!data){ - throw Error('File read but no prompt data found'); + const validatedData = stringParser.safeParse(data); + if(!validatedData.success){ + return { + content: undefined, + error: `Error reading the file ${fileName}.\n${validatedData.error.message}`, + }; } return { - data, + content: data, error: undefined }; } catch(error){ if(error instanceof Error){ return { - data: undefined, - error + content: undefined, + error: `Unexpected error reading the file ${fileName}.\n${error.stack}`, }; } - console.log(error); return { - data: undefined, - error: new Error(`Something went wrong reading the prompt file. Original error: ${error}`) + content: undefined, + error: `Unexpected error reading prompt file ${fileName}`, }; } } - -export const hasErrors = ( - fileObj: {data: string, error: undefined} | {data: undefined, error: Error} - ): fileObj is {data: undefined, error: Error} => { - return fileObj.error !== undefined +export const parseFileReaderResponse = (data: FileReaderSuccess | FileReaderError): data is FileReaderSuccess => { + return FileReaderSuccess.safeParse(data).success; } \ No newline at end of file