diff --git a/biome.json b/biome.json index 0d93874..5436c99 100644 --- a/biome.json +++ b/biome.json @@ -22,6 +22,7 @@ "javascript": { "formatter": { "quoteStyle": "double" - } + }, + "globals": ["__HTTP_LOGGING_ENABLED__"] } } diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 3957c61..9435370 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -43,6 +43,23 @@ Please ensure that the dev-production separation still works when adding new fun In particular, when adding new top-level buttons, make them distinguishable for dev and production versions (see [webpack-config](../webpack.config.js) to see how this is currently handled for the manifest.json). +### Log request-response pairs + +For tests it might be useful to have the full request-response interaction with an LLM. +For this we added an extra development mode which can be enabled by runing +```shell +npm run with-http-logging +``` +instead of `npm start`. + +With this, each request that is made to an LLM will open a download modal in Thunderbird where you download a json +file which contains the request you made (minus the Authorization header) and the response by the backend. + +This request-response json can be reused in tests to mock the backend. +See also [mockResponses](../src/__tests__/mockResponses). + +Make sure that the content of these examples is save for publication (See also: Test mails down below). + ### Build the plugin locally - Build the addon package: diff --git a/manifest.json b/manifest.json index e69ff92..0112acd 100644 --- a/manifest.json +++ b/manifest.json @@ -11,7 +11,7 @@ "update_url": "https://raw.githubusercontent.com/TNG/tb-llm-composer/refs/heads/main/updates.json" } }, - "permissions": ["menus", "compose", "storage", "notifications", "messagesRead", "accountsRead"], + "permissions": ["menus", "compose", "storage", "notifications", "messagesRead", "accountsRead", "downloads"], "icons": { "64": "icons/icon-64px.png", "32": "icons/icon-32px.png", diff --git a/package.json b/package.json index 0e0f8c0..dedac91 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "main": "background.js", "scripts": { "start": "webpack --watch", + "with-http-logging": "webpack --env HTTP_LOGGING=true --watch", "build": "webpack --mode production", "zip": "run-script-os", "zip:darwin:linux": "cd build && zip -r ../llm-thunderbird.xpi ./*", diff --git a/src/__tests__/mockResponses/README.md b/src/__tests__/mockResponses/README.md new file mode 100644 index 0000000..24d01b4 --- /dev/null +++ b/src/__tests__/mockResponses/README.md @@ -0,0 +1,32 @@ +# Mock LLM responses + +Real-world mock request-response pairs of interactions with the LLM. + +The format is inspired by [mock-server](https://www.mock-server.com/mock_server/creating_expectations.html), however, +the request matching should mostly be done based on the request body since the path is configurable for example. +This enables easy copy-pasting examples from wherever you get your examples from. + +A very basic example of a json in this folder looks like this: +```json +{ + "httpRequest": { + "method": "POST", + "path": "/login", + "body": { + "username": "foo", + "password": "bar" + } + }, + "httpResponse": { + "statusCode": 302, + "headers": { + "Location": [ + "https://www.mock-server.com" + ] + }, + "cookies": { + "sessionId": "2By8LOhBmaW5nZXJwcmludCIlMDAzMW" + } + } +} +``` diff --git a/src/llmConnection.ts b/src/llmConnection.ts index 32a8ec1..b70b96c 100644 --- a/src/llmConnection.ts +++ b/src/llmConnection.ts @@ -1,3 +1,4 @@ +import { logRequestResponseToFile } from "./logRequestResponseToFile"; import { getPluginOptions, type LlmParameters } from "./optionsParams"; export enum LlmRoles { @@ -84,6 +85,8 @@ export async function sendContentToLlm( return callLlmApi(options.model, requestBody, abortSignal, options.api_token); } +export type LlmResponseBodyType = string | LlmTextCompletionResponse | TgiErrorResponse; + async function callLlmApi( url: string, requestBody: LlmApiRequestBody, @@ -98,20 +101,32 @@ async function callLlmApi( } console.log(`LLM-CONNECTION: Sending request to LLM: POST ${url} with body:\n`, JSON.stringify(requestBody)); - const response = await fetch(url, { + const fetchOptions = { signal: signal, method: "POST", headers: headers, body: JSON.stringify(requestBody), - }); + }; + const response = await fetch(url, fetchOptions); + const responseBody = await safeParseBody(response); + if (process.env.NODE_ENV === "development" && __HTTP_LOGGING_ENABLED__) { + logRequestResponseToFile(url, fetchOptions, requestBody, response, responseBody); + } if (!response.ok) { - const errorResponseBody = await response.text(); - throw Error(`LLM-CONNECTION: Error response from ${url}: ${errorResponseBody}`); + throw Error(`LLM-CONNECTION: Error response from ${url}: ${JSON.stringify(responseBody)}`); } - const responseBody = (await response.json()) as LlmTextCompletionResponse | TgiErrorResponse; console.log("LLM-CONNECTION: LLM responded with:", response.status, responseBody); + return responseBody as LlmTextCompletionResponse | TgiErrorResponse; +} - return responseBody; +async function safeParseBody(response: Response): Promise { + const responseBody = await response.text(); + try { + return JSON.parse(responseBody); + } catch (e) { + console.warn("Could not parse response body", responseBody, e); + return responseBody; + } } export function isLlmTextCompletionResponse(response: LlmTextCompletionResponse | TgiErrorResponse) { diff --git a/src/logRequestResponseToFile.ts b/src/logRequestResponseToFile.ts new file mode 100644 index 0000000..bb1a585 --- /dev/null +++ b/src/logRequestResponseToFile.ts @@ -0,0 +1,42 @@ +import type { LlmResponseBodyType } from "./llmConnection"; + +declare global { + const __HTTP_LOGGING_ENABLED__: boolean; +} + +export function logRequestResponseToFile( + url: string, + requestOptions: RequestInit, + requestBody: object, + response: Response, + responseBody: LlmResponseBodyType, +) { + const headers = { ...requestOptions.headers } as { [key: string]: string }; + headers.Authorization = "***"; + const json = { + request: { + url: new URL(url).pathname, + method: requestOptions.method, + headers: headers, + body: requestBody, + }, + response: { + status: response.status, + statusText: response.statusText, + headers: response.headers, + body: responseBody, + }, + }; + const date = new Date(); + const blob = new Blob([JSON.stringify(json, null, 2)], { type: "text/plain;charset=utf-8" }); + browser.downloads + .download({ + url: URL.createObjectURL(blob), + filename: + `request-response-${date.getFullYear()}${date.getMonth() + 1}${date.getDate()}-` + + `${date.getHours()}${date.getMinutes()}${date.getSeconds()}.json`, + }) + .catch((e) => { + console.warn("Failed to trigger download of request-response", e); + }); +} diff --git a/webpack.config.js b/webpack.config.js index 924283c..bfd1984 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -1,3 +1,4 @@ +const { DefinePlugin } = require("webpack"); const CopyWebpackPlugin = require("copy-webpack-plugin"); const path = require("node:path"); const TerserPlugin = require("terser-webpack-plugin"); @@ -62,6 +63,9 @@ module.exports = (_env, argv) => { }, ], }), + new DefinePlugin({ + __HTTP_LOGGING_ENABLED__: _env.HTTP_LOGGING === "true", + }), ], optimization: { minimize: isProductionMode,