diff --git a/CHANGELOG.md b/CHANGELOG.md index 2ccf93c..6067dfe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 0.14.0 + +- Update `openai` version +- Fix tool calls issues for Vercel AI SDK integration +- Remove `useChatStream` (moved to separate repo) + ## 0.13.9 - Fix for browser runtime diff --git a/README.md b/README.md index da9d975..aabf4b1 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ const rawCompletion = await lt.chat.completions.create({ prompt: "", doNotRecord: false, // false will ensure logs do not contain any info about payloads. You can still see the request in the logs, but you cannot see the variables etc. metadata: { - "custom-field": 1, + "custom-field": "1", }, }) ``` @@ -319,75 +319,3 @@ runner.on("chunk", (chunk: ChatCompletionChunk) => { // NOTE: chunk here is always a proper JSON even with parts of the message }) ``` - -## useChatStream React hook - -"You can leverage our React hook to handle AI streams more easily. We have developed a hook called `useChatStream`, which can be imported from `langtail/react/useChatStream`. - -Here's an example: - -```ts -// NOTE: your FE code -import { useChatStream } from "langtail/react/useChatStream"; - -function YourComponent() { - const { isLoading, messages, send } = useChatStream({ - fetcher: (message) => - fetch(`/api/langtail`, { - method: "POST", - body: JSON.stringify({ messages: [message] }), - headers: { - "Content-Type": "application/json", - }, - }).then((res) => res.body), - onToolCall: async (toolCall: ChatCompletionMessageToolCall, fullMessage) => { - if (toolCall.function.name === "weather") { - return "Sunny 22 degrees" - } - - return "Unknown data" - } - }); - - useEffect(() => { - // Call send wherever you like with any content - send({ role: 'user' , content: "Can you hear me?"}) - }, []) - - // NOTE: the `messages` array is updated within the react providing you with live stream of the messages - return ( - <> - {messages.map((message) => ( -

- {message.role}: {message.content} -

- ))} - - ) -} -``` - -```ts -// NOTE: your next.js BE code, assuming that this is the route /api/langtail -import { Langtail } from "langtail" -import { NextRequest } from "next/server" - -export const runtime = "edge" - -export const lt = new Langtail({ - apiKey: process.env.LANGTAIL_API_KEY ?? "", -}) - -// Create a new assistant -export async function POST(request: NextRequest) { - const messages = (await request.json()).messages - - const result = await lt.prompts.invoke({ - prompt: "weather", - messages, - stream: true, - }) - - return new Response(result.toReadableStream()) -} -``` diff --git a/package.json b/package.json index e509b0d..7db3405 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "langtail", - "version": "0.13.9", + "version": "0.14.0", "description": "", "main": "./Langtail.js", "packageManager": "pnpm@8.15.6", @@ -35,17 +35,12 @@ ], "license": "MIT", "devDependencies": { - "@testing-library/dom": "^10.0.0", - "@testing-library/react": "^16.0.0", "@types/node": "^20.12.11", - "@types/react": "^18.3.3", "copyfiles": "^2.4.1", "fs-extra": "^11.2.0", "jsdom": "^24.1.0", "nock": "14.0.0-beta.5", "prettier": "^3.2.5", - "react": "18.2.0", - "react-dom": "^18.0.0", "tsup": "^8.0.2", "typescript": "^5.4.5", "vitest": "^1.6.0" @@ -83,11 +78,6 @@ "import": "./stream/index.mjs", "types": "./stream/index.d.ts" }, - "./react/useChatStream": { - "require": "./react/useChatStream.js", - "import": "./react/useChatStream.mjs", - "types": "./react/useChatStream.d.ts" - }, "./vercel-ai": { "require": "./vercel-ai/index.js", "import": "./vercel-ai/index.mjs", @@ -99,18 +89,15 @@ "types": "./customTypes.d.ts" } }, - "peerDependencies": { - "react": ">=18.2.0" - }, "dependencies": { - "@ai-sdk/provider": "^0.0.24", - "@ai-sdk/provider-utils": "^1.0.20", + "@ai-sdk/provider": "^1.0.1", + "@ai-sdk/provider-utils": "^2.0.3", "@langtail/handlebars-evalless": "^0.1.2", "commander": "^12.1.0", "date-fns": "^3.6.0", "dotenv-flow": "^4.1.0", "json-schema-to-zod": "^2.1.0", - "openai": "^4.43.0", + "openai": "^4.76.0", "query-string": "^7.1.3", "zod": "^3.23.8" }, @@ -131,8 +118,7 @@ "src/vercel-ai/index.ts", "src/bin/entry.ts", "src/schemas.ts", - "src/stream/index.ts", - "src/react/useChatStream.ts" + "src/stream/index.ts" ], "external": [ "dotenv-flow", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6ae1468..e7acde6 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -6,11 +6,11 @@ settings: dependencies: '@ai-sdk/provider': - specifier: ^0.0.24 - version: 0.0.24 + specifier: ^1.0.1 + version: 1.0.1 '@ai-sdk/provider-utils': - specifier: ^1.0.20 - version: 1.0.20(zod@3.23.8) + specifier: ^2.0.3 + version: 2.0.3(zod@3.23.8) '@langtail/handlebars-evalless': specifier: ^0.1.2 version: 0.1.2 @@ -27,8 +27,8 @@ dependencies: specifier: ^2.1.0 version: 2.1.0 openai: - specifier: ^4.43.0 - version: 4.43.0 + specifier: ^4.76.0 + version: 4.76.0(zod@3.23.8) query-string: specifier: ^7.1.3 version: 7.1.3 @@ -37,18 +37,9 @@ dependencies: version: 3.23.8 devDependencies: - '@testing-library/dom': - specifier: ^10.0.0 - version: 10.1.0 - '@testing-library/react': - specifier: ^16.0.0 - version: 16.0.0(@testing-library/dom@10.1.0)(@types/react@18.3.3)(react-dom@18.3.1)(react@18.2.0) '@types/node': specifier: ^20.12.11 version: 20.12.11 - '@types/react': - specifier: ^18.3.3 - version: 18.3.3 copyfiles: specifier: ^2.4.1 version: 2.4.1 @@ -64,12 +55,6 @@ devDependencies: prettier: specifier: ^3.2.5 version: 3.2.5 - react: - specifier: 18.2.0 - version: 18.2.0 - react-dom: - specifier: ^18.0.0 - version: 18.3.1(react@18.2.0) tsup: specifier: ^8.0.2 version: 8.0.2(typescript@5.4.5) @@ -82,8 +67,8 @@ devDependencies: packages: - /@ai-sdk/provider-utils@1.0.20(zod@3.23.8): - resolution: {integrity: sha512-ngg/RGpnA00eNOWEtXHenpX1MsM2QshQh4QJFjUfwcqHpM5kTfG7je7Rc3HcEDP+OkRVv2GF+X4fC1Vfcnl8Ow==} + /@ai-sdk/provider-utils@2.0.3(zod@3.23.8): + resolution: {integrity: sha512-Cyk7GlFEse2jQ4I3FWYuZ1Zhr5w1mD9SHMJTYm/in1rd7r89nmEoQiOy3h8YV2ZvTa2/6aR10xZ4M0k4B3BluA==} engines: {node: '>=18'} peerDependencies: zod: ^3.0.0 @@ -91,50 +76,20 @@ packages: zod: optional: true dependencies: - '@ai-sdk/provider': 0.0.24 - eventsource-parser: 1.1.2 - nanoid: 3.3.6 + '@ai-sdk/provider': 1.0.1 + eventsource-parser: 3.0.0 + nanoid: 3.3.7 secure-json-parse: 2.7.0 zod: 3.23.8 dev: false - /@ai-sdk/provider@0.0.24: - resolution: {integrity: sha512-XMsNGJdGO+L0cxhhegtqZ8+T6nn4EoShS819OvCgI2kLbYTIvk0GWFGD0AXJmxkxs3DrpsJxKAFukFR7bvTkgQ==} + /@ai-sdk/provider@1.0.1: + resolution: {integrity: sha512-mV+3iNDkzUsZ0pR2jG0sVzU6xtQY5DtSCBy3JFycLp6PwjyLw/iodfL3MwdmMCRJWgs3dadcHejRnMvF9nGTBg==} engines: {node: '>=18'} dependencies: json-schema: 0.4.0 dev: false - /@babel/code-frame@7.24.7: - resolution: {integrity: sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/highlight': 7.24.7 - picocolors: 1.0.0 - dev: true - - /@babel/helper-validator-identifier@7.24.7: - resolution: {integrity: sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/highlight@7.24.7: - resolution: {integrity: sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-validator-identifier': 7.24.7 - chalk: 2.4.2 - js-tokens: 4.0.0 - picocolors: 1.0.0 - dev: true - - /@babel/runtime@7.24.7: - resolution: {integrity: sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==} - engines: {node: '>=6.9.0'} - dependencies: - regenerator-runtime: 0.14.1 - dev: true - /@esbuild/aix-ppc64@0.19.12: resolution: {integrity: sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==} engines: {node: '>=12'} @@ -536,46 +491,6 @@ packages: resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} dev: true - /@testing-library/dom@10.1.0: - resolution: {integrity: sha512-wdsYKy5zupPyLCW2Je5DLHSxSfbIp6h80WoHOQc+RPtmPGA52O9x5MJEkv92Sjonpq+poOAtUKhh1kBGAXBrNA==} - engines: {node: '>=18'} - dependencies: - '@babel/code-frame': 7.24.7 - '@babel/runtime': 7.24.7 - '@types/aria-query': 5.0.4 - aria-query: 5.3.0 - chalk: 4.1.2 - dom-accessibility-api: 0.5.16 - lz-string: 1.5.0 - pretty-format: 27.5.1 - dev: true - - /@testing-library/react@16.0.0(@testing-library/dom@10.1.0)(@types/react@18.3.3)(react-dom@18.3.1)(react@18.2.0): - resolution: {integrity: sha512-guuxUKRWQ+FgNX0h0NS0FIq3Q3uLtWVpBzcLOggmfMoUpgBnzBzvLLd4fbm6yS8ydJd94cIfY4yP9qUQjM2KwQ==} - engines: {node: '>=18'} - peerDependencies: - '@testing-library/dom': ^10.0.0 - '@types/react': ^18.0.0 - '@types/react-dom': ^18.0.0 - react: ^18.0.0 - react-dom: ^18.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - dependencies: - '@babel/runtime': 7.24.7 - '@testing-library/dom': 10.1.0 - '@types/react': 18.3.3 - react: 18.2.0 - react-dom: 18.3.1(react@18.2.0) - dev: true - - /@types/aria-query@5.0.4: - resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} - dev: true - /@types/estree@1.0.5: resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} dev: true @@ -598,17 +513,6 @@ packages: dependencies: undici-types: 5.26.5 - /@types/prop-types@15.7.12: - resolution: {integrity: sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==} - dev: true - - /@types/react@18.3.3: - resolution: {integrity: sha512-hti/R0pS0q1/xx+TsI73XIqk26eBsISZ2R0wUijXIngRK9R/e7Xw/cXVxQK7R5JjW+SV4zGcn5hXjudkN/pLIw==} - dependencies: - '@types/prop-types': 15.7.12 - csstype: 3.1.3 - dev: true - /@vitest/expect@1.6.0: resolution: {integrity: sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==} dependencies: @@ -692,13 +596,6 @@ packages: engines: {node: '>=12'} dev: true - /ansi-styles@3.2.1: - resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} - engines: {node: '>=4'} - dependencies: - color-convert: 1.9.3 - dev: true - /ansi-styles@4.3.0: resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} engines: {node: '>=8'} @@ -728,12 +625,6 @@ packages: picomatch: 2.3.1 dev: true - /aria-query@5.3.0: - resolution: {integrity: sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==} - dependencies: - dequal: 2.0.3 - dev: true - /array-union@2.1.0: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} @@ -803,23 +694,6 @@ packages: type-detect: 4.0.8 dev: true - /chalk@2.4.2: - resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} - engines: {node: '>=4'} - dependencies: - ansi-styles: 3.2.1 - escape-string-regexp: 1.0.5 - supports-color: 5.5.0 - dev: true - - /chalk@4.1.2: - resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} - engines: {node: '>=10'} - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - dev: true - /check-error@1.0.3: resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==} dependencies: @@ -849,12 +723,6 @@ packages: wrap-ansi: 7.0.0 dev: true - /color-convert@1.9.3: - resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} - dependencies: - color-name: 1.1.3 - dev: true - /color-convert@2.0.1: resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} engines: {node: '>=7.0.0'} @@ -862,10 +730,6 @@ packages: color-name: 1.1.4 dev: true - /color-name@1.1.3: - resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} - dev: true - /color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} dev: true @@ -923,10 +787,6 @@ packages: rrweb-cssom: 0.6.0 dev: true - /csstype@3.1.3: - resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} - dev: true - /data-urls@5.0.0: resolution: {integrity: sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==} engines: {node: '>=18'} @@ -971,11 +831,6 @@ packages: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} - /dequal@2.0.3: - resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} - engines: {node: '>=6'} - dev: true - /diff-sequences@29.6.3: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -988,10 +843,6 @@ packages: path-type: 4.0.0 dev: true - /dom-accessibility-api@0.5.16: - resolution: {integrity: sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==} - dev: true - /dotenv-flow@4.1.0: resolution: {integrity: sha512-0cwP9jpQBQfyHwvE0cRhraZMkdV45TQedA8AAUZMsFzvmLcQyc1HPv+oX0OOYwLFjIlvgVepQ+WuQHbqDaHJZg==} engines: {node: '>= 12.0.0'} @@ -1057,11 +908,6 @@ packages: engines: {node: '>=6'} dev: true - /escape-string-regexp@1.0.5: - resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} - engines: {node: '>=0.8.0'} - dev: true - /estree-walker@3.0.3: resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} dependencies: @@ -1073,9 +919,9 @@ packages: engines: {node: '>=6'} dev: false - /eventsource-parser@1.1.2: - resolution: {integrity: sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==} - engines: {node: '>=14.18'} + /eventsource-parser@3.0.0: + resolution: {integrity: sha512-T1C0XCUimhxVQzW4zFipdx0SficT651NnkR0ZSH3yQwh+mFMdLfgjABVi4YtMTtaL4s168593DaoaRLMqryavA==} + engines: {node: '>=18.0.0'} dev: false /execa@5.1.1: @@ -1265,16 +1111,6 @@ packages: uglify-js: 3.19.3 dev: false - /has-flag@3.0.0: - resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} - engines: {node: '>=4'} - dev: true - - /has-flag@4.0.0: - resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} - engines: {node: '>=8'} - dev: true - /html-encoding-sniffer@4.0.0: resolution: {integrity: sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==} engines: {node: '>=18'} @@ -1411,10 +1247,6 @@ packages: engines: {node: '>=10'} dev: true - /js-tokens@4.0.0: - resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - dev: true - /js-tokens@8.0.3: resolution: {integrity: sha512-UfJMcSJc+SEXEl9lH/VLHSZbThQyLpw1vLO1Lb+j4RWDvG3N2f7yj3PVQA3cmkTBNldJ9eFnM+xEXxHIXrYiJw==} dev: true @@ -1506,13 +1338,6 @@ packages: resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} dev: true - /loose-envify@1.4.0: - resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} - hasBin: true - dependencies: - js-tokens: 4.0.0 - dev: true - /loupe@2.3.7: resolution: {integrity: sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==} dependencies: @@ -1524,11 +1349,6 @@ packages: engines: {node: 14 || >=16.14} dev: true - /lz-string@1.5.0: - resolution: {integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==} - hasBin: true - dev: true - /magic-string@0.30.8: resolution: {integrity: sha512-ISQTe55T2ao7XtlAStud6qwYPZjE4GK1S/BeVPus4jrq6JuOnQ00YKQC581RWhR122W7msZV263KzVeLoqidyQ==} engines: {node: '>=12'} @@ -1621,17 +1441,10 @@ packages: thenify-all: 1.6.0 dev: true - /nanoid@3.3.6: - resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - dev: false - /nanoid@3.3.7: resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - dev: true /neo-async@2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} @@ -1717,9 +1530,14 @@ packages: mimic-fn: 4.0.0 dev: true - /openai@4.43.0: - resolution: {integrity: sha512-4SMUB/XiqnO5IrEcdzEGGTcHoeXq7D/k82v36zoqSitrMUjenZXGH5JysIH7aF7Wr+gjvq0dT2mV6wLVKA7Seg==} + /openai@4.76.0(zod@3.23.8): + resolution: {integrity: sha512-QBGIetjX1C9xDp5XGa/3mPnfKI9BgAe2xHQX6PmO98wuW9qQaurBaumcYptQWc9LHZZq7cH/Y1Rjnsr6uUDdVw==} hasBin: true + peerDependencies: + zod: ^3.23.8 + peerDependenciesMeta: + zod: + optional: true dependencies: '@types/node': 18.19.24 '@types/node-fetch': 2.6.11 @@ -1728,7 +1546,7 @@ packages: form-data-encoder: 1.7.2 formdata-node: 4.4.1 node-fetch: 2.7.0 - web-streams-polyfill: 3.3.3 + zod: 3.23.8 transitivePeerDependencies: - encoding dev: false @@ -1834,15 +1652,6 @@ packages: engines: {node: '>=14'} hasBin: true - /pretty-format@27.5.1: - resolution: {integrity: sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==} - engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} - dependencies: - ansi-regex: 5.0.1 - ansi-styles: 5.2.0 - react-is: 17.0.2 - dev: true - /pretty-format@29.7.0: resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -1888,31 +1697,10 @@ packages: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} dev: true - /react-dom@18.3.1(react@18.2.0): - resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==} - peerDependencies: - react: ^18.3.1 - dependencies: - loose-envify: 1.4.0 - react: 18.2.0 - scheduler: 0.23.2 - dev: true - - /react-is@17.0.2: - resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} - dev: true - /react-is@18.2.0: resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} dev: true - /react@18.2.0: - resolution: {integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==} - engines: {node: '>=0.10.0'} - dependencies: - loose-envify: 1.4.0 - dev: true - /readable-stream@1.0.34: resolution: {integrity: sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg==} dependencies: @@ -1941,10 +1729,6 @@ packages: picomatch: 2.3.1 dev: true - /regenerator-runtime@0.14.1: - resolution: {integrity: sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==} - dev: true - /require-directory@2.1.1: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} engines: {node: '>=0.10.0'} @@ -2016,12 +1800,6 @@ packages: xmlchars: 2.2.0 dev: true - /scheduler@0.23.2: - resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==} - dependencies: - loose-envify: 1.4.0 - dev: true - /secure-json-parse@2.7.0: resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} dev: false @@ -2163,20 +1941,6 @@ packages: ts-interface-checker: 0.1.13 dev: true - /supports-color@5.5.0: - resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} - engines: {node: '>=4'} - dependencies: - has-flag: 3.0.0 - dev: true - - /supports-color@7.2.0: - resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} - engines: {node: '>=8'} - dependencies: - has-flag: 4.0.0 - dev: true - /symbol-tree@3.2.4: resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} dev: true @@ -2473,11 +2237,6 @@ packages: xml-name-validator: 5.0.0 dev: true - /web-streams-polyfill@3.3.3: - resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==} - engines: {node: '>= 8'} - dev: false - /web-streams-polyfill@4.0.0-beta.3: resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} engines: {node: '>= 14'} diff --git a/src/LangtailPrompts.spec.ts b/src/LangtailPrompts.spec.ts index 07c8ab5..74db6cb 100644 --- a/src/LangtailPrompts.spec.ts +++ b/src/LangtailPrompts.spec.ts @@ -130,7 +130,7 @@ describe.skipIf(!liveTesting)( project: "ci-tests-project", fetch: async (url, init) => { expect(init?.headers?.["x-langtail-do-not-record"]).toBe("true") - expect(init?.headers?.["x-langtail-metadata-custom-field"]).toBe(1) + expect(init?.headers?.["x-langtail-metadata-custom-field"]).toBe("1") return { ok: true, @@ -157,7 +157,7 @@ describe.skipIf(!liveTesting)( }, doNotRecord: true, metadata: { - "custom-field": 1, + "custom-field": "1", }, }) diff --git a/src/bin/generateTools.ts b/src/bin/generateTools.ts index 05cd9bb..e7b143f 100644 --- a/src/bin/generateTools.ts +++ b/src/bin/generateTools.ts @@ -3,7 +3,7 @@ import fs from 'fs'; import { LangtailPrompts } from '../LangtailPrompts'; import jsonSchemaToZod from 'json-schema-to-zod'; import SDK_VERSION from '../version' -import { askUserToConfirm, dirExists, getApiKey, prepareOutputFilePath } from './utils'; +import { askUserToConfirm, dirExists, getApiKey, getBaseUrl, prepareOutputFilePath } from './utils'; import { Environment, PromptOptions, PromptSlug, Version } from '../types'; @@ -90,7 +90,8 @@ const generateTools = async ({ out }: GenerateToolsOptions) => { } const langtailPrompts = new LangtailPrompts({ - apiKey: getApiKey() + apiKey: getApiKey(), + baseURL: getBaseUrl() }); const deployments = await langtailPrompts.listDeployments(); diff --git a/src/bin/generateTypes.ts b/src/bin/generateTypes.ts index 76d8b31..da41918 100644 --- a/src/bin/generateTypes.ts +++ b/src/bin/generateTypes.ts @@ -1,7 +1,7 @@ import fs from 'fs'; import path from 'path'; import { LangtailEnvironment, LangtailPrompts } from "../LangtailPrompts"; -import { dirExists, getApiKey, prepareOutputFilePath } from "./utils"; +import { dirExists, getApiKey, getBaseUrl, prepareOutputFilePath } from "./utils"; import SDK_VERSION from '../version' import { Environment, PromptOptions, PromptSlug, Version } from '../types'; @@ -84,7 +84,8 @@ const generateTypes = async ({ out }: GenerateTypesOptions) => { } const langtailPrompts = new LangtailPrompts({ - apiKey: getApiKey() + apiKey: getApiKey(), + baseURL: getBaseUrl() }); const deployments = await langtailPrompts.listDeployments(); diff --git a/src/bin/utils.ts b/src/bin/utils.ts index 5adbcb0..a6be15f 100644 --- a/src/bin/utils.ts +++ b/src/bin/utils.ts @@ -3,6 +3,15 @@ import path from 'path'; import readline from 'readline'; +export const getBaseUrl = (): string => { + let baseUrl = process.env.LANGTAIL_BASE_URL; + if (!baseUrl) { + baseUrl = "https://api.langtail.com"; + } + return baseUrl; +} + + export const getApiKey = (): string => { const apiKey = process.env.LANGTAIL_API_KEY; if (!apiKey) { diff --git a/src/openai.spec.ts b/src/openai.spec.ts index 334a68e..c669b56 100644 --- a/src/openai.spec.ts +++ b/src/openai.spec.ts @@ -19,7 +19,7 @@ describe("Langtail Proxy", () => { model: "gpt-3.5-turbo", doNotRecord: false, metadata: { - "custom-field": 1, + "custom-field": "1", }, }) let partCount = 0 @@ -67,7 +67,7 @@ describe("Langtail Proxy", () => { model: "gpt-3.5-turbo", doNotRecord: true, metadata: { - "custom-field": 1, + "custom-field": "1", }, }) diff --git a/src/react/useChatStream.test.ts b/src/react/useChatStream.test.ts deleted file mode 100644 index 580cbf7..0000000 --- a/src/react/useChatStream.test.ts +++ /dev/null @@ -1,1411 +0,0 @@ - -import { describe, expect, it, vi } from "vitest" -import { renderHook, act } from "@testing-library/react" -import { type ChatMessage, useChatStream } from "./useChatStream" -import { EventEmitter } from "stream" -import { JSDOM } from 'jsdom' - -class DataEventListener extends EventEmitter { - addEventListener(event: string, listener: (...args: any[]) => void) { - this.on(event, listener) - } - - removeEventListener(event: string, listener: (...args: any[]) => void) { - this.off(event, listener) - } - - dispatchEvent(event: string, ...args: any[]) { - this.emit(event, ...args) - } - -} - -const toolCallData = `{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":"Under"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":"stood"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" will"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" get"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" the"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" current"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" for"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" Prague"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" Czech"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":" Republic"},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0, "id":"call_tNW2f79DhRvuuwrslSYt3yVT","type":"function", "function":{ "name": "get_weather", "arguments":"{\\""}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\\":\\""}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Pr"}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ague"}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" Czech"}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" Republic"}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null} -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[],"usage":{"prompt_tokens":284,"completion_tokens":34,"total_tokens":318}} -` - -describe("useAIStream", () => { - beforeAll(() => { - const dom = new JSDOM() - global.window = dom.window - global.document = dom.window.document - }) - - afterAll(() => { - // @ts-expect-error: setting up jsdom for react hooks - global.window = undefined - // @ts-expect-error: setting up jsdom for react hooks - global.document = undefined - }) - - describe("public API", () => { - it("type test: passing one message to send()", async () => { - const mockedStream = new ReadableStream() - - const createReadableStream = vi.fn((parameter) => { - expect(parameter).toEqual({ role: 'user', content: 'hello' }) - return Promise.resolve(mockedStream) - }) - - const { result } = renderHook(() => useChatStream({ fetcher: createReadableStream })) - - act(() => { - result.current.send({ role: 'user', content: 'hello' }) - }) - - await vi.waitFor(() => { - expect(createReadableStream).toHaveBeenCalledTimes(1) - }) - }) - - it("should pass paramters of send() to the createReadableStream()", async () => { - const mockedStream = new ReadableStream() - - const createReadableStream = vi.fn((parameter: string) => { - expect(parameter).toEqual(expectedParameter) - return Promise.resolve(mockedStream) - }) - - const expectedParameter = "hello" - const { result } = renderHook(() => useChatStream({ fetcher: createReadableStream })) - - act(() => { - result.current.send(expectedParameter) - }) - - await vi.waitFor(() => { - expect(createReadableStream).toHaveBeenCalledTimes(1) - }) - }) - - it("should call createReadableStream() as many times as send() is called", async () => { - const createReadableStream = vi.fn(() => { - return Promise.resolve(new ReadableStream()) - }) - - const { result } = renderHook(() => useChatStream({ fetcher: createReadableStream })) - - act(() => { - result.current.send('') - result.current.send('') - result.current.send('') - }) - - await vi.waitFor(() => { - expect(createReadableStream).toHaveBeenCalledTimes(3) - }) - }) - - it("should call trigger that stream was aborted when abort() is called", async () => { - const abortAgent = vi.fn() - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - const createReadableStream = vi.fn(() => - Promise.resolve(stream) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onAbort: abortAgent - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `${JSON.stringify({ "id": "chatcmpl-9a0ckk5rq36dtBFE2ail2G2AZbk9s", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "hello" }, "logprobs": null, "finish_reason": null }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-9a0ckk5rq36dtBFE2ail2G2AZbk9s", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "." }, "logprobs": null, "finish_reason": "stop" }], "usage": null })}\n` - ) - dataEmitter.dispatchEvent('close') - result.current.abort() - }) - - await vi.waitFor(() => { - expect(abortAgent).toHaveBeenCalledTimes(1) - }) - }) - - it("should change loading state to false when aborted", async () => { - const abortAgent = vi.fn() - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - const createReadableStream = vi.fn(() => - Promise.resolve(stream) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onAbort: abortAgent - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `${JSON.stringify({ "id": "chatcmpl-9a0ckk5rq36dtBFE2ail2G2AZbk9s", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "hello" }, "logprobs": null, "finish_reason": null }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-9a0ckk5rq36dtBFE2ail2G2AZbk9s", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "." }, "logprobs": null, "finish_reason": "stop" }], "usage": null })}\n` - ) - dataEmitter.dispatchEvent('close') - result.current.abort() - }) - - await vi.waitFor(() => { - expect(abortAgent).toHaveBeenCalledTimes(1) - expect(result.current.isLoading).to.be.false - }) - }) - - it("should call onError callback when an error occurs", async () => { - const createReadableStream = vi.fn(() => { - return Promise.reject(new Error('threw by test')) - }) - - const onError = vi.fn() - - - const { result } = renderHook(() => useChatStream({ fetcher: createReadableStream, onError })) - - act(() => { - result.current.send('') - }) - - await vi.waitFor(() => { - expect(onError).toHaveBeenCalledTimes(1) - }) - }) - - it("should change the loading state to false when error occurs", async () => { - const createReadableStream = vi.fn(() => { - return Promise.reject(new Error('threw by test')) - }) - - const onError = vi.fn() - - - const { result } = renderHook(() => useChatStream({ fetcher: createReadableStream, onError })) - - act(() => { - result.current.send('') - }) - - await vi.waitFor(() => { - expect(onError).toHaveBeenCalledTimes(1) - expect(result.current.isLoading).to.be.false - }) - }) - - describe("return values", () => { - it("should return isLoading when request is running", async () => { - const createReadableStream = vi.fn(() => - Promise.resolve(new ReadableStream()) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - expect(result.current.isLoading).toBe(false) - result.current.send('') - }) - - await vi.waitFor(() => { - expect(result.current.isLoading).toBe(true) - }) - }) - - it("should return isLoading: false when request finishes", async () => { - const createReadableStream = vi.fn(() => - Promise.resolve(new ReadableStream()) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - await act(async () => { - expect(result.current.isLoading).toBe(false) - result.current.send('') - await vi.waitFor(() => { - expect(result.current.isLoading).toBe(false) - }) - }) - }) - - it("should return and error when request fails", async () => { - const createReadableStream = vi.fn(() => { - return Promise.reject(new Error('threw by test')) - }) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - result.current.send('') - }) - - await vi.waitFor(() => { - expect(result.current.error?.message).toEqual('threw by test') - }) - }) - - describe("messages", () => { - it("should fill messages with user data", async () => { - const createReadableStream = vi.fn(() => - Promise.resolve(new ReadableStream()) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - result.current.send('user input') - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([{ role: 'user', content: 'user input' }]) - }) - }) - - it("should add another message to the initial one", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - const createReadableStream = vi.fn(() => - Promise.resolve(stream) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `${JSON.stringify({ "id": "chatcmpl-9a0ckk5rq36dtBFE2ail2G2AZbk9s", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "hello" }, "logprobs": null, "finish_reason": null }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-9a0ckk5rq36dtBFE2ail2G2AZbk9s", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "." }, "logprobs": null, "finish_reason": "stop" }], "usage": null })}\n` - ) - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { role: 'assistant', content: 'hello.' }, - ]) - }) - }) - - it("should complete 2 messages", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - const createReadableStream = vi.fn(() => - Promise.resolve(stream) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `${JSON.stringify({ "id": "chatcmpl-9a0ckk5rq36dtBFE2ail2G2AZbk9s", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "hello" }, "logprobs": null, "finish_reason": null }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-9a0ckk5rq36dtBFE2ail2G2AZbk9s", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "!" }, "logprobs": null, "finish_reason": 'stop' }], "usage": null })}\n` - ) - dataEmitter.dispatchEvent('data', - `${JSON.stringify({ "id": "chatcmpl-123", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": " And this " }, "logprobs": null, "finish_reason": null }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-123", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "is my" }, "logprobs": null, "finish_reason": null }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-123", "object": "chat.completion.chunk", "created": 1718369234, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": " end." }, "logprobs": null, "finish_reason": 'stop' }], "usage": null })}\n` - ) - - dataEmitter.dispatchEvent('close') - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { role: 'assistant', content: 'hello!' }, - { role: 'assistant', content: ' And this is my end.' }, - ]) - }) - }) - - it("should return message with a tool call", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - const createReadableStream = vi.fn(() => - Promise.resolve(stream) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":"Understood, I will get the current weather for Prague, Czech Republic."},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0, "id":"call_tNW2f79DhRvuuwrslSYt3yVT", "type": "function", "function":{"name":"get_weather", "arguments":"{\\"location\\":\\"Prague, Czech Republic\\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[],"usage":{"prompt_tokens":284,"completion_tokens":34,"total_tokens":318}}\n\n` - ) - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { - "content": "Understood, I will get the current weather for Prague, Czech Republic.", - "role": "assistant", - "tool_calls": [ - { - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }, - ] - }, - ]) - }) - }) - }) - }) - - describe('too calls', () => { - it("should trigger a tool call", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - let ran = false - const createReadableStream = vi.fn(() => { - // NOTE: run this only once - if (ran) { - return Promise.reject('Error in tools!') - } - - ran = true - return Promise.resolve(stream) - }) - - const onToolCall = vi.fn(() => Promise.reject('Error in tools!')) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onToolCall - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', toolCallData) - dataEmitter.dispatchEvent('close') - }) - - await vi.waitFor(() => { - expect(onToolCall).toBeCalledWith( - { - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }, - { - "content": "Understood, I will get the current weather for Prague, Czech Republic.", - "role": "assistant", - "tool_calls": [ - { - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }, - ], - } - ) - result.current.abort() - }) - }) - - - it("should ensure that the content isn't nullish", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - let ran = false - const createReadableStream = vi.fn(() => { - // NOTE: run this only once - if (ran) { - return Promise.reject('Error in tools!') - } - - ran = true - return Promise.resolve(stream) - }) - - const onToolCall = () => Promise.resolve('Result in test') - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onToolCall - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content": null},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0, "id":"call_tNW2f79DhRvuuwrslSYt3yVT", "type": "function", "function":{"name":"get_weather", "arguments":"{\\"location\\":\\"Prague, Czech Republic\\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[],"usage":{"prompt_tokens":284,"completion_tokens":34,"total_tokens":318}}\n\n` - ) - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { - "content": "", - "role": "assistant", - "tool_calls": [ - { - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }, - ] - }, - { - "content": "Result in test", - "role": "tool", - "tool_call_id": "call_tNW2f79DhRvuuwrslSYt3yVT", - } - ]) - }) - }) - - it("should pass tool call result to the messages", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - let ran = false - const createReadableStream = vi.fn(() => { - // NOTE: run this only once - if (ran) { - return Promise.reject('Error in tools!') - } - - ran = true - return Promise.resolve(stream) - }) - - const onToolCall = () => Promise.resolve('Result in test') - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onToolCall - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"role":"assistant","content":"U"},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":"nderstood, I will get the current weather for Prague, Czech Republic."},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0, "id":"call_tNW2f79DhRvuuwrslSYt3yVT", "type": "function", "function":{"name":"get_weather", "arguments":"{\\"location\\":\\"Prague, Czech Republic\\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[],"usage":{"prompt_tokens":284,"completion_tokens":34,"total_tokens":318}}\n\n` - ) - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { - "content": "Understood, I will get the current weather for Prague, Czech Republic.", - "role": "assistant", - "tool_calls": [ - { - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }, - ] - }, - { - "content": "Result in test", - "role": "tool", - "tool_call_id": "call_tNW2f79DhRvuuwrslSYt3yVT", - } - ]) - }) - }) - - - it("should assemble streamed message ending with a tool call in the complete messages", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - let ran = false - const createReadableStream = vi.fn(() => { - // NOTE: run this only once - if (ran) { - return Promise.reject('Error in tools!') - } - - ran = true - return Promise.resolve(stream) - }) - - const onToolCall = () => Promise.resolve('Result in test') - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onToolCall - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `{"id":"chatcmpl-AO3VbbywyEeMZvsHfHvTpZZRqP14K","object":"chat.completion.chunk","created":1730296723,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_90354628f2","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}\n - {"id":"chatcmpl-AO3VbbywyEeMZvsHfHvTpZZRqP14K","object":"chat.completion.chunk","created":1730296723,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_90354628f2","choices":[{"index":0,"delta":{"content":"Sure"},"logprobs":null,"finish_reason":null}],"usage":null}\n - {"id":"chatcmpl-AO3VbbywyEeMZvsHfHvTpZZRqP14K","object":"chat.completion.chunk","created":1730296723,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_90354628f2","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}],"usage":null}\n - {"id":"chatcmpl-AO3VbbywyEeMZvsHfHvTpZZRqP14K","object":"chat.completion.chunk","created":1730296723,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_90354628f2","choices":[{"index":0,"delta":{"content":" I'll"},"logprobs":null,"finish_reason":null}],"usage":null}\n - {"id":"chatcmpl-AO3VbbywyEeMZvsHfHvTpZZRqP14K","object":"chat.completion.chunk","created":1730296723,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_90354628f2","choices":[{"index":0,"delta":{"content":" generate a joke for you"},"logprobs":null,"finish_reason":null}],"usage":null}\n - {"id":"chatcmpl-AO3VbbywyEeMZvsHfHvTpZZRqP14K","object":"chat.completion.chunk","created":1730296723,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_90354628f2","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}\n - {"id":"chatcmpl-AO3VbbywyEeMZvsHfHvTpZZRqP14K","object":"chat.completion.chunk","model":"gpt-4o-2024-08-06","created":1730296723,"system_fingerprint":"fp_90354628f2","choices":[{"logprobs":null,"index":0,"finish_reason":"tool_calls","delta":{"content":null,"role":"assistant","tool_calls":[{"id":"call_SdFeFRJQTfJvZynvs6KgrN6t","type":"function","function":{"name":"generate_theme_joke","arguments":"{\\"theme\\":\\"Dad\\"}"}}]}}],"usage":{"prompt_tokens":125,"completion_tokens":43,"total_tokens":168,"prompt_tokens_details":{"cached_tokens":0},"completion_tokens_details":{"reasoning_tokens":0}}}\n\n` - ) - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { - "content": "Sure! I'll generate a joke for you.", - "refusal": null, - "role": "assistant", - "tool_calls": [ - { - "id": "call_SdFeFRJQTfJvZynvs6KgrN6t", - "type": "function", - "function": { - "name": "generate_theme_joke", - "arguments": "{\"theme\":\"Dad\"}" - } - } - ] - } - ]) - }) - }) - - - it("should properly add tool calls to the streamed messages", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - let ran = false - const createReadableStream = vi.fn(() => { - // NOTE: run this only once - if (ran) { - return Promise.reject('Error in tools!') - } - - ran = true - return Promise.resolve(stream) - }) - - const onToolCall = () => Promise.resolve('Result in test') - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onToolCall - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', - `${[`{"id":"msg_01PNZ2n8jrVj1iiELVjqcw3E","object":"chat.completion.chunk","created":1730296723,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_90354628f2","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}`, - `{ "id": "msg_01PNZ2n8jrVj1iiELVjqcw3E", "object": "chat.completion.chunk", "created": 1730364993, "model": "claude-3-sonnet-20240229", "system_fingerprint": null, "choices": [{ "index": 0, "delta": { "content": "Sure" }, "logprobs": null, "finish_reason": null }] }`, - `{ "id": "msg_01PNZ2n8jrVj1iiELVjqcw3E", "object": "chat.completion.chunk", "created": 1730364993, "model": "claude-3-sonnet-20240229", "system_fingerprint": null, "choices": [{ "index": 0, "delta": { "content": ", let" }, "logprobs": null, "finish_reason": null }] }`, - `{ "id": "msg_01PNZ2n8jrVj1iiELVjqcw3E", "object": "chat.completion.chunk", "created": 1730364993, "model": "claude-3-sonnet-20240229", "system_fingerprint": null, "choices": [{ "index": 0, "delta": { "content": " me generate" }, "logprobs": null, "finish_reason": null }] }`, - `{ "id": "msg_01PNZ2n8jrVj1iiELVjqcw3E", "object": "chat.completion.chunk", "created": 1730364993, "model": "claude-3-sonnet-20240229", "system_fingerprint": null, "choices": [{ "index": 0, "delta": { "content": " a dad joke for" }, "logprobs": null, "finish_reason": null }] }`, - `{ "id": "msg_01PNZ2n8jrVj1iiELVjqcw3E", "object": "chat.completion.chunk", "created": 1730364993, "model": "claude-3-sonnet-20240229", "system_fingerprint": null, "choices": [{ "index": 0, "delta": { "content": " you:" }, "logprobs": null, "finish_reason": null }] }`, - `${JSON.stringify({ "object": "chat.completion.chunk", "id": "msg_01PNZ2n8jrVj1iiELVjqcw3E", "model": "claude-3-sonnet-20240229", "created": 1730364994, "system_fingerprint": null, "choices": [{ "logprobs": null, "index": 0, "finish_reason": "tool_use", "delta": { "content": null, "role": "assistant", "tool_calls": [{ "id": "toolu_01B1GTdvhAEB29KubfFpUbFm", "type": "function", "function": { "name": "generate_dad_jokes", "arguments": "{\"theme\": \"general\"}" } }] } }] })}`].join("\n")}\n` - ) - dataEmitter.dispatchEvent('data', - [ - `{"object":"langtail.tool.handled","id":"msg_01PNZ2n8jrVj1iiELVjqcw3E-langtail-tool-handled-toolu_01B1GTdvhAEB29KubfFpUbFm","model":"claude-3-sonnet-20240229","created":1730364994,"system_fingerprint":null,"choices":[{"logprobs":null,"index":0,"finish_reason":"tool_calls_handled","delta":{"role":"tool","tool_call_id":"toolu_01B1GTdvhAEB29KubfFpUbFm","content":"Someone messed up number of floors in the elevator. It was wrong on so many levels."}}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"There"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"'s"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" classic"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" da"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"d joke for"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" you! Let"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364995,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" me know if you"},"logprobs":null,"finish_reason":null}]}`, - `{ "id": "msg_01G6x1ceDQcQGgoASaH8VM7g", "object": "chat.completion.chunk", "created": 1730364995, "model": "claude-3-sonnet-20240229", "system_fingerprint": null, "choices": [{ "index": 0, "delta": { "content": "'" }, "logprobs": null, "finish_reason": null }] }`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364996,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"d like another"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364996,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" one on"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364996,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" a different theme"},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364996,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}`, - `{"id":"msg_01G6x1ceDQcQGgoASaH8VM7g","object":"chat.completion.chunk","created":1730364996,"model":"claude-3-sonnet-20240229","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":""},"logprobs":null,"finish_reason":"end_turn"}]}\n`].join("\n") - - - ) - dataEmitter.dispatchEvent('close') - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { - "content": "Sure, let me generate a dad joke for you:", - "refusal": null, - "role": "assistant", - "tool_calls": [{ "id": "toolu_01B1GTdvhAEB29KubfFpUbFm", "type": "function", "function": { "name": "generate_dad_jokes", "arguments": "{\"theme\": \"general\"}" } }] - }, - { - "content": "Someone messed up number of floors in the elevator. It was wrong on so many levels.", - "role": "tool", - "tool_call_id": "toolu_01B1GTdvhAEB29KubfFpUbFm", - }, - { - "content": "There's a classic dad joke for you! Let me know if you'd like another one on a different theme.", - "role": "assistant", - } - ]) - }) - }) - - it("should request AI completion with tool call reults", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - let ran = false - const createReadableStream = vi.fn(() => { - // NOTE: run this only once - if (ran) { - return Promise.reject('Error in tools!') - } - - ran = true - return Promise.resolve(stream) - }) - - const onToolCall = vi.fn(() => { - return Promise.resolve('Tool result in test!') - }) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onToolCall - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', toolCallData) - dataEmitter.dispatchEvent('close') - }) - - await vi.waitFor(() => { - expect(createReadableStream.mock.lastCall?.at(0)).toEqual( - [ - { role: 'user', content: 'user input' }, - { - role: 'assistant', - content: 'Understood, I will get the current weather for Prague, Czech Republic.', - tool_calls: [{ - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }] - }, - { - role: 'tool', - tool_call_id: 'call_tNW2f79DhRvuuwrslSYt3yVT', - content: 'Tool result in test!' - } - ] - ) - result.current.abort() - }) - }) - - it("should pass through tool call arguments to tool result", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - let ran = false - const createReadableStream = vi.fn(() => { - // NOTE: run this only once - if (ran) { - return Promise.reject('Error in tools!') - } - - ran = true - return Promise.resolve(stream) - }) - - const onToolCall = vi.fn((call) => { - return Promise.resolve(`call: ${call.id}, function: ${call.function.name}, arguments: ${JSON.parse(call.function.arguments).location}`) - }) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onToolCall - }), - ) - - act(() => { - result.current.send('user input') - dataEmitter.dispatchEvent('data', toolCallData) - dataEmitter.dispatchEvent('close') - }) - - await vi.waitFor(() => { - expect(createReadableStream.mock.lastCall?.at(0)).toEqual([ - { role: 'user', content: 'user input' }, - { - role: 'assistant', - content: 'Understood, I will get the current weather for Prague, Czech Republic.', - tool_calls: [{ - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }] - }, - { - role: 'tool', - tool_call_id: 'call_tNW2f79DhRvuuwrslSYt3yVT', - content: "call: call_tNW2f79DhRvuuwrslSYt3yVT, function: get_weather, arguments: Prague, Czech Republic", - }] - - ) - result.current.abort() - }) - }) - - - it("should it have the results from the AI in messages prop after tool result response", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - - const queryDataEmitter = new DataEventListener() - const toolQueryEmitter = new DataEventListener() - - const queryStream = createMockReadadbleStream(queryDataEmitter) - const toolQueryStream = createMockReadadbleStream(toolQueryEmitter) - - let userQueryRequest = false - const createReadableStream = vi.fn((parms) => { - if (!userQueryRequest) { - userQueryRequest = true - return Promise.resolve(queryStream) - } - - return Promise.resolve(toolQueryStream) - }) - - - const onToolCall = vi.fn(() => { - return Promise.resolve(`Cloudy.`) - }) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - onToolCall, - }), - ) - - act(() => { - result.current.send('user input') - queryDataEmitter.dispatchEvent('data', - `{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"role":"assistant","content":"U"},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"content":"nderstood, I will get the current weather for Prague, Czech Republic."},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{"tool_calls":[{"index":0, "id":"call_tNW2f79DhRvuuwrslSYt3yVT", "type": "function", "function":{"name":"get_weather", "arguments":"{\\"location\\":\\"Prague, Czech Republic\\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}\n -{"id":"chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow","object":"chat.completion.chunk","created":1718443487,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_319be4768e","choices":[],"usage":{"prompt_tokens":284,"completion_tokens":34,"total_tokens":318}}\n\n` - ) - queryDataEmitter.dispatchEvent('close') - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { - "content": "Understood, I will get the current weather for Prague, Czech Republic.", - "role": "assistant", - "tool_calls": [ - { - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }, - ] - }, - { - "content": "Cloudy.", - "role": "tool", - "tool_call_id": "call_tNW2f79DhRvuuwrslSYt3yVT", - } - ]) - }) - - await act(() => { - toolQueryEmitter.dispatchEvent('data', - `${JSON.stringify({ "id": "chatcmpl-9boHC3d9Nn2u2bdmE46H0nbEoknpv", "object": "chat.completion.chunk", "created": 1718798426, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_f4e629d0a5", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "" }, "logprobs": null, "finish_reason": null }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-9boHC3d9Nn2u2bdmE46H0nbEoknpv", "object": "chat.completion.chunk", "created": 1718798426, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_f4e629d0a5", "choices": [{ "index": 0, "delta": { "content": "The weather in Prague is cloudy." }, "logprobs": null, "finish_reason": null }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-9boHC3d9Nn2u2bdmE46H0nbEoknpv", "object": "chat.completion.chunk", "created": 1718798426, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_f4e629d0a5", "choices": [{ "index": 0, "delta": {}, "logprobs": null, "finish_reason": "stop" }], "usage": null })}\n - ${JSON.stringify({ "id": "chatcmpl-9boHC3d9Nn2u2bdmE46H0nbEoknpv", "object": "chat.completion.chunk", "created": 1718798426, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_f4e629d0a5", "choices": [], "usage": { "prompt_tokens": 360, "completion_tokens": 20, "total_tokens": 380 } })}\n` - ) - - toolQueryEmitter.dispatchEvent('close') - }) - - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'user input' }, - { - "content": "Understood, I will get the current weather for Prague, Czech Republic.", - "role": "assistant", - "tool_calls": [ - { - "function": { - "arguments": "{\"location\":\"Prague, Czech Republic\"}", - "name": "get_weather", - }, - "id": "call_tNW2f79DhRvuuwrslSYt3yVT", - "type": "function", - }, - ] - }, - { - "content": "Cloudy.", - "role": "tool", - "tool_call_id": "call_tNW2f79DhRvuuwrslSYt3yVT", - }, - { - "content": "The weather in Prague is cloudy.", - "role": "assistant", - } - ]) - }) - }) - - - it("should handle complete 'message' in choice instead of 'delta' without crashing", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - const dataEmitter = new DataEventListener() - const stream = createMockReadadbleStream(dataEmitter) - const createReadableStream = vi.fn(() => Promise.resolve(stream)) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - result.current.send([{ role: 'user', content: 'Test message' }]) - dataEmitter.dispatchEvent('data', JSON.stringify({ - id: "chatcmpl-123", - object: "chat.completion", - created: 1718443487, - model: "gpt-4-0613", - choices: [{ - index: 0, - message: { - role: "assistant", - content: "This is a complete message." - }, - finish_reason: "stop" - }] - })) - dataEmitter.dispatchEvent('close') - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'Test message' }, - { role: 'assistant', content: 'This is a complete message.' } - ]) - }) - }) - - it("should keep the role 'tool' when the message is complete", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - }) - dataEmitter.addEventListener('close', (data: string) => { - controller.close(); - }) - }, - }); - } - const dataEmitter = new DataEventListener() - const stream = createMockReadadbleStream(dataEmitter) - const createReadableStream = vi.fn(() => Promise.resolve(stream)) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - await act(() => { - result.current.send([{ role: 'user', content: 'Test message' }]) - dataEmitter.dispatchEvent('data', `${JSON.stringify({ - "object": "langtail.tool.handled", - "id": "chatcmpl-A8SBTY8If8s8yBP7HRj5tfd4uLDZC-langtail-tool-handled-call_uTSra7MsEkzrL8VffnRtHsdW", - "model": "gpt-4o-2024-05-13", - "created": 1726578327, - "system_fingerprint": "fp_a5d11b2ef2", - "choices": [{ "logprobs": null, "index": 0, "finish_reason": "tool_calls_handled", "delta": { "role": "tool", "tool_call_id": "call_uTSra7MsEkzrL8VffnRtHsdW", "content": "\n This is the joke: Why do Czechs always carry a pencil and paper?\n\nIn case they need to draw a red line somewhere!", "handled_tool_result": true } }], - "usage": { "prompt_tokens": 100, "completion_tokens": 17, "total_tokens": 117, "completion_tokens_details": { "reasoning_tokens": 0 } } - })}\n\n`) - - return act(() => { - dataEmitter.dispatchEvent('data', `${JSON.stringify({ - id: "chatcmpl-125", - object: "chat.completion.chunk", - created: 1718443487, - model: "gpt-4-0613", - choices: [{ - index: 0, - message: { - role: "assistant", - content: "End of assistant message!" - }, - finish_reason: "stop" - }] - })}\n\n`) - - dataEmitter.dispatchEvent('close') - }) - }) - - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { role: 'user', content: 'Test message' }, { - "content": ` - This is the joke: Why do Czechs always carry a pencil and paper? - -In case they need to draw a red line somewhere!`, - "handled_tool_result": true, - "role": "tool", - "tool_call_id": "call_uTSra7MsEkzrL8VffnRtHsdW", - }, - { role: 'assistant', content: "End of assistant message!" } - ]) - }) - }) - }) - - - describe("image support", () => { - it("should should accept the user message containing an image without any problems", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - const createReadableStream = vi.fn(() => - Promise.resolve(stream) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - result.current.send({ - role: "user", - content: [ - { - type: "image_url", - image_url: { - detail: "auto", - url: 'https://example.com/image.jpg' - } - } - ] - }) - dataEmitter.dispatchEvent('data', - `{ "id": "chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow", "object": "chat.completion.chunk", "created": 1718443487, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "Ahoj." }, "logprobs": null, "finish_reason": "stop" }], "usage": null }\n`) - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { - role: 'user', content: [ - { - "image_url": { - "detail": "auto", - "url": "https://example.com/image.jpg", - }, - "type": "image_url", - }, - ] - }, - { - "content": "Ahoj.", - "role": "assistant", - }, - ]) - }) - }) - - it("should should accept the user message containing an image and message without any problems", async () => { - function createMockReadadbleStream(dataEmitter: DataEventListener) { - return new ReadableStream({ - start(controller) { - dataEmitter.addEventListener('data', (data: string) => { - controller.enqueue(data) - controller.close(); - }) - }, - }); - } - - const dataEmitter = new DataEventListener() - - const stream = createMockReadadbleStream(dataEmitter) - - const createReadableStream = vi.fn(() => - Promise.resolve(stream) - ) - - const { result } = renderHook(() => - useChatStream({ - fetcher: createReadableStream, - }), - ) - - act(() => { - result.current.send({ - role: "user", - content: [ - { - type: "image_url", - image_url: { - detail: "auto", - url: 'https://example.com/image.jpg' - } - }, - { - type: "text", - text: "Čau!" - } - ] - }) - dataEmitter.dispatchEvent('data', - `{ "id": "chatcmpl-9aJwNzlnvn1jG845CJe2QZH6AKcow", "object": "chat.completion.chunk", "created": 1718443487, "model": "gpt-4o-2024-05-13", "system_fingerprint": "fp_319be4768e", "choices": [{ "index": 0, "delta": { "role": "assistant", "content": "Ahoj." }, "logprobs": null, "finish_reason": "stop" }], "usage": null }\n`) - }) - - await vi.waitFor(() => { - expect(result.current.messages).toEqual([ - { - role: 'user', content: [ - { - "image_url": { - "detail": "auto", - "url": "https://example.com/image.jpg", - }, - "type": "image_url", - }, - { - "text": "Čau!", - "type": "text", - } - ] - }, - { - "content": "Ahoj.", - "role": "assistant", - }, - ]) - }) - }) - }) - }) -}) diff --git a/src/react/useChatStream.ts b/src/react/useChatStream.ts deleted file mode 100644 index ba91c59..0000000 --- a/src/react/useChatStream.ts +++ /dev/null @@ -1,432 +0,0 @@ -import { - ChatCompletion, - ChatCompletionAssistantMessageParam, - ChatCompletionMessageToolCall, - ChatCompletionChunk, - ChatCompletionMessageParam, - ChatCompletionMessage, -} from "openai/resources" -import { chatStreamToRunner, type ChatCompletionStream } from "../stream" -import { useRef, useState } from "react" - -const defaultReturn = { - abort: () => { }, -} - -export type ChatMessage = - | { - role: "user" | "assistant" | "system" - content: string | null - tool_calls?: ChatCompletionMessageToolCall[] - } - | { - role: "tool" - tool_call_id: string - content: string | null - tool_calls?: ChatCompletionMessageToolCall[] - } | { - role: "assistant" | "user" | "system" | "tool" - content: [ - { - type: "image_url", - image_url: { - detail: "auto", - url: string, - }, - }, - ] | [ - { - type: "image_url", - image_url: { - detail: "auto", - url: string, - }, - }, - { - type: "text", - text: string, - }, - ] - tool_calls?: ChatCompletionMessageToolCall[] - } - -function addDeltaToolCalls(message: ChatCompletionMessage | ChatCompletion.Choice | ChatMessage): ChatMessage { - const result = { - ...("message" in message ? message.message : message), - ...("delta" in message && message.delta && typeof message.delta === 'object' && "tool_calls" in message.delta ? { tool_calls: message.delta.tool_calls as ChatCompletionMessageToolCall[] } : {}), - } - - return result -} - -export function mapAIMessagesToChatCompletions( - messages: (ChatCompletion | ChatMessage)[], -): ChatMessage[] { - return messages.flatMap((message) => { - if ("id" in message && "choices" in message) { - return message.choices.map((choice) => { - return addDeltaToolCalls(choice) - }) - } - - return [addDeltaToolCalls(message)] - }) -} - -export function combineAIMessageChunkWithCompleteMessages( - messages: (ChatCompletion | ChatMessage)[], - chunk: ChatCompletionChunk, -): (ChatCompletion | ChatMessage)[] { - const messageId = chunk.id - - const contentMessage = chunk.choices.find((choice) => { - const lookForContentIn = choice.delta || ("message" in choice && choice.message) || {} - return "content" in lookForContentIn - }) - - if (!contentMessage) { - return messages - } - - const existingMessageToComplete = messages.find((message) => { - return "id" in message && message.id === messageId - }) - - const choicesDeltas = chunk.choices.filter((choice) => { - return "delta" in choice && choice.delta // NOTE: delte can be null - }) - - if (!existingMessageToComplete && choicesDeltas.length > 0) { - return [ - ...messages, - { - id: chunk.id, - created: chunk.created, - model: chunk.model, - object: 'chat.completion', - choices: choicesDeltas.map((choice) => { - const messageChoice: ChatCompletion.Choice = { - finish_reason: 'length' as const, - index: choice.index, - logprobs: null, - delta: choice.delta, - message: { - ...choice.delta, - content: choice.delta.content ?? "", - // @ts-expect-error - mantain the original role here - role: choice.delta.role ?? "assistant" as const, - }, - } - - return messageChoice - }), - }, - ] - } - - return messages.map((message) => { - if ("id" in message && message.id === messageId) { - const chunkChoices = chunk.choices.reduce< - Map - >((acc, choice) => { - acc.set(choice.index, choice) - return acc - }, new Map()) - - return { - ...message, - choices: message.choices.map((choice) => { - const chunkChoice = chunkChoices.get(choice.index) - - if (!chunkChoice) { - return choice - } - - return { - ...choice, - ...{ - ...chunkChoice, - finish_reason: chunkChoice.finish_reason ?? 'length' as const, - }, - message: { - ...choice.message, - content: - (choice.message?.content ?? "") + - (chunkChoice.delta.content ?? ""), - }, - } - }), - } - } - return message - }) -} - -function normalizeMessage(message: ChatCompletionMessage, currentMessage?: ChatCompletionChunk) { - const toolCalls = (message.tool_calls && message.tool_calls.length === 0 && currentMessage?.choices?.some(choice => (("delta" in choice) && "tool_calls" in choice.delta) && choice.delta?.tool_calls) - ? currentMessage?.choices?.flatMap(choice => (("delta" in choice) && "tool_calls" in choice.delta) && Array.isArray(choice.delta?.tool_calls) ? choice.delta?.tool_calls : []) - : message.tool_calls ?? []) as ChatCompletionMessageToolCall[] - return { - ...message, - // NOTE: ensure that message isn't null or undefined - content: message.content ?? "", - ...(toolCalls.length > 0 ? { tool_calls: toolCalls } : {}), - } -} - -function parameterToMessage( - parameter: ChatMessage | ChatMessage[] | string, -): ChatMessage[] { - if (Array.isArray(parameter)) { - return parameter - } - - if (typeof parameter === "string") { - return [ - { - role: "user", - content: parameter, - }, - ] - } - - return [parameter] -} - -export function useChatStream< - P extends ChatMessage[] | ChatMessage | string, - O extends Record = Record, ->( - options: { - messageMode?: 'append' | 'replace' - fetcher: ( - paramters: P, - optional: O | undefined, - abortController: AbortController, - ) => Promise | null>, - onText?: (contentDelta: string) => void - onToolCall?: ( - toolCall: ChatCompletionMessageToolCall, - message: ChatCompletionAssistantMessageParam, - ) => Promise | string - onAbort?: () => void - onChunk?: (chunk: ChatCompletionChunk) => void - onError?: (error: Error) => void - onStart?: () => void - onEnd?: (finalAIMessages: ChatMessage[]) => void - onMessagesChange?: (messages: ChatMessage[]) => void - } = { - fetcher: () => Promise.resolve(null), - }, -): { - isLoading: boolean - error: null | Error - messages: ChatMessage[] - addMessages: (additionalMessages: ChatCompletion[]) => void - setMessages: (additionalMessages: ChatCompletion[]) => void - abort: () => void - send: (parameters: P, optional?: O | undefined) => void -} { - const messagesRef = useRef<(ChatCompletion | ChatMessage)[]>([]) - const runnerRef = useRef( - defaultReturn, - ) - const toolRunRef = useRef(false) - const abortControllerRef = useRef(null) - const [isLoading, setIsLoading] = useState(false) - const [messages, setMessages] = useState<(ChatCompletion | ChatMessage)[]>([]) - const [error, setError] = useState(null) - const generatingRef = useRef(false) - const endedRef = useRef(false) - const errorRef = useRef(null) - const messageMode = options.messageMode ?? 'append' - - function setIsLoadingState(generating: boolean) { - generatingRef.current = generating - setIsLoading(generatingRef.current) - } - - function setMessagesState(messages: (ChatCompletion | ChatMessage)[]) { - messagesRef.current = messages - setMessages(messagesRef.current) - } - - function setErrorState(error: Error | null) { - errorRef.current = error - setError(errorRef.current) - } - - - function addMessages( - messages: (ChatCompletion | ChatMessage)[], - ): (ChatCompletion | ChatMessage)[] { - messagesRef.current = messagesRef.current.concat(messages) - setMessagesState(messagesRef.current) - - return messagesRef.current - } - - return { - error, - messages: mapAIMessagesToChatCompletions(messages), - isLoading, - addMessages, - setMessages: (messages: ChatCompletion[]) => { - setMessagesState(messages) - }, - abort: () => { - setIsLoadingState(false) - runnerRef.current.abort() - abortControllerRef.current?.abort() - }, - send: function send(parameter: P, optional?: O) { - const abortController = new AbortController() - abortControllerRef.current = abortController - - switch (messageMode) { - case 'replace': - setMessagesState(parameterToMessage(parameter)) - case 'append': - addMessages(parameterToMessage(parameter)) - } - - setIsLoadingState(true) - return options.fetcher(parameter, optional, abortController).then( - (maybeStream) => { - if (!maybeStream) { - setIsLoadingState(false) - return () => { } - } - - - const onConnect = () => { - if (endedRef.current) { - options.onStart?.() - } - } - - const onFinalChatCompletion = (finalMessage: ChatCompletion) => { - // NOTE: for some reason, tool_calls are empty in finalMessage, that's why we keep them through storing the finalized message - const finalizedMessage = messagesRef.current.find((currentMessage) => { - return "id" in currentMessage && currentMessage.id === finalMessage.id - }) - - messagesRef.current = messagesRef.current - .filter( - (currentMessage) => - !("id" in currentMessage) || - currentMessage.id !== finalMessage.id, - ) - .concat(finalMessage.choices.flatMap((choice) => normalizeMessage(choice.message, finalizedMessage as unknown as (ChatCompletionChunk | undefined)))) - - - const userChatMessages = mapAIMessagesToChatCompletions( - messagesRef.current, - ) - options.onMessagesChange?.(userChatMessages) - setMessagesState(messagesRef.current) - } - - const onChunk = (chunk: ChatCompletionChunk) => { - options.onChunk?.(chunk) - - const combinedMessages = combineAIMessageChunkWithCompleteMessages( - messagesRef.current, - chunk, - ) - const mappedAiToChatCompletions = - mapAIMessagesToChatCompletions(combinedMessages) - options.onMessagesChange?.(mappedAiToChatCompletions) - - setMessagesState(combinedMessages) - } - - const onContent = (delta: string) => { - options.onText?.(delta) - } - - const onMessage = (message: ChatCompletionMessageParam) => { - const { onToolCall } = options - if ( - message.role === "assistant" && - message.tool_calls && - onToolCall - ) { - toolRunRef.current = true - Promise.all( - message.tool_calls.map((toolCall) => - Promise.resolve(onToolCall(toolCall, message)).then( - (content) => ({ - role: "tool" as const, - tool_call_id: toolCall.id, - content, - }) - ), - ), - ).then((toolMessages) => { - const nextMessages = [...addMessages(toolMessages)] - messagesRef.current = [] - return send(nextMessages as P, optional) - }, (error) => { - setErrorState(error) - options.onError?.(error) - }) - } - } - - const onEnd = () => { - abortControllerRef.current = null - if (toolRunRef.current) { - toolRunRef.current = false - return - } - - endedRef.current = true - setIsLoadingState(false) - options.onEnd?.(mapAIMessagesToChatCompletions(messagesRef.current)) - } - - - const runner = chatStreamToRunner(maybeStream) - - const destroyRunner = () => { - runner.off("connect", onConnect) - runner.off("finalChatCompletion", onFinalChatCompletion) - runner.off("content", onContent) - runner.off("message", onMessage) - runner.off("chunk", onChunk) - runner.off("end", onEnd) - runner.off("abort", onAbort) - - runnerRef.current = defaultReturn - } - - const onAbort = () => { - options.onAbort?.() - destroyRunner() - } - - runner.on("connect", onConnect) - runner.on("content", onContent) - runner.on("chunk", onChunk) - runner.on("finalChatCompletion", onFinalChatCompletion) - runner.on("message", onMessage) - runner.on("end", onEnd) - runner.on("abort", onAbort) - - runnerRef.current = runner - if (abortControllerRef.current?.signal.aborted) { - runner.abort() - setIsLoadingState(false) - runnerRef.current = { abort: () => { } } - return - } - }, (error) => { - setIsLoadingState(false) - options.onError?.(error) - setErrorState(error) - }, - ) - }, - } -} diff --git a/src/schemas.ts b/src/schemas.ts index f9440f5..1ff96a9 100644 --- a/src/schemas.ts +++ b/src/schemas.ts @@ -8,7 +8,7 @@ import type { export interface ILangtailExtraProps { doNotRecord?: boolean threadId?: string - metadata?: Record + metadata?: Record } export type ChatCompletionsCreateParams = diff --git a/src/stream/index.ts b/src/stream/index.ts deleted file mode 100644 index 77362a4..0000000 --- a/src/stream/index.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream'; -import { AssistantStream } from 'openai/lib/AssistantStream'; -import { Stream } from "openai/streaming" - - -export const chatStreamToRunner = (stream: ReadableStream): ChatCompletionStream => { - return ChatCompletionStream.fromReadableStream(stream); -} -export const assistantStreamToRunner = (stream: ReadableStream): AssistantStream => { - return AssistantStream.fromReadableStream(stream); -} - -export const readableStreamFromSSEResponse = (reponse: Response, controller?: AbortController): ReadableStream => { - return Stream.fromSSEResponse(reponse, controller || new AbortController()).toReadableStream(); -} - -export type { - ChatCompletionStream, - AssistantStream, - Stream -} diff --git a/src/vercel-ai/get-response-metadata.ts b/src/vercel-ai/get-response-metadata.ts new file mode 100644 index 0000000..fdce2f5 --- /dev/null +++ b/src/vercel-ai/get-response-metadata.ts @@ -0,0 +1,15 @@ +export function getResponseMetadata({ + id, + model, + created, +}: { + id?: string | undefined | null; + created?: number | undefined | null; + model?: string | undefined | null; +}) { + return { + id: id ?? undefined, + modelId: model ?? undefined, + timestamp: created != null ? new Date(created * 1000) : undefined, + }; +} \ No newline at end of file diff --git a/src/vercel-ai/langtail-chat-settings.ts b/src/vercel-ai/langtail-chat-settings.ts index ff328ef..bdcf27e 100644 --- a/src/vercel-ai/langtail-chat-settings.ts +++ b/src/vercel-ai/langtail-chat-settings.ts @@ -15,4 +15,5 @@ type LangtailChatSettingsBase

= u export type LangtailChatSettings

= undefined, V extends Version = undefined> = LangtailChatSettingsBase & ILangtailExtraProps & OpenAiBodyType & { variables?: Variables + structuredOutputs?: boolean } \ No newline at end of file diff --git a/src/vercel-ai/langtail-language-model.ts b/src/vercel-ai/langtail-language-model.ts index 22072ab..3c564ca 100644 --- a/src/vercel-ai/langtail-language-model.ts +++ b/src/vercel-ai/langtail-language-model.ts @@ -1,8 +1,10 @@ import { InvalidResponseDataError, LanguageModelV1, + LanguageModelV1CallWarning, LanguageModelV1FinishReason, LanguageModelV1LogProbs, + LanguageModelV1ProviderMetadata, LanguageModelV1StreamPart, } from '@ai-sdk/provider'; import { @@ -10,20 +12,22 @@ import { createEventSourceResponseHandler, createJsonResponseHandler, generateId, - isParseableJson, + isParsableJson, postJsonToApi, } from '@ai-sdk/provider-utils'; import { z } from 'zod'; import { convertToOpenAIChatMessages } from './convert-to-openai-chat-messages'; import { mapOpenAIFinishReason } from './map-openai-finish-reason'; import { LangtailChatSettings } from './langtail-chat-settings'; -import { openaiFailedResponseHandler } from './openai-error'; +import { openaiErrorDataSchema, openaiFailedResponseHandler } from './openai-error'; import { mapOpenAIChatLogProbsOutput } from './map-openai-chat-logprobs'; import { LangtailPrompts } from '../Langtail'; import { ChatCompletionCreateParamsBase } from 'openai/resources/chat/completions'; import { FunctionParameters } from 'openai/resources'; import type { PromptSlug, Environment, Version, LangtailEnvironment } from '../types'; import { ILangtailExtraProps } from '../schemas'; +import { getResponseMetadata } from './get-response-metadata'; +import { prepareTools } from './openai-prepare-tools'; type LangtailChatConfig = { provider: string; @@ -37,7 +41,7 @@ const MODEL_IN_LANGTAIL = 'langtail'; export class LangtailChatLanguageModel

= undefined, V extends Version = undefined> implements LanguageModelV1 { readonly specificationVersion: 'v1' = 'v1'; - readonly defaultObjectGenerationMode = 'tool'; + readonly supportsImageUrls = true; readonly modelId: string; readonly promptId: P; @@ -65,6 +69,19 @@ export class LangtailChatLanguageModel

[0]): ChatCompletionCreateParamsBase & ILangtailExtraProps { + providerMetadata, + }: Parameters[0]) { const type = mode.type; + const warnings: LanguageModelV1CallWarning[] = []; + + if (topK != null) { + warnings.push({ + type: 'unsupported-setting', + setting: 'topK', + }); + } + + if ( + responseFormat?.type === 'json' && + responseFormat.schema != null && + !this.supportsStructuredOutputs + ) { + warnings.push({ + type: 'unsupported-setting', + setting: 'responseFormat', + details: + 'JSON response format schema is only supported with structuredOutputs', + }); + } + const baseArgs = { model: this.modelId, @@ -101,6 +145,7 @@ export class LangtailChatLanguageModel

({ - type: 'function', - function: { - name: tool.name, - description: tool.description ?? "", - parameters: tool.parameters as FunctionParameters, - }, - })), + args: { + ...baseArgs, + tools, + tool_choice, + functions, + function_call, + }, + warnings: [...warnings, ...toolWarnings], }; } case 'object-json': { return { - ...baseArgs, - response_format: { type: 'json_object' }, + args: { + ...baseArgs, + response_format: + this.supportsStructuredOutputs && mode.schema != null + ? { + type: 'json_schema', + json_schema: { + schema: mode.schema, + strict: true, + name: mode.name ?? 'response', + description: mode.description, + }, + } + : { type: 'json_object' }, + }, + warnings, }; } case 'object-tool': { return { - ...baseArgs, - tool_choice: { type: 'function', function: { name: mode.tool.name } }, - tools: [ - { + args: { + ...baseArgs, + tool_choice: { type: 'function', - function: { - name: mode.tool.name, - description: mode.tool.description ?? "", - parameters: mode.tool.parameters as FunctionParameters, - }, + function: { name: mode.tool.name }, }, - ], + tools: [ + { + type: 'function', + function: { + name: mode.tool.name, + description: mode.tool.description, + parameters: mode.tool.parameters, + strict: this.supportsStructuredOutputs ? true : undefined, + }, + }, + ], + }, + warnings, }; } @@ -165,7 +258,7 @@ export class LangtailChatLanguageModel

[0], ): Promise>> { - const args = this.getArgs(options); + const { args, warnings } = this.getArgs(options); const body = { ...args, @@ -179,7 +272,7 @@ export class LangtailChatLanguageModel

({ - toolCallType: 'function', - toolCallId: toolCall.id ?? generateId(), - toolName: toolCall.function.name, - args: toolCall.function.arguments!, - })), + toolCalls: + choice.message.tool_calls?.map(toolCall => ({ + toolCallType: 'function', + toolCallId: toolCall.id ?? generateId(), + toolName: toolCall.function.name, + args: toolCall.function.arguments!, + })), finishReason: mapOpenAIFinishReason(choice.finish_reason), usage: { - promptTokens: response.usage.prompt_tokens, - completionTokens: response.usage.completion_tokens, + promptTokens: response.usage?.prompt_tokens ?? NaN, + completionTokens: response.usage?.completion_tokens ?? NaN, }, rawCall: { rawPrompt, rawSettings }, rawResponse: { headers: responseHeaders }, - warnings: [], + request: { body: JSON.stringify(body) }, + response: getResponseMetadata(response), + warnings, logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs), + providerMetadata, }; } async doStream( options: Parameters[0], ): Promise>> { - const args = this.getArgs(options); + const { args, warnings } = this.getArgs(options); const body = { ...args, @@ -238,15 +351,21 @@ export class LangtailChatLanguageModel

= []; - let finishReason: LanguageModelV1FinishReason = 'other'; - let usage: { promptTokens: number; completionTokens: number } = { - promptTokens: Number.NaN, - completionTokens: Number.NaN, + let finishReason: LanguageModelV1FinishReason = 'unknown'; + let usage: { + promptTokens: number | undefined; + completionTokens: number | undefined; + } = { + promptTokens: undefined, + completionTokens: undefined, }; let logprobs: LanguageModelV1LogProbs; + let isFirstChunk = true; + let providerMetadata: LanguageModelV1ProviderMetadata | undefined; return { stream: response.pipeThrough( new TransformStream< @@ -254,18 +373,56 @@ export class LangtailChatLanguageModel

({ transform(chunk, controller) { + // handle failed chunk parsing / validation: if (!chunk.success) { + finishReason = 'error'; controller.enqueue({ type: 'error', error: chunk.error }); return; } const value = chunk.value; + // handle error chunks: + if ('error' in value) { + finishReason = 'error'; + controller.enqueue({ type: 'error', error: value.error }); + return; + } + + if (isFirstChunk) { + isFirstChunk = false; + + controller.enqueue({ + type: 'response-metadata', + ...getResponseMetadata(value), + }); + } + if (value.usage != null) { usage = { - promptTokens: value.usage.prompt_tokens, - completionTokens: value.usage.completion_tokens, + promptTokens: value.usage.prompt_tokens ?? undefined, + completionTokens: value.usage.completion_tokens ?? undefined, }; + + const { + completion_tokens_details: completionTokenDetails, + prompt_tokens_details: promptTokenDetails, + } = value.usage; + + if ( + completionTokenDetails?.reasoning_tokens != null || + promptTokenDetails?.cached_tokens != null + ) { + providerMetadata = { openai: {} }; + if (completionTokenDetails?.reasoning_tokens != null) { + providerMetadata.openai.reasoningTokens = + completionTokenDetails?.reasoning_tokens; + } + if (promptTokenDetails?.cached_tokens != null) { + providerMetadata.openai.cachedPromptTokens = + promptTokenDetails?.cached_tokens; + } + } } const choice = value.choices[0]; @@ -295,8 +452,10 @@ export class LangtailChatLanguageModel

0) { + controller.enqueue({ + type: 'tool-call-delta', + toolCallType: 'function', + toolCallId: toolCall.id, + toolName: toolCall.function.name, + argsTextDelta: toolCall.function.arguments, + }); + } + + // check if tool call is complete + // (some providers send the full tool call in one chunk): + if (isParsableJson(toolCall.function.arguments)) { + controller.enqueue({ + type: 'tool-call', + toolCallType: 'function', + toolCallId: toolCall.id ?? generateId(), + toolName: toolCall.function.name, + args: toolCall.function.arguments, + }); + toolCall.hasFinished = true; + } + } + continue; } - // existing tool call, merge + // existing tool call, merge if not finished const toolCall = toolCalls[index]; + if (toolCall.hasFinished) { + continue; + } if (toolCallDelta.function?.arguments != null) { toolCall.function!.arguments += @@ -353,20 +547,19 @@ export class LangtailChatLanguageModel

; +export type OpenAIErrorData = z.infer; export const openaiFailedResponseHandler = createJsonErrorResponseHandler({ - errorSchema: openAIErrorDataSchema, + errorSchema: openaiErrorDataSchema, errorToMessage: data => data.error.message, -}); +}); \ No newline at end of file diff --git a/src/vercel-ai/openai-prepare-tools.ts b/src/vercel-ai/openai-prepare-tools.ts new file mode 100644 index 0000000..b00a59e --- /dev/null +++ b/src/vercel-ai/openai-prepare-tools.ts @@ -0,0 +1,161 @@ +import { + JSONSchema7, + LanguageModelV1, + LanguageModelV1CallWarning, + UnsupportedFunctionalityError, +} from '@ai-sdk/provider'; + +export function prepareTools({ + mode, + useLegacyFunctionCalling = false, + structuredOutputs, +}: { + mode: Parameters[0]['mode'] & { + type: 'regular'; + }; + useLegacyFunctionCalling: boolean | undefined; + structuredOutputs: boolean; +}): { + tools?: { + type: 'function'; + function: { + name: string; + description: string | undefined; + parameters: JSONSchema7; + strict?: boolean; + }; + }[]; + tool_choice?: + | 'auto' + | 'none' + | 'required' + | { type: 'function'; function: { name: string } }; + + // legacy support + functions?: { + name: string; + description: string | undefined; + parameters: JSONSchema7; + }[]; + function_call?: { name: string }; + + toolWarnings: LanguageModelV1CallWarning[]; +} { + // when the tools array is empty, change it to undefined to prevent errors: + const tools = mode.tools?.length ? mode.tools : undefined; + + const toolWarnings: LanguageModelV1CallWarning[] = []; + + if (tools == null) { + return { tools: undefined, tool_choice: undefined, toolWarnings }; + } + + const toolChoice = mode.toolChoice; + + if (useLegacyFunctionCalling) { + const openaiFunctions: Array<{ + name: string; + description: string | undefined; + parameters: JSONSchema7; + }> = []; + + for (const tool of tools) { + if (tool.type === 'provider-defined') { + toolWarnings.push({ type: 'unsupported-tool', tool }); + } else { + openaiFunctions.push({ + name: tool.name, + description: tool.description, + parameters: tool.parameters, + }); + } + } + + if (toolChoice == null) { + return { + functions: openaiFunctions, + function_call: undefined, + toolWarnings, + }; + } + + const type = toolChoice.type; + + switch (type) { + case 'auto': + case 'none': + case undefined: + return { + functions: openaiFunctions, + function_call: undefined, + toolWarnings, + }; + case 'required': + throw new UnsupportedFunctionalityError({ + functionality: 'useLegacyFunctionCalling and toolChoice: required', + }); + default: + return { + functions: openaiFunctions, + function_call: { name: toolChoice.toolName }, + toolWarnings, + }; + } + } + + const openaiTools: Array<{ + type: 'function'; + function: { + name: string; + description: string | undefined; + parameters: JSONSchema7; + strict: boolean | undefined; + }; + }> = []; + + for (const tool of tools) { + if (tool.type === 'provider-defined') { + toolWarnings.push({ type: 'unsupported-tool', tool }); + } else { + openaiTools.push({ + type: 'function', + function: { + name: tool.name, + description: tool.description, + parameters: tool.parameters, + strict: structuredOutputs ? true : undefined, + }, + }); + } + } + + if (toolChoice == null) { + return { tools: openaiTools, tool_choice: undefined, toolWarnings }; + } + + const type = toolChoice.type; + + switch (type) { + case 'auto': + case 'none': + case 'required': + return { tools: openaiTools, tool_choice: type, toolWarnings }; + case 'tool': + return { + tools: openaiTools, + tool_choice: { + type: 'function', + function: { + name: toolChoice.toolName, + }, + }, + toolWarnings, + }; + default: { + const _exhaustiveCheck: never = type; + throw new UnsupportedFunctionalityError({ + functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`, + }); + } + } +} \ No newline at end of file