diff --git a/.changeset/funny-paws-visit.md b/.changeset/funny-paws-visit.md new file mode 100644 index 0000000..df6329f --- /dev/null +++ b/.changeset/funny-paws-visit.md @@ -0,0 +1,5 @@ +--- +"@google/generative-ai": patch +--- + +Make sure chat api do not send empty text request after encounter any server error that returns empty response. This fixes issue #124 and issue #286. diff --git a/src/methods/chat-session.test.ts b/src/methods/chat-session.test.ts index 52354e5..c098210 100644 --- a/src/methods/chat-session.test.ts +++ b/src/methods/chat-session.test.ts @@ -22,6 +22,8 @@ import * as chaiAsPromised from "chai-as-promised"; import * as generateContentMethods from "./generate-content"; import { GenerateContentStreamResult } from "../../types"; import { ChatSession } from "./chat-session"; +import { getMockResponse } from "../../test-utils/mock-response"; +import * as request from "../requests/request"; use(sinonChai); use(chaiAsPromised); @@ -45,6 +47,15 @@ describe("ChatSession", () => { ); }); }); + describe("sendMessageRecitationErrorNotAddingResponseToHistory()", () => { + it("generateContent errors should be catchable", async () => { + const mockResponse = getMockResponse("unary-failure-citations.json"); + stub(request, "makeModelRequest").resolves(mockResponse as Response); + const chatSession = new ChatSession("MY_API_KEY", "a-model"); + await chatSession.sendMessage("hello"); + expect((await chatSession.getHistory()).length).equals(0); + }); + }); describe("sendMessageStream()", () => { it("generateContentStream errors should be catchable", async () => { const clock = useFakeTimers(); diff --git a/src/methods/chat-session.ts b/src/methods/chat-session.ts index 3c2664d..f926b66 100644 --- a/src/methods/chat-session.ts +++ b/src/methods/chat-session.ts @@ -110,7 +110,8 @@ export class ChatSession { .then((result) => { if ( result.response.candidates && - result.response.candidates.length > 0 + result.response.candidates.length > 0 && + result.response.candidates[0]?.content !== undefined ) { this._history.push(newContent); const responseContent: Content = { @@ -179,7 +180,11 @@ export class ChatSession { }) .then((streamResult) => streamResult.response) .then((response) => { - if (response.candidates && response.candidates.length > 0) { + if ( + response.candidates && + response.candidates.length > 0 && + response.candidates[0]?.content !== undefined + ) { this._history.push(newContent); const responseContent = { ...response.candidates[0].content }; // Response seems to come back without a role set. diff --git a/test-utils/mock-responses/unary-failure-citations.json b/test-utils/mock-responses/unary-failure-citations.json new file mode 100644 index 0000000..a73346d --- /dev/null +++ b/test-utils/mock-responses/unary-failure-citations.json @@ -0,0 +1,13 @@ +{ + "candidates": [ + { + "finishReason": "RECITATION", + "index": 0 + } + ], + "usageMetadata": { + "promptTokenCount": 18, + "totalTokenCount": 18 + }, + "modelVersion": "gemini-1.5-flash-001" +} \ No newline at end of file