Skip to content

Commit

Permalink
chore: add system test for streaming
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 592356730
  • Loading branch information
sararob authored and copybara-github committed Dec 19, 2023
1 parent 3713903 commit 5fd33ff
Showing 1 changed file with 26 additions and 1 deletion.
27 changes: 26 additions & 1 deletion system_test/end_to_end_sample_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,10 @@ const generativeTextModel = vertex_ai.preview.getGenerativeModel({
},
});

const textModelNoOutputLimit = vertex_ai.preview.getGenerativeModel({
model: 'gemini-pro',
});

const generativeVisionModel = vertex_ai.preview.getGenerativeModel({
model: 'gemini-pro-vision',
});
Expand Down Expand Up @@ -158,7 +162,7 @@ describe('generateContentStream', () => {

describe('sendMessageStream', () => {
beforeEach(() => {
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
jasmine.DEFAULT_TIMEOUT_INTERVAL = 30000;
});
it('should should return a stream and populate history', async () => {
const chat = generativeTextModel.startChat({});
Expand All @@ -177,6 +181,27 @@ describe('sendMessageStream', () => {
);
expect(chat.history.length).toBe(2);
});
it('should return chunks as they come in', async () => {
const chat = textModelNoOutputLimit.startChat({});
const chatInput1 = 'Tell me a story in 1000 words';
const result1 = await chat.sendMessageStream(chatInput1);
let firstChunkTimestamp = 0;
let aggregatedResultTimestamp = 0;

// To verify streaming is working correcty, we check that there is >= 2
// second difference between the first chunk and the aggregated result
const streamThreshold = 2000;

for await (const item of result1.stream) {
if (firstChunkTimestamp === 0) {
firstChunkTimestamp = Date.now();
}
}
await result1.response;
aggregatedResultTimestamp = Date.now();
expect(aggregatedResultTimestamp - firstChunkTimestamp)
.toBeGreaterThan(streamThreshold);
});
});

describe('countTokens', () => {
Expand Down

0 comments on commit 5fd33ff

Please sign in to comment.