Skip to content

Commit

Permalink
upgrade gemini-1.0 to gemini-1.5
Browse files Browse the repository at this point in the history
  • Loading branch information
VictorS67 committed Feb 5, 2025
1 parent 24477ed commit 5dbee5f
Show file tree
Hide file tree
Showing 7 changed files with 169 additions and 87 deletions.
1 change: 1 addition & 0 deletions packages/core/jest.global.env.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
process.env.OPENAI_API_KEY = "you_should_get_this_api_from_openai";
process.env.DEEPSEEK_API_KEY = "you_should_get_this_api_from_deepseek";
process.env.MOONSHOT_API_KEY = "you_should_get_this_api_from_moonshot";
process.env.GOOGLE_API_KEY = "you_should_get_this_api_from_google";
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ export type GeminiChatParams =
* @example
* ```typescript
* const geminiAI = new GeminiChat({
* modelName: 'gemini-pro',
* modelName: 'gemini-1.5-pro',
* googleApiKey: 'your-api-key',
* });
* const message = new HumanMessage("Hello, world!");
Expand Down Expand Up @@ -103,9 +103,9 @@ export class GeminiChat<
}

/**
* ID of the model to use. `gemini-pro` or `gemini-pro-vision`
* ID of the model to use.
*/
modelName = 'gemini-pro';
modelName = 'gemini-1.5-flash';

/**
* The temperature is used for sampling during the response generation,
Expand All @@ -120,9 +120,6 @@ export class GeminiChat<
*
* Range: 0.0 - 1.0
*
* Default for gemini-pro: 0.9
*
* Default for gemini-pro-vision: 0.4
*/
temperature = 0.9;

Expand Down Expand Up @@ -159,9 +156,6 @@ export class GeminiChat<
*
* Range: 1-2048
*
* Default for gemini-pro: 2048
*
* Default for gemini-pro-vision: 4096
*/
maxOutputTokens = 2048;

Expand All @@ -186,9 +180,6 @@ export class GeminiChat<
*
* Range: 1 - 40
*
* Default for gemini-pro: none
*
* Default for gemini-pro-vision: 32
*/
topK: number;

Expand Down Expand Up @@ -251,7 +242,7 @@ export class GeminiChat<

if (!checkModelForGemini(this.modelName)) {
throw new Error(
'model is not valid for Gemini, please change it to `gemini-pro` or `gemini-pro-vision`'
'model is not valid for Gemini, please check official model versions website.'
);
}

Expand All @@ -260,23 +251,9 @@ export class GeminiChat<
this.temperature = 0.4;
this.topK = fields?.topK ?? 32;
this.maxOutputTokens = fields?.maxOutputTokens ?? 4096;

if (this.maxOutputTokens > 4096) {
console.warn(
'gemini-pro-vision does not support output token larger than 4096, now using 4096 as maxOutputTokens.'
);
this.maxOutputTokens = 4096;
}
} else {
this.topK = fields?.topK ?? 1;
this.maxOutputTokens = fields?.maxOutputTokens ?? 2048;

if (this.maxOutputTokens > 2048) {
console.warn(
'gemini-pro does not support output token larger than 2048, now using 2048 as maxOutputTokens.'
);
this.maxOutputTokens = 2048;
}
}

this.temperature = fields?.temperature ?? this.temperature;
Expand Down Expand Up @@ -363,7 +340,7 @@ export class GeminiChat<
* @example
* ```typescript
* const geminiChat = new GeminiChat({
* modelName: 'gemini-pro',
* modelName: 'gemini-1.5-pro',
* googleApiKey: 'your-api-key',
* });
* const messages = [new HumanMessage({ content: "Hello, world!" })];
Expand All @@ -383,7 +360,7 @@ export class GeminiChat<
)
) {
throw new Error(
'Message contains Image input but modality is not enabled for gemini-pro, please change it to gemini-pro-vision.'
'Message contains Image input but modality is not enabled for text model, please change it to vision model.'
);
}

Expand Down Expand Up @@ -619,7 +596,7 @@ export class GeminiChat<
* @example
* ```typescript
* const geminiChat = new GeminiChat({
* modelName: 'gemini-pro',
* modelName: 'gemini-1.5-pro',
* googleApiKey: 'your-api-key',
* });
*
Expand Down Expand Up @@ -719,19 +696,35 @@ export class GeminiChat<
* @returns The number of tokens in the model's context size.
* @example
* ```typescript
* const contextSize = GeminiChat.getModelContextSize('gemini-pro-vision');
* const contextSize = GeminiChat.getModelContextSize('gemini-1.0-pro-vision');
* console.log(contextSize); // Outputs: 16384
* ```
*/
static getModelContextSize(
modelName: 'gemini-pro' | 'gemini-pro-vision'
): number {
switch (modelName) {
case 'gemini-pro':
return 32768;
case 'gemini-pro-vision':
return 16384;
static getModelContextSize(modelName: string): number {
if (!checkModelForGemini(modelName)) {
throw new Error(
'model is not valid for Gemini, please check official model versions website.'
);
}

if (modelName.includes('flash-thinking')) {
return 1048576;
} else if (modelName.includes('2.0-flash')) {
return 1048576;
} else if (modelName.includes('1.5-flash')) {
return 1048576;
} else if (modelName.includes('1.5-pro')) {
return 2097152;
} else if (
modelName.includes('1.0-pro') ||
modelName.includes('gemini-pro')
) {
return 32760;
} else if (modelName.includes('pro-vision')) {
return 16384;
}

throw new Error("no official clarification on model's max input.");
}

/**
Expand All @@ -742,7 +735,7 @@ export class GeminiChat<
* @example
* ```typescript
* const geminiChat = new GeminiChat({
* modelName: 'gemini-pro',
* modelName: 'gemini-1.5-pro',
* googleApiKey: 'your-api-key',
* });
* const tokens = await geminiChat.getNumTokensInGenerations(generations);
Expand All @@ -766,7 +759,7 @@ export class GeminiChat<
* @example
* ```typescript
* const geminiChat = new GeminiChat({
* modelName: 'gemini-pro',
* modelName: 'gemini-1.5-pro',
* googleApiKey: 'your-api-key',
* });
* const messages = [new HumanMessage({ content: "Hello, world!" })];
Expand All @@ -790,7 +783,7 @@ export class GeminiChat<
* @example
* ```typescript
* const geminiChat = new GeminiChat({
* modelName: 'gemini-pro',
* modelName: 'gemini-1.5-pro',
* googleApiKey: 'your-api-key',
* });
* const contents = [
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP

exports[`test Gemini simple text 1`] = `
{
"generations": [
{
"info": {
"citationMetadata": undefined,
"completion": 0,
"finishMessage": undefined,
"finishReason": "STOP",
"safetyRatings": undefined,
},
"output": "Elara wasn't expecting a magical backpack. She'd expected, at best, a slightly less-torn one for her first year at the prestigious Whispering Woods Academy for Arcane Arts. Instead, her grandmother, a woman whose smile lines mapped a lifetime of whispered secrets and potent spells, had presented her with a worn leather satchel, smelling faintly of cinnamon and ozone.

"This," Nana Elara said, her voice low and husky, "is a rucksack of possibilities. It holds more than meets the eye."

At first, it seemed ordinary. Elara crammed it with her spellbooks (which were alarmingly heavy), a chipped cauldron, and a half-eaten bag of enchanted gingerbread. But that evening, as she unpacked in her dorm room, she discovered the truth. She reached for her quill, expecting it to be nestled among her other supplies, but it wasn't there. Then, she felt a tickle in her pocket. Her quill, feather perfectly smooth, lay nestled beside a perfectly ripe plum.

The backpack was a bottomless marvel. It seemed to anticipate her needs. Missing parchment? A fresh stack appeared. Need a specific herb for a potion? It materialized, perfectly preserved. Low on ink? A vial of shimmering, self-replenishing ink magically surfaced. Even her perpetually empty snack bag – a source of constant frustration – was constantly refilled with Nana Elara’s delectable gingerbread.

However, the backpack had a mischievous streak. It wasn't just anticipating her needs; it seemed to have a sense of humor, albeit a rather chaotic one. Once, during a particularly challenging Charms lesson, Elara needed a specific type of glittering beetle wing. The backpack provided it…along with a small, giggling swarm of identical beetles that buzzed around the professor's head. Another time, needing a quiet place to study, she found herself magically transported – backpack and all – to the top of a giant oak, overlooking the entire academy.

This presented its challenges. During a crucial potion-making exam, the backpack decided to "help" by producing an alarming array of ingredients – including a squawking rubber chicken and a live, rather indignant, gnome – that delayed her significantly. Yet, it also provided unexpected solutions. When her rival, the notoriously skilled Zephyr, accidentally set her cauldron ablaze, the backpack conjured a miniature rain cloud to extinguish the flames, leaving Zephyr sputtering and Elara triumphant.

By the end of the year, Elara had learned to work with her unpredictable companion. She understood that the backpack wasn't just a magical tool; it was a partner, a chaotic, unpredictable yet ultimately helpful one. It was a testament to her grandmother's love, a reminder that sometimes, the most unexpected gifts hold the greatest magic, even if they occasionally lead to a little controlled chaos. And as for the gnome? He became a surprisingly helpful study buddy.
",
},
],
"llmOutput": {
"tokenUsage": {
"completionTokens": 617,
"promptTokens": 8,
"totalTokens": 625,
},
},
}
`;

exports[`test Gemini text 1`] = `
{
"generations": [
{
"info": {
"citationMetadata": undefined,
"completion": 0,
"finishMessage": undefined,
"finishReason": "STOP",
"safetyRatings": [
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"probability": "NEGLIGIBLE",
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"probability": "NEGLIGIBLE",
},
{
"category": "HARM_CATEGORY_HARASSMENT",
"probability": "NEGLIGIBLE",
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"probability": "NEGLIGIBLE",
},
],
},
"output": "AI: I am Gemini, a multi-modal AI language model developed by Google. I am designed to help and inform people to the best of my abilities.",
},
],
"llmOutput": {
"tokenUsage": {
"completionTokens": 33,
"promptTokens": 9,
"totalTokens": 42,
},
},
}
`;
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY;
test('test GeminiChat text', async () => {
const gemini = new GeminiChat({
googleApiKey: GOOGLE_API_KEY,
modelName: 'gemini-pro',
modelName: 'gemini-1.5-pro',
});

const llmResult = await gemini.invoke([
Expand All @@ -24,7 +24,7 @@ test('test GeminiChat text', async () => {
test('test GeminiChat vision', async () => {
const gemini = new GeminiChat({
googleApiKey: GOOGLE_API_KEY,
modelName: 'gemini-pro-vision',
modelName: 'gemini-1.0-pro-vision',
});

const filePath: string = path.resolve(
Expand Down Expand Up @@ -54,7 +54,7 @@ test('test GeminiChat vision', async () => {
test('test GeminiChat sexual violation', async () => {
const gemini = new GeminiChat({
googleApiKey: GOOGLE_API_KEY,
modelName: 'gemini-pro',
modelName: 'gemini-1.5-flash',
});

expect(
Expand All @@ -73,5 +73,5 @@ test('test GeminiChat sexual violation', async () => {
],
}
)
).rejects.toThrow('The message is blocked because of SAFETY');
});
).rejects.toThrow('No candidates from Gemini response.');
});
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY;
test('test Gemini simple text', async () => {
const gemini = new Gemini({
googleApiKey: GOOGLE_API_KEY,
modelName: 'gemini-pro',
modelName: 'gemini-1.5-flash',
streaming: false,
});

Expand All @@ -23,7 +23,7 @@ test('test Gemini simple text', async () => {
test('test Gemini text', async () => {
const gemini = new Gemini({
googleApiKey: GOOGLE_API_KEY,
modelName: 'gemini-pro',
modelName: 'gemini-1.0-pro',
});

const llmResult = await gemini.invoke([
Expand All @@ -36,7 +36,7 @@ test('test Gemini text', async () => {
test('test Gemini sexual violation', async () => {
const gemini = new Gemini({
googleApiKey: GOOGLE_API_KEY,
modelName: 'gemini-pro',
modelName: 'gemini-1.5-flash',
});

expect(
Expand All @@ -51,5 +51,5 @@ test('test Gemini sexual violation', async () => {
],
}
)
).rejects.toThrow('The message is blocked because of SAFETY');
).rejects.toThrow('No candidates from Gemini response.');
});
Loading

0 comments on commit 5dbee5f

Please sign in to comment.