Skip to content

Commit

Permalink
add gemini
Browse files Browse the repository at this point in the history
  • Loading branch information
VictorS67 committed Dec 28, 2023
1 parent 41fb099 commit 71e8ffa
Show file tree
Hide file tree
Showing 18 changed files with 2,416 additions and 95 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,4 @@ jobs:
- name: Install dependencies
run: yarn install --immutable --network-timeout 500000
- name: Test
run: yarn workspace encre-core run test:single
run: yarn workspace encre-core run test
5 changes: 4 additions & 1 deletion packages/core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"clean": "rimraf build/ && node scripts/create-entrypoints.js pre",
"lint": "eslint src/**/*.ts",
"lint-fix": "eslint --fix src/**/*.ts",
"test": "jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%",
"test": "jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50% --silent",
"test:single": "yarn run jest --config jest.config.cjs --testTimeout 100000 --silent"
},
"packageManager": "[email protected]",
Expand Down Expand Up @@ -47,10 +47,13 @@
"typedoc": "^0.25.2"
},
"dependencies": {
"@google-cloud/aiplatform": "^3.9.0",
"@google/generative-ai": "^0.1.3",
"@types/object-hash": "^3.0.5",
"@types/pdf-parse": "^1.1.2",
"camelcase": "^8.0.0",
"decamelize": "^6.0.0",
"google-auth-library": "^9.4.1",
"js-tiktoken": "^1.0.7",
"langchain": "^0.0.149",
"nanoid": "^5.0.2",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,26 +1,28 @@
import { TiktokenModel } from 'js-tiktoken/lite';
import { BaseCache } from '../../../../cache/base.js';
import { MemoryCache } from '../../../../cache/index.js';
import { type CallableConfig } from '../../../../record/callable.js';
import { BaseCache } from '../../../cache/base.js';
import { MemoryCache } from '../../../cache/index.js';
import { type CallableConfig } from '../../../record/callable.js';
import {
AsyncCallError,
AsyncCaller,
baseFailedAttemptHandler,
type AsyncCallerParams,
} from '../../../../utils/asyncCaller.js';
} from '../../../utils/asyncCaller.js';
import {
encodingForModel,
getNumTokens,
getTiktokenModel,
} from '../../../../utils/tokenizer.js';
import { BaseEvent, BaseEventParams } from '../../../base.js';
} from '../../../utils/tokenizer.js';
import { BaseEvent, BaseEventParams } from '../../base.js';
import {
type BaseMessageLike,
convertMessageLikeToMessage,
BaseMessage,
} from '../../../input/load/msgs/base.js';
import { BasePrompt, StringPrompt } from '../../../input/load/prompts/base.js';
import { ChatPrompt } from '../../../input/load/prompts/chat.js';
import { Generation } from '../../../output/provide/generation.js';
import { LLMResult } from '../../../output/provide/llmresult.js';
} from '../../input/load/msgs/base.js';
import { BasePrompt, StringPrompt } from '../../input/load/prompts/base.js';
import { ChatPrompt } from '../../input/load/prompts/chat.js';
import { Generation } from '../../output/provide/generation.js';
import { LLMResult } from '../../output/provide/llmresult.js';

export interface BaseLMCallOptions extends BaseEventParams {
/**
Expand Down Expand Up @@ -92,7 +94,9 @@ export abstract class BaseLM<
this.cache = undefined;
}

this.caller = new AsyncCaller(params ?? {});
this.caller = new AsyncCaller(
(params ?? {}) && { onFailedAttempt: this._failedAttemptHandler }
);
}

/**
Expand Down Expand Up @@ -193,6 +197,10 @@ export abstract class BaseLM<

return llmStrKey;
}

protected _failedAttemptHandler(e: Error) {
baseFailedAttemptHandler(e as AsyncCallError);
}
}

export interface BaseLLMCallOptions extends BaseLMCallOptions {}
Expand Down
8 changes: 8 additions & 0 deletions packages/core/src/events/inference/chat/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
/**
* Interface for tracking token usage in LLM/Chat LM API Calls.
*/
export interface TokenUsage {
completionTokens?: number;
promptTokens?: number;
totalTokens?: number;
}
Loading

0 comments on commit 71e8ffa

Please sign in to comment.