Skip to content
This repository has been archived by the owner on Oct 10, 2024. It is now read-only.

Release 0.2.0 #74

Merged
merged 1 commit into from
May 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions .github/workflows/build_publish.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ on:
pull_request:

jobs:

lint_and_test:
runs-on: ubuntu-latest

Expand Down Expand Up @@ -76,6 +75,4 @@ jobs:
run: |
echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" >> .npmrc
npm version ${{ github.ref_name }}
sed -i 's/VERSION = '\''0.0.1'\''/VERSION = '\''${{ github.ref_name }}'\''/g' src/client.js
npm publish

npm publish
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@mistralai/mistralai",
"version": "0.0.1",
"version": "0.2.0",
"description": "",
"author": "[email protected]",
"license": "ISC",
Expand All @@ -12,7 +12,9 @@
"test": "node --experimental-vm-modules node_modules/.bin/jest"
},
"jest": {
"testPathIgnorePatterns": ["examples"]
"testPathIgnorePatterns": [
"examples"
]
},
"repository": {
"type": "git",
Expand Down
333 changes: 172 additions & 161 deletions src/client.d.ts
Original file line number Diff line number Diff line change
@@ -1,162 +1,173 @@
declare module '@mistralai/mistralai' {
export interface ModelPermission {
id: string;
object: 'model_permission';
created: number;
allow_create_engine: boolean;
allow_sampling: boolean;
allow_logprobs: boolean;
allow_search_indices: boolean;
allow_view: boolean;
allow_fine_tuning: boolean;
organization: string;
group: string | null;
is_blocking: boolean;
}

export interface Model {
id: string;
object: 'model';
created: number;
owned_by: string;
root: string | null;
parent: string | null;
permission: ModelPermission[];
}

export interface ListModelsResponse {
object: 'list';
data: Model[];
}

export interface Function {
name: string;
description: string;
parameters: object;
}

export interface FunctionCall {
name: string;
arguments: string;
}

export interface ToolCalls {
id: string;
function: FunctionCall;
}

export interface ResponseFormat {
type: 'json_object';
}

export interface TokenUsage {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
}

export interface ChatCompletionResponseChoice {
index: number;
message: {
role: string;
content: string;
tool_calls: null | ToolCalls[];
};
finish_reason: string;
}

export interface ChatCompletionResponseChunkChoice {
index: number;
delta: {
role?: string;
content?: string;
tool_calls?: ToolCalls[];
};
finish_reason: string;
}

export interface ChatCompletionResponse {
id: string;
object: 'chat.completion';
created: number;
model: string;
choices: ChatCompletionResponseChoice[];
usage: TokenUsage;
}

export interface ChatCompletionResponseChunk {
id: string;
object: 'chat.completion.chunk';
created: number;
model: string;
choices: ChatCompletionResponseChunkChoice[];
usage: TokenUsage | null;
}

export interface Embedding {
id: string;
object: 'embedding';
embedding: number[];
}

export interface EmbeddingResponse {
id: string;
object: 'list';
data: Embedding[];
model: string;
usage: TokenUsage;
}

export interface Message {
role: string;
content: string | string[]
}

export interface Tool {
type: 'function';
function: Function;
}

export interface ChatRequest {
model: string;
messages: Array<Message>;
tools?: Array<Tool>;
temperature?: number;
maxTokens?: number;
topP?: number;
randomSeed?: number;
/**
* @deprecated use safePrompt instead
*/
safeMode?: boolean;
safePrompt?: boolean;
toolChoice?: 'auto' | 'any' | 'none';
responseFormat?: ResponseFormat;
}

export interface ChatRequestOptions {
signal?: AbortSignal
}

class MistralClient {
apiKey: string
endpoint: string
maxRetries: number
timeout: number

constructor(apiKey?: string, endpoint?: string, maxRetries?: number, timeout?: number);

listModels(): Promise<ListModelsResponse>;

chat(request: ChatRequest, options?: ChatRequestOptions): Promise<ChatCompletionResponse>;

chatStream(request: ChatRequest, options?: ChatRequestOptions): AsyncGenerator<ChatCompletionResponseChunk, void>;

embeddings(options: {
model: string;
input: string | string[];
}): Promise<EmbeddingResponse>;
}

export default MistralClient;
declare module "@mistralai/mistralai" {
export interface ModelPermission {
id: string;
object: "model_permission";
created: number;
allow_create_engine: boolean;
allow_sampling: boolean;
allow_logprobs: boolean;
allow_search_indices: boolean;
allow_view: boolean;
allow_fine_tuning: boolean;
organization: string;
group: string | null;
is_blocking: boolean;
}

export interface Model {
id: string;
object: "model";
created: number;
owned_by: string;
root: string | null;
parent: string | null;
permission: ModelPermission[];
}

export interface ListModelsResponse {
object: "list";
data: Model[];
}

export interface Function {
name: string;
description: string;
parameters: object;
}

export interface FunctionCall {
name: string;
arguments: string;
}

export interface ToolCalls {
id: string;
function: FunctionCall;
}

export interface ResponseFormat {
type: "json_object";
}

export interface TokenUsage {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
}

export interface ChatCompletionResponseChoice {
index: number;
message: {
role: string;
content: string;
tool_calls: null | ToolCalls[];
};
finish_reason: string;
}

export interface ChatCompletionResponseChunkChoice {
index: number;
delta: {
role?: string;
content?: string;
tool_calls?: ToolCalls[];
};
finish_reason: string;
}

export interface ChatCompletionResponse {
id: string;
object: "chat.completion";
created: number;
model: string;
choices: ChatCompletionResponseChoice[];
usage: TokenUsage;
}

export interface ChatCompletionResponseChunk {
id: string;
object: "chat.completion.chunk";
created: number;
model: string;
choices: ChatCompletionResponseChunkChoice[];
usage: TokenUsage | null;
}

export interface Embedding {
id: string;
object: "embedding";
embedding: number[];
}

export interface EmbeddingResponse {
id: string;
object: "list";
data: Embedding[];
model: string;
usage: TokenUsage;
}

export interface Message {
role: string;
content: string | string[];
}

export interface Tool {
type: "function";
function: Function;
}

export interface ChatRequest {
model: string;
messages: Array<Message>;
tools?: Array<Tool>;
temperature?: number;
maxTokens?: number;
topP?: number;
randomSeed?: number;
/**
* @deprecated use safePrompt instead
*/
safeMode?: boolean;
safePrompt?: boolean;
toolChoice?: "auto" | "any" | "none";
responseFormat?: ResponseFormat;
}

export interface ChatRequestOptions {
signal?: AbortSignal;
}

class MistralClient {
apiKey: string;
endpoint: string;
maxRetries: number;
timeout: number;

constructor(
apiKey?: string,
endpoint?: string,
maxRetries?: number,
timeout?: number
);

listModels(): Promise<ListModelsResponse>;

chat(
request: ChatRequest,
options?: ChatRequestOptions
): Promise<ChatCompletionResponse>;

chatStream(
request: ChatRequest,
options?: ChatRequestOptions
): AsyncGenerator<ChatCompletionResponseChunk, void>;

embeddings(options: {
model: string;
input: string | string[];
}): Promise<EmbeddingResponse>;
}

export default MistralClient;
}
Loading