diff --git a/src/client.js b/src/client.js index d821b55..8b26b0f 100644 --- a/src/client.js +++ b/src/client.js @@ -1,8 +1,12 @@ let isNode = false; +if (typeof window === 'undefined' || typeof globalThis.fetch !== 'undefined') { + globalThis.fetch = (await import('node-fetch')).default; + isNode = true; +} async function initializeFetch() { - if (typeof globalThis.fetch === "undefined") { - const nodeFetch = await import("node-fetch"); + if (typeof globalThis.fetch === 'undefined') { + const nodeFetch = await import('node-fetch'); fetch = nodeFetch.default; isNode = true; } else { @@ -13,7 +17,7 @@ async function initializeFetch() { initializeFetch(); const RETRY_STATUS_CODES = [429, 500, 502, 503, 504]; -const ENDPOINT = "https://api.mistral.ai"; +const ENDPOINT = 'https://api.mistral.ai'; /** * MistralAPIError @@ -27,9 +31,9 @@ class MistralAPIError extends Error { */ constructor(message) { super(message); - this.name = "MistralAPIError"; + this.name = 'MistralAPIError'; } -} +}; /** * MistralClient @@ -48,7 +52,7 @@ class MistralClient { apiKey = process.env.MISTRAL_API_KEY, endpoint = ENDPOINT, maxRetries = 5, - timeout = 120 + timeout = 120, ) { this.endpoint = endpoint; this.apiKey = apiKey; @@ -64,16 +68,16 @@ class MistralClient { * @param {*} request * @return {Promise<*>} */ - _request = async function (method, path, request) { + _request = async function(method, path, request) { const url = `${this.endpoint}/${path}`; const options = { method: method, headers: { - Accept: "application/json", - "Content-Type": "application/json", - Authorization: `Bearer ${this.apiKey}`, + 'Accept': 'application/json', + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${this.apiKey}`, }, - body: method !== "get" ? JSON.stringify(request) : null, + body: method !== 'get' ? JSON.stringify(request) : null, timeout: this.timeout * 1000, }; @@ -93,11 +97,11 @@ class MistralClient { const decoder = new TextDecoder(); while (true) { // Read from the stream - const { done, value } = await reader.read(); + const {done, value} = await reader.read(); // Exit if we're done if (done) return; // Else yield the chunk - yield decoder.decode(value, { stream: true }); + yield decoder.decode(value, {stream: true}); } } finally { reader.releaseLock(); @@ -112,31 +116,31 @@ class MistralClient { console.debug( `Retrying request on response status: ${response.status}`, `Response: ${await response.text()}`, - `Attempt: ${attempts + 1}` + `Attempt: ${attempts + 1}`, ); // eslint-disable-next-line max-len await new Promise((resolve) => - setTimeout(resolve, Math.pow(2, attempts + 1) * 500) + setTimeout(resolve, Math.pow(2, (attempts + 1)) * 500), ); } else { throw new MistralAPIError( `HTTP error! status: ${response.status} ` + - `Response: \n${await response.text()}` + `Response: \n${await response.text()}`, ); } } catch (error) { console.error(`Request failed: ${error.message}`); - if (error.name === "MistralAPIError") { + if (error.name === 'MistralAPIError') { throw error; } if (attempts === this.maxRetries - 1) throw error; // eslint-disable-next-line max-len await new Promise((resolve) => - setTimeout(resolve, Math.pow(2, attempts + 1) * 500) + setTimeout(resolve, Math.pow(2, (attempts + 1)) * 500), ); } } - throw new Error("Max retries reached"); + throw new Error('Max retries reached'); }; /** @@ -151,7 +155,7 @@ class MistralClient { * @param {*} safeMode * @return {Promise} */ - _makeChatCompletionRequest = function ( + _makeChatCompletionRequest = function( model, messages, temperature, @@ -159,7 +163,7 @@ class MistralClient { topP, randomSeed, stream, - safeMode + safeMode, ) { return { model: model, @@ -177,8 +181,8 @@ class MistralClient { * Returns a list of the available models * @return {Promise} */ - listModels = async function () { - const response = await this._request("get", "v1/models"); + listModels = async function() { + const response = await this._request('get', 'v1/models'); return response; }; @@ -194,7 +198,7 @@ class MistralClient { * @param {*} safeMode whether to use safe mode, e.g. true * @return {Promise} */ - chat = async function ({ + chat = async function({ model, messages, temperature, @@ -211,12 +215,12 @@ class MistralClient { topP, randomSeed, false, - safeMode + safeMode, ); const response = await this._request( - "post", - "v1/chat/completions", - request + 'post', + 'v1/chat/completions', + request, ); return response; }; @@ -250,25 +254,25 @@ class MistralClient { topP, randomSeed, true, - safeMode + safeMode, ); const response = await this._request( - "post", - "v1/chat/completions", - request + 'post', + 'v1/chat/completions', + request, ); - let buffer = ""; + let buffer = ''; for await (const chunk of response) { buffer += chunk; let firstNewline; - while ((firstNewline = buffer.indexOf("\n")) !== -1) { + while ((firstNewline = buffer.indexOf('\n')) !== -1) { const chunkLine = buffer.substring(0, firstNewline); buffer = buffer.substring(firstNewline + 1); - if (chunkLine.startsWith("data:")) { + if (chunkLine.startsWith('data:')) { const json = chunkLine.substring(6).trim(); - if (json !== "[DONE]") { + if (json !== '[DONE]') { yield JSON.parse(json); } } @@ -284,12 +288,12 @@ class MistralClient { * e.g. ['What is the best French cheese?'] * @return {Promise} */ - embeddings = async function ({ model, input }) { + embeddings = async function({model, input}) { const request = { model: model, input: input, }; - const response = await this._request("post", "v1/embeddings", request); + const response = await this._request('post', 'v1/embeddings', request); return response; }; }