Skip to content
This repository has been archived by the owner on Oct 10, 2024. It is now read-only.

Commit

Permalink
don't override global fetch & fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
AntoineAtMistral committed Apr 26, 2024
1 parent 0d5af64 commit 67a6d8d
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 16 deletions.
16 changes: 9 additions & 7 deletions src/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,19 @@ const ENDPOINT = 'https://api.mistral.ai';
* @return {Promise<void>}
*/
async function initializeFetch() {
if (typeof window === 'undefined' ||
typeof globalThis.fetch === 'undefined') {
const nodeFetch = await import('node-fetch');
fetch = nodeFetch.default;
if (typeof window === 'undefined') {
isNode = true;
}

if (typeof globalThis.fetch === 'undefined') {
const nodeFetch = await import('node-fetch');
globalThis.mistralFetch = nodeFetch.default;
} else {
fetch = globalThis.fetch;
globalThis.mistralFetch = globalThis.fetch;
}
}

initializeFetch();
await initializeFetch();

/**
* MistralAPIError
Expand Down Expand Up @@ -90,7 +92,7 @@ class MistralClient {

for (let attempts = 0; attempts < this.maxRetries; attempts++) {
try {
const response = await fetch(url, options);
const response = await globalThis.mistralFetch(url, options);

if (response.ok) {
if (request?.stream) {
Expand Down
18 changes: 9 additions & 9 deletions tests/client.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ describe('Mistral Client', () => {
it('should return a chat response object', async() => {
// Mock the fetch function
const mockResponse = mockChatResponsePayload();
globalThis.fetch = mockFetch(200, mockResponse);
globalThis.mistralFetch = mockFetch(200, mockResponse);

const response = await client.chat({
model: 'mistral-small',
Expand All @@ -37,7 +37,7 @@ describe('Mistral Client', () => {
it('should return a chat response object if safeMode is set', async() => {
// Mock the fetch function
const mockResponse = mockChatResponsePayload();
globalThis.fetch = mockFetch(200, mockResponse);
globalThis.mistralFetch = mockFetch(200, mockResponse);

const response = await client.chat({
model: 'mistral-small',
Expand All @@ -55,7 +55,7 @@ describe('Mistral Client', () => {
it('should return a chat response object if safePrompt is set', async() => {
// Mock the fetch function
const mockResponse = mockChatResponsePayload();
globalThis.fetch = mockFetch(200, mockResponse);
globalThis.mistralFetch = mockFetch(200, mockResponse);

const response = await client.chat({
model: 'mistral-small',
Expand All @@ -75,7 +75,7 @@ describe('Mistral Client', () => {
it('should return parsed, streamed response', async() => {
// Mock the fetch function
const mockResponse = mockChatResponseStreamingPayload();
globalThis.fetch = mockFetchStream(200, mockResponse);
globalThis.mistralFetch = mockFetchStream(200, mockResponse);

const response = await client.chatStream({
model: 'mistral-small',
Expand All @@ -98,7 +98,7 @@ describe('Mistral Client', () => {
it('should return parsed, streamed response with safeMode', async() => {
// Mock the fetch function
const mockResponse = mockChatResponseStreamingPayload();
globalThis.fetch = mockFetchStream(200, mockResponse);
globalThis.mistralFetch = mockFetchStream(200, mockResponse);

const response = await client.chatStream({
model: 'mistral-small',
Expand All @@ -122,7 +122,7 @@ describe('Mistral Client', () => {
it('should return parsed, streamed response with safePrompt', async() => {
// Mock the fetch function
const mockResponse = mockChatResponseStreamingPayload();
globalThis.fetch = mockFetchStream(200, mockResponse);
globalThis.mistralFetch = mockFetchStream(200, mockResponse);

const response = await client.chatStream({
model: 'mistral-small',
Expand All @@ -148,7 +148,7 @@ describe('Mistral Client', () => {
it('should return embeddings', async() => {
// Mock the fetch function
const mockResponse = mockEmbeddingResponsePayload();
globalThis.fetch = mockFetch(200, mockResponse);
globalThis.mistralFetch = mockFetch(200, mockResponse);

const response = await client.embeddings(mockEmbeddingRequest);
expect(response).toEqual(mockResponse);
Expand All @@ -159,7 +159,7 @@ describe('Mistral Client', () => {
it('should return batched embeddings', async() => {
// Mock the fetch function
const mockResponse = mockEmbeddingResponsePayload(10);
globalThis.fetch = mockFetch(200, mockResponse);
globalThis.mistralFetch = mockFetch(200, mockResponse);

const response = await client.embeddings(mockEmbeddingRequest);
expect(response).toEqual(mockResponse);
Expand All @@ -170,7 +170,7 @@ describe('Mistral Client', () => {
it('should return a list of models', async() => {
// Mock the fetch function
const mockResponse = mockListModels();
globalThis.fetch = mockFetch(200, mockResponse);
globalThis.mistralFetch = mockFetch(200, mockResponse);

const response = await client.listModels();
expect(response).toEqual(mockResponse);
Expand Down

0 comments on commit 67a6d8d

Please sign in to comment.