From 56680e4d37c9fcdbb2b23d4c9ecd0e41eaf5ec6c Mon Sep 17 00:00:00 2001 From: stainless-bot Date: Fri, 8 Mar 2024 07:23:06 +0000 Subject: [PATCH] feat: OpenAPI spec update via Stainless API --- CONTRIBUTING.md | 4 +--- README.md | 25 ++++++++++++--------- src/core.ts | 16 ------------- src/lib/chat_completions_ext.ts | 4 ++-- src/resources/chat/completions.ts | 37 +++---------------------------- 5 files changed, 21 insertions(+), 65 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 0b403a9..4ed35f6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -42,9 +42,7 @@ If you’d like to use the repository from source, you can either install from g To install via git: ```bash -npm install --save git+ssh://git@github.com:groq/groq-typescript.git -# or -yarn add git+ssh://git@github.com:groq/groq-typescript.git +npm install git+ssh://git@github.com:groq/groq-typescript.git ``` Alternatively, to link a local copy of the repo: diff --git a/README.md b/README.md index de94f4a..9669449 100644 --- a/README.md +++ b/README.md @@ -9,10 +9,7 @@ The REST API documentation can be found [on console.groq.com](https://console.gr ## Installation ```sh -# install from NPM -npm install --save groq-sdk -# or -yarn add groq-sdk +npm install groq-sdk ``` ## Usage @@ -80,7 +77,7 @@ async function main() { ], model: 'mixtral-8x7b-32768', }) - .catch((err) => { + .catch(async (err) => { if (err instanceof Groq.APIError) { console.log(err.status); // 400 console.log(err.name); // BadRequestError @@ -233,7 +230,7 @@ If you would like to disable or customize this behavior, for example to use the ```ts import http from 'http'; -import HttpsProxyAgent from 'https-proxy-agent'; +import { HttpsProxyAgent } from 'https-proxy-agent'; // Configure the default for all requests: const groq = new Groq({ @@ -241,10 +238,18 @@ const groq = new Groq({ }); // Override per-request: -await groq.chat.completions.create({ messages: [{ role: 'system', content: 'You are a helpful assisstant.' }, { role: 'user', content: 'Explain the importance of low latency LLMs' }], model: 'mixtral-8x7b-32768' }, { - baseURL: 'http://localhost:8080/test-api', - httpAgent: new http.Agent({ keepAlive: false }), -}) +await groq.chat.completions.create( + { + messages: [ + { role: 'system', content: 'You are a helpful assisstant.' }, + { role: 'user', content: 'Explain the importance of low latency LLMs' }, + ], + model: 'mixtral-8x7b-32768', + }, + { + httpAgent: new http.Agent({ keepAlive: false }), + }, +); ``` ## Semantic Versioning diff --git a/src/core.ts b/src/core.ts index 4e476b7..6875045 100644 --- a/src/core.ts +++ b/src/core.ts @@ -1,5 +1,4 @@ import { VERSION } from './version'; -import { Stream } from './lib/streaming'; import { GroqError, APIError, @@ -39,19 +38,6 @@ type APIResponseProps = { async function defaultParseResponse(props: APIResponseProps): Promise { const { response } = props; - if (props.options.stream) { - debug('response', response.status, response.url, response.headers, response.body); - - // Note: there is an invariant here that isn't represented in the type system - // that if you set `stream: true` the response type must also be `Stream` - - if (props.options.__streamClass) { - return props.options.__streamClass.fromSSEResponse(response, props.controller) as any; - } - - return Stream.fromSSEResponse(response, props.controller) as any; - } - // fetch refuses to read the body when the status code is 204. if (response.status === 204) { return null as T; @@ -750,7 +736,6 @@ export type RequestOptions | Readable> = idempotencyKey?: string; __binaryResponse?: boolean | undefined; - __streamClass?: typeof Stream; }; // This is required so that we can determine if a given object matches the RequestOptions @@ -771,7 +756,6 @@ const requestOptionsKeys: KeysEnum = { idempotencyKey: true, __binaryResponse: true, - __streamClass: true, }; export const isRequestOptions = (obj: unknown): obj is RequestOptions => { diff --git a/src/lib/chat_completions_ext.ts b/src/lib/chat_completions_ext.ts index b8c175d..a617a18 100644 --- a/src/lib/chat_completions_ext.ts +++ b/src/lib/chat_completions_ext.ts @@ -1,5 +1,5 @@ // Manually curated models for streaming chat completions. -import { ChatCompletion } from '../resources/chat' +import { ChatCompletion } from '../resources/chat'; export interface ChatCompletionChunk { id: string; @@ -74,7 +74,7 @@ export namespace ChatCompletionChunk { id?: string; usage?: ChatCompletion.Usage; error?: string; - } + }; } export interface ChatCompletionTokenLogprob { diff --git a/src/resources/chat/completions.ts b/src/resources/chat/completions.ts index 7bf78a4..81b6bc8 100644 --- a/src/resources/chat/completions.ts +++ b/src/resources/chat/completions.ts @@ -3,32 +3,13 @@ import * as Core from 'groq-sdk/core'; import { APIResource } from 'groq-sdk/resource'; import * as CompletionsAPI from 'groq-sdk/resources/chat/completions'; -import { Stream } from 'groq-sdk/lib/streaming'; -import { ChatCompletionChunk } from 'groq-sdk/lib/chat_completions_ext'; export class Completions extends APIResource { /** * Creates a completion for a chat prompt */ - create( - body: ChatCompletionCreateParamsNonStreaming, - options?: Core.RequestOptions, - ): Core.APIPromise; - create( - body: ChatCompletionCreateParamsStreaming, - options?: Core.RequestOptions, - ): Core.APIPromise>; - create( - body: ChatCompletionCreateParamsBase, - options?: Core.RequestOptions, - ): Core.APIPromise | ChatCompletion>; - create( - body: ChatCompletionCreateParams, - options?: Core.RequestOptions, - ): Core.APIPromise | Core.APIPromise> { - return this._client.post('/openai/v1/chat/completions', { body, ...options, stream: body.stream ?? false }) as - | Core.APIPromise - | Core.APIPromise>; + create(body: CompletionCreateParams, options?: Core.RequestOptions): Core.APIPromise { + return this._client.post('/openai/v1/chat/completions', { body, ...options }); } } @@ -130,7 +111,7 @@ export namespace ChatCompletion { } } -export interface ChatCompletionCreateParamsBase { +export interface CompletionCreateParams { messages: Array; model: string; @@ -254,15 +235,3 @@ export namespace Completions { export import ChatCompletion = CompletionsAPI.ChatCompletion; export import CompletionCreateParams = CompletionsAPI.CompletionCreateParams; } - -export interface ChatCompletionCreateParamsNonStreaming extends ChatCompletionCreateParamsBase { - stream?: false; -} - -export interface ChatCompletionCreateParamsStreaming extends ChatCompletionCreateParamsBase { - stream: true; -} - -export type ChatCompletionCreateParams = - | ChatCompletionCreateParamsNonStreaming - | ChatCompletionCreateParamsStreaming;