diff --git a/.stats.yml b/.stats.yml
index fc5085b..7ae90a8 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,2 +1,2 @@
-configured_endpoints: 6
+configured_endpoints: 7
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/groqcloud%2Fgroqcloud-c28de228634e737a173375583a09eef5e0d7fa81fcdf7090d14d194e6ef4fdc5.yml
diff --git a/api.md b/api.md
index f25ec91..e1db3f3 100644
--- a/api.md
+++ b/api.md
@@ -1,3 +1,13 @@
+# Embeddings
+
+Types:
+
+- EmbeddingCreateResponse
+
+Methods:
+
+- client.embeddings.create({ ...params }) -> EmbeddingCreateResponse
+
# Chat
## Completions
diff --git a/src/index.ts b/src/index.ts
index b4a3dc6..95bad1e 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -116,6 +116,7 @@ export class Groq extends Core.APIClient {
this.apiKey = apiKey;
}
+ embeddings: API.Embeddings = new API.Embeddings(this);
chat: API.Chat = new API.Chat(this);
audio: API.Audio = new API.Audio(this);
models: API.Models = new API.Models(this);
@@ -177,6 +178,10 @@ export import fileFromPath = Uploads.fileFromPath;
export namespace Groq {
export import RequestOptions = Core.RequestOptions;
+ export import Embeddings = API.Embeddings;
+ export import EmbeddingCreateResponse = API.EmbeddingCreateResponse;
+ export import EmbeddingCreateParams = API.EmbeddingCreateParams;
+
export import Chat = API.Chat;
export import Audio = API.Audio;
diff --git a/src/resources/embeddings.ts b/src/resources/embeddings.ts
new file mode 100644
index 0000000..cd9adbe
--- /dev/null
+++ b/src/resources/embeddings.ts
@@ -0,0 +1,113 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import * as Core from '../core';
+import { APIResource } from '../resource';
+import * as EmbeddingsAPI from './embeddings';
+
+export class Embeddings extends APIResource {
+ /**
+ * Creates an embedding vector representing the input text.
+ */
+ create(
+ body: EmbeddingCreateParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ return this._client.post('/openai/v1/embeddings', { body, ...options });
+ }
+}
+
+export interface EmbeddingCreateResponse {
+ /**
+ * The list of embeddings generated by the model.
+ */
+ data: Array;
+
+ /**
+ * The name of the model used to generate the embedding.
+ */
+ model: string;
+
+ /**
+ * The object type, which is always "list".
+ */
+ object: 'list';
+
+ /**
+ * The usage information for the request.
+ */
+ usage: EmbeddingCreateResponse.Usage;
+}
+
+export namespace EmbeddingCreateResponse {
+ /**
+ * Represents an embedding vector returned by embedding endpoint.
+ */
+ export interface Data {
+ /**
+ * The embedding vector, which is a list of floats. The length of vector depends on
+ * the model as listed in the [embedding guide](/docs/guides/embeddings).
+ */
+ embedding: Array | string;
+
+ /**
+ * The index of the embedding in the list of embeddings.
+ */
+ index: number;
+
+ /**
+ * The object type, which is always "embedding".
+ */
+ object: 'embedding';
+ }
+
+ /**
+ * The usage information for the request.
+ */
+ export interface Usage {
+ /**
+ * The number of tokens used by the prompt.
+ */
+ prompt_tokens: number;
+
+ /**
+ * The total number of tokens used by the request.
+ */
+ total_tokens: number;
+ }
+}
+
+export interface EmbeddingCreateParams {
+ /**
+ * Input text to embed, encoded as a string or array of tokens. To embed multiple
+ * inputs in a single request, pass an array of strings or array of token arrays.
+ * The input must not exceed the max input tokens for the model, cannot be an empty
+ * string, and any array must be 2048 dimensions or less.
+ */
+ input: string | Array;
+
+ /**
+ * ID of the model to use.
+ */
+ model: string;
+
+ /**
+ * The number of dimensions to return the embeddings in.
+ */
+ dimensions?: number;
+
+ /**
+ * The format to return the embeddings in.
+ */
+ encoding_format?: 'float' | 'base64';
+
+ /**
+ * A unique identifier representing your end-user, which can help us monitor and
+ * detect abuse.
+ */
+ user?: string | null;
+}
+
+export namespace Embeddings {
+ export import EmbeddingCreateResponse = EmbeddingsAPI.EmbeddingCreateResponse;
+ export import EmbeddingCreateParams = EmbeddingsAPI.EmbeddingCreateParams;
+}
diff --git a/src/resources/index.ts b/src/resources/index.ts
index 3d61ce9..417fb8b 100644
--- a/src/resources/index.ts
+++ b/src/resources/index.ts
@@ -1,5 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
export { Chat } from './chat/chat';
+export { EmbeddingCreateResponse, EmbeddingCreateParams, Embeddings } from './embeddings';
export { Model, ModelList, Models } from './models';
export { Translation, Audio } from './audio/audio';
diff --git a/tests/api-resources/embeddings.test.ts b/tests/api-resources/embeddings.test.ts
new file mode 100644
index 0000000..0528069
--- /dev/null
+++ b/tests/api-resources/embeddings.test.ts
@@ -0,0 +1,35 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Groq from 'groq-sdk';
+import { Response } from 'node-fetch';
+
+const groq = new Groq({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource embeddings', () => {
+ test('create: only required params', async () => {
+ const responsePromise = groq.embeddings.create({
+ input: 'The quick brown fox jumped over the lazy dog',
+ model: 'nomic-embed-text-v1.5',
+ });
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('create: required and optional params', async () => {
+ const response = await groq.embeddings.create({
+ input: 'The quick brown fox jumped over the lazy dog',
+ model: 'nomic-embed-text-v1.5',
+ dimensions: 1,
+ encoding_format: 'float',
+ user: 'string',
+ });
+ });
+});