diff --git a/src/apis/images.ts b/src/apis/images.ts index 9792c3b..57164ec 100644 --- a/src/apis/images.ts +++ b/src/apis/images.ts @@ -8,7 +8,7 @@ import OpenAI from "openai"; export interface ImagesBody { prompt: string; - model: (string & {}) | "dall-e-2" | "dall-e-3"; + model?: (string & {}) | "dall-e-2" | "dall-e-3"; n?: number | null; quality?: "standard" | "hd"; response_format?: "url" | "b64_json" | null; diff --git a/tests/assistants/openai.test.ts b/tests/assistants/openai.test.ts new file mode 100644 index 0000000..52718fe --- /dev/null +++ b/tests/assistants/openai.test.ts @@ -0,0 +1,26 @@ +import { config } from 'dotenv'; +import { Portkey } from 'portkey-ai'; + +config({ override: true }) +const client = new Portkey({ + apiKey: process.env["PORTKEY_API_KEY"] ?? "", + baseURL: "https://api.portkeydev.com/v1", + provider: "openai", + virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? "" +}); + +describe('OpenAI Assistants APIs', () => { + test('assistant: create: documentation', async () => { + const myAssistant = await client.beta.assistants.create({ + instructions: + "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + name: "Math Tutor", + tools: [{ type: "code_interpreter" }], + model: "gpt-4", + }); + expect(myAssistant).toBeDefined(); + expect(myAssistant.tools).toBeDefined(); + expect(myAssistant.tools.length).toBeGreaterThan(0); + }); + +}); \ No newline at end of file diff --git a/tests/images/image.png b/tests/images/image.png new file mode 100644 index 0000000..aea7d92 Binary files /dev/null and b/tests/images/image.png differ diff --git a/tests/images/imageMask.png b/tests/images/imageMask.png new file mode 100644 index 0000000..e231148 Binary files /dev/null and b/tests/images/imageMask.png differ diff --git a/tests/images/openai.test.ts b/tests/images/openai.test.ts new file mode 100644 index 0000000..9c37a78 --- /dev/null +++ b/tests/images/openai.test.ts @@ -0,0 +1,134 @@ +import { config } from "dotenv"; +import { Portkey } from "portkey-ai"; +import fs from "fs"; +import path from "path"; + +config({ override: true }); +const client = new Portkey({ + apiKey: process.env["PORTKEY_API_KEY"] ?? "", + baseURL: "https://api.portkeydev.com/v1", + provider: "openai", + virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? "" +}); + +describe("Openai Images APIs", () => { + test("generate: only required params", async () => { + const response = await client.images.generate({ + prompt: "A cute baby sea otter", + }); + expect(response).toBeDefined(); + expect(response.data).toBeDefined(); + expect(response.data.length).toBeGreaterThan(0); + }, 120000); + + test("generate: only required params with model", async () => { + const response = await client.images.generate({ + model: "dall-e-3", + prompt: "A cute baby sea otter", + }); + expect(response).toBeDefined(); + expect(response.data).toBeDefined(); + expect(response.data.length).toBeGreaterThan(0); + }, 120000); + + test("createVariation: only required params", async () => { + const imagePath = path.join(__dirname, 'image.png'); + const response = await client.images.createVariation({ + image: fs.createReadStream(imagePath), + }); + expect(response).toBeDefined(); + expect(response.data).toBeDefined(); + expect(response.data.length).toBeGreaterThan(0); + }, 120000); + + test("edit: only required params", async () => { + const imagePath = path.join(__dirname, 'image.png'); + const imageMaskPath = path.join(__dirname, 'imageMask.png'); + const response = await client.images.edit({ + image: fs.createReadStream(imagePath), + mask: fs.createReadStream(imageMaskPath), + prompt:"A cute baby sea otter wearing a beret" + }); + expect(response).toBeDefined(); + expect(response.data).toBeDefined(); + expect(response.data.length).toBeGreaterThan(0); + }, 120000); + + + // test('model: gpt-4-1106-preview', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-4-1106-preview', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-4', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-4', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-4-0314', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-4-0314', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-4-32k', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-4-32k', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-4-32k-0314', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-4-32k-0314', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-4-0613', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-4-0613', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-3.5-turbo-0613', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0613', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-3.5-turbo-0301', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0301', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-3.5-turbo-1106', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-1106', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-3.5-turbo-16k', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-16k', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); + + // test('model: gpt-3.5-turbo', async () => { + // const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [{ "role": "user", "content": "Say this is a test" }] }); + // expect(completion).toBeDefined(); + // expect(completion.choices).toBeDefined(); + // expect(completion.choices.length).toBeGreaterThan(0); + // }); +});