Skip to content

Commit

Permalink
feat: added and tested few test cases
Browse files Browse the repository at this point in the history
  • Loading branch information
csgulati09 committed Feb 24, 2024
1 parent 56ea656 commit e469a69
Show file tree
Hide file tree
Showing 5 changed files with 161 additions and 1 deletion.
2 changes: 1 addition & 1 deletion src/apis/images.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import OpenAI from "openai";

export interface ImagesBody {
prompt: string;
model: (string & {}) | "dall-e-2" | "dall-e-3";
model?: (string & {}) | "dall-e-2" | "dall-e-3";
n?: number | null;
quality?: "standard" | "hd";
response_format?: "url" | "b64_json" | null;
Expand Down
26 changes: 26 additions & 0 deletions tests/assistants/openai.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import { config } from 'dotenv';
import { Portkey } from 'portkey-ai';

config({ override: true })
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
provider: "openai",
virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? ""
});

describe('OpenAI Assistants APIs', () => {
test('assistant: create: documentation', async () => {
const myAssistant = await client.beta.assistants.create({
instructions:
"You are a personal math tutor. When asked a question, write and run Python code to answer the question.",
name: "Math Tutor",
tools: [{ type: "code_interpreter" }],
model: "gpt-4",
});
expect(myAssistant).toBeDefined();
expect(myAssistant.tools).toBeDefined();
expect(myAssistant.tools.length).toBeGreaterThan(0);
});

});
Binary file added tests/images/image.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added tests/images/imageMask.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
134 changes: 134 additions & 0 deletions tests/images/openai.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import { config } from "dotenv";
import { Portkey } from "portkey-ai";
import fs from "fs";
import path from "path";

config({ override: true });
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
provider: "openai",
virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? ""
});

describe("Openai Images APIs", () => {
test("generate: only required params", async () => {
const response = await client.images.generate({
prompt: "A cute baby sea otter",
});
expect(response).toBeDefined();
expect(response.data).toBeDefined();
expect(response.data.length).toBeGreaterThan(0);
}, 120000);

test("generate: only required params with model", async () => {
const response = await client.images.generate({
model: "dall-e-3",
prompt: "A cute baby sea otter",
});
expect(response).toBeDefined();
expect(response.data).toBeDefined();
expect(response.data.length).toBeGreaterThan(0);
}, 120000);

test("createVariation: only required params", async () => {
const imagePath = path.join(__dirname, 'image.png');
const response = await client.images.createVariation({
image: fs.createReadStream(imagePath),
});
expect(response).toBeDefined();
expect(response.data).toBeDefined();
expect(response.data.length).toBeGreaterThan(0);
}, 120000);

test("edit: only required params", async () => {
const imagePath = path.join(__dirname, 'image.png');
const imageMaskPath = path.join(__dirname, 'imageMask.png');
const response = await client.images.edit({
image: fs.createReadStream(imagePath),
mask: fs.createReadStream(imageMaskPath),
prompt:"A cute baby sea otter wearing a beret"
});
expect(response).toBeDefined();
expect(response.data).toBeDefined();
expect(response.data.length).toBeGreaterThan(0);
}, 120000);


// test('model: gpt-4-1106-preview', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-1106-preview', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4-0314', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-0314', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4-32k', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-32k', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4-32k-0314', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-32k-0314', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4-0613', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-0613', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo-0613', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0613', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo-0301', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0301', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo-1106', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-1106', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo-16k', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-16k', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });
});

0 comments on commit e469a69

Please sign in to comment.