Skip to content

Commit

Permalink
fix: start script removed, comments deleted, embedding interface chan…
Browse files Browse the repository at this point in the history
…ged, BASE URL changed
  • Loading branch information
csgulati09 committed Mar 16, 2024
1 parent 5a4c375 commit 20aeba7
Show file tree
Hide file tree
Showing 11 changed files with 13 additions and 92 deletions.
3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@
"test": "jest",
"build": "bash ./build",
"lint": "eslint --ext ts,js .",
"fix": "eslint --fix --ext ts,js .",
"start": "npm run build && npm pack && cd /Users/chandeep/Documents/Workspace/Portkey/Hands-On/revamp-Node && npm i /Users/chandeep/Documents/Workspace/Portkey/SDK/portkey-node-sdk/dist"
"fix": "eslint --fix --ext ts,js ."
},
"imports": {
"portkey-ai": ".",
Expand Down
2 changes: 1 addition & 1 deletion src/_types/portkeyConstructs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export interface Function {
}

export interface ModelParams {
model: string;
model?: string;
suffix?: string;
max_tokens?: number;
temperature?: number;
Expand Down
7 changes: 3 additions & 4 deletions src/apis/embeddings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,10 @@ import { overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";

export interface EmbeddingsBody extends ModelParams {
input: string | Array<string> | Array<number> | Array<Array<number>>;
model: (string & {}) | 'text-embedding-ada-002' | 'text-embedding-3-small' | 'text-embedding-3-large';
input: string;
model?: string;
dimensions?: number;
encoding_format?: 'float' | 'base64';
user?: string;
encoding_format?: string;
}

export type EmbeddingsResponse = Record<string, any> & APIResponseType
Expand Down
2 changes: 1 addition & 1 deletion tests/assistants/openai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Portkey } from 'portkey-ai';
config({ override: true })
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
baseURL: "https://api.portkey.ai/v1",
provider: "openai",
virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? ""
});
Expand Down
2 changes: 1 addition & 1 deletion tests/chat/anthropic.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Portkey } from 'portkey-ai';
config({ override: true })
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
baseURL: "https://api.portkey.ai/v1",
provider: "openai",
virtualKey: process.env["ANTHROPIC_VIRTUAL_KEY"] ?? ""
});
Expand Down
2 changes: 1 addition & 1 deletion tests/chat/anyscale.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Portkey } from 'portkey-ai';
config({ override: true })
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
baseURL: "https://api.portkey.ai/v1",
provider: "openai",
virtualKey: process.env["ANYSCALE_VIRTUAL_KEY"] ?? ""
});
Expand Down
2 changes: 1 addition & 1 deletion tests/chat/openai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Portkey } from 'portkey-ai';
config({ override: true })
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
baseURL: "https://api.portkey.ai/v1",
provider: "openai",
virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? ""
});
Expand Down
2 changes: 1 addition & 1 deletion tests/completion/anthropic.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Portkey } from 'portkey-ai';
config({ override: true })
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
baseURL: "https://api.portkey.ai/v1",
provider: "openai",
virtualKey: process.env["ANTHROPIC_VIRTUAL_KEY"] ?? ""
});
Expand Down
2 changes: 1 addition & 1 deletion tests/completion/anyscale.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Portkey } from 'portkey-ai';
config({ override: true })
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
baseURL: "https://api.portkey.ai/v1",
provider: "openai",
virtualKey: process.env["ANYSCALE_VIRTUAL_KEY"] ?? ""
});
Expand Down
2 changes: 1 addition & 1 deletion tests/completion/openai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { Portkey } from 'portkey-ai';
config({ override: true })
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
baseURL: "https://api.portkey.ai/v1",
provider: "openai",
virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? ""
});
Expand Down
79 changes: 1 addition & 78 deletions tests/images/openai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import path from "path";
config({ override: true });
const client = new Portkey({
apiKey: process.env["PORTKEY_API_KEY"] ?? "",
baseURL: "https://api.portkeydev.com/v1",
baseURL: "https://api.portkey.ai/v1",
provider: "openai",
virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? ""
});
Expand Down Expand Up @@ -54,81 +54,4 @@ describe("Openai Images APIs", () => {
expect(response.data.length).toBeGreaterThan(0);
}, 120000);


// test('model: gpt-4-1106-preview', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-1106-preview', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4-0314', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-0314', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4-32k', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-32k', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4-32k-0314', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-32k-0314', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-4-0613', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-4-0613', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo-0613', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0613', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo-0301', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0301', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo-1106', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-1106', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo-16k', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-16k', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });

// test('model: gpt-3.5-turbo', async () => {
// const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [{ "role": "user", "content": "Say this is a test" }] });
// expect(completion).toBeDefined();
// expect(completion.choices).toBeDefined();
// expect(completion.choices.length).toBeGreaterThan(0);
// });
});

0 comments on commit 20aeba7

Please sign in to comment.