diff --git a/package-lock.json b/package-lock.json index 80d66f1..51bd6fe 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,8 @@ "license": "MIT", "dependencies": { "agentkeepalive": "^4.5.0", - "dotenv": "^16.3.1" + "dotenv": "^16.3.1", + "openai": "^4.28.4" }, "devDependencies": { "@babel/core": "^7.23.3", @@ -2721,11 +2722,19 @@ "version": "20.8.9", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.9.tgz", "integrity": "sha512-UzykFsT3FhHb1h7yD4CA4YhBHq545JC0YnEz41xkipN88eKQtL6rSgocL5tbAP6Ola9Izm/Aw4Ora8He4x0BHg==", - "dev": true, "dependencies": { "undici-types": "~5.26.4" } }, + "node_modules/@types/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, "node_modules/@types/semver": { "version": "7.5.2", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.2.tgz", @@ -2947,6 +2956,17 @@ "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", "dev": true }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, "node_modules/acorn": { "version": "8.10.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", @@ -3089,6 +3109,11 @@ "node": ">=8" } }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, "node_modules/babel-jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", @@ -3259,6 +3284,11 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "node_modules/base-64": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz", + "integrity": "sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==" + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -3403,6 +3433,14 @@ "node": ">=10" } }, + "node_modules/charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==", + "engines": { + "node": "*" + } + }, "node_modules/ci-info": { "version": "3.9.0", "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", @@ -3472,6 +3510,17 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -3538,6 +3587,14 @@ "node": ">= 8" } }, + "node_modules/crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==", + "engines": { + "node": "*" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -3584,6 +3641,14 @@ "node": ">=0.10.0" } }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -3611,6 +3676,15 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/digest-fetch": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz", + "integrity": "sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA==", + "dependencies": { + "base-64": "^0.1.0", + "md5": "^2.3.0" + } + }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -3958,6 +4032,14 @@ "node": ">=0.10.0" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -4136,6 +4218,44 @@ "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==", "dev": true }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/form-data-encoder": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", + "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==" + }, + "node_modules/formdata-node": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz", + "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", + "dependencies": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.3" + }, + "engines": { + "node": ">= 12.20" + } + }, + "node_modules/formdata-node/node_modules/web-streams-polyfill": { + "version": "4.0.0-beta.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", + "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", + "engines": { + "node": ">= 14" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -4402,6 +4522,11 @@ "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true }, + "node_modules/is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, "node_modules/is-core-module": { "version": "2.13.1", "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", @@ -5294,6 +5419,16 @@ "tmpl": "1.0.5" } }, + "node_modules/md5": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", + "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", + "dependencies": { + "charenc": "0.0.2", + "crypt": "0.0.2", + "is-buffer": "~1.1.6" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -5322,6 +5457,25 @@ "node": ">=8.6" } }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", @@ -5360,6 +5514,43 @@ "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", "dev": true }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", @@ -5417,6 +5608,33 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/openai": { + "version": "4.28.4", + "resolved": "https://registry.npmjs.org/openai/-/openai-4.28.4.tgz", + "integrity": "sha512-RNIwx4MT/F0zyizGcwS+bXKLzJ8QE9IOyigDG/ttnwB220d58bYjYFp0qjvGwEFBO6+pvFVIDABZPGDl46RFsg==", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "digest-fetch": "^1.3.0", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7", + "web-streams-polyfill": "^3.2.1" + }, + "bin": { + "openai": "bin/cli" + } + }, + "node_modules/openai/node_modules/@types/node": { + "version": "18.19.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.17.tgz", + "integrity": "sha512-SzyGKgwPzuWp2SHhlpXKzCX0pIOfcI4V2eF37nNBJOhwlegQ83omtVQ1XxZpDE06V/d6AQvfQdPfnw0tRC//Ng==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, "node_modules/optionator": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", @@ -6207,6 +6425,11 @@ "node": ">=8.0" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, "node_modules/ts-jest": { "version": "29.1.1", "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.1.tgz", @@ -6363,8 +6586,7 @@ "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dev": true + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" }, "node_modules/unicode-canonical-property-names-ecmascript": { "version": "2.0.0", @@ -6474,6 +6696,28 @@ "makeerror": "1.0.12" } }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/package.json b/package.json index 690b806..a070e38 100644 --- a/package.json +++ b/package.json @@ -41,6 +41,7 @@ }, "dependencies": { "agentkeepalive": "^4.5.0", - "dotenv": "^16.3.1" + "dotenv": "^16.3.1", + "openai": "^4.28.4" } } diff --git a/src/apis/assistants.ts b/src/apis/assistants.ts new file mode 100644 index 0000000..f9a8eb5 --- /dev/null +++ b/src/apis/assistants.ts @@ -0,0 +1,291 @@ +import { ApiClientInterface } from "../_types/generalTypes"; +import { ApiResource } from "../apiResource"; +import { RequestOptions } from "../baseClient"; +import { OPEN_AI_API_KEY } from "../constants"; +import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils"; +import { createHeaders } from "./createHeaders"; +import OpenAI from "openai"; + +export interface AssistantCreateParams { + model: string; + description?: string | null; + file_ids?: Array; + instructions?: string | null; + metadata?: unknown | null; + name?: string | null; + tools?: Array; +} + +export interface FileCreateParams { + file_id: string; +} + +export interface FileListParams extends CursorPageParams { + before?: string; + order?: string; +} + +export interface CursorPageParams { + after?: string; + limit?: number; +} + +export interface AssistantListParams extends CursorPageParams { + before?: string; + order?: string; +} + +export interface AssistantUpdateParams { + description?: string | null; + file_ids?: Array; + instructions?: string | null; + metadata?: unknown | null; + model?: string; + name?: string | null; + tools?: Array; +} + + +export class Assistants extends ApiResource { + + files: Files; + + constructor(client:any) { + super(client); + this.files = new Files(client); + } + + async create( + _body: AssistantCreateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: AssistantCreateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.assistants.create(body, opts).withResponse(); + + return finalResponse(result); + } + + async list( + _query?: AssistantListParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const query: AssistantListParams | undefined = _query; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.beta.assistants.list(query, opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + assistantId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.assistants.retrieve(assistantId, opts).withResponse(); + + return finalResponse(result); + } + + async update( + assistantId: string, + _body: AssistantUpdateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: AssistantUpdateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.assistants.update(assistantId, body, opts).withResponse(); + + return finalResponse(result); + } + + async del( + assistantId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.assistants.del(assistantId, opts).withResponse(); + + return finalResponse(result); + } + +} + +export class Files extends ApiResource{ + + async create( + assistantId: string, + _body: FileCreateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: FileCreateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.assistants.files.create(assistantId, body, opts).withResponse(); + + return finalResponse(result); + } + + async list( + assistantId: string, + _query?: FileListParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const query: FileListParams | undefined = _query; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.beta.assistants.files.list(assistantId, query, opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + assistantId: string, + fileId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.assistants.files.retrieve(assistantId, fileId, opts).withResponse(); + + return finalResponse(result); + } + + async del( + assistantId: string, + fileId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.assistants.files.del(assistantId, fileId, opts).withResponse(); + + return finalResponse(result); + } + +} \ No newline at end of file diff --git a/src/apis/chatCompletions.ts b/src/apis/chatCompletions.ts index ac70776..a878f92 100644 --- a/src/apis/chatCompletions.ts +++ b/src/apis/chatCompletions.ts @@ -38,6 +38,8 @@ class ChatCompletions extends ApiResource { const config = overrideConfig(this.client.config, params.config) this.client.customHeaders = { ...this.client.customHeaders, ...createHeaders({ ...params, config }) } } + + const stream = _body.stream ?? false return this.post(CHAT_COMPLETE_API, { body, ...opts, stream }) as | APIPromise diff --git a/src/apis/completions.ts b/src/apis/completions.ts index 34c0c01..81e5e1f 100644 --- a/src/apis/completions.ts +++ b/src/apis/completions.ts @@ -7,7 +7,6 @@ import { Stream } from "../streaming"; import { overrideConfig } from "../utils"; import { createHeaders } from "./createHeaders"; - export class Completions extends ApiResource { create( _body: CompletionsBodyNonStreaming, @@ -36,6 +35,7 @@ export class Completions extends ApiResource { this.client.customHeaders = { ...this.client.customHeaders, ...createHeaders({ ...params, config }) } } const stream = _body.stream ?? false + this.client.responseHeaders return this.post(TEXT_COMPLETE_API, { body, ...opts, stream }) as | APIPromise @@ -45,7 +45,7 @@ export class Completions extends ApiResource { export interface CompletionsBodyBase extends ModelParams { - prompt?: string; + prompt: string; } export interface CompletionsBodyStreaming extends CompletionsBodyBase { diff --git a/src/apis/embeddings.ts b/src/apis/embeddings.ts index 079198c..51cd0ae 100644 --- a/src/apis/embeddings.ts +++ b/src/apis/embeddings.ts @@ -30,5 +30,3 @@ export class Embeddings extends ApiResource { return response } } - - diff --git a/src/apis/files.ts b/src/apis/files.ts new file mode 100644 index 0000000..dd9b69b --- /dev/null +++ b/src/apis/files.ts @@ -0,0 +1,156 @@ +import { ApiClientInterface } from "../_types/generalTypes"; +import { ApiResource } from "../apiResource"; +import { RequestOptions } from "../baseClient"; +import { OPEN_AI_API_KEY } from "../constants"; +import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils"; +import { createHeaders } from "./createHeaders"; +import OpenAI from "openai"; + +export class MainFiles extends ApiResource { + + async create( + _body: FileCreateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: FileCreateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.files.create(body, opts).withResponse(); + + return finalResponse(result); + } + + async list( + _query?: FileListParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const query: FileListParams | undefined = _query; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.files.list(query, opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + fileId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.files.retrieve(fileId, opts).withResponse(); + + return finalResponse(result); + } + + async del( + fileId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.files.del(fileId, opts).withResponse(); + + return finalResponse(result); + } + + async retrieveContent( + fileId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.files.content(fileId, opts).withResponse(); + + return finalResponse(result); + } + +} + + +export interface FileCreateParams { + file: any; + purpose?: string; +} + +export interface FileObject { + id: string; + bytes?: number; + created_at?: number; + filename?: string; + object?: string; + purpose?: string; + status?: string; + status_details?: string; +} + +export interface FileListParams { + purpose?: string; + } \ No newline at end of file diff --git a/src/apis/images.ts b/src/apis/images.ts new file mode 100644 index 0000000..784cf27 --- /dev/null +++ b/src/apis/images.ts @@ -0,0 +1,130 @@ +import { ApiClientInterface } from "../_types/generalTypes"; +import { ApiResource } from "../apiResource"; +import { RequestOptions } from "../baseClient"; +import { OPEN_AI_API_KEY } from "../constants"; +import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils"; +import { createHeaders } from "./createHeaders"; +import OpenAI from "openai"; + +export interface ImagesBody { + prompt: string; + model?: string; + n?: number | null; + quality?: string; + response_format?: string | null; + size?: string | null; + style?: string | null; + user?: string; +} + +export interface ImageEditParams { + image: any; + prompt: string; + mask?: any; + model?: string | null; + n?: number | null; + response_format?: string | null; + size?: string | null; + user?: string; +} + +export interface ImageCreateVariationParams { + image: any; + model?: string | null; + n?: number | null; + response_format?: string | null; + size?: string | null; + user?: string; +} + +export interface ImagesResponse { + created: number; + + data: Array; +} +export interface Image { + b64_json?: string; + revised_prompt?: string; + url?: string; +} + +export class Images extends ApiResource { + async generate( + _body: ImagesBody, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: ImagesBody = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.images.generate(body, opts).withResponse(); + + return finalResponse(result); + } + + async edit( + _body: ImageEditParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: ImageEditParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.images.edit(body, opts).withResponse(); + + return finalResponse(result); + } + + async createVariation( + _body: ImageCreateVariationParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: ImageCreateVariationParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.images.createVariation(body, opts).withResponse(); + + return finalResponse(result); + } +} diff --git a/src/apis/index.ts b/src/apis/index.ts index c0b4cf2..ab53264 100644 --- a/src/apis/index.ts +++ b/src/apis/index.ts @@ -5,4 +5,9 @@ export { Feedback } from "./feedback"; export { Generations, Prompt } from "./generations"; export { postMethod } from "./postMethod"; export { Embeddings } from "./embeddings"; +export { Images } from "./images"; +export { Assistants } from "./assistants"; +export { Threads } from "./threads"; +export { MainFiles } from "./files"; +export { Models } from "./models"; diff --git a/src/apis/models.ts b/src/apis/models.ts new file mode 100644 index 0000000..8831a52 --- /dev/null +++ b/src/apis/models.ts @@ -0,0 +1,80 @@ +import { ApiClientInterface } from "../_types/generalTypes"; +import { ApiResource } from "../apiResource"; +import { RequestOptions } from "../baseClient"; +import { OPEN_AI_API_KEY } from "../constants"; +import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils"; +import { createHeaders } from "./createHeaders"; +import OpenAI from "openai"; + +export class Models extends ApiResource { + async list( + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client) + }); + + const result = await OAIclient.models.list(opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + model: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.models.retrieve(model, opts).withResponse(); + + return finalResponse(result); + } + + async del( + model: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.models.del(model, opts).withResponse(); + + return finalResponse(result); + } +} diff --git a/src/apis/threads.ts b/src/apis/threads.ts new file mode 100644 index 0000000..3e9d6ee --- /dev/null +++ b/src/apis/threads.ts @@ -0,0 +1,640 @@ +import { ApiClientInterface } from "../_types/generalTypes"; +import { ApiResource } from "../apiResource"; +import { RequestOptions } from "../baseClient"; +import { OPEN_AI_API_KEY } from "../constants"; +import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils"; +import { createHeaders } from "./createHeaders"; +import OpenAI from "openai"; + + +export class Threads extends ApiResource { + + messages: Messages; + runs: Runs + + constructor(client:any) { + super(client); + this.messages = new Messages(client); + this.runs = new Runs(client); + } + + async create( + _body: ThreadCreateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: ThreadCreateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.beta.threads.create(body, opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + threadId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.retrieve(threadId, opts).withResponse(); + + return finalResponse(result); + } + + async update( + threadId: string, + _body: ThreadUpdateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: ThreadUpdateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.update(threadId, body, opts).withResponse(); + + return finalResponse(result); + } + + async del( + threadId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.del(threadId, opts).withResponse(); + + return finalResponse(result); + } + + async createAndRun( + _body: ThreadCreateAndRunParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: ThreadCreateAndRunParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.createAndRun(body, opts).withResponse(); + + return finalResponse(result); + } + +} + + +export class Messages extends ApiResource{ + + files: Files; + + constructor(client:any) { + super(client); + this.files = new Files(client); + } + + async create( + threadId: string, + _body: MessageCreateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: MessageCreateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.beta.threads.messages.create(threadId, body, opts).withResponse(); + + return finalResponse(result); + } + + async list( + threadId: string, + _query?: MessageListParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const query: MessageListParams | undefined = _query; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.beta.threads.messages.list(threadId, query, opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + threadId: string, + messageId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.messages.retrieve(threadId, messageId, opts).withResponse(); + + return finalResponse(result); + + } + + async update( + threadId: string, + messageId: string, + _body: MessageUpdateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: MessageUpdateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.messages.update(threadId, messageId, body, opts).withResponse(); + + return finalResponse(result); + } + + +} + +export class Files extends ApiResource{ + + async list( + threadId: string, + messageId: string, + _query?: FileListParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const query: FileListParams | undefined = _query; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.beta.threads.messages.files.list(threadId, messageId, query, opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + threadId: string, + messageId: string, + fileId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.messages.files.retrieve(threadId, messageId, fileId, opts).withResponse(); + + return finalResponse(result); + } + +} + + +export class Runs extends ApiResource{ + + steps: Steps; + + constructor(client:any) { + super(client); + this.steps = new Steps(client); + } + + async create( + threadId: string, + _body: RunCreateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: RunCreateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.runs.create(threadId, body, opts).withResponse(); + + return finalResponse(result); + } + + async list( + threadId: string, + _query?: RunListParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const query: RunListParams | undefined = _query; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.beta.threads.runs.list(threadId, query, opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + threadId: string, + runId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.runs.retrieve(threadId, runId, opts).withResponse(); + + return finalResponse(result); + } + + async update( + threadId: string, + runId: string, + _body: RunUpdateParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: RunUpdateParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.runs.update(threadId, runId, body, opts).withResponse(); + + return finalResponse(result); + } + + async submitToolOutputs( + threadId: string, + runId: string, + _body: RunSubmitToolOutputsParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const body: RunSubmitToolOutputsParams = _body; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.runs.submitToolOutputs(threadId, runId, body, opts).withResponse(); + + return finalResponse(result); + } + + async cancel( + threadId: string, + runId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.runs.cancel(threadId, runId, opts).withResponse(); + + return finalResponse(result); + } + +} + +export class Steps extends ApiResource{ + + async list( + threadId: string, + runId: string, + _query?: StepListParams, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + const query: StepListParams | undefined = _query; + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const result = await OAIclient.beta.threads.runs.steps.list(threadId, runId, query, opts).withResponse(); + + return finalResponse(result); + } + + async retrieve( + threadId: string, + runId: string, + stepId: string, + params?: ApiClientInterface, + opts?: RequestOptions + ): Promise { + if (params) { + const config = overrideConfig(this.client.config, params.config); + this.client.customHeaders = { + ...this.client.customHeaders, + ...createHeaders({ ...params, config }), + }; + } + + const OAIclient = new OpenAI({ + apiKey: OPEN_AI_API_KEY, + baseURL: this.client.baseURL, + defaultHeaders: defaultHeadersBuilder(this.client), + }); + + const result = await OAIclient.beta.threads.runs.steps.retrieve(threadId, runId, stepId, opts).withResponse(); + + return finalResponse(result); + } + +} + + + +export interface ThreadCreateParams { + messages?: Array; + metadata?: unknown | null; +} + + +export interface Message { + content: string; + role: string; + file_ids?: Array; + metadata?: unknown | null; +} + + +export interface ThreadUpdateParams { + metadata?: unknown | null; +} + +export interface MessageCreateParams { + content: string; + role: string; + file_ids?: Array; + metadata?: unknown | null; +} + +export interface MessageListParams extends CursorPageParams { + order?: string; +} + +export interface CursorPageParams { + after?: string; + + limit?: number; +} + +export interface FileListParams extends CursorPageParams { + before?: string; + order?: string; +} + +export interface MessageUpdateParams { + metadata?: unknown | null; +} + +export interface RunCreateParams { + assistant_id: string; + additional_instructions?: string | null; + instructions?: string | null; + metadata?: unknown | null; + model?: string | null; + tools?: Array | null; +} + +export interface ThreadCreateAndRunParams { + + assistant_id: string; + instructions?: string | null; + metadata?: unknown | null; + model?: string | null; + thread?: any; + tools?: Array | null; +} + +export interface RunListParams extends CursorPageParams { + before?: string; + order?: string; +} + +export interface StepListParams extends CursorPageParams { + before?: string; + order?: string; +} + +export interface RunUpdateParams { + metadata?: unknown | null; +} + +export interface RunSubmitToolOutputsParams { + tool_outputs: Array; +} + + +export interface ToolOutput { + output?: string; + tool_call_id?: string; +} diff --git a/src/baseClient.ts b/src/baseClient.ts index 5985e6a..db692b2 100644 --- a/src/baseClient.ts +++ b/src/baseClient.ts @@ -117,12 +117,14 @@ export abstract class ApiClient { baseURL: string; customHeaders: Record responseHeaders: Record + portkeyHeaders: Record private fetch: Fetch; constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }: ApiClientInterface) { this.apiKey = apiKey ?? ""; this.baseURL = baseURL ?? ""; this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }) + this.portkeyHeaders = this.defaultHeaders() this.fetch = fetch; this.responseHeaders = {} } diff --git a/src/client.ts b/src/client.ts index 1d03631..01bc499 100644 --- a/src/client.ts +++ b/src/client.ts @@ -1,5 +1,4 @@ import { ApiClientInterface } from "./_types/generalTypes"; -import * as Types from "./_types/portkeyConstructs"; import * as API from "./apis"; import { PostBodyParams } from "./apis/postMethod"; import { ApiClient, RequestOptions } from "./baseClient"; @@ -37,8 +36,9 @@ export class Portkey extends ApiClient { traceID, metadata, Authorization, - cacheForceRefresh + cacheForceRefresh, }); + this.apiKey = apiKey; if (!this.apiKey) { throw castToError(MISSING_API_KEY_ERROR_MESSAGE) @@ -56,6 +56,17 @@ export class Portkey extends ApiClient { chat = new API.Chat(this); generations = new API.Generations(this); prompts = new API.Prompt(this); + feedback = new API.Feedback(this); + embeddings = new API.Embeddings(this); + images = new API.Images(this); + files = new API.MainFiles(this); + models = new API.Models(this); + beta = { + assistants: new API.Assistants(this), + threads: new API.Threads(this) + }; + + post = ( url: string, _body: PostBodyParams, @@ -64,6 +75,5 @@ export class Portkey extends ApiClient { ) => { return new API.postMethod(this).create(url, _body, params, opts) }; - feedback = new API.Feedback(this); - embeddings = new API.Embeddings(this); + } \ No newline at end of file diff --git a/src/constants.ts b/src/constants.ts index 9fb6f24..7d0aed1 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -31,6 +31,8 @@ export const PORTKEY_GATEWAY_URL = PORTKEY_BASE_URL export const PORTKEY_API_KEY_ENV = "PORTKEY_API_KEY" export const PORTKEY_PROXY_ENV = "PORTKEY_PROXY" +export const OPEN_AI_API_KEY = "DUMMY-KEY" + // API routes export const CHAT_COMPLETE_API = "/chat/completions" export const TEXT_COMPLETE_API = "/completions" diff --git a/src/utils.ts b/src/utils.ts index 270f77c..8ce3e9b 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,4 +1,5 @@ import { PORTKEY_HEADER_PREFIX } from "./constants"; +import { createResponseHeaders } from "./streaming"; type PlatformProperties = { "x-portkey-runtime"?: string, @@ -81,4 +82,40 @@ export const parseBody = (data: Record | undefined | null): Rec parsedData[k] = v } return parsedData +} + +export function finalResponse(response:any) { + + const headers = portkeyHeaders(response.response.headers); + const json = { + ...response.data?.body || response.data, + getHeaders: () => headers + } + return json +} + +export function portkeyHeaders(headers:any) { + + const parsedHeaders = createResponseHeaders(headers); + const prefix = PORTKEY_HEADER_PREFIX + const filteredHeaders = Object.entries(parsedHeaders) + .filter(([key, _]) => key.startsWith(prefix)) + .map(([key, value]) => [key.replace(prefix, ''), value]) + + return Object.fromEntries(filteredHeaders) +} + +export function defaultHeadersBuilder(client: any){ + + const customHeaders = client.customHeaders + const portkeyHeaders = client.portkeyHeaders + + // Logic to add Bearer only if it is not present. + // Else it would be added everytime a request is made + if (Object.prototype.hasOwnProperty.call(customHeaders, "authorization") && !customHeaders["authorization"].startsWith("Bearer")){ + client.customHeaders["authorization"] = + "Bearer " + client.customHeaders["authorization"]; + } + + return {...customHeaders, ...portkeyHeaders} } \ No newline at end of file diff --git a/tests/assistants/openai.test.ts b/tests/assistants/openai.test.ts new file mode 100644 index 0000000..c59805f --- /dev/null +++ b/tests/assistants/openai.test.ts @@ -0,0 +1,24 @@ +import { config } from 'dotenv'; +import { Portkey } from 'portkey-ai'; + +config({ override: true }) +const client = new Portkey({ + apiKey: process.env["PORTKEY_API_KEY"] ?? "", + virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? "" +}); + +describe('OpenAI Assistants APIs', () => { + test('assistant: create: documentation', async () => { + const myAssistant = await client.beta.assistants.create({ + instructions: + "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + name: "Math Tutor", + tools: [{ type: "code_interpreter" }], + model: "gpt-4", + }); + expect(myAssistant).toBeDefined(); + expect(myAssistant.tools).toBeDefined(); + expect(myAssistant.tools.length).toBeGreaterThan(0); + }); + +}); \ No newline at end of file diff --git a/tests/chat/anthropic.test.ts b/tests/chat/anthropic.test.ts index b8584dd..7347226 100644 --- a/tests/chat/anthropic.test.ts +++ b/tests/chat/anthropic.test.ts @@ -4,101 +4,49 @@ import { Portkey } from 'portkey-ai'; config({ override: true }) const client = new Portkey({ apiKey: process.env["PORTKEY_API_KEY"] ?? "", - baseURL: "https://api.portkeydev.com/v1", - provider: "openai", virtualKey: process.env["ANTHROPIC_VIRTUAL_KEY"] ?? "" }); describe('Anthropic ChatCompletions APIs', () => { - test('model: claude-instant-1.2', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1.2', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1-100k', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1-100k', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-instant-1', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-instant-1-100k', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1-100k', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1.3', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1.3', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1.3-100k', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1.3-100k', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1.2', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1.2', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-3-opus-20240229', async () => { + const completion = await client.chat.completions.create({ model: 'claude-3-opus-20240229', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: claude-1.0', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1.0', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-3-sonnet-20240229', async () => { + const completion = await client.chat.completions.create({ model: 'claude-3-sonnet-20240229', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: claude-instant-1.1', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1.1', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-3-haiku-20240307', async () => { + const completion = await client.chat.completions.create({ model: 'claude-3-haiku-20240307', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: claude-instant-1.1-100k', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1.1-100k', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-2.1', async () => { + const completion = await client.chat.completions.create({ model: 'claude-2.1', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: claude-instant-1.0', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1.0', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-2.0', async () => { + const completion = await client.chat.completions.create({ model: 'claude-2.0', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: claude-2', async () => { - const completion = await client.chat.completions.create({ model: 'claude-2', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-instant-1.2', async () => { + const completion = await client.chat.completions.create({ model: 'claude-instant-1.2', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - }); \ No newline at end of file diff --git a/tests/chat/anyscale.test.ts b/tests/chat/anyscale.test.ts index 2883a3b..3ba056e 100644 --- a/tests/chat/anyscale.test.ts +++ b/tests/chat/anyscale.test.ts @@ -4,8 +4,6 @@ import { Portkey } from 'portkey-ai'; config({ override: true }) const client = new Portkey({ apiKey: process.env["PORTKEY_API_KEY"] ?? "", - baseURL: "https://api.portkeydev.com/v1", - provider: "openai", virtualKey: process.env["ANYSCALE_VIRTUAL_KEY"] ?? "" }); diff --git a/tests/chat/openai.test.ts b/tests/chat/openai.test.ts index 7063ff2..20a7120 100644 --- a/tests/chat/openai.test.ts +++ b/tests/chat/openai.test.ts @@ -4,49 +4,40 @@ import { Portkey } from 'portkey-ai'; config({ override: true }) const client = new Portkey({ apiKey: process.env["PORTKEY_API_KEY"] ?? "", - baseURL: "https://api.portkeydev.com/v1", - provider: "openai", virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? "" }); describe('Openai ChatCompletions APIs', () => { - test('model: gpt-4-32k-0613', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-4-32k-0613', messages: [{ "role": "user", "content": "Say this is a test" }] }); + test('model: gpt-4-0125-preview', async () => { + const completion = await client.chat.completions.create({ model: 'gpt-4-0125-preview', messages: [{ "role": "user", "content": "Say this is a test" }] }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: gpt-4-1106-preview', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-4-1106-preview', messages: [{ "role": "user", "content": "Say this is a test" }] }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: gpt-4', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-4', messages: [{ "role": "user", "content": "Say this is a test" }] }); + test('model: gpt-4-turbo-preview', async () => { + const completion = await client.chat.completions.create({ model: 'gpt-4-turbo-preview', messages: [{ "role": "user", "content": "Say this is a test" }] }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: gpt-4-0314', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-4-0314', messages: [{ "role": "user", "content": "Say this is a test" }] }); + test('model: gpt-4-1106-preview', async () => { + const completion = await client.chat.completions.create({ model: 'gpt-4-1106-preview', messages: [{ "role": "user", "content": "Say this is a test" }] }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: gpt-4-32k', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-4-32k', messages: [{ "role": "user", "content": "Say this is a test" }] }); + test('model: gpt-4-vision-preview', async () => { + const completion = await client.chat.completions.create({ model: 'gpt-4-vision-preview', messages: [{ "role": "user", "content": "Say this is a test" }] }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: gpt-4-32k-0314', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-4-32k-0314', messages: [{ "role": "user", "content": "Say this is a test" }] }); + test('model: gpt-4', async () => { + const completion = await client.chat.completions.create({ model: 'gpt-4', messages: [{ "role": "user", "content": "Say this is a test" }] }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); @@ -59,15 +50,15 @@ describe('Openai ChatCompletions APIs', () => { expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: gpt-3.5-turbo-0613', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0613', messages: [{ "role": "user", "content": "Say this is a test" }] }); + test('model: gpt-3.5-turbo', async () => { + const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [{ "role": "user", "content": "Say this is a test" }] }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: gpt-3.5-turbo-0301', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0301', messages: [{ "role": "user", "content": "Say this is a test" }] }); + test('model: gpt-3.5-turbo-0125', async () => { + const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-0125', messages: [{ "role": "user", "content": "Say this is a test" }] }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); @@ -79,19 +70,4 @@ describe('Openai ChatCompletions APIs', () => { expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - - test('model: gpt-3.5-turbo-16k', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo-16k', messages: [{ "role": "user", "content": "Say this is a test" }] }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: gpt-3.5-turbo', async () => { - const completion = await client.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [{ "role": "user", "content": "Say this is a test" }] }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - }); \ No newline at end of file diff --git a/tests/completion/anthropic.test.ts b/tests/completion/anthropic.test.ts index b8584dd..d3d7cbb 100644 --- a/tests/completion/anthropic.test.ts +++ b/tests/completion/anthropic.test.ts @@ -4,101 +4,28 @@ import { Portkey } from 'portkey-ai'; config({ override: true }) const client = new Portkey({ apiKey: process.env["PORTKEY_API_KEY"] ?? "", - baseURL: "https://api.portkeydev.com/v1", - provider: "openai", virtualKey: process.env["ANTHROPIC_VIRTUAL_KEY"] ?? "" }); describe('Anthropic ChatCompletions APIs', () => { - test('model: claude-instant-1.2', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1.2', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1-100k', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1-100k', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-instant-1', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-instant-1-100k', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1-100k', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-2.1', async () => { + const completion = await client.chat.completions.create({ model: 'claude-2.1', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: claude-1.3', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1.3', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-2.0', async () => { + const completion = await client.chat.completions.create({ model: 'claude-2.0', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - test('model: claude-1.3-100k', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1.3-100k', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1.2', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1.2', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-1.0', async () => { - const completion = await client.chat.completions.create({ model: 'claude-1.0', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-instant-1.1', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1.1', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-instant-1.1-100k', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1.1-100k', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-instant-1.0', async () => { - const completion = await client.chat.completions.create({ model: 'claude-instant-1.0', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: claude-2', async () => { - const completion = await client.chat.completions.create({ model: 'claude-2', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); + test('model: claude-instant-1.2', async () => { + const completion = await client.chat.completions.create({ model: 'claude-instant-1.2', messages: [{ "role": "user", "content": "Say this is a test" }], max_tokens: 275 }); expect(completion).toBeDefined(); expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - }); \ No newline at end of file diff --git a/tests/completion/anyscale.test.ts b/tests/completion/anyscale.test.ts index 2883a3b..3ba056e 100644 --- a/tests/completion/anyscale.test.ts +++ b/tests/completion/anyscale.test.ts @@ -4,8 +4,6 @@ import { Portkey } from 'portkey-ai'; config({ override: true }) const client = new Portkey({ apiKey: process.env["PORTKEY_API_KEY"] ?? "", - baseURL: "https://api.portkeydev.com/v1", - provider: "openai", virtualKey: process.env["ANYSCALE_VIRTUAL_KEY"] ?? "" }); diff --git a/tests/completion/openai.test.ts b/tests/completion/openai.test.ts index d9e876b..0664694 100644 --- a/tests/completion/openai.test.ts +++ b/tests/completion/openai.test.ts @@ -4,8 +4,6 @@ import { Portkey } from 'portkey-ai'; config({ override: true }) const client = new Portkey({ apiKey: process.env["PORTKEY_API_KEY"] ?? "", - baseURL: "https://api.portkeydev.com/v1", - provider: "openai", virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? "" }); @@ -16,61 +14,4 @@ describe('Completions APIs', () => { expect(completion.choices).toBeDefined(); expect(completion.choices.length).toBeGreaterThan(0); }); - - test('model: text-davinci-003', async () => { - const completion = await client.completions.create({ model: 'text-davinci-003', prompt: 'This is a test.' }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: text-davinci-002', async () => { - const completion = await client.completions.create({ model: 'text-davinci-002', prompt: 'This is a test.' }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: text-curie-001', async () => { - const completion = await client.completions.create({ model: 'text-curie-001', prompt: 'This is a test.' }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: text-babbage-001', async () => { - const completion = await client.completions.create({ model: 'text-babbage-001', prompt: 'This is a test.' }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: text-ada-001', async () => { - const completion = await client.completions.create({ model: 'text-ada-001', prompt: 'This is a test.' }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: babbage-002', async () => { - const completion = await client.completions.create({ model: 'babbage-002', prompt: 'This is a test.' }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: davinci-002', async () => { - const completion = await client.completions.create({ model: 'davinci-002', prompt: 'This is a test.' }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - - test('model: text-davinci-001', async () => { - const completion = await client.completions.create({ model: 'text-davinci-001', prompt: 'This is a test.' }); - expect(completion).toBeDefined(); - expect(completion.choices).toBeDefined(); - expect(completion.choices.length).toBeGreaterThan(0); - }); - }); \ No newline at end of file diff --git a/tests/images/image.png b/tests/images/image.png new file mode 100644 index 0000000..aea7d92 Binary files /dev/null and b/tests/images/image.png differ diff --git a/tests/images/imageMask.png b/tests/images/imageMask.png new file mode 100644 index 0000000..e231148 Binary files /dev/null and b/tests/images/imageMask.png differ diff --git a/tests/images/openai.test.ts b/tests/images/openai.test.ts new file mode 100644 index 0000000..e2d5b9a --- /dev/null +++ b/tests/images/openai.test.ts @@ -0,0 +1,55 @@ +import { config } from "dotenv"; +import { Portkey } from "portkey-ai"; +import fs from "fs"; +import path from "path"; + +config({ override: true }); +const client = new Portkey({ + apiKey: process.env["PORTKEY_API_KEY"] ?? "", + virtualKey: process.env["OPENAI_VIRTUAL_KEY"] ?? "" +}); + +describe("Openai Images APIs", () => { + test("generate: only required params", async () => { + const response = await client.images.generate({ + prompt: "A cute baby sea otter", + }); + expect(response).toBeDefined(); + expect(response.data).toBeDefined(); + expect(response.data.length).toBeGreaterThan(0); + }, 120000); + + test("generate: only required params with model", async () => { + const response = await client.images.generate({ + model: "dall-e-3", + prompt: "A cute baby sea otter", + }); + expect(response).toBeDefined(); + expect(response.data).toBeDefined(); + expect(response.data.length).toBeGreaterThan(0); + }, 120000); + + test("createVariation: only required params", async () => { + const imagePath = path.join(__dirname, 'image.png'); + const response = await client.images.createVariation({ + image: fs.createReadStream(imagePath), + }); + expect(response).toBeDefined(); + expect(response.data).toBeDefined(); + expect(response.data.length).toBeGreaterThan(0); + }, 120000); + + test("edit: only required params", async () => { + const imagePath = path.join(__dirname, 'image.png'); + const imageMaskPath = path.join(__dirname, 'imageMask.png'); + const response = await client.images.edit({ + image: fs.createReadStream(imagePath), + mask: fs.createReadStream(imageMaskPath), + prompt:"A cute baby sea otter wearing a beret" + }); + expect(response).toBeDefined(); + expect(response.data).toBeDefined(); + expect(response.data.length).toBeGreaterThan(0); + }, 120000); + +});