Skip to content

Commit

Permalink
updating examples with zod schema function (#364)
Browse files Browse the repository at this point in the history
  • Loading branch information
Shyam-Raghuwanshi authored May 29, 2024
1 parent ea4b1e3 commit ca4ec1b
Show file tree
Hide file tree
Showing 109 changed files with 1,913 additions and 2,777 deletions.
6 changes: 3 additions & 3 deletions JS/edgechains/arakoodev/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "arakoodev",
"version": "1.0.9",
"version": "1.0.10",
"main": "dist/index.js",
"files": [
"dist"
Expand All @@ -26,7 +26,6 @@
"@hono/node-server": "^1.11.0",
"@supabase/supabase-js": "^2.42.3",
"@types/dotenv": "^8.2.0",
"axios": "^1.6.2",
"axios-retry": "^4.1.0",
"cheerio": "^1.0.0-rc.12",
"cors": "^2.8.5",
Expand All @@ -49,7 +48,8 @@
"typeorm": "^0.3.20",
"vitest": "^1.5.1",
"web-streams-polyfill": "^4.0.0",
"youtube-transcript": "^1.2.1"
"youtube-transcript": "^1.2.1",
"zod-to-ts": "^1.2.0"
},
"keywords": [],
"author": "",
Expand Down
148 changes: 116 additions & 32 deletions JS/edgechains/arakoodev/src/openai/src/lib/endpoints/OpenAiEndpoint.ts
Original file line number Diff line number Diff line change
@@ -1,38 +1,52 @@
import axios from "axios";
import { config } from "dotenv";
import { printNode, zodToTs } from "zod-to-ts";
import { z } from "zod";
config();
const openAI_url = "https://api.openai.com/v1/chat/completions";

type role = "user" | "assistant" | "system";

interface OpenAIConstructionOptions {
apiKey?: string;
}

interface messageOption {
role: string;
role: role;
content: string;
name?: string;
}
}[]

interface OpenAIChatOptions {
model?: string;
role?: string;
role?: role;
max_tokens?: number;
temperature?: number;
prompt?: string;
messages?: messageOption[];
messages?: messageOption;
}

interface chatWithFunctionOptions {
model?: string;
role?: string;
role?: role;
max_tokens?: number;
temperature?: number;
prompt?: string;
functions?: object | Array<object>;
messages?: messageOption[];
messages?: messageOption;
function_call?: string;
}

interface ZodSchemaResponseOptions<S extends z.ZodTypeAny> {
model?: string;
role?: role
max_tokens?: number;
temperature?: number;
prompt?: string;
messages?: messageOption;
schema: S;
}

interface chatWithFunctionReturnOptions {
content: string;
function_call: {
Expand All @@ -57,14 +71,12 @@ export class OpenAI {
openAI_url,
{
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
: chatOptions.messages,
messages: chatOptions.prompt ? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
] : chatOptions.messages,
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
},
Expand All @@ -73,14 +85,17 @@ export class OpenAI {
Authorization: "Bearer " + this.apiKey,
"content-type": "application/json",
},
}
},
)
.then((response) => {
return response.data.choices;
})
.catch((error) => {
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error);
Expand All @@ -91,40 +106,39 @@ export class OpenAI {
return responce[0].message;
}

async chatWithFunction(
chatOptions: chatWithFunctionOptions
): Promise<chatWithFunctionReturnOptions> {
async chatWithFunction(chatOptions: chatWithFunctionOptions): Promise<chatWithFunctionReturnOptions> {
const responce = await axios
.post(
openAI_url,
{
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
: chatOptions.messages,
messages: chatOptions.prompt ? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
] : chatOptions.messages,
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
functions: chatOptions.functions,
function_call: chatOptions.function_call || "auto",
function_call: chatOptions.function_call || "auto"
},
{
headers: {
Authorization: "Bearer " + this.apiKey,
"content-type": "application/json",
},
}
},
)
.then((response) => {
return response.data.choices;
})
.catch((error) => {
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error);
Expand All @@ -148,14 +162,17 @@ export class OpenAI {
Authorization: `Bearer ${this.apiKey}`,
"content-type": "application/json",
},
}
},
)
.then((response) => {
return response.data.data;
})
.catch((error) => {
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error.request);
Expand All @@ -165,4 +182,71 @@ export class OpenAI {
});
return response;
}

async zodSchemaResponse<S extends z.ZodTypeAny>(chatOptions: ZodSchemaResponseOptions<S>): Promise<S> {

const { node } = zodToTs(chatOptions.schema, 'User')

const content = `
Analyze the text enclosed in triple backticks below. Your task is to fill in the data as described, and respond only with a JSON object that strictly conforms to the following TypeScript schema. Do not include any additional text or explanations outside of the JSON object, as this will cause parsing errors.
Schema:
\`\`\`
${printNode(node)}
\`\`\`
User Prompt:
\`\`\`
${chatOptions.prompt || "No prompt provided."}
\`\`\`
`;

const response = await axios
.post(
openAI_url,
{
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content,
},
]
: [
{
role: chatOptions?.messages?.role || "user",
content,
},
],
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
},
{
headers: {
Authorization: "Bearer " + this.apiKey,
"content-type": "application/json",
},
}
)
.then((response) => {
return response.data.choices[0].message.content
})
.catch((error) => {
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error);
} else {
console.log("Error creating request:", error.message);
}
});
if (typeof response === "string") {
return chatOptions.schema.parse(JSON.parse(response));
} else {
throw Error("response must be a string");
}
}

}
2 changes: 0 additions & 2 deletions JS/edgechains/examples/Test01/.env

This file was deleted.

8 changes: 0 additions & 8 deletions JS/edgechains/examples/Test01/.gitignore

This file was deleted.

58 changes: 0 additions & 58 deletions JS/edgechains/examples/Test01/htmljs.ts

This file was deleted.

11 changes: 0 additions & 11 deletions JS/edgechains/examples/Test01/ormconfig.json

This file was deleted.

Loading

0 comments on commit ca4ec1b

Please sign in to comment.