Skip to content

Commit

Permalink
Merge branch 'ts' into updating-package-name
Browse files Browse the repository at this point in the history
  • Loading branch information
Shyam-Raghuwanshi authored May 29, 2024
2 parents 8daae10 + f4460c0 commit 3b8d9c5
Show file tree
Hide file tree
Showing 106 changed files with 2,060 additions and 2,534 deletions.
14 changes: 9 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
[workspace]
members = [
"JS/wasm/crates/apis",
"JS/wasm/crates/arakoo-core",
"JS/wasm/crates/cli",
"JS/wasm/crates/serve",
Expand All @@ -13,15 +12,20 @@ edition = "2021"
version = "0.0.1"

[workspace.dependencies]
wizer = "4.0.0"
wasmtime = "16"
wasmtime-wasi = "16"
wasi-common = "16"
wizer = "6.0.0"
wasmtime = { features = ["async"], version = "19" }
wasmtime-wasi = "19"
javy = { version = "2.1.0" }
anyhow = "1.0.79"
once_cell = "1.19.0"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_bytes = "0.11"
http = "1.1.0"
reqwest = { version = "0.12.4", features = [
"blocking","json"
] }

[profile.release]
lto = true
opt-level = 's'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ interface messageOption {
role: role;
content: string;
name?: string;
}[]
}
[];

interface OpenAIChatOptions {
model?: string;
Expand All @@ -39,7 +40,7 @@ interface chatWithFunctionOptions {

interface ZodSchemaResponseOptions<S extends z.ZodTypeAny> {
model?: string;
role?: role
role?: role;
max_tokens?: number;
temperature?: number;
prompt?: string;
Expand Down Expand Up @@ -71,12 +72,14 @@ export class OpenAI {
openAI_url,
{
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt ? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
] : chatOptions.messages,
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
: chatOptions.messages,
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
},
Expand All @@ -85,17 +88,14 @@ export class OpenAI {
Authorization: "Bearer " + this.apiKey,
"content-type": "application/json",
},
},
}
)
.then((response) => {
return response.data.choices;
})
.catch((error) => {
if (error.response) {
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Server responded with status code:", error.response.status);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error);
Expand All @@ -106,39 +106,40 @@ export class OpenAI {
return responce[0].message;
}

async chatWithFunction(chatOptions: chatWithFunctionOptions): Promise<chatWithFunctionReturnOptions> {
async chatWithFunction(
chatOptions: chatWithFunctionOptions
): Promise<chatWithFunctionReturnOptions> {
const responce = await axios
.post(
openAI_url,
{
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt ? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
] : chatOptions.messages,
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
]
: chatOptions.messages,
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
functions: chatOptions.functions,
function_call: chatOptions.function_call || "auto"
function_call: chatOptions.function_call || "auto",
},
{
headers: {
Authorization: "Bearer " + this.apiKey,
"content-type": "application/json",
},
},
}
)
.then((response) => {
return response.data.choices;
})
.catch((error) => {
if (error.response) {
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Server responded with status code:", error.response.status);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error);
Expand All @@ -162,17 +163,14 @@ export class OpenAI {
Authorization: `Bearer ${this.apiKey}`,
"content-type": "application/json",
},
},
}
)
.then((response) => {
return response.data.data;
})
.catch((error) => {
if (error.response) {
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Server responded with status code:", error.response.status);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error.request);
Expand All @@ -183,9 +181,10 @@ export class OpenAI {
return response;
}

async zodSchemaResponse<S extends z.ZodTypeAny>(chatOptions: ZodSchemaResponseOptions<S>): Promise<S> {

const { node } = zodToTs(chatOptions.schema, 'User')
async zodSchemaResponse<S extends z.ZodTypeAny>(
chatOptions: ZodSchemaResponseOptions<S>
): Promise<S> {
const { node } = zodToTs(chatOptions.schema, "User");

const content = `
Analyze the text enclosed in triple backticks below. Your task is to fill in the data as described, and respond only with a JSON object that strictly conforms to the following TypeScript schema. Do not include any additional text or explanations outside of the JSON object, as this will cause parsing errors.
Expand All @@ -208,17 +207,17 @@ export class OpenAI {
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt
? [
{
role: chatOptions.role || "user",
content,
},
]
{
role: chatOptions.role || "user",
content,
},
]
: [
{
role: chatOptions?.messages?.role || "user",
content,
},
],
{
role: chatOptions?.messages?.role || "user",
content,
},
],
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
},
Expand All @@ -230,7 +229,7 @@ export class OpenAI {
}
)
.then((response) => {
return response.data.choices[0].message.content
return response.data.choices[0].message.content;
})
.catch((error) => {
if (error.response) {
Expand All @@ -248,5 +247,4 @@ export class OpenAI {
throw Error("response must be a string");
}
}

}
2 changes: 1 addition & 1 deletion JS/edgechains/examples/chat-with-llm/dist/index.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { ArakooServer } from "@arakoodev/edgechains.js/arakooserver";
import Jsonnet from "@arakoodev/jsonnet";
//@ts-ignore
import createClient from 'sync-rpc';
import createClient from "sync-rpc";
import { fileURLToPath } from "url";
import path from "path";
const server = new ArakooServer();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet");
const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key;
const openai = new OpenAI({ apiKey: openAIApiKey });
const schema = zod_1.z.object({
answer: zod_1.z.string().describe("The answer to the question")
answer: zod_1.z.string().describe("The answer to the question"),
});
// ** Example schema for a horse object, You can unComment the following code if you want to test the complex schema based answer**
// const genderOrStage = ["mare", "stallion", "gelding", "foal"]; // Example values
Expand Down Expand Up @@ -38,8 +38,7 @@ function openAICall() {
return openai.zodSchemaResponse({ prompt, schema: schema }).then((res) => {
return JSON.stringify(res);
});
}
catch (error) {
} catch (error) {
return error;
}
};
Expand Down
3 changes: 2 additions & 1 deletion JS/edgechains/examples/chat-with-llm/package.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "chat-with-youtube-video",
"name": "ownChatGpt",
"version": "1.0.0",
"description": "",
"main": "index.js",
Expand All @@ -20,3 +20,4 @@
"@types/node": "^20.12.12"
}
}

14 changes: 6 additions & 8 deletions JS/edgechains/examples/chat-with-llm/readme.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
## Video
## Video

```
https://youtu.be/fq3BpdduO2g
```
Expand All @@ -13,13 +14,12 @@
npm install
```


## Configuration

1 Add OpenAiApi key in secrets.jsonnet
```bash
1 Add OpenAiApi key in secrets.jsonnet
`bash
local OPENAI_API_KEY = "sk-****";
```
`

## Usage

Expand All @@ -31,11 +31,9 @@

2. Hit the `POST` endpoint with basic question `http://localhost:3000/chat`.


```bash
body = {
"question":"hi"
}
```

1 change: 1 addition & 0 deletions JS/edgechains/examples/chat-with-llm/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,4 @@ app.post("/chat", async (c: any) => {

server.listen(3000)


18 changes: 8 additions & 10 deletions JS/edgechains/examples/chat-with-llm/src/lib/generateResponse.cts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ const Jsonnet = require("@arakoodev/jsonnet");
const jsonnet = new Jsonnet();

const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet");
const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key
const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key;

const openai = new OpenAI({ apiKey: openAIApiKey })
const openai = new OpenAI({ apiKey: openAIApiKey });

const schema = z.object({
answer: z.string().describe("The answer to the question")
})
answer: z.string().describe("The answer to the question"),
});

// ** Example schema for a horse object, You can unComment the following code if you want to test the complex schema based answer**
// const genderOrStage = ["mare", "stallion", "gelding", "foal"]; // Example values
Expand All @@ -37,17 +37,15 @@ const schema = z.object({
// });

function openAICall() {

return function (prompt: string) {
try {
return openai.zodSchemaResponse({ prompt, schema: schema }).then((res: any) => {
return JSON.stringify(res)
})
return JSON.stringify(res);
});
} catch (error) {
return error;
}
}
};
}


module.exports = openAICall;
module.exports = openAICall;
26 changes: 13 additions & 13 deletions JS/edgechains/examples/chat-with-llm/tsconfig.json
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
{
"compilerOptions": {
"target": "ES2022",
"moduleResolution": "NodeNext",
"module": "NodeNext",
"rootDir": "./src",
"outDir": "./dist",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"skipLibCheck": true
},
"exclude": ["./**/*.test.ts", "vitest.config.ts"]
}
"compilerOptions": {
"target": "ES2022",
"moduleResolution": "NodeNext",
"module": "NodeNext",
"rootDir": "./src",
"outDir": "./dist",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"skipLibCheck": true
},
"exclude": ["./**/*.test.ts", "vitest.config.ts"]
}
Loading

0 comments on commit 3b8d9c5

Please sign in to comment.