diff --git a/JS/edgechains/arakoodev/package.json b/JS/edgechains/arakoodev/package.json index a2f18702c..f0296a13d 100644 --- a/JS/edgechains/arakoodev/package.json +++ b/JS/edgechains/arakoodev/package.json @@ -1,6 +1,6 @@ { "name": "arakoodev", - "version": "1.0.9", + "version": "1.0.10", "main": "dist/index.js", "files": [ "dist" @@ -26,7 +26,6 @@ "@hono/node-server": "^1.11.0", "@supabase/supabase-js": "^2.42.3", "@types/dotenv": "^8.2.0", - "axios": "^1.6.2", "axios-retry": "^4.1.0", "cheerio": "^1.0.0-rc.12", "cors": "^2.8.5", @@ -49,7 +48,8 @@ "typeorm": "^0.3.20", "vitest": "^1.5.1", "web-streams-polyfill": "^4.0.0", - "youtube-transcript": "^1.2.1" + "youtube-transcript": "^1.2.1", + "zod-to-ts": "^1.2.0" }, "keywords": [], "author": "", diff --git a/JS/edgechains/arakoodev/src/openai/src/lib/endpoints/OpenAiEndpoint.ts b/JS/edgechains/arakoodev/src/openai/src/lib/endpoints/OpenAiEndpoint.ts index b025d9f4f..c2777eb6a 100644 --- a/JS/edgechains/arakoodev/src/openai/src/lib/endpoints/OpenAiEndpoint.ts +++ b/JS/edgechains/arakoodev/src/openai/src/lib/endpoints/OpenAiEndpoint.ts @@ -1,38 +1,52 @@ import axios from "axios"; import { config } from "dotenv"; +import { printNode, zodToTs } from "zod-to-ts"; +import { z } from "zod"; config(); const openAI_url = "https://api.openai.com/v1/chat/completions"; +type role = "user" | "assistant" | "system"; + interface OpenAIConstructionOptions { apiKey?: string; } interface messageOption { - role: string; + role: role; content: string; name?: string; -} +}[] interface OpenAIChatOptions { model?: string; - role?: string; + role?: role; max_tokens?: number; temperature?: number; prompt?: string; - messages?: messageOption[]; + messages?: messageOption; } interface chatWithFunctionOptions { model?: string; - role?: string; + role?: role; max_tokens?: number; temperature?: number; prompt?: string; functions?: object | Array; - messages?: messageOption[]; + messages?: messageOption; function_call?: string; } +interface ZodSchemaResponseOptions { + model?: string; + role?: role + max_tokens?: number; + temperature?: number; + prompt?: string; + messages?: messageOption; + schema: S; +} + interface chatWithFunctionReturnOptions { content: string; function_call: { @@ -57,14 +71,12 @@ export class OpenAI { openAI_url, { model: chatOptions.model || "gpt-3.5-turbo", - messages: chatOptions.prompt - ? [ - { - role: chatOptions.role || "user", - content: chatOptions.prompt, - }, - ] - : chatOptions.messages, + messages: chatOptions.prompt ? [ + { + role: chatOptions.role || "user", + content: chatOptions.prompt, + }, + ] : chatOptions.messages, max_tokens: chatOptions.max_tokens || 256, temperature: chatOptions.temperature || 0.7, }, @@ -73,14 +85,17 @@ export class OpenAI { Authorization: "Bearer " + this.apiKey, "content-type": "application/json", }, - } + }, ) .then((response) => { return response.data.choices; }) .catch((error) => { if (error.response) { - console.log("Server responded with status code:", error.response.status); + console.log( + "Server responded with status code:", + error.response.status, + ); console.log("Response data:", error.response.data); } else if (error.request) { console.log("No response received:", error); @@ -91,40 +106,39 @@ export class OpenAI { return responce[0].message; } - async chatWithFunction( - chatOptions: chatWithFunctionOptions - ): Promise { + async chatWithFunction(chatOptions: chatWithFunctionOptions): Promise { const responce = await axios .post( openAI_url, { model: chatOptions.model || "gpt-3.5-turbo", - messages: chatOptions.prompt - ? [ - { - role: chatOptions.role || "user", - content: chatOptions.prompt, - }, - ] - : chatOptions.messages, + messages: chatOptions.prompt ? [ + { + role: chatOptions.role || "user", + content: chatOptions.prompt, + }, + ] : chatOptions.messages, max_tokens: chatOptions.max_tokens || 256, temperature: chatOptions.temperature || 0.7, functions: chatOptions.functions, - function_call: chatOptions.function_call || "auto", + function_call: chatOptions.function_call || "auto" }, { headers: { Authorization: "Bearer " + this.apiKey, "content-type": "application/json", }, - } + }, ) .then((response) => { return response.data.choices; }) .catch((error) => { if (error.response) { - console.log("Server responded with status code:", error.response.status); + console.log( + "Server responded with status code:", + error.response.status, + ); console.log("Response data:", error.response.data); } else if (error.request) { console.log("No response received:", error); @@ -148,14 +162,17 @@ export class OpenAI { Authorization: `Bearer ${this.apiKey}`, "content-type": "application/json", }, - } + }, ) .then((response) => { return response.data.data; }) .catch((error) => { if (error.response) { - console.log("Server responded with status code:", error.response.status); + console.log( + "Server responded with status code:", + error.response.status, + ); console.log("Response data:", error.response.data); } else if (error.request) { console.log("No response received:", error.request); @@ -165,4 +182,71 @@ export class OpenAI { }); return response; } + + async zodSchemaResponse(chatOptions: ZodSchemaResponseOptions): Promise { + + const { node } = zodToTs(chatOptions.schema, 'User') + + const content = ` + Analyze the text enclosed in triple backticks below. Your task is to fill in the data as described, and respond only with a JSON object that strictly conforms to the following TypeScript schema. Do not include any additional text or explanations outside of the JSON object, as this will cause parsing errors. + + Schema: + \`\`\` + ${printNode(node)} + \`\`\` + + User Prompt: + \`\`\` + ${chatOptions.prompt || "No prompt provided."} + \`\`\` + `; + + const response = await axios + .post( + openAI_url, + { + model: chatOptions.model || "gpt-3.5-turbo", + messages: chatOptions.prompt + ? [ + { + role: chatOptions.role || "user", + content, + }, + ] + : [ + { + role: chatOptions?.messages?.role || "user", + content, + }, + ], + max_tokens: chatOptions.max_tokens || 256, + temperature: chatOptions.temperature || 0.7, + }, + { + headers: { + Authorization: "Bearer " + this.apiKey, + "content-type": "application/json", + }, + } + ) + .then((response) => { + return response.data.choices[0].message.content + }) + .catch((error) => { + if (error.response) { + console.log("Server responded with status code:", error.response.status); + console.log("Response data:", error.response.data); + } else if (error.request) { + console.log("No response received:", error); + } else { + console.log("Error creating request:", error.message); + } + }); + if (typeof response === "string") { + return chatOptions.schema.parse(JSON.parse(response)); + } else { + throw Error("response must be a string"); + } + } + } diff --git a/JS/edgechains/examples/Test01/.env b/JS/edgechains/examples/Test01/.env deleted file mode 100644 index 976725b05..000000000 --- a/JS/edgechains/examples/Test01/.env +++ /dev/null @@ -1,2 +0,0 @@ -OPENAI_API_KEY= - OPENAI_ORG_ID= diff --git a/JS/edgechains/examples/Test01/.gitignore b/JS/edgechains/examples/Test01/.gitignore deleted file mode 100644 index 4299fdb39..000000000 --- a/JS/edgechains/examples/Test01/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# standard exclusions - node_modules - - # build artifacts - dist - - # environment files - .env diff --git a/JS/edgechains/examples/Test01/htmljs.ts b/JS/edgechains/examples/Test01/htmljs.ts deleted file mode 100644 index ae460e4de..000000000 --- a/JS/edgechains/examples/Test01/htmljs.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { html } from "hono/html"; - -// These functions form the basis of the html.js framework and will be moved to a separate lib - -export const view = (viewToRender) => { - return async (c) => { - const newBody = await viewToRender({ context: c }); - return c.html(newBody); - }; -}; - -export const rootLayout = (layoutToApply) => { - return async (c, next) => { - await next(); - if (c.req.header("HX-Request") !== "true") { - // Req is a normal request, so we render the whole page which means adding the root layout - const curBody = await c.res.text(); - c.res = undefined; // To overwrite res, set it to undefined before setting new value https://github.com/honojs/hono/pull/970 released in https://github.com/honojs/hono/releases/tag/v3.1.0 - const newBody = await layoutToApply({ context: c, children: html(curBody) }); - c.res = c.html(newBody); - } - // Else do nothing and let the original response be sent - }; -}; - -export const layout = (layoutToApply) => { - return async (c, next) => { - await next(); - if ( - (c.req.header("HX-Request") === "true" && - (c.req.header("HX-Boosted") === "true" || !c.req.header("HX-Target"))) || - c.req.header("HX-Request") !== "true" - ) { - // Req is regular req or boosted link, so we apply layouts - const curBody = await c.res.text(); - c.res = undefined; // To overwrite res, set it to undefined before setting new value https://github.com/honojs/hono/pull/970 released in https://github.com/honojs/hono/releases/tag/v3.1.0 - const newBody = await layoutToApply({ context: c, children: html(curBody) }); - c.res = c.html(newBody); - } - // Else do nothing and let the original response be sent, which will be a partial update applied to the page with hx-target - }; -}; - -export const Link: any = ({ to, "hx-target": hxTarget, class: className, children }) => { - if (hxTarget) { - return html`${children}`; - } else { - return html`${children}`; - } -}; diff --git a/JS/edgechains/examples/Test01/ormconfig.json b/JS/edgechains/examples/Test01/ormconfig.json deleted file mode 100644 index 21819c00f..000000000 --- a/JS/edgechains/examples/Test01/ormconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "type": "postgres", - "host": "db.rmzqtepwnzoxgkkzjctt.supabase.co", - "port": 5432, - "username": "postgres", - "password": "xaX0MYcf1YiJlChK", - "database": "postgres", - "entities": ["dist/entities/**/*.js"], - "synchronize": false, - "logging": false -} diff --git a/JS/edgechains/examples/Test01/package.json b/JS/edgechains/examples/Test01/package.json deleted file mode 100644 index b1c31117e..000000000 --- a/JS/edgechains/examples/Test01/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "example", - "version": "1.0.0", - "description": "", - "main": "dist/index.js", - "type": "module", - "scripts": { - "build": "rm -rf dist && node esbuild.build.js", - "start": "node --experimental-wasm-modules ./src/index.js", - "lint": "eslint --ignore-path .eslintignore --ext .js,.ts", - "format": "prettier --ignore-path .gitignore --write \"**/*.+(js|ts|json)\"", - "test": "npx jest" - }, - "jest": { - "setupFiles": [ - "/setupTests.js" - ] - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "@arakoodev/jsonnet": "^0.1.2", - "@hono/node-server": "^1.2.0", - "@types/dotenv": "^8.2.0", - "hono": "^3.9.2", - "pg": "^8.11.3", - "reflect-metadata": "^0.1.13", - "tsc": "^2.0.4", - "typescript": "^5.3.2" - }, - "devDependencies": { - "@arakoodev/edgechains.js": "0.1.22", - "@types/jest": "^29.5.8", - "@types/node": "^20.9.4", - "@typescript-eslint/eslint-plugin": "^6.11.0", - "@typescript-eslint/parser": "^6.11.0", - "axios": "^1.6.2", - "dotenv": "^16.3.1", - "dts-bundle-generator": "^8.1.2", - "eslint": "^8.54.0", - "eslint-config-prettier": "^9.0.0", - "eslint-config-standard-with-typescript": "^40.0.0", - "eslint-plugin-import": "^2.29.0", - "eslint-plugin-n": "^16.3.1", - "eslint-plugin-promise": "^6.1.1", - "jest": "^29.7.0", - "prettier": "^3.1.0", - "react": "^18.2.0", - "ts-jest": "^29.1.1", - "tsx": "^3.12.2", - "typeorm": "^0.3.17", - "typescript": "^5.0.2" - } -} diff --git a/JS/edgechains/examples/Test01/src/index.ts b/JS/edgechains/examples/Test01/src/index.ts deleted file mode 100644 index 188077192..000000000 --- a/JS/edgechains/examples/Test01/src/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -import "dotenv/config"; -import { serve } from "@hono/node-server"; -import { Hono } from "hono"; -import { HydeSearchRouter } from "./routes/hydeSearch.route.js"; -import { view } from "../htmljs.js"; -import ExampleLayout from "./layouts/ExampleLayout.js"; - -const app = new Hono(); - -app.route("/", HydeSearchRouter); - -app.get("/", view(ExampleLayout)); - -serve(app, () => { - console.log("server running on port 3000"); -}); diff --git a/JS/edgechains/examples/Test01/src/jsonnet/hyde.jsonnet b/JS/edgechains/examples/Test01/src/jsonnet/hyde.jsonnet deleted file mode 100644 index 77881790f..000000000 --- a/JS/edgechains/examples/Test01/src/jsonnet/hyde.jsonnet +++ /dev/null @@ -1,18 +0,0 @@ -//Replace the {} in the prompt template with the query -local updateQueryPrompt(promptTemplate, query) = - local updatedPrompt = std.strReplace(promptTemplate,'{}',query + "\n"); - updatedPrompt; - -//To replace the time in the system prompt -local updateTimePrompt(promptTemplate, time) = - local updatedPrompt =std.strReplace(promptTemplate,'{time}', time ); - updatedPrompt; - -local promptTemplate = std.extVar("promptTemplate"); -local time = std.extVar("time"); -local query = std.extVar("query"); -local updatedQueryPrompt = updateQueryPrompt(promptTemplate, query); -local updatedPrompt = updateTimePrompt(updatedQueryPrompt, time); -{ - "prompt": updatedPrompt -} \ No newline at end of file diff --git a/JS/edgechains/examples/Test01/src/jsonnet/prompts.jsonnet b/JS/edgechains/examples/Test01/src/jsonnet/prompts.jsonnet deleted file mode 100644 index 10ff2da84..000000000 --- a/JS/edgechains/examples/Test01/src/jsonnet/prompts.jsonnet +++ /dev/null @@ -1,103 +0,0 @@ -local WEB_SEARCH = ||| - Please write a passage to answer the question. - Question: {} - Passage: - |||; - -local SCIFACT = ||| - Please write a scientific paper passage to support/refute the claim. - Claim: {} - Passage: - |||; - -local ARGUANA = ||| - Please write a counter argument for the passage. - Passage: {} - Counter Argument: - |||; - -local TREC_COVID = ||| - Please write a scientific paper passage to answer the question. - Question: {} - Passage: - |||; - -local FIQA = ||| - Please write a financial article passage to answer the question. - Question: {} - Passage: - |||; - -local DBPEDIA_ENTITY = ||| - Please write a passage to answer the question. - Question: {} - Passage: - |||; - -local TREC_NEWS = ||| - Please write a news passage about the topic. - Topic: {} - Passage: - |||; - -local MR_TYDI = ||| - Please write a passage in {} to answer the question in detail. - Question: {} - Passage: - |||; -local CHUNK_SUMMARY = ||| - Summarize the following text to replace the original text with all important information left as it is. - Do not replace abbreviations with it's full forms. - {} - Summary: - |||; - -local ANS_PROMPT_SYSTEM = ||| - You are an AI assistant whose name is DoMIno. - - Its responses must not be vague, accusatory, rude, controversial, off-topic, or defensive. - - It should avoid giving subjective opinions but rely on objective facts or phrases like \"in this context a human might say...\", \"some people might think...\", etc. - - It can provide additional relevant details to answer in-depth and comprehensively covering mutiple aspects. - - It must provide an answer based solely on the provided sources below and not prior knowledge. It should ignore whether the question is singular or plural and just focus on the subject of the question. - - If the documents do not provide any context refuse to answer do not create an answer for the query without documents. - - If the full form of any abbreviation is unknown leave it as an abbreviation. Do not try to guess or infer the full form of the abrreviation. But do answer the query using the abbreviation without expanding it. - - If it doesn't know the answer, it must just say that it doesn't know and never try to make up an answer. However, if you are asked terms like highest, lowest, minimum, maximum and if you cannot find an exact answer, then you should mention that and still give an answer without the constraints of highest, lowest, minimum, maximum. - Below are multiple sources of information which are numbered. Please discard the sources of information that are not relevant for the question. Only use the ones that are relevant: - ---------------- - {} - |||; -local ANS_PROMPT_USER = ||| - Question: {} - Helpful Answer: - |||; -local SUMMARY = ||| - Do not expand on abbreviations and leave them as is in the reply. Please generate 5 different responses in bullet points for the question. - Please write a summary to answer the question in detail: - Question: {} - Passage: - |||; -local DATE_EXTRACTION = ||| - Extract the date of the document from the given chunk in the following format Month DD, YYYY. - Only give date in the answer, don't write any sentence or full stop: - {} - |||; -local TITLE_EXTRACTION = ||| - Extract the title of the document from the given chunk: - {} - |||; - -{ - "web_search": WEB_SEARCH, - "scifact": SCIFACT, - "arguana": ARGUANA, - "trec_covid": TREC_COVID, - "fiqa": FIQA, - "dbpedia_entity": DBPEDIA_ENTITY, - "trec_news": TREC_NEWS, - "mrqa_tydi": MR_TYDI, - "chunk_summary": CHUNK_SUMMARY, - "ans_prompt_system": ANS_PROMPT_SYSTEM, - "ans_prompt_user": ANS_PROMPT_USER, - "summary": SUMMARY, - "date_extraction": DATE_EXTRACTION, - "title_extraction": TITLE_EXTRACTION -} \ No newline at end of file diff --git a/JS/edgechains/examples/Test01/src/layouts/ExampleLayout.ts b/JS/edgechains/examples/Test01/src/layouts/ExampleLayout.ts deleted file mode 100644 index 90df407b0..000000000 --- a/JS/edgechains/examples/Test01/src/layouts/ExampleLayout.ts +++ /dev/null @@ -1,282 +0,0 @@ -import { html } from "hono/html"; -import { FC } from "hono/jsx"; - -const ExampleLayout: FC = (props) => html` - - - - - - Document - - - - - - - - -
- -
-
-
-
-
-
-
-
-
- -
- - -
-

Enter these details for RRF search:

- Metadata Table Name: - OrderRRF: - - - Text-BaseWeight: - - -
- Text-FineTuneWeight: - 0.35 -
- Similarity-BaseWeight: - -
- Similarity-FineTuneWeight: - 0.4 -
- Date-BaseWeight: - -
- Date-FineTuneWeight: - 0.75 -
-
-
-
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- - - - - - - -`; - -export default ExampleLayout; diff --git a/JS/edgechains/examples/Test01/src/routes/hydeSearch.route.ts b/JS/edgechains/examples/Test01/src/routes/hydeSearch.route.ts deleted file mode 100644 index 51b376524..000000000 --- a/JS/edgechains/examples/Test01/src/routes/hydeSearch.route.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { Hono } from "hono"; -import { hydeSearchAdaEmbedding } from "../service/HydeSearchService.js"; -import { HydeFragmentData } from "../types/HydeFragmentData.js"; -const HydeSearchRouter = new Hono(); - -HydeSearchRouter.get("/search", async (c) => { - const query = await c.req.query(); - const arkRequest = { - topK: parseInt(query.topK ?? "5"), - metadataTable: query.metadataTable, - query: query.query, - textWeight: { - baseWeight: query.textBaseWeight, - fineTuneWeight: query.textFineTuneWeight, - }, - similarityWeight: { - baseWeight: query.similarityBaseWeight, - fineTuneWeight: query.similarityFineTuneWeight, - }, - dateWeight: { - baseWeight: query.dateBaseWeight, - fineTuneWeight: query.dateFineTuneWeight, - }, - orderRRF: query.orderRRF, - }; - const answer = await hydeSearchAdaEmbedding( - arkRequest, - process.env.OPENAI_API_KEY!, - process.env.OPENAI_ORG_ID! - ); - const final_answer = answer.finalAnswer; - const responses = answer.wordEmbeddings; - const data: HydeFragmentData = { responses, final_answer }; - return c.html(` - -
-
-
${data.final_answer}
-
-
    - ${data.responses.map( - (item) => ` -
  • -
    -
    - ${ - item.rawText != null - ? `
    ${item.rawText}
    ` - : `
    ${item.metadata}
    ` - } - ${ - item.filename != null - ? `
    ${item.filename}
    ` - : "" - } - ${ - item.titleMetadata != null - ? `
    ${item.titleMetadata}
    ` - : "" - } - ${ - item.documentDate != null - ? `
    ${item.documentDate}
    ` - : "" - } -
    -
    -
  • - ` - )} -
- - `); -}); - -export { HydeSearchRouter }; diff --git a/JS/edgechains/examples/Test01/src/service/HydeSearchService.ts b/JS/edgechains/examples/Test01/src/service/HydeSearchService.ts deleted file mode 100644 index 46996d364..000000000 --- a/JS/edgechains/examples/Test01/src/service/HydeSearchService.ts +++ /dev/null @@ -1,132 +0,0 @@ -import Jsonnet from "@arakoodev/jsonnet"; -import { OpenAiEndpoint } from "@arakoodev/edgechains.js"; -import { PostgresClient } from "@arakoodev/edgechains.js"; -import type { ArkRequest } from "@arakoodev/edgechains.js"; -import * as path from "path"; - -enum PostgresDistanceMetric { - COSINE = "COSINE", - IP = "IP", - L2 = "L2", -} - -async function hydeSearchAdaEmbedding(arkRequest: ArkRequest, apiKey: string, orgId: string) { - try { - const gpt3endpoint = new OpenAiEndpoint( - "https://api.openai.com/v1/chat/completions", - apiKey, - orgId, - "gpt-3.5-turbo", - "user", - parseInt("0.7") - ); - // Get required params from API... - const table = "ada_hyde_prod"; - const namespace = "360_docs"; - const query = arkRequest.query; - const topK = Number(arkRequest.topK); - - // - const jsonnet = new Jsonnet(); - - const promptPath = path.join(__dirname, "../src/jsonnet/prompts.jsonnet"); - const hydePath = path.join(__dirname, "../src/jsonnet/hyde.jsonnet"); - // Load Jsonnet to extract args.. - const promptLoader = await jsonnet.evaluateFile(promptPath); - - // Getting ${summary} basePrompt - const promptTemplate = JSON.parse(promptLoader).summary; - // Getting the updated promptTemplate with query - let hydeLoader = await jsonnet - .extString("promptTemplate", promptTemplate) - .extString("time", "") - .extString("query", query) - .evaluateFile(hydePath); - - // Get concatenated prompt - const prompt = JSON.parse(hydeLoader).prompt; - - // Block and get the response from GPT3 - const gptResponse = await gpt3endpoint.gptFn(prompt); - - // Chain 1 ==> Get Gpt3Response & split - const gpt3Responses = gptResponse.split("\n"); - - // Chain 2 ==> Get Embeddings from OpenAI using Each Response - const embeddingsListChain: Promise = Promise.all( - gpt3Responses.map(async (resp) => { - const embedding = await gpt3endpoint.embeddings(resp); - return embedding; - }) - ); - - // Chain 5 ==> Query via EmbeddingChain - const dbClient = new PostgresClient( - await embeddingsListChain, - PostgresDistanceMetric.IP, - topK, - 20, - table, - namespace, - arkRequest, - 15 - ); - - const queryResult = await dbClient.dbQuery(); - - // Chain 6 ==> Create Prompt using Embeddings - const retrievedDocs: string[] = []; - - for (const embeddings of queryResult) { - retrievedDocs.push( - `${embeddings.raw_text}\n score:${embeddings.score}\n filename:${embeddings.filename}\n` - ); - } - - if (retrievedDocs.join("").length > 4096) { - retrievedDocs.length = 4096; - } - - const currentTime = new Date().toLocaleString(); - const formattedTime = currentTime; - - // System prompt - const ansPromptSystem = JSON.parse(promptLoader).ans_prompt_system; - - hydeLoader = await jsonnet - .extString(promptTemplate, ansPromptSystem) - .extString("time", formattedTime) - .extString("qeury", retrievedDocs.join("")) - .evaluateFile(hydePath); - - const finalPromptSystem = JSON.parse(hydeLoader).prompt; - - // User prompt - const ansPromptUser = JSON.parse(promptLoader).ans_prompt_user; - - hydeLoader = await jsonnet - .extString(promptTemplate, ansPromptUser) - .extString("qeury", query) - .evaluateFile(hydePath); - const finalPromptUser = JSON.parse(hydeLoader).prompt; - - const chatMessages = [ - { role: "system", content: finalPromptSystem }, - { role: "user", content: finalPromptUser }, - ]; - - const finalAnswer = await gpt3endpoint.gptFnChat(chatMessages); - - const response = { - wordEmbeddings: queryResult, - finalAnswer: finalAnswer, - }; - return response; - } catch (error) { - // Handle errors here - console.error(error); - throw error; - } -} - -export { hydeSearchAdaEmbedding }; diff --git a/JS/edgechains/examples/Test01/src/testGeneration/TestGenerator.ts b/JS/edgechains/examples/Test01/src/testGeneration/TestGenerator.ts deleted file mode 100644 index 833f5a6f9..000000000 --- a/JS/edgechains/examples/Test01/src/testGeneration/TestGenerator.ts +++ /dev/null @@ -1,66 +0,0 @@ -import Jsonnet from "@arakoodev/jsonnet"; -import * as path from "path"; -import { OpenAiEndpoint } from "@arakoodev/edgechains.js"; - -const jsonnet = new Jsonnet(); -const promptPath = path.join(process.cwd(), "./src/testGeneration/prompts.jsonnet"); -const testGeneratorPath = path.join(process.cwd(), "./src/testGeneration/testGenerator.jsonnet"); - -const gpt3endpoint = new OpenAiEndpoint( - "https://api.openai.com/v1/chat/completions", - "", - "", - "gpt-3.5-turbo", - "user", - 0.7 -); - -const classText = - "public class ChatMessage {\n" + - " String role;\n" + - " String content;\n\n" + - " public ChatMessage(String role, String content) {\n" + - " this.role = role;\n" + - " this.content = content;\n" + - " }\n\n" + - " public ChatMessage() {}\n\n" + - " public String getRole() {\n" + - " return role;\n" + - " }\n\n" + - " public String getContent() {\n" + - " return content;\n" + - " }\n\n" + - " public void setContent(String content) {\n" + - " this.content = content;\n" + - " }\n\n" + - " @Override\n" + - " public String toString() {\n" + - ' return "ChatMessage{" + "role=\'" + role + "\', content=\'" + content + "\'}";\n' + - " }\n" + - "}"; -export async function getContent() { - try { - var prompt = await jsonnet.evaluateFile(promptPath); - - const testPrompt = await jsonnet - .extString("promptTemplate", JSON.parse(prompt).prompt) - .extString("test_class", classText) - .extString("test_package", "JUnit") - .evaluateFile(testGeneratorPath); - - var responce = await gpt3endpoint.gptFnTestGenerator(JSON.parse(testPrompt).prompt); - - console.log("First Response.......\n \n" + responce); - var finalResponse = responce; - - responce += JSON.parse(prompt).promptPlan; - - finalResponse += await gpt3endpoint.gptFnTestGenerator(responce); - - console.log("Final Response.......\n\n"); - - return finalResponse; - } catch (error) { - console.log(error); - } -} diff --git a/JS/edgechains/examples/Test01/src/testGeneration/hyde.jsonnet b/JS/edgechains/examples/Test01/src/testGeneration/hyde.jsonnet deleted file mode 100644 index 77881790f..000000000 --- a/JS/edgechains/examples/Test01/src/testGeneration/hyde.jsonnet +++ /dev/null @@ -1,18 +0,0 @@ -//Replace the {} in the prompt template with the query -local updateQueryPrompt(promptTemplate, query) = - local updatedPrompt = std.strReplace(promptTemplate,'{}',query + "\n"); - updatedPrompt; - -//To replace the time in the system prompt -local updateTimePrompt(promptTemplate, time) = - local updatedPrompt =std.strReplace(promptTemplate,'{time}', time ); - updatedPrompt; - -local promptTemplate = std.extVar("promptTemplate"); -local time = std.extVar("time"); -local query = std.extVar("query"); -local updatedQueryPrompt = updateQueryPrompt(promptTemplate, query); -local updatedPrompt = updateTimePrompt(updatedQueryPrompt, time); -{ - "prompt": updatedPrompt -} \ No newline at end of file diff --git a/JS/edgechains/examples/Test01/src/testGeneration/prompts.jsonnet b/JS/edgechains/examples/Test01/src/testGeneration/prompts.jsonnet deleted file mode 100644 index 10ff2da84..000000000 --- a/JS/edgechains/examples/Test01/src/testGeneration/prompts.jsonnet +++ /dev/null @@ -1,103 +0,0 @@ -local WEB_SEARCH = ||| - Please write a passage to answer the question. - Question: {} - Passage: - |||; - -local SCIFACT = ||| - Please write a scientific paper passage to support/refute the claim. - Claim: {} - Passage: - |||; - -local ARGUANA = ||| - Please write a counter argument for the passage. - Passage: {} - Counter Argument: - |||; - -local TREC_COVID = ||| - Please write a scientific paper passage to answer the question. - Question: {} - Passage: - |||; - -local FIQA = ||| - Please write a financial article passage to answer the question. - Question: {} - Passage: - |||; - -local DBPEDIA_ENTITY = ||| - Please write a passage to answer the question. - Question: {} - Passage: - |||; - -local TREC_NEWS = ||| - Please write a news passage about the topic. - Topic: {} - Passage: - |||; - -local MR_TYDI = ||| - Please write a passage in {} to answer the question in detail. - Question: {} - Passage: - |||; -local CHUNK_SUMMARY = ||| - Summarize the following text to replace the original text with all important information left as it is. - Do not replace abbreviations with it's full forms. - {} - Summary: - |||; - -local ANS_PROMPT_SYSTEM = ||| - You are an AI assistant whose name is DoMIno. - - Its responses must not be vague, accusatory, rude, controversial, off-topic, or defensive. - - It should avoid giving subjective opinions but rely on objective facts or phrases like \"in this context a human might say...\", \"some people might think...\", etc. - - It can provide additional relevant details to answer in-depth and comprehensively covering mutiple aspects. - - It must provide an answer based solely on the provided sources below and not prior knowledge. It should ignore whether the question is singular or plural and just focus on the subject of the question. - - If the documents do not provide any context refuse to answer do not create an answer for the query without documents. - - If the full form of any abbreviation is unknown leave it as an abbreviation. Do not try to guess or infer the full form of the abrreviation. But do answer the query using the abbreviation without expanding it. - - If it doesn't know the answer, it must just say that it doesn't know and never try to make up an answer. However, if you are asked terms like highest, lowest, minimum, maximum and if you cannot find an exact answer, then you should mention that and still give an answer without the constraints of highest, lowest, minimum, maximum. - Below are multiple sources of information which are numbered. Please discard the sources of information that are not relevant for the question. Only use the ones that are relevant: - ---------------- - {} - |||; -local ANS_PROMPT_USER = ||| - Question: {} - Helpful Answer: - |||; -local SUMMARY = ||| - Do not expand on abbreviations and leave them as is in the reply. Please generate 5 different responses in bullet points for the question. - Please write a summary to answer the question in detail: - Question: {} - Passage: - |||; -local DATE_EXTRACTION = ||| - Extract the date of the document from the given chunk in the following format Month DD, YYYY. - Only give date in the answer, don't write any sentence or full stop: - {} - |||; -local TITLE_EXTRACTION = ||| - Extract the title of the document from the given chunk: - {} - |||; - -{ - "web_search": WEB_SEARCH, - "scifact": SCIFACT, - "arguana": ARGUANA, - "trec_covid": TREC_COVID, - "fiqa": FIQA, - "dbpedia_entity": DBPEDIA_ENTITY, - "trec_news": TREC_NEWS, - "mrqa_tydi": MR_TYDI, - "chunk_summary": CHUNK_SUMMARY, - "ans_prompt_system": ANS_PROMPT_SYSTEM, - "ans_prompt_user": ANS_PROMPT_USER, - "summary": SUMMARY, - "date_extraction": DATE_EXTRACTION, - "title_extraction": TITLE_EXTRACTION -} \ No newline at end of file diff --git a/JS/edgechains/examples/Test01/src/types/HydeFragmentData.ts b/JS/edgechains/examples/Test01/src/types/HydeFragmentData.ts deleted file mode 100644 index e807409b3..000000000 --- a/JS/edgechains/examples/Test01/src/types/HydeFragmentData.ts +++ /dev/null @@ -1,10 +0,0 @@ -export interface HydeFragmentData { - responses: Array<{ - rawText?: string; - metadata?: string; - filename?: string; - titleMetadata?: string; - documentDate?: string; - }>; - final_answer?: string; -} diff --git a/JS/edgechains/examples/Test01/tsconfig.json b/JS/edgechains/examples/Test01/tsconfig.json deleted file mode 100644 index f370e670f..000000000 --- a/JS/edgechains/examples/Test01/tsconfig.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "compilerOptions": { - "types": ["dotenv/config", "jest", "node"], - "target": "ES2022", - "module": "NodeNext", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "jsx": "react-jsx", - "jsxImportSource": "hono/jsx", - "noImplicitAny": false, - "moduleResolution": "NodeNext", - "declaration": true - }, - "include": ["src/**/*.ts", "dist/**/*.d.ts"] -} diff --git a/JS/edgechains/examples/chat-with-llm/dist/index.js b/JS/edgechains/examples/chat-with-llm/dist/index.js new file mode 100644 index 000000000..f45d910a3 --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/dist/index.js @@ -0,0 +1,19 @@ +import { ArakooServer } from "arakoodev/arakooserver"; +import Jsonnet from "@arakoodev/jsonnet"; +//@ts-ignore +import createClient from 'sync-rpc'; +import { fileURLToPath } from "url"; +import path from "path"; +const server = new ArakooServer(); +const app = server.createApp(); +const jsonnet = new Jsonnet(); +const __dirname = fileURLToPath(import.meta.url); +const openAICall = createClient(path.join(__dirname, "../lib/generateResponse.cjs")); +app.post("/chat", async (c) => { + const { question } = await c.req.json(); + jsonnet.extString("question", question || ""); + jsonnet.javascriptCallback("openAICall", openAICall); + let response = jsonnet.evaluateFile(path.join(__dirname, "../../jsonnet/main.jsonnet")); + return c.json(JSON.parse(response)); +}); +server.listen(3000); diff --git a/JS/edgechains/examples/chat-with-llm/dist/lib/generateResponse.cjs b/JS/edgechains/examples/chat-with-llm/dist/lib/generateResponse.cjs new file mode 100644 index 000000000..c2b4221f4 --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/dist/lib/generateResponse.cjs @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const { OpenAI } = require("arakoodev/openai"); +const zod_1 = require("zod"); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key; +const openai = new OpenAI({ apiKey: openAIApiKey }); +const schema = zod_1.z.object({ + answer: zod_1.z.string().describe("The answer to the question") +}); +// ** Example schema for a horse object, You can unComment the following code if you want to test the complex schema based answer** +// const genderOrStage = ["mare", "stallion", "gelding", "foal"]; // Example values +// const breed = ["arabian", "thoroughbred", "quarter horse", "appaloosa"]; // Example values +// const schema = z.object({ +// isAdvertisingSaleOfHorse: z +// .boolean() +// .describe("Whether the text is advertising the sale of a horse."), +// genderOrStage: z +// .enum(genderOrStage as any) +// .nullable() +// .describe(`The gender or stage of the horse, which MUST be one of ${genderOrStage.join(", ")}.`), +// age: z.number().nullable().describe("The age of the horse in years."), +// height: z.number().nullable().describe("The height of the horse in hands."), +// breed: z +// .enum(breed as any) +// .describe(`The breed of the horse, which MUST be one of ${breed.join(", ")}.`), +// gaited: z.boolean().describe("Whether the horse is gaited or not."), +// temperament: z +// .enum(["alpha", "other"]) +// .describe('The temperament of the horse, either "alpha" indicating it is top of the herd, or "other".'), +// }); +function openAICall() { + return function (prompt) { + try { + return openai.zodSchemaResponse({ prompt, schema: schema }).then((res) => { + return JSON.stringify(res); + }); + } + catch (error) { + return error; + } + }; +} +module.exports = openAICall; diff --git a/JS/edgechains/examples/chat-with-llm/jsonnet/main.jsonnet b/JS/edgechains/examples/chat-with-llm/jsonnet/main.jsonnet new file mode 100644 index 000000000..2fd656649 --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/jsonnet/main.jsonnet @@ -0,0 +1,16 @@ + +local promptTemplate = ||| + You are a helpful assistant that can answer questions based on given question + Answer the following question: {question} + |||; + + +local UserQuestion = std.extVar("question"); + +local promptWithQuestion = std.strReplace(promptTemplate,'{question}', UserQuestion + "\n"); + +local main() = + local response = arakoo.native("openAICall")(promptWithQuestion); + response; + +main() \ No newline at end of file diff --git a/JS/edgechains/examples/chat-with-llm/jsonnet/secrets.jsonnet b/JS/edgechains/examples/chat-with-llm/jsonnet/secrets.jsonnet new file mode 100644 index 000000000..978cacd33 --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/jsonnet/secrets.jsonnet @@ -0,0 +1,7 @@ + +local OPENAI_API_KEY = "sk-proj-***"; + +{ + "openai_api_key":OPENAI_API_KEY, +} + diff --git a/JS/edgechains/examples/chat-with-llm/package.json b/JS/edgechains/examples/chat-with-llm/package.json new file mode 100644 index 000000000..7a0cf0226 --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/package.json @@ -0,0 +1,22 @@ +{ + "name": "chat-with-youtube-video", + "version": "1.0.0", + "description": "", + "main": "index.js", + "type": "module", + "keywords": [], + "author": "", + "scripts": { + "start": "tsc && node --experimental-wasm-modules ./dist/index.js" + }, + "license": "ISC", + "dependencies": { + "@arakoodev/jsonnet": "^0.2.1", + "arakoodev": "^1.0.10", + "sync-rpc": "^1.3.6", + "zod": "^3.23.8" + }, + "devDependencies": { + "@types/node": "^20.12.12" + } +} diff --git a/JS/edgechains/examples/chat-with-llm/readme.md b/JS/edgechains/examples/chat-with-llm/readme.md new file mode 100644 index 000000000..25cff9f54 --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/readme.md @@ -0,0 +1,41 @@ +## Video + ``` + https://drive.google.com/file/d/1y0tGZx9HnjZpWlMmJW-6Yt1fW9lXpxFD/view + ``` + +# Chat with LLM Example + +## Installation + +1. Install the required dependencies: + + ```bash + npm install + ``` + + +## Configuration + +1 Add OpenAiApi key in secrets.jsonnet + ```bash + local OPENAI_API_KEY = "sk-****"; + ``` + +## Usage + +1. Start the server: + + ```bash + npm run start + ``` + +2. Hit the `POST` endpoint with basic question `http://localhost:3000/chat`. + + + ```bash + + body = { + "question":"hi" + } + ``` + diff --git a/JS/edgechains/examples/chat-with-llm/src/index.ts b/JS/edgechains/examples/chat-with-llm/src/index.ts new file mode 100644 index 000000000..11a88ff01 --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/src/index.ts @@ -0,0 +1,27 @@ + +import { ArakooServer } from "arakoodev/arakooserver"; +import Jsonnet from "@arakoodev/jsonnet"; +//@ts-ignore +import createClient from 'sync-rpc'; + +import { fileURLToPath } from "url" +import path from "path"; +const server = new ArakooServer(); + +const app = server.createApp(); + +const jsonnet = new Jsonnet(); +const __dirname = fileURLToPath(import.meta.url); + +const openAICall = createClient(path.join(__dirname, "../lib/generateResponse.cjs")); + +app.post("/chat", async (c: any) => { + const { question } = await c.req.json(); + jsonnet.extString("question", question || ""); + jsonnet.javascriptCallback("openAICall", openAICall); + let response = jsonnet.evaluateFile(path.join(__dirname, "../../jsonnet/main.jsonnet")); + return c.json(JSON.parse(response)); +}); + +server.listen(3000) + diff --git a/JS/edgechains/examples/chat-with-llm/src/lib/generateResponse.cts b/JS/edgechains/examples/chat-with-llm/src/lib/generateResponse.cts new file mode 100644 index 000000000..bac42a8df --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/src/lib/generateResponse.cts @@ -0,0 +1,53 @@ +const path = require("path"); +const { OpenAI } = require("arakoodev/openai"); +import { z } from "zod"; +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); + +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key + +const openai = new OpenAI({ apiKey: openAIApiKey }) + +const schema = z.object({ + answer: z.string().describe("The answer to the question") +}) + +// ** Example schema for a horse object, You can unComment the following code if you want to test the complex schema based answer** +// const genderOrStage = ["mare", "stallion", "gelding", "foal"]; // Example values +// const breed = ["arabian", "thoroughbred", "quarter horse", "appaloosa"]; // Example values + +// const schema = z.object({ +// isAdvertisingSaleOfHorse: z +// .boolean() +// .describe("Whether the text is advertising the sale of a horse."), +// genderOrStage: z +// .enum(genderOrStage as any) +// .nullable() +// .describe(`The gender or stage of the horse, which MUST be one of ${genderOrStage.join(", ")}.`), +// age: z.number().nullable().describe("The age of the horse in years."), +// height: z.number().nullable().describe("The height of the horse in hands."), +// breed: z +// .enum(breed as any) +// .describe(`The breed of the horse, which MUST be one of ${breed.join(", ")}.`), +// gaited: z.boolean().describe("Whether the horse is gaited or not."), +// temperament: z +// .enum(["alpha", "other"]) +// .describe('The temperament of the horse, either "alpha" indicating it is top of the herd, or "other".'), +// }); + +function openAICall() { + + return function (prompt: string) { + try { + return openai.zodSchemaResponse({ prompt, schema: schema }).then((res: any) => { + return JSON.stringify(res) + }) + } catch (error) { + return error; + } + } +} + + +module.exports = openAICall; \ No newline at end of file diff --git a/JS/edgechains/examples/chat-with-llm/tsconfig.json b/JS/edgechains/examples/chat-with-llm/tsconfig.json new file mode 100644 index 000000000..3837eea6c --- /dev/null +++ b/JS/edgechains/examples/chat-with-llm/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "ES2022", + "moduleResolution": "NodeNext", + "module": "NodeNext", + "rootDir": "./src", + "outDir": "./dist", + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true + }, + "exclude": ["./**/*.test.ts", "vitest.config.ts"] +} \ No newline at end of file diff --git a/JS/edgechains/examples/chat-with-pdf/package.json b/JS/edgechains/examples/chat-with-pdf/package.json index b8acb751b..1e2c6ec9c 100644 --- a/JS/edgechains/examples/chat-with-pdf/package.json +++ b/JS/edgechains/examples/chat-with-pdf/package.json @@ -15,9 +15,10 @@ "dependencies": { "@arakoodev/jsonnet": "^0.2.0", "@babel/preset-env": "^7.24.4", - "arakoodev": "^1.0.7", + "arakoodev": "^1.0.10", "cli-spinner": "^0.2.10", - "regenerator-runtime": "^0.14.1" + "regenerator-runtime": "^0.14.1", + "zod": "^3.23.8" }, "devDependencies": { "@types/cli-spinner": "^0.2.3", diff --git a/JS/edgechains/examples/chat-with-pdf/readme.md b/JS/edgechains/examples/chat-with-pdf/readme.md new file mode 100644 index 000000000..f6ae65827 --- /dev/null +++ b/JS/edgechains/examples/chat-with-pdf/readme.md @@ -0,0 +1,101 @@ +--- + +# Setup +1. Clone the repo into a public GitHub repository (or fork [https://github.com/arakoodev/EdgeChains/fork](https://github.com/arakoodev/EdgeChains/fork)). + +``` + git clone https://github.com/arakoodev/EdgeChains/ +``` + +2. Go to the project folder +``` + cd EdgeChains +``` + +# Run the ChatWithPdf example + +This section provides instructions for developers on how to utilize the chat with PDF feature. By following these steps, you can integrate the functionality seamlessly into your projects. + +--- + +1. Go to the ChatWithPdfExample +``` + cd JS/edgechains/examples/chat-with-pdf/ +``` + +2. Install packages with npm + +``` + npm install +``` + +3. Setup you secrets in `secrets.jsonnet` + +``` + local SUPABASE_API_KEY = "your supabase api key here"; + + + local OPENAI_API_KEY = "your openai api key here"; + + local SUPABASE_URL = "your supabase url here"; + + { + "supabase_api_key":SUPABASE_API_KEY, + "supabase_url":SUPABASE_URL, + "openai_api_key":OPENAI_API_KEY, + } + +``` + +4. Database Configuration + +- Ensure that you have a PostgreSQL Vector database set up at [Supabase](https://supabase.com/vector). +- Go to the SQL Editor tab in Supabase. +- Create a new query using the New Query button. +- Paste the following query into the editor and run it using the Run button in the bottom right corner. + +``` +create table if not exists documents ( + id bigint primary key generated always as identity, + content text, + embedding vector (1536) + ); + +create or replace function public.match_documents ( + query_embedding vector(1536), + similarity_threshold float, + match_count int +) +returns table ( + id bigint, + content text, + similarity float +) +language sql +as $$ + select + id, + content, + 1- (documents.embedding <=> query_embedding) as similarity + from documents + where 1 - (documents.embedding <=> query_embedding) > similarity_threshold + order by documents.embedding <=> query_embedding + limit match_count; + $$; + +``` + +- You should see a success message in the Result tab. +![image](https://github.com/Shyam-Raghuwanshi/EdgeChains/assets/94217498/052d9a15-838f-4e68-9888-072cecb78a13) + +5. Run the example + +``` + npm run start +``` + +- Then you can run the ChatWithPdf example using npm run start and continue chatting with the example.pdf. + +⚠️👉Remember: Comment out the InsertToSupabase function if you are running the code again; otherwise, the PDF data will be pushed again to the Supabase vector data. + +--- diff --git a/JS/edgechains/examples/chat-with-pdf/src/routes/chat.ts b/JS/edgechains/examples/chat-with-pdf/src/routes/chat.ts index cd1d11704..e502778ea 100644 --- a/JS/edgechains/examples/chat-with-pdf/src/routes/chat.ts +++ b/JS/edgechains/examples/chat-with-pdf/src/routes/chat.ts @@ -1,12 +1,14 @@ import path from "path"; import fs from "fs"; import { fileURLToPath } from "url"; -import { ChatOpenAi } from "arakoodev/openai"; +import { OpenAI } from "arakoodev/openai"; import { Supabase } from "arakoodev/vector-db"; import { PdfLoader } from "arakoodev/document-loader"; import { TextSplitter } from "arakoodev/splitter"; import { ArakooServer } from "arakoodev/arakooserver"; -import { Spinner } from "cli-spinner"; +import { Spinner } from "cli-spinner" +import { z } from "zod"; + const server = new ArakooServer(); const __dirname = fileURLToPath(import.meta.url); @@ -43,8 +45,8 @@ const supabase = new Supabase(supabaseUrl, supabaseApiKey); const client = supabase.createClient(); -const llm = new ChatOpenAi({ - openAIApiKey: openAIApiKey, +const llm = new OpenAI({ + apiKey: openAIApiKey, }); async function getEmbeddings(content) { @@ -84,7 +86,11 @@ async function InsertToSupabase(content) { } } // this should run only once for uploding pdf data to supabase then you can continue with the chatbot functionality -// await InsertToSupabase(splitedDocs); +await InsertToSupabase(splitedDocs); + +const chatSchema = z.object({ + answer: z.string().describe("The answer to the question") +}) ChatRouter.get("/", async (c) => { const searchStr = c.req.query("question").toLowerCase(); @@ -115,6 +121,6 @@ ChatRouter.get("/", async (c) => { .evaluateFile(InterPath); const prompt = JSON.parse(InterLoader).prompt; - const res = await llm.generateResponse(prompt); - return c.json({ res }); + const res = await llm.zodSchemaResponse({ schema: chatSchema, prompt: prompt }) + return c.json(res); }); diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/dist/backup.js b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/backup.js new file mode 100644 index 000000000..b0c3014fa --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/backup.js @@ -0,0 +1,223 @@ +// import { OpenAI } from "openai"; +// import readlineSync from "readline-sync"; +// import axios from "axios"; +// import moment from "moment-timezone"; +// import { ChatOpenAi } from "arakoodev/openai" +export {}; +// const openai = new OpenAI({ apiKey: "sk-proj-wOByN9rS9LorcpZlyjhYT3BlbkFJYq5aMCDtdXY5MMISVuze" }) +// async function lookupTime(location: string) { +// try { +// const response = await axios.get(`http://worldtimeapi.org/api/timezone/${location}`); +// const { datetime } = response.data; +// const dateTime = moment.tz(datetime, location).format("h:mmA"); +// const timeResponse = `The current time in ${location} is ${dateTime}.`; +// return timeResponse; +// } catch (error) { +// console.error(error); +// } +// } +// async function lookupWeather(location: string) { +// const options = { +// method: 'GET', +// url: 'https://weatherapi-com.p.rapidapi.com/forecast.json', +// params: { +// q: location, +// days: '3' +// }, +// headers: { +// 'X-RapidAPI-Key': "e8c7527ecdmshdd2b467ed60092cp1803cfjsn2a4dcb102fc4", +// ' X-RapidAPI-Host': 'weatherapi-com.p.rapidapi.com' +// } +// }; +// try { +// const response = await axios.request(options); +// let weather = response.data; +// const weatherForecast = `Location: ${weather.location.name} \ +// Current Temperature: ${weather.current.temp_c} \ +// Condition: ${weather.current.condition.text}. \ +// Low Today: ${weather.forecast.forecastday[0].day.mintemp_c} \ +// High Today: ${weather.forecast.forecastday[0].day.maxtemp_c}`; +// console.log({ weatherForecast }); +// return weatherForecast; +// } catch (error) { +// console.error(error); +// return "No forecast found"; +// } +// } +// const user_input = readlineSync.question("Your input: "); +// try { +// const completion = await openai.chat.completions.create({ +// model: "gpt-3.5-turbo-0613", +// messages: [{ role: "user", content: user_input }], +// functions: [ +// { +// name: "lookupTime", +// description: "get the current time in a given location", +// parameters: { +// type: "object", // specify that the parameter is an object +// properties: { +// location: { +// type: "string", // specify the parameter type as a string +// description: "The location, e.g. Beijing, China. But it should be written in a timezone name like Asia/Shanghai" +// } +// }, +// required: ["location"] // specify that the location parameter is required +// } +// }, +// { +// name: "lookupWeather", +// description: "get the weather forecast in a given location", +// parameters: { +// type: "object", // specify that the parameter is an object +// properties: { +// location: { +// type: "string", // specify the parameter type as a string +// description: "The location, e.g. Beijing, China. But it should be written in a city, state, country" +// } +// }, +// required: ["location"] // specify that the location parameter is required +// } +// } +// ], +// function_call: "auto" +// }); +// const completionResponse = completion.choices[0].message; +// if (!completionResponse.content) { +// const functionCallName = completionResponse?.function_call?.name; +// if (functionCallName === "lookupTime") { +// const completionArguments = JSON.parse(completionResponse?.function_call?.arguments!); +// const completion_text = await lookupTime(completionArguments.location); +// console.log(completion_text); +// } else if (functionCallName === "lookupWeather") { +// const completionArguments = JSON.parse(completionResponse?.function_call?.arguments!); +// const completion_text = await lookupWeather(completionArguments.location); +// try { +// const completion = await openai.chat.completions.create({ +// model: "gpt-3.5-turbo-0613", +// messages: [{ role: "user", content: "Summarize the following input." + completion_text }] +// }); +// const completionResponse = completion.choices[0].message.content; +// console.log(completionResponse); +// } catch (error) { +// console.log(error) +// } +// } +// } else { +// const completion_text = completion.choices[0].message.content; +// console.log(completion_text); +// } +// } catch (error) { +// console.error(error); +// } +//////////////////////////////////////////////////////////////// +// import readlineSync from "readline-sync"; +// import axios from "axios"; +// import moment from "moment-timezone"; +// import { OpenAI } from "arakoodev/openai"; +// const openai = new OpenAI({ +// apiKey: "sk-proj-wOByN9rS9LorcpZlyjhYT3BlbkFJYq5aMCDtdXY5MMISVuze" +// }) +// async function lookupTime(location: string) { +// try { +// const response = await axios.get(`http://worldtimeapi.org/api/timezone/${location}`); +// const { datetime } = response.data; +// const dateTime = moment.tz(datetime, location).format("h:mmA"); +// const timeResponse = `The current time in ${location} is ${dateTime}.`; +// return timeResponse; +// } catch (error) { +// console.error(error); +// } +// } +// async function lookupWeather(location: string) { +// const options = { +// method: 'GET', +// url: 'https://weatherapi-com.p.rapidapi.com/forecast.json', +// params: { +// q: location, +// days: '3' +// }, +// headers: { +// 'X-RapidAPI-Key': "e8c7527ecdmshdd2b467ed60092cp1803cfjsn2a4dcb102fc4", +// ' X-RapidAPI-Host': 'weatherapi-com.p.rapidapi.com' +// } +// }; +// try { +// const response = await axios.request(options); +// let weather = response.data; +// const weatherForecast = `Location: ${weather.location.name} \ +// Current Temperature: ${weather.current.temp_c} \ +// Condition: ${weather.current.condition.text}. \ +// Low Today: ${weather.forecast.forecastday[0].day.mintemp_c} \ +// High Today: ${weather.forecast.forecastday[0].day.maxtemp_c}`; +// return weatherForecast; +// } catch (error) { +// console.error(error); +// return "No forecast found"; +// } +// } +// const user_input = readlineSync.question("Your input: "); +// const functions = [ +// { +// name: "lookupTime", +// description: "get the current time in a given location", +// parameters: { +// type: "object", // specify that the parameter is an object +// properties: { +// location: { +// type: "string", // specify the parameter type as a string +// description: "The location, e.g. Beijing, China. But it should be written in a timezone name like Asia/Shanghai" +// } +// }, +// required: ["location"] // specify that the location parameter is required +// } +// }, +// { +// name: "lookupWeather", +// description: "get the weather forecast in a given location", +// parameters: { +// type: "object", // specify that the parameter is an object +// properties: { +// location: { +// type: "string", // specify the parameter type as a string +// description: "The location, e.g. Beijing, China. But it should be written in a city, state, country" +// } +// }, +// required: ["location"] // specify that the location parameter is required +// } +// } +// ] +// try { +// const completion = await openai.chatWithFunction({ +// model: "gpt-3.5-turbo-0613", +// messages: [{ role: "user", content: user_input }], +// functions, +// function_call: "auto" +// }) +// const completionResponse = completion; +// if (!completionResponse.content) { +// const functionCallName = completionResponse?.function_call?.name; +// if (functionCallName === "lookupTime") { +// const completionArguments = JSON.parse(completionResponse?.function_call?.arguments!); +// const completion_text = await lookupTime(completionArguments.location); +// console.log(completion_text); +// } else if (functionCallName === "lookupWeather") { +// const completionArguments = JSON.parse(completionResponse?.function_call?.arguments!); +// const completion_text = await lookupWeather(completionArguments.location); +// try { +// const completion = await openai.chat({ +// model: "gpt-3.5-turbo-0613", +// messages: [{ role: "user", content: "Summarize the following input." + completion_text }] +// }) +// const completionResponse = completion.content; +// console.log(completionResponse); +// } catch (error) { +// console.log(error) +// } +// } +// } else { +// const completion_text = completion.content; +// console.log(completion_text); +// } +// } catch (error) { +// console.error(error); +// } diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/dist/index.js b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/index.js new file mode 100644 index 000000000..2dfa9181b --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/index.js @@ -0,0 +1,25 @@ +import { ArakooServer } from "arakoodev/arakooserver"; +import { fileURLToPath } from "url"; +import Jsonnet from "@arakoodev/jsonnet"; +import path from "path"; +//@ts-ignore +import createClient from "sync-rpc"; +const server = new ArakooServer(); +const app = server.createApp(); +const jsonnet = new Jsonnet(); +const __dirname = fileURLToPath(import.meta.url); +const openAIChat = createClient(path.join(__dirname, "../lib/openAIChat.cjs")); +const openAIFunction = createClient(path.join(__dirname, "../lib/openAIFunction.cjs")); +const lookupTime = createClient(path.join(__dirname, "../lib/lookupTime.cjs")); +const lookupWeather = createClient(path.join(__dirname, "../lib/lookupWeather.cjs")); +app.get("/", async (c) => { + const { question } = c.req.query(); + jsonnet.extString("user_input", question); + jsonnet.javascriptCallback("lookupTime", lookupTime); + jsonnet.javascriptCallback("lookupWeather", lookupWeather); + jsonnet.javascriptCallback("openAIChat", openAIChat); + jsonnet.javascriptCallback("openAIFunction", openAIFunction); + const response = jsonnet.evaluateFile(path.join(__dirname, "../../jsonnet/main.jsonnet")); + return c.json(response); +}); +server.listen(3000); diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupTime.cjs b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupTime.cjs new file mode 100644 index 000000000..c0304b511 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupTime.cjs @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const axios = require("axios"); +const moment = require("moment-timezone"); +function lookupTime() { + return ((location) => { + try { + const response = axios.get(`http://worldtimeapi.org/api/timezone/${location}`).then((response) => { + const { datetime } = response.data; + const dateTime = moment.tz(datetime, location).format("h:mmA"); + const timeResponse = `The current time in ${location} is ${dateTime}.`; + return timeResponse; + }).catch((error) => { + console.error(error); + }); + return response; + } + catch (error) { + console.error(error); + } + }); +} +module.exports = lookupTime; diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupWeather.cjs b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupWeather.cjs new file mode 100644 index 000000000..461131edb --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupWeather.cjs @@ -0,0 +1,42 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const axios = require('axios'); +const path = require('path'); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const rapidAPI_Key = JSON.parse(jsonnet.evaluateFile(secretsPath)).rapid_api_key; +function lookupWeather() { + return ((location) => { + const options = { + url: 'https://weatherapi-com.p.rapidapi.com/forecast.json', + params: { + q: location, + days: '3' + }, + headers: { + 'X-RapidAPI-Key': rapidAPI_Key, + ' X-RapidAPI-Host': 'weatherapi-com.p.rapidapi.com' + } + }; + try { + const response = axios.request(options).then((response) => { + const weather = response.data; + const weatherForecast = `Location: ${weather.location.name} \ + Current Temperature: ${weather.current.temp_c} \ + Condition: ${weather.current.condition.text}. \ + Low Today: ${weather.forecast.forecastday[0].day.mintemp_c} \ + High Today: ${weather.forecast.forecastday[0].day.maxtemp_c}`; + return weatherForecast; + }).catch((error) => { + console.error(error); + }); + return response; + } + catch (error) { + console.error(error); + return "No forecast found"; + } + }); +} +module.exports = lookupWeather; diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupWheather.cjs b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupWheather.cjs new file mode 100644 index 000000000..f58728634 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/lookupWheather.cjs @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const axios = require('axios'); +function lookupWeather() { + return ((location) => { + const options = { + method: 'GET', + url: 'https://weatherapi-com.p.rapidapi.com/forecast.json', + params: { + q: location, + days: '3' + }, + headers: { + 'X-RapidAPI-Key': "e8c7527ecdmshdd2b467ed60092cp1803cfjsn2a4dcb102fc4", + ' X-RapidAPI-Host': 'weatherapi-com.p.rapidapi.com' + } + }; + try { + const response = axios.request(options).then((response) => { + return response; + }).catch((error) => { + console.error(error); + }); + let weather = response.data; + const weatherForecast = `Location: ${weather.location.name} \ + Current Temperature: ${weather.current.temp_c} \ + Condition: ${weather.current.condition.text}. \ + Low Today: ${weather.forecast.forecastday[0].day.mintemp_c} \ + High Today: ${weather.forecast.forecastday[0].day.maxtemp_c}`; + return weatherForecast; + } + catch (error) { + console.error(error); + return "No forecast found"; + } + }); +} +module.exports = lookupWeather; diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/openAIChat.cjs b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/openAIChat.cjs new file mode 100644 index 000000000..19d913877 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/openAIChat.cjs @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const { OpenAI } = require("arakoodev/openai"); +const path = require('path'); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const apiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key; +const openai = new OpenAI({ + apiKey +}); +function openAIChat() { + return ((prompt) => { + try { + const completion = openai.chat({ + model: "gpt-3.5-turbo-0613", + messages: [{ role: "user", content: "Summarize the following input." + prompt }] + }).then((completion) => { + return JSON.stringify(completion); + }).catch((error) => { + console.error(error); + }); + return completion; + } + catch (error) { + console.error(error); + } + }); +} +module.exports = openAIChat; diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/openAIFunction.cjs b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/openAIFunction.cjs new file mode 100644 index 000000000..2fddac055 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/lib/openAIFunction.cjs @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const { OpenAI } = require("arakoodev/openai"); +const path = require('path'); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const apiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key; +const openai = new OpenAI({ + apiKey +}); +function openAIFunction() { + return (({ prompt, functions }) => { + try { + const completion = openai.chatWithFunction({ + model: "gpt-3.5-turbo-0613", + messages: [{ role: "user", content: prompt }], + functions, + function_call: "auto" + }).then((completion) => { + return JSON.stringify(completion); + }).catch((error) => { + console.error(error); + }); + return completion; + } + catch (error) { + console.error(error); + } + }); +} +module.exports = openAIFunction; diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/dist/weather.js b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/weather.js new file mode 100644 index 000000000..213691f78 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/dist/weather.js @@ -0,0 +1,119 @@ +var _a, _b, _c; +import readlineSync from "readline-sync"; +import axios from "axios"; +import moment from "moment-timezone"; +import { OpenAI } from "arakoodev/openai"; +const openai = new OpenAI({ + apiKey: "sk-proj-wOByN9rS9LorcpZlyjhYT3BlbkFJYq5aMCDtdXY5MMISVuze" +}); +async function lookupTime(location) { + try { + const response = await axios.get(`http://worldtimeapi.org/api/timezone/${location}`); + const { datetime } = response.data; + const dateTime = moment.tz(datetime, location).format("h:mmA"); + const timeResponse = `The current time in ${location} is ${dateTime}.`; + return timeResponse; + } + catch (error) { + console.error(error); + } +} +async function lookupWeather(location) { + const options = { + method: 'GET', + url: 'https://weatherapi-com.p.rapidapi.com/forecast.json', + params: { + q: location, + days: '3' + }, + headers: { + 'X-RapidAPI-Key': "e8c7527ecdmshdd2b467ed60092cp1803cfjsn2a4dcb102fc4", + ' X-RapidAPI-Host': 'weatherapi-com.p.rapidapi.com' + } + }; + try { + const response = await axios.request(options); + let weather = response.data; + const weatherForecast = `Location: ${weather.location.name} \ + Current Temperature: ${weather.current.temp_c} \ + Condition: ${weather.current.condition.text}. \ + Low Today: ${weather.forecast.forecastday[0].day.mintemp_c} \ + High Today: ${weather.forecast.forecastday[0].day.maxtemp_c}`; + console.log({ weatherForecast }); + return weatherForecast; + } + catch (error) { + console.error(error); + return "No forecast found"; + } +} +const user_input = readlineSync.question("Your input: "); +const functions = [ + { + name: "lookupTime", + description: "get the current time in a given location", + parameters: { + type: "object", // specify that the parameter is an object + properties: { + location: { + type: "string", // specify the parameter type as a string + description: "The location, e.g. Beijing, China. But it should be written in a timezone name like Asia/Shanghai" + } + }, + required: ["location"] // specify that the location parameter is required + } + }, + { + name: "lookupWeather", + description: "get the weather forecast in a given location", + parameters: { + type: "object", // specify that the parameter is an object + properties: { + location: { + type: "string", // specify the parameter type as a string + description: "The location, e.g. Beijing, China. But it should be written in a city, state, country" + } + }, + required: ["location"] // specify that the location parameter is required + } + } +]; +try { + const completion = await openai.chatWithFunction({ + model: "gpt-3.5-turbo-0613", + messages: [{ role: "user", content: user_input }], + functions, + function_call: "auto" + }); + const completionResponse = completion; + if (!completionResponse.content) { + const functionCallName = (_a = completionResponse === null || completionResponse === void 0 ? void 0 : completionResponse.function_call) === null || _a === void 0 ? void 0 : _a.name; + if (functionCallName === "lookupTime") { + const completionArguments = JSON.parse((_b = completionResponse === null || completionResponse === void 0 ? void 0 : completionResponse.function_call) === null || _b === void 0 ? void 0 : _b.arguments); + const completion_text = await lookupTime(completionArguments.location); + console.log(completion_text); + } + else if (functionCallName === "lookupWeather") { + const completionArguments = JSON.parse((_c = completionResponse === null || completionResponse === void 0 ? void 0 : completionResponse.function_call) === null || _c === void 0 ? void 0 : _c.arguments); + const completion_text = await lookupWeather(completionArguments.location); + try { + const completion = await openai.chat({ + model: "gpt-3.5-turbo-0613", + messages: [{ role: "user", content: "Summarize the following input." + completion_text }] + }); + const completionResponse = completion.content; + console.log(completionResponse); + } + catch (error) { + console.log(error); + } + } + } + else { + const completion_text = completion.content; + console.log(completion_text); + } +} +catch (error) { + console.error(error); +} diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/jsonnet/main.jsonnet b/JS/edgechains/examples/getWeather-or-time-function-calling/jsonnet/main.jsonnet new file mode 100644 index 000000000..c1adfa790 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/jsonnet/main.jsonnet @@ -0,0 +1,61 @@ +local functions = [ + { + name: "lookupTime", + description: "get the current time in a given location", + parameters: { + type: "object", // specify that the parameter is an object + properties: { + location: { + type: "string", // specify the parameter type as a string + description: "The location, e.g. Beijing, China. But it should be written in a timezone name like Asia/Shanghai" + } + }, + required: ["location"] // specify that the location parameter is required + } + }, + { + name: "lookupWeather", + description: "get the weather forecast in a given location", + parameters: { + type: "object", // specify that the parameter is an object + properties: { + location: { + type: "string", // specify the parameter type as a string + description: "The location, e.g. Beijing, China. But it should be written in a city, state, country" + } + }, + required: ["location"] // specify that the location parameter is required + } + } +]; + + +local user_input = std.extVar("user_input"); + +local completionResponse = std.parseJson(arakoo.native("openAIFunction")({prompt:user_input, functions:functions})); + +// { +// role: 'assistant', +// content: null, +// function_call: { +// name: 'lookupWeather', +// arguments: '{\n "location": "Paris, France"\n}' +// } +// } + +if(completionResponse.content == null || completionResponse.content == "null") then + local functionCallName = completionResponse.function_call.name; + if(functionCallName == "lookupTime") then + local completionArguments = completionResponse.function_call.arguments; + local completion_text = arakoo.native("lookupTime")(std.parseJson(completionArguments).location); + completion_text + else if(functionCallName == "lookupWeather") then + local completionArguments = completionResponse.function_call.arguments; + local completion_text = arakoo.native("lookupWeather")(std.parseJson(completionArguments).location); + + local completion = std.parseJson(arakoo.native("openAIChat")(completion_text)); + local completionResponse = completion.content; + completionResponse +else + local completion_text = completionResponse.content; + completion_text diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/jsonnet/secrets.jsonnet b/JS/edgechains/examples/getWeather-or-time-function-calling/jsonnet/secrets.jsonnet new file mode 100644 index 000000000..090cd6e67 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/jsonnet/secrets.jsonnet @@ -0,0 +1,9 @@ + +local OPENAI_API_KEY = "sk-proj-***"; +local Rapid_API_Key = "e8c***"; + +{ + "rapid_api_key":Rapid_API_Key, + "openai_api_key":OPENAI_API_KEY, +} + diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/package.json b/JS/edgechains/examples/getWeather-or-time-function-calling/package.json new file mode 100644 index 000000000..4e4305290 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/package.json @@ -0,0 +1,28 @@ +{ + "name": "function-calling", + "version": "1.0.0", + "description": "", + "main": "index.js", + "type": "module", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "start": "tsc && node --experimental-wasm-modules ./dist/index.js" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "@arakoodev/jsonnet": "^0.2.1", + "@sendgrid/mail": "^8.1.3", + "arakoodev": "^1.0.9", + "axios": "^1.7.1", + "moment-timezone": "^0.5.45", + "openai": "^4.47.1", + "readline-sync": "^1.4.10", + "resend": "^3.2.0", + "sync-rpc": "^1.3.6" + }, + "devDependencies": { + "@types/readline-sync": "^1.4.8" + } +} diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/readme.md b/JS/edgechains/examples/getWeather-or-time-function-calling/readme.md new file mode 100644 index 000000000..f53b38dca --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/readme.md @@ -0,0 +1,48 @@ +## Video + ``` + https://drive.google.com/file/d/1amBXmUZVyWR6l-MaYuE9h6zAJyvIYNIp/view + ``` + +## Installation + +1. Install the required dependencies: + + ```bash + npm install + ``` + +## Configuration + +1 Add OpenAiApi key in secrets.jsonnet + + ```bash + local OPENAI_API_KEY = "sk-****"; + ``` + +2 Add RapidAPIKey key in secrets.jsonnet + + + ```bash + local Rapid_API_Key = "e8c7527e*****"; + ``` + +## Usage + +1. Start the server: + + ```bash + npm run start + ``` + +2. Hit the `GET` endpoint. + + + ```bash + http://localhost:3000?question=Paris temperature + ``` + or + + ```bash + http://localhost:3000?question=Paris time + ``` + diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/src/index.ts b/JS/edgechains/examples/getWeather-or-time-function-calling/src/index.ts new file mode 100644 index 000000000..2d2ae446f --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/src/index.ts @@ -0,0 +1,33 @@ +import { ArakooServer } from "arakoodev/arakooserver"; +import { fileURLToPath } from "url" +import Jsonnet from "@arakoodev/jsonnet" +import path from "path" + +//@ts-ignore +import createClient from "sync-rpc" + +const server = new ArakooServer(); +const app = server.createApp(); +const jsonnet = new Jsonnet(); + +const __dirname = fileURLToPath(import.meta.url); + +const openAIChat = createClient(path.join(__dirname, "../lib/openAIChat.cjs")); +const openAIFunction = createClient(path.join(__dirname, "../lib/openAIFunction.cjs")); +const lookupTime = createClient(path.join(__dirname, "../lib/lookupTime.cjs")); +const lookupWeather = createClient(path.join(__dirname, "../lib/lookupWeather.cjs")); + + +app.get("/", async (c) => { + const { question } = c.req.query(); + jsonnet.extString("user_input", question); + jsonnet.javascriptCallback("lookupTime", lookupTime); + jsonnet.javascriptCallback("lookupWeather", lookupWeather); + jsonnet.javascriptCallback("openAIChat", openAIChat); + jsonnet.javascriptCallback("openAIFunction", openAIFunction); + const response = jsonnet.evaluateFile(path.join(__dirname, "../../jsonnet/main.jsonnet")) + return c.json(response); +}); + + +server.listen(3000) diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/lookupTime.cts b/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/lookupTime.cts new file mode 100644 index 000000000..ed94d3964 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/lookupTime.cts @@ -0,0 +1,23 @@ +const axios = require("axios"); +const moment = require("moment-timezone"); + +function lookupTime() { + return ((location: string) => { + try { + const response = axios.get(`http://worldtimeapi.org/api/timezone/${location}`).then((response: any) => { + const { datetime } = response.data; + const dateTime = moment.tz(datetime, location).format("h:mmA"); + const timeResponse = `The current time in ${location} is ${dateTime}.`; + return timeResponse; + }).catch((error: any) => { + console.error(error); + }) + + return response; + } catch (error) { + console.error(error); + } + }) +} + +module.exports = lookupTime; \ No newline at end of file diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/lookupWeather.cts b/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/lookupWeather.cts new file mode 100644 index 000000000..74f2ec087 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/lookupWeather.cts @@ -0,0 +1,42 @@ +const axios = require('axios'); +const path = require('path'); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); + +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const rapidAPI_Key = JSON.parse(jsonnet.evaluateFile(secretsPath)).rapid_api_key + +function lookupWeather() { + return ((location: string) => { + const options = { + url: 'https://weatherapi-com.p.rapidapi.com/forecast.json', + params: { + q: location, + days: '3' + }, + headers: { + 'X-RapidAPI-Key': rapidAPI_Key, + ' X-RapidAPI-Host': 'weatherapi-com.p.rapidapi.com' + } + }; + try { + const response = axios.request(options).then((response: any) => { + const weather = response.data; + const weatherForecast = `Location: ${weather.location.name} \ + Current Temperature: ${weather.current.temp_c} \ + Condition: ${weather.current.condition.text}. \ + Low Today: ${weather.forecast.forecastday[0].day.mintemp_c} \ + High Today: ${weather.forecast.forecastday[0].day.maxtemp_c}`; + return weatherForecast + }).catch((error: any) => { + console.error(error); + }); + return response; + } catch (error) { + console.error(error); + return "No forecast found"; + } + }) +} + +module.exports = lookupWeather; \ No newline at end of file diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/openAIChat.cts b/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/openAIChat.cts new file mode 100644 index 000000000..534745767 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/openAIChat.cts @@ -0,0 +1,34 @@ +const { OpenAI } = require("arakoodev/openai"); + +const path = require('path'); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); + +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const apiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key + +const openai = new OpenAI({ + apiKey +}) + +function openAIChat() { + + return ((prompt: string) => { + try { + const completion = openai.chat({ + model: "gpt-3.5-turbo-0613", + messages: [{ role: "user", content: "Summarize the following input." + prompt }] + }).then((completion: any) => { + return JSON.stringify(completion); + } + ).catch((error: any) => { + console.error(error); + }) + return completion + } catch (error) { + console.error(error); + } + }) +} + +module.exports = openAIChat; \ No newline at end of file diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/openAIFunction.cts b/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/openAIFunction.cts new file mode 100644 index 000000000..95bc2f4db --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/src/lib/openAIFunction.cts @@ -0,0 +1,41 @@ +const { OpenAI } = require("arakoodev/openai"); + +const path = require('path'); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); + +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const apiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key + +const openai = new OpenAI({ + apiKey +}) + +interface messageOption { + prompt: string; + functions: object | Array; +} + +function openAIFunction() { + + return (({ prompt, functions }: messageOption) => { + try { + const completion = openai.chatWithFunction({ + model: "gpt-3.5-turbo-0613", + messages: [{ role: "user", content: prompt }], + functions, + function_call: "auto" + }).then((completion: any) => { + return JSON.stringify(completion); + } + ).catch((error: any) => { + console.error(error); + }) + return completion + } catch (error) { + console.error(error); + } + }) +} + +module.exports = openAIFunction; \ No newline at end of file diff --git a/JS/edgechains/examples/getWeather-or-time-function-calling/tsconfig.json b/JS/edgechains/examples/getWeather-or-time-function-calling/tsconfig.json new file mode 100644 index 000000000..2c9d200a8 --- /dev/null +++ b/JS/edgechains/examples/getWeather-or-time-function-calling/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "ES2017", + "moduleResolution": "NodeNext", + "module": "NodeNext", + "rootDir": "src", + "outDir": "./dist", + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true + }, + "exclude": ["./jsonnet"] +} \ No newline at end of file diff --git a/JS/edgechains/examples/hydeSearch/.gitignore b/JS/edgechains/examples/hydeSearch/.gitignore deleted file mode 100644 index d9cd34b5a..000000000 --- a/JS/edgechains/examples/hydeSearch/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -# standard exclusions -node_modules -# build artifacts -dist -# environment files -.env \ No newline at end of file diff --git a/JS/edgechains/examples/hydeSearch/README.md b/JS/edgechains/examples/hydeSearch/README.md deleted file mode 100644 index 81cc59336..000000000 --- a/JS/edgechains/examples/hydeSearch/README.md +++ /dev/null @@ -1,13 +0,0 @@ -## How to run hydeSearch Example:- - -- Use the following command in the root directory to run this example: - - `npm i` - - `npm run build` - - `npm start` - -- You will see that server will running on port `3000`. - -- Type `localhost:3000` in your favorite browser to use this example. diff --git a/JS/edgechains/examples/hydeSearch/esbuild.build.js b/JS/edgechains/examples/hydeSearch/esbuild.build.js deleted file mode 100644 index aca1def07..000000000 --- a/JS/edgechains/examples/hydeSearch/esbuild.build.js +++ /dev/null @@ -1,50 +0,0 @@ -import { build } from "esbuild"; -import { resolve, join } from "path"; -import { existsSync, mkdirSync, promises } from "fs"; - -import { fileURLToPath } from "url"; -import { dirname } from "path"; - -const __filename = fileURLToPath(import.meta.url); -const __dirname = dirname(__filename); - -const outputDir = resolve(__dirname, "dist"); - -if (!existsSync(outputDir)) { - mkdirSync(outputDir); -} - -const distPath = join(process.cwd(), "dist"); - -promises.mkdir(distPath, { recursive: true }); - -build({ - entryPoints: ["./src/index.ts"], - bundle: true, - minify: true, - platform: "node", - outfile: "./dist/index.js", - tsconfig: "./tsconfig.json", - target: "node21.1.0", - external: [ - "express", - "tsx", - "typescript", - "typeorm", - "react", - "react-dom", - "pg", - "jsdom", - "hono", - "@hanazuki/node-jsonnet", - "@arakoodev/jsonnet", - "readline/promises", - ], - format: "esm", - loader: { - ".html": "text", - ".css": "css", - ".jsonnet": "text", - ".wasm": "file", - }, -}).catch(() => process.exit(1)); diff --git a/JS/edgechains/examples/hydeSearch/htmljs.d.ts b/JS/edgechains/examples/hydeSearch/htmljs.d.ts deleted file mode 100644 index 6ea96c3fd..000000000 --- a/JS/edgechains/examples/hydeSearch/htmljs.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -export declare const view: (viewToRender: any) => (c: any) => Promise; -export declare const rootLayout: (layoutToApply: any) => (c: any, next: any) => Promise; -export declare const layout: (layoutToApply: any) => (c: any, next: any) => Promise; -export declare const Link: any; diff --git a/JS/edgechains/examples/hydeSearch/htmljs.js b/JS/edgechains/examples/hydeSearch/htmljs.js deleted file mode 100644 index d52af79ca..000000000 --- a/JS/edgechains/examples/hydeSearch/htmljs.js +++ /dev/null @@ -1,53 +0,0 @@ -import { html } from "hono/html"; -// These functions form the basis of the html.js framework and will be moved to a separate lib -export const view = (viewToRender) => { - return async (c) => { - const newBody = await viewToRender({ context: c }); - return c.html(newBody); - }; -}; -export const rootLayout = (layoutToApply) => { - return async (c, next) => { - await next(); - if (c.req.header("HX-Request") !== "true") { - // Req is a normal request, so we render the whole page which means adding the root layout - const curBody = await c.res.text(); - c.res = undefined; // To overwrite res, set it to undefined before setting new value https://github.com/honojs/hono/pull/970 released in https://github.com/honojs/hono/releases/tag/v3.1.0 - const newBody = await layoutToApply({ context: c, children: html(curBody) }); - c.res = c.html(newBody); - } - // Else do nothing and let the original response be sent - }; -}; -export const layout = (layoutToApply) => { - return async (c, next) => { - await next(); - if ( - (c.req.header("HX-Request") === "true" && - (c.req.header("HX-Boosted") === "true" || !c.req.header("HX-Target"))) || - c.req.header("HX-Request") !== "true" - ) { - // Req is regular req or boosted link, so we apply layouts - const curBody = await c.res.text(); - c.res = undefined; // To overwrite res, set it to undefined before setting new value https://github.com/honojs/hono/pull/970 released in https://github.com/honojs/hono/releases/tag/v3.1.0 - const newBody = await layoutToApply({ context: c, children: html(curBody) }); - c.res = c.html(newBody); - } - // Else do nothing and let the original response be sent, which will be a partial update applied to the page with hx-target - }; -}; -export const Link = ({ to, "hx-target": hxTarget, class: className, children }) => { - if (hxTarget) { - return html`${children}`; - } else { - return html`${children}`; - } -}; diff --git a/JS/edgechains/examples/hydeSearch/htmljs.ts b/JS/edgechains/examples/hydeSearch/htmljs.ts deleted file mode 100644 index ae460e4de..000000000 --- a/JS/edgechains/examples/hydeSearch/htmljs.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { html } from "hono/html"; - -// These functions form the basis of the html.js framework and will be moved to a separate lib - -export const view = (viewToRender) => { - return async (c) => { - const newBody = await viewToRender({ context: c }); - return c.html(newBody); - }; -}; - -export const rootLayout = (layoutToApply) => { - return async (c, next) => { - await next(); - if (c.req.header("HX-Request") !== "true") { - // Req is a normal request, so we render the whole page which means adding the root layout - const curBody = await c.res.text(); - c.res = undefined; // To overwrite res, set it to undefined before setting new value https://github.com/honojs/hono/pull/970 released in https://github.com/honojs/hono/releases/tag/v3.1.0 - const newBody = await layoutToApply({ context: c, children: html(curBody) }); - c.res = c.html(newBody); - } - // Else do nothing and let the original response be sent - }; -}; - -export const layout = (layoutToApply) => { - return async (c, next) => { - await next(); - if ( - (c.req.header("HX-Request") === "true" && - (c.req.header("HX-Boosted") === "true" || !c.req.header("HX-Target"))) || - c.req.header("HX-Request") !== "true" - ) { - // Req is regular req or boosted link, so we apply layouts - const curBody = await c.res.text(); - c.res = undefined; // To overwrite res, set it to undefined before setting new value https://github.com/honojs/hono/pull/970 released in https://github.com/honojs/hono/releases/tag/v3.1.0 - const newBody = await layoutToApply({ context: c, children: html(curBody) }); - c.res = c.html(newBody); - } - // Else do nothing and let the original response be sent, which will be a partial update applied to the page with hx-target - }; -}; - -export const Link: any = ({ to, "hx-target": hxTarget, class: className, children }) => { - if (hxTarget) { - return html`${children}`; - } else { - return html`${children}`; - } -}; diff --git a/JS/edgechains/examples/hydeSearch/jsonnet/hyde.jsonnet b/JS/edgechains/examples/hydeSearch/jsonnet/hyde.jsonnet deleted file mode 100644 index 77881790f..000000000 --- a/JS/edgechains/examples/hydeSearch/jsonnet/hyde.jsonnet +++ /dev/null @@ -1,18 +0,0 @@ -//Replace the {} in the prompt template with the query -local updateQueryPrompt(promptTemplate, query) = - local updatedPrompt = std.strReplace(promptTemplate,'{}',query + "\n"); - updatedPrompt; - -//To replace the time in the system prompt -local updateTimePrompt(promptTemplate, time) = - local updatedPrompt =std.strReplace(promptTemplate,'{time}', time ); - updatedPrompt; - -local promptTemplate = std.extVar("promptTemplate"); -local time = std.extVar("time"); -local query = std.extVar("query"); -local updatedQueryPrompt = updateQueryPrompt(promptTemplate, query); -local updatedPrompt = updateTimePrompt(updatedQueryPrompt, time); -{ - "prompt": updatedPrompt -} \ No newline at end of file diff --git a/JS/edgechains/examples/hydeSearch/jsonnet/prompts.jsonnet b/JS/edgechains/examples/hydeSearch/jsonnet/prompts.jsonnet deleted file mode 100644 index 10ff2da84..000000000 --- a/JS/edgechains/examples/hydeSearch/jsonnet/prompts.jsonnet +++ /dev/null @@ -1,103 +0,0 @@ -local WEB_SEARCH = ||| - Please write a passage to answer the question. - Question: {} - Passage: - |||; - -local SCIFACT = ||| - Please write a scientific paper passage to support/refute the claim. - Claim: {} - Passage: - |||; - -local ARGUANA = ||| - Please write a counter argument for the passage. - Passage: {} - Counter Argument: - |||; - -local TREC_COVID = ||| - Please write a scientific paper passage to answer the question. - Question: {} - Passage: - |||; - -local FIQA = ||| - Please write a financial article passage to answer the question. - Question: {} - Passage: - |||; - -local DBPEDIA_ENTITY = ||| - Please write a passage to answer the question. - Question: {} - Passage: - |||; - -local TREC_NEWS = ||| - Please write a news passage about the topic. - Topic: {} - Passage: - |||; - -local MR_TYDI = ||| - Please write a passage in {} to answer the question in detail. - Question: {} - Passage: - |||; -local CHUNK_SUMMARY = ||| - Summarize the following text to replace the original text with all important information left as it is. - Do not replace abbreviations with it's full forms. - {} - Summary: - |||; - -local ANS_PROMPT_SYSTEM = ||| - You are an AI assistant whose name is DoMIno. - - Its responses must not be vague, accusatory, rude, controversial, off-topic, or defensive. - - It should avoid giving subjective opinions but rely on objective facts or phrases like \"in this context a human might say...\", \"some people might think...\", etc. - - It can provide additional relevant details to answer in-depth and comprehensively covering mutiple aspects. - - It must provide an answer based solely on the provided sources below and not prior knowledge. It should ignore whether the question is singular or plural and just focus on the subject of the question. - - If the documents do not provide any context refuse to answer do not create an answer for the query without documents. - - If the full form of any abbreviation is unknown leave it as an abbreviation. Do not try to guess or infer the full form of the abrreviation. But do answer the query using the abbreviation without expanding it. - - If it doesn't know the answer, it must just say that it doesn't know and never try to make up an answer. However, if you are asked terms like highest, lowest, minimum, maximum and if you cannot find an exact answer, then you should mention that and still give an answer without the constraints of highest, lowest, minimum, maximum. - Below are multiple sources of information which are numbered. Please discard the sources of information that are not relevant for the question. Only use the ones that are relevant: - ---------------- - {} - |||; -local ANS_PROMPT_USER = ||| - Question: {} - Helpful Answer: - |||; -local SUMMARY = ||| - Do not expand on abbreviations and leave them as is in the reply. Please generate 5 different responses in bullet points for the question. - Please write a summary to answer the question in detail: - Question: {} - Passage: - |||; -local DATE_EXTRACTION = ||| - Extract the date of the document from the given chunk in the following format Month DD, YYYY. - Only give date in the answer, don't write any sentence or full stop: - {} - |||; -local TITLE_EXTRACTION = ||| - Extract the title of the document from the given chunk: - {} - |||; - -{ - "web_search": WEB_SEARCH, - "scifact": SCIFACT, - "arguana": ARGUANA, - "trec_covid": TREC_COVID, - "fiqa": FIQA, - "dbpedia_entity": DBPEDIA_ENTITY, - "trec_news": TREC_NEWS, - "mrqa_tydi": MR_TYDI, - "chunk_summary": CHUNK_SUMMARY, - "ans_prompt_system": ANS_PROMPT_SYSTEM, - "ans_prompt_user": ANS_PROMPT_USER, - "summary": SUMMARY, - "date_extraction": DATE_EXTRACTION, - "title_extraction": TITLE_EXTRACTION -} \ No newline at end of file diff --git a/JS/edgechains/examples/hydeSearch/ormconfig.json b/JS/edgechains/examples/hydeSearch/ormconfig.json deleted file mode 100644 index 21819c00f..000000000 --- a/JS/edgechains/examples/hydeSearch/ormconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "type": "postgres", - "host": "db.rmzqtepwnzoxgkkzjctt.supabase.co", - "port": 5432, - "username": "postgres", - "password": "xaX0MYcf1YiJlChK", - "database": "postgres", - "entities": ["dist/entities/**/*.js"], - "synchronize": false, - "logging": false -} diff --git a/JS/edgechains/examples/hydeSearch/package.json b/JS/edgechains/examples/hydeSearch/package.json deleted file mode 100644 index 16ad7d403..000000000 --- a/JS/edgechains/examples/hydeSearch/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "example", - "version": "1.0.0", - "description": "", - "type": "module", - "main": "dist/index.js", - "scripts": { - "build": "rm -rf dist && node esbuild.build.js", - "start": "node --experimental-wasm-modules src/index.js", - "lint": "eslint --ignore-path .eslintignore --ext .js,.ts", - "format": "prettier --ignore-path .gitignore --write \"**/*.+(js|ts|json)\"", - "test": "npx jest" - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "@hono/node-server": "^1.2.0", - "@types/dotenv": "^8.2.0", - "hono": "^3.9.2", - "pg": "^8.11.3", - "reflect-metadata": "^0.1.13", - "tsc": "^2.0.4", - "@arakoodev/jsonnet": "0.1.2" - }, - "devDependencies": { - "@arakoodev/edgechains.js": "^0.1.22", - "@hanazuki/node-jsonnet": "^2.1.0", - "@types/jest": "^29.5.8", - "@types/node": "^20.9.4", - "@typescript-eslint/eslint-plugin": "^6.11.0", - "@typescript-eslint/parser": "^6.11.0", - "axios": "^1.6.2", - "dotenv": "^16.3.1", - "dts-bundle-generator": "^8.1.2", - "eslint": "^8.54.0", - "eslint-config-prettier": "^9.0.0", - "eslint-config-standard-with-typescript": "^40.0.0", - "eslint-plugin-import": "^2.29.0", - "eslint-plugin-n": "^16.3.1", - "eslint-plugin-promise": "^6.1.1", - "jest": "^29.7.0", - "prettier": "^3.1.0", - "react": "^18.2.0", - "ts-jest": "^29.1.1", - "tsx": "^3.12.2", - "typeorm": "^0.3.17", - "typescript": "^5.3.3" - } -} diff --git a/JS/edgechains/examples/hydeSearch/src/ExampleLayout.d.ts b/JS/edgechains/examples/hydeSearch/src/ExampleLayout.d.ts deleted file mode 100644 index bc18f3b4a..000000000 --- a/JS/edgechains/examples/hydeSearch/src/ExampleLayout.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { FC } from "hono/jsx"; -declare const ExampleLayout: FC; -export default ExampleLayout; diff --git a/JS/edgechains/examples/hydeSearch/src/ExampleLayout.js b/JS/edgechains/examples/hydeSearch/src/ExampleLayout.js deleted file mode 100644 index 7f0c9cd3b..000000000 --- a/JS/edgechains/examples/hydeSearch/src/ExampleLayout.js +++ /dev/null @@ -1,279 +0,0 @@ -import { html } from "hono/html"; -const ExampleLayout = (props) => html` - - - - - - Document - - - - - - - - -
- -
-
-
-
-
-
-
-
-
- -
- - -
-

Enter these details for RRF search:

- Metadata Table Name: - OrderRRF: - - - Text-BaseWeight: - - -
- Text-FineTuneWeight: - 0.35 -
- Similarity-BaseWeight: - -
- Similarity-FineTuneWeight: - 0.4 -
- Date-BaseWeight: - -
- Date-FineTuneWeight: - 0.75 -
-
-
-
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- - - - - - - -`; -export default ExampleLayout; diff --git a/JS/edgechains/examples/hydeSearch/src/ExampleLayout.ts b/JS/edgechains/examples/hydeSearch/src/ExampleLayout.ts deleted file mode 100644 index 90df407b0..000000000 --- a/JS/edgechains/examples/hydeSearch/src/ExampleLayout.ts +++ /dev/null @@ -1,282 +0,0 @@ -import { html } from "hono/html"; -import { FC } from "hono/jsx"; - -const ExampleLayout: FC = (props) => html` - - - - - - Document - - - - - - - - -
- -
-
-
-
-
-
-
-
-
- -
- - -
-

Enter these details for RRF search:

- Metadata Table Name: - OrderRRF: - - - Text-BaseWeight: - - -
- Text-FineTuneWeight: - 0.35 -
- Similarity-BaseWeight: - -
- Similarity-FineTuneWeight: - 0.4 -
- Date-BaseWeight: - -
- Date-FineTuneWeight: - 0.75 -
-
-
-
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- - - - - - - -`; - -export default ExampleLayout; diff --git a/JS/edgechains/examples/hydeSearch/src/HydeSearch.d.ts b/JS/edgechains/examples/hydeSearch/src/HydeSearch.d.ts deleted file mode 100644 index 737bdf50f..000000000 --- a/JS/edgechains/examples/hydeSearch/src/HydeSearch.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { ArkRequest } from "@arakoodev/edgechains.js"; -import { Hono } from "hono"; -declare const HydeSearchRouter: Hono; -export interface HydeFragmentData { - responses: Array<{ - rawText?: string; - metadata?: string; - filename?: string; - titleMetadata?: string; - documentDate?: string; - }>; - final_answer?: string; -} -declare function hydeSearchAdaEmbedding( - arkRequest: ArkRequest, - apiKey: string, - orgId: string -): Promise<{ - wordEmbeddings: any; - finalAnswer: any; -}>; -export { hydeSearchAdaEmbedding }; -export { HydeSearchRouter }; diff --git a/JS/edgechains/examples/hydeSearch/src/HydeSearch.js b/JS/edgechains/examples/hydeSearch/src/HydeSearch.js deleted file mode 100644 index 3e2529cd9..000000000 --- a/JS/edgechains/examples/hydeSearch/src/HydeSearch.js +++ /dev/null @@ -1,187 +0,0 @@ -import Jsonnet from "@arakoodev/jsonnet"; -import { OpenAiEndpoint } from "@arakoodev/edgechains.js"; -import { PostgresClient } from "@arakoodev/edgechains.js"; -import * as path from "path"; -import { Hono } from "hono"; -import { fileURLToPath } from "url"; - -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -const HydeSearchRouter = new Hono(); - -var PostgresDistanceMetric; -(function (PostgresDistanceMetric) { - PostgresDistanceMetric["COSINE"] = "COSINE"; - PostgresDistanceMetric["IP"] = "IP"; - PostgresDistanceMetric["L2"] = "L2"; -})(PostgresDistanceMetric || (PostgresDistanceMetric = {})); - -async function hydeSearchAdaEmbedding(arkRequest, apiKey, orgId) { - try { - const gpt3endpoint = new OpenAiEndpoint( - "https://api.openai.com/v1/chat/completions", - apiKey, - orgId, - "gpt-3.5-turbo", - "user", - parseInt("0.7") - ); - // Get required params from API... - const table = "ada_hyde_prod"; - const namespace = "360_docs"; - const query = arkRequest.query; - const topK = Number(arkRequest.topK); - // - const jsonnet = new Jsonnet(); - - const promptPath = path.join(__dirname, "../jsonnet/prompts.jsonnet"); - const hydePath = path.join(__dirname, "../jsonnet/hyde.jsonnet"); - // Load Jsonnet to extract args.. - const promptLoader = jsonnet.evaluateFile(promptPath); - // Getting ${summary} basePrompt - const promptTemplate = JSON.parse(promptLoader).summary; - // Getting the updated promptTemplate with query - let hydeLoader = jsonnet - .extString("promptTemplate", promptTemplate) - .extString("time", "") - .extString("query", query) - .evaluateFile(hydePath); - // Get concatenated prompt - const prompt = JSON.parse(hydeLoader).prompt; - // Block and get the response from GPT3 - const gptResponse = await gpt3endpoint.gptFn(prompt); - // Chain 1 ==> Get Gpt3Response & split - const gpt3Responses = gptResponse.split("\n"); - // Chain 2 ==> Get Embeddings from OpenAI using Each Response - const embeddingsListChain = Promise.all( - gpt3Responses.map(async (resp) => { - const embedding = await gpt3endpoint.embeddings(resp); - return embedding; - }) - ); - // Chain 5 ==> Query via EmbeddingChain - const dbClient = new PostgresClient( - await embeddingsListChain, - PostgresDistanceMetric.IP, - topK, - 20, - table, - namespace, - arkRequest, - 15 - ); - const queryResult = await dbClient.dbQuery(); - // Chain 6 ==> Create Prompt using Embeddings - const retrievedDocs = []; - for (const embeddings of queryResult) { - retrievedDocs.push( - `${embeddings.raw_text}\n score:${embeddings.score}\n filename:${embeddings.filename}\n` - ); - } - if (retrievedDocs.join("").length > 4096) { - retrievedDocs.length = 4096; - } - const currentTime = new Date().toLocaleString(); - const formattedTime = currentTime; - // System prompt - const ansPromptSystem = JSON.parse(promptLoader).ans_prompt_system; - hydeLoader = jsonnet - .extString(promptTemplate, ansPromptSystem) - .extString("time", formattedTime) - .extString("qeury", retrievedDocs.join("")) - .evaluateFile(hydePath); - const finalPromptSystem = JSON.parse(hydeLoader).prompt; - // User prompt - const ansPromptUser = JSON.parse(promptLoader).ans_prompt_user; - hydeLoader = jsonnet - .extString(promptTemplate, ansPromptUser) - .extString("qeury", query) - .evaluateFile(hydePath); - const finalPromptUser = JSON.parse(hydeLoader).prompt; - const chatMessages = [ - { role: "system", content: finalPromptSystem }, - { role: "user", content: finalPromptUser }, - ]; - const finalAnswer = await gpt3endpoint.gptFnChat(chatMessages); - const response = { - wordEmbeddings: queryResult, - finalAnswer: finalAnswer, - }; - return response; - } catch (error) { - // Handle errors here - console.error(error); - throw error; - } -} -export { hydeSearchAdaEmbedding }; -HydeSearchRouter.get("/search", async (c) => { - const query = c.req.query(); - const arkRequest = { - topK: parseInt(query.topK ?? "5"), - metadataTable: query.metadataTable, - query: query.query, - textWeight: { - baseWeight: query.textBaseWeight, - fineTuneWeight: query.textFineTuneWeight, - }, - similarityWeight: { - baseWeight: query.similarityBaseWeight, - fineTuneWeight: query.similarityFineTuneWeight, - }, - dateWeight: { - baseWeight: query.dateBaseWeight, - fineTuneWeight: query.dateFineTuneWeight, - }, - orderRRF: query.orderRRF, - }; - const answer = await hydeSearchAdaEmbedding( - arkRequest, - process.env.OPENAI_API_KEY, - process.env.OPENAI_ORG_ID - ); - const final_answer = answer.finalAnswer; - const responses = answer.wordEmbeddings; - const data = { responses, final_answer }; - return c.html(` - -
-
-
${data.final_answer}
-
-
    - ${data.responses.map( - (item) => ` -
  • -
    -
    - ${ - item.rawText != null - ? `
    ${item.rawText}
    ` - : `
    ${item.metadata}
    ` - } - ${ - item.filename != null - ? `
    ${item.filename}
    ` - : "" - } - ${ - item.titleMetadata != null - ? `
    ${item.titleMetadata}
    ` - : "" - } - ${ - item.documentDate != null - ? `
    ${item.documentDate}
    ` - : "" - } -
    -
    -
  • - ` - )} -
- - `); -}); -export { HydeSearchRouter }; diff --git a/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.d.ts b/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.d.ts deleted file mode 100644 index cb0ff5c3b..000000000 --- a/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.d.ts +++ /dev/null @@ -1 +0,0 @@ -export {}; diff --git a/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.js b/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.js deleted file mode 100644 index cadd982c2..000000000 --- a/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.js +++ /dev/null @@ -1,34 +0,0 @@ -import dotenv from "dotenv"; -import { hydeSearchAdaEmbedding } from "./HydeSearch.js"; -dotenv.config({ path: ".env" }); -describe("Hyde Search", () => { - it("should return a response", async () => { - const arkRequest = { - topK: 5, - metadataTable: "title_metadata", - query: "tell me the top 5 programming languages currently", - textWeight: { - baseWeight: "1.0", - fineTuneWeight: "0.35", - }, - similarityWeight: { - baseWeight: "1.5", - fineTuneWeight: "0.40", - }, - dateWeight: { - baseWeight: "1.25", - fineTuneWeight: "0.75", - }, - orderRRF: "default", - }; - expect( - ( - await hydeSearchAdaEmbedding( - arkRequest, - process.env.OPENAI_API_KEY, - process.env.OPENAI_ORG_ID - ) - ).finalAnswer - ).toContain("Java"); - }, 30000); -}); diff --git a/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.ts b/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.ts deleted file mode 100644 index 511ce1f0b..000000000 --- a/JS/edgechains/examples/hydeSearch/src/HydeSearch.test.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { ArkRequest } from "@arakoodev/edgechains.js"; -import dotenv from "dotenv"; -import { hydeSearchAdaEmbedding } from "./HydeSearch.js"; - -dotenv.config({ path: ".env" }); -describe("Hyde Search", () => { - it("should return a response", async () => { - const arkRequest: ArkRequest = { - topK: 5, - metadataTable: "title_metadata", - query: "tell me the top 5 programming languages currently", - textWeight: { - baseWeight: "1.0", - fineTuneWeight: "0.35", - }, - similarityWeight: { - baseWeight: "1.5", - fineTuneWeight: "0.40", - }, - dateWeight: { - baseWeight: "1.25", - fineTuneWeight: "0.75", - }, - orderRRF: "default", - }; - expect( - ( - await hydeSearchAdaEmbedding( - arkRequest, - process.env.OPENAI_API_KEY!, - process.env.OPENAI_ORG_ID! - ) - ).finalAnswer - ).toContain("Java"); - }, 30000); -}); diff --git a/JS/edgechains/examples/hydeSearch/src/HydeSearch.ts b/JS/edgechains/examples/hydeSearch/src/HydeSearch.ts deleted file mode 100644 index 94633bac3..000000000 --- a/JS/edgechains/examples/hydeSearch/src/HydeSearch.ts +++ /dev/null @@ -1,217 +0,0 @@ -import Jsonnet from "@arakoodev/jsonnet"; -import { OpenAiEndpoint } from "@arakoodev/edgechains.js"; -import { PostgresClient } from "@arakoodev/edgechains.js"; -import type { ArkRequest } from "@arakoodev/edgechains.js"; -import * as path from "path"; -import { Hono } from "hono"; -const HydeSearchRouter = new Hono(); - -enum PostgresDistanceMetric { - COSINE = "COSINE", - IP = "IP", - L2 = "L2", -} - -export interface HydeFragmentData { - responses: Array<{ - rawText?: string; - metadata?: string; - filename?: string; - titleMetadata?: string; - documentDate?: string; - }>; - final_answer?: string; -} - -async function hydeSearchAdaEmbedding(arkRequest: ArkRequest, apiKey: string, orgId: string) { - try { - const gpt3endpoint = new OpenAiEndpoint( - "https://api.openai.com/v1/chat/completions", - apiKey, - orgId, - "gpt-3.5-turbo", - "user", - parseInt("0.7") - ); - // Get required params from API... - const table = "ada_hyde_prod"; - const namespace = "360_docs"; - const query = arkRequest.query; - const topK = Number(arkRequest.topK); - - // - const jsonnet = new Jsonnet(); - - const promptPath = path.join(__dirname, "../jsonnet/prompts.jsonnet"); - const hydePath = path.join(__dirname, "../jsonnet/hyde.jsonnet"); - // Load Jsonnet to extract args.. - const promptLoader = jsonnet.evaluateFile(promptPath); - - // Getting ${summary} basePrompt - const promptTemplate = JSON.parse(promptLoader).summary; - // Getting the updated promptTemplate with query - let hydeLoader = jsonnet - .extString("promptTemplate", promptTemplate) - .extString("time", "") - .extString("query", query) - .evaluateFile(hydePath); - - // Get concatenated prompt - const prompt = JSON.parse(hydeLoader).prompt; - - // Block and get the response from GPT3 - const gptResponse = await gpt3endpoint.gptFn(prompt); - - // Chain 1 ==> Get Gpt3Response & split - const gpt3Responses = gptResponse.split("\n"); - - // Chain 2 ==> Get Embeddings from OpenAI using Each Response - const embeddingsListChain: Promise = Promise.all( - gpt3Responses.map(async (resp) => { - const embedding = await gpt3endpoint.embeddings(resp); - return embedding; - }) - ); - - // Chain 5 ==> Query via EmbeddingChain - const dbClient = new PostgresClient( - await embeddingsListChain, - PostgresDistanceMetric.IP, - topK, - 20, - table, - namespace, - arkRequest, - 15 - ); - - const queryResult = await dbClient.dbQuery(); - - // Chain 6 ==> Create Prompt using Embeddings - const retrievedDocs: string[] = []; - - for (const embeddings of queryResult) { - retrievedDocs.push( - `${embeddings.raw_text}\n score:${embeddings.score}\n filename:${embeddings.filename}\n` - ); - } - - if (retrievedDocs.join("").length > 4096) { - retrievedDocs.length = 4096; - } - - const currentTime = new Date().toLocaleString(); - const formattedTime = currentTime; - - // System prompt - const ansPromptSystem = JSON.parse(promptLoader).ans_prompt_system; - - hydeLoader = jsonnet - .extString(promptTemplate, ansPromptSystem) - .extString("time", formattedTime) - .extString("qeury", retrievedDocs.join("")) - .evaluateFile(hydePath); - - const finalPromptSystem = JSON.parse(hydeLoader).prompt; - - // User prompt - const ansPromptUser = JSON.parse(promptLoader).ans_prompt_user; - - hydeLoader = jsonnet - .extString(promptTemplate, ansPromptUser) - .extString("qeury", query) - .evaluateFile(hydePath); - const finalPromptUser = JSON.parse(hydeLoader).prompt; - - const chatMessages = [ - { role: "system", content: finalPromptSystem }, - { role: "user", content: finalPromptUser }, - ]; - - const finalAnswer = await gpt3endpoint.gptFnChat(chatMessages); - - const response = { - wordEmbeddings: queryResult, - finalAnswer: finalAnswer, - }; - return response; - } catch (error) { - // Handle errors here - console.error(error); - throw error; - } -} - -export { hydeSearchAdaEmbedding }; - -HydeSearchRouter.get("/search", async (c) => { - const query = c.req.query(); - const arkRequest = { - topK: parseInt(query.topK ?? "5"), - metadataTable: query.metadataTable, - query: query.query, - textWeight: { - baseWeight: query.textBaseWeight, - fineTuneWeight: query.textFineTuneWeight, - }, - similarityWeight: { - baseWeight: query.similarityBaseWeight, - fineTuneWeight: query.similarityFineTuneWeight, - }, - dateWeight: { - baseWeight: query.dateBaseWeight, - fineTuneWeight: query.dateFineTuneWeight, - }, - orderRRF: query.orderRRF, - }; - const answer = await hydeSearchAdaEmbedding( - arkRequest, - process.env.OPENAI_API_KEY!, - process.env.OPENAI_ORG_ID! - ); - const final_answer = answer.finalAnswer; - const responses = answer.wordEmbeddings; - const data: HydeFragmentData = { responses, final_answer }; - return c.html(` - -
-
-
${data.final_answer}
-
-
    - ${data.responses.map( - (item) => ` -
  • -
    -
    - ${ - item.rawText != null - ? `
    ${item.rawText}
    ` - : `
    ${item.metadata}
    ` - } - ${ - item.filename != null - ? `
    ${item.filename}
    ` - : "" - } - ${ - item.titleMetadata != null - ? `
    ${item.titleMetadata}
    ` - : "" - } - ${ - item.documentDate != null - ? `
    ${item.documentDate}
    ` - : "" - } -
    -
    -
  • - ` - )} -
- - `); -}); - -export { HydeSearchRouter }; diff --git a/JS/edgechains/examples/hydeSearch/src/index.d.ts b/JS/edgechains/examples/hydeSearch/src/index.d.ts deleted file mode 100644 index 3d0d62ecd..000000000 --- a/JS/edgechains/examples/hydeSearch/src/index.d.ts +++ /dev/null @@ -1 +0,0 @@ -import "dotenv/config"; diff --git a/JS/edgechains/examples/hydeSearch/src/index.js b/JS/edgechains/examples/hydeSearch/src/index.js deleted file mode 100644 index 0609071b1..000000000 --- a/JS/edgechains/examples/hydeSearch/src/index.js +++ /dev/null @@ -1,12 +0,0 @@ -import "dotenv/config"; -import { serve } from "@hono/node-server"; -import { Hono } from "hono"; -import { HydeSearchRouter } from "./HydeSearch.js"; -import { view } from "../htmljs.js"; -import ExampleLayout from "./ExampleLayout.js"; -const app = new Hono(); -app.route("/", HydeSearchRouter); -app.get("/", view(ExampleLayout)); -serve(app, () => { - console.log("server running on port 3000"); -}); diff --git a/JS/edgechains/examples/hydeSearch/src/index.ts b/JS/edgechains/examples/hydeSearch/src/index.ts deleted file mode 100644 index 12e7d5f6b..000000000 --- a/JS/edgechains/examples/hydeSearch/src/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -import "dotenv/config"; -import { serve } from "@hono/node-server"; -import { Hono } from "hono"; -import { HydeSearchRouter } from "./HydeSearch.js"; -import { view } from "../htmljs.js"; -import ExampleLayout from "./ExampleLayout.js"; - -const app = new Hono(); - -app.route("/", HydeSearchRouter); - -app.get("/", view(ExampleLayout)); - -serve(app, () => { - console.log("server running on port 3000"); -}); diff --git a/JS/edgechains/examples/hydeSearch/tsconfig.json b/JS/edgechains/examples/hydeSearch/tsconfig.json deleted file mode 100644 index f370e670f..000000000 --- a/JS/edgechains/examples/hydeSearch/tsconfig.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "compilerOptions": { - "types": ["dotenv/config", "jest", "node"], - "target": "ES2022", - "module": "NodeNext", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "jsx": "react-jsx", - "jsxImportSource": "hono/jsx", - "noImplicitAny": false, - "moduleResolution": "NodeNext", - "declaration": true - }, - "include": ["src/**/*.ts", "dist/**/*.d.ts"] -} diff --git a/JS/edgechains/examples/language-translater/backend/dist/index.js b/JS/edgechains/examples/language-translater/backend/dist/index.js new file mode 100644 index 000000000..ab5a220e8 --- /dev/null +++ b/JS/edgechains/examples/language-translater/backend/dist/index.js @@ -0,0 +1,21 @@ +import { ArakooServer } from "arakoodev/arakooserver"; +import Jsonnet from "@arakoodev/jsonnet"; +//@ts-ignore +import createClient from 'sync-rpc'; +import { fileURLToPath } from "url"; +import path from "path"; +const server = new ArakooServer(); +const app = server.createApp(); +server.useCors(); +const jsonnet = new Jsonnet(); +const __dirname = fileURLToPath(import.meta.url); +const openAICall = createClient(path.join(__dirname, "../lib/generateResponse.cjs")); +app.post("/translate", async (c) => { + const { language, text } = await c.req.json(); + jsonnet.extString("language", language || ""); + jsonnet.extString("text", text || ""); + jsonnet.javascriptCallback("openAICall", openAICall); + let response = jsonnet.evaluateFile(path.join(__dirname, "../../jsonnet/main.jsonnet")); + return c.json(response); +}); +server.listen(3000); diff --git a/JS/edgechains/examples/language-translater/backend/dist/lib/generateResponse.cjs b/JS/edgechains/examples/language-translater/backend/dist/lib/generateResponse.cjs new file mode 100644 index 000000000..02f0cc0b9 --- /dev/null +++ b/JS/edgechains/examples/language-translater/backend/dist/lib/generateResponse.cjs @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const zod_1 = require("zod"); +const path = require("path"); +const { OpenAI } = require("arakoodev/openai"); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key; +const openai = new OpenAI({ apiKey: openAIApiKey }); +const schema = zod_1.z.object({ + answer: zod_1.z.string().describe("The answer to the question") +}); +function openAICall() { + return function (prompt) { + try { + return openai.zodSchemaResponse({ prompt, schema: schema }).then((res) => { + return JSON.stringify(res); + }); + } + catch (error) { + return error; + } + }; +} +module.exports = openAICall; diff --git a/JS/edgechains/examples/language-translater/backend/jsonnet/main.jsonnet b/JS/edgechains/examples/language-translater/backend/jsonnet/main.jsonnet new file mode 100644 index 000000000..02c124a1f --- /dev/null +++ b/JS/edgechains/examples/language-translater/backend/jsonnet/main.jsonnet @@ -0,0 +1,17 @@ + +local promptTemplate = ||| + You are a helpful assistant that translate this text {text} into this language {language}. + |||; + + +local text = std.extVar("text"); +local language = std.extVar("language"); + +local promptWithtext = std.strReplace(promptTemplate,'{text}', text + "\n"); +local finalPrompt = std.strReplace(promptWithtext,'{language}', language + "\n"); + +local main() = + local response = arakoo.native("openAICall")(finalPrompt); + response; + +main() \ No newline at end of file diff --git a/JS/edgechains/examples/language-translater/backend/jsonnet/secrets.jsonnet b/JS/edgechains/examples/language-translater/backend/jsonnet/secrets.jsonnet new file mode 100644 index 000000000..978cacd33 --- /dev/null +++ b/JS/edgechains/examples/language-translater/backend/jsonnet/secrets.jsonnet @@ -0,0 +1,7 @@ + +local OPENAI_API_KEY = "sk-proj-***"; + +{ + "openai_api_key":OPENAI_API_KEY, +} + diff --git a/JS/edgechains/examples/language-translater/backend/package.json b/JS/edgechains/examples/language-translater/backend/package.json new file mode 100644 index 000000000..9f9b8a6d5 --- /dev/null +++ b/JS/edgechains/examples/language-translater/backend/package.json @@ -0,0 +1,22 @@ +{ + "name": "language-translater", + "version": "1.0.0", + "description": "", + "main": "index.js", + "type": "module", + "keywords": [], + "author": "", + "scripts": { + "start": "tsc && node --experimental-wasm-modules ./dist/index.js" + }, + "license": "ISC", + "dependencies": { + "@arakoodev/jsonnet": "^0.2.1", + "hono": "^4.3.7", + "sync-rpc": "^1.3.6", + "zod": "^3.23.8" + }, + "devDependencies": { + "@types/cors": "^2.8.17" + } +} diff --git a/JS/edgechains/examples/language-translater/backend/src/index.ts b/JS/edgechains/examples/language-translater/backend/src/index.ts new file mode 100644 index 000000000..ae7717abd --- /dev/null +++ b/JS/edgechains/examples/language-translater/backend/src/index.ts @@ -0,0 +1,29 @@ + +import { ArakooServer } from "arakoodev/arakooserver"; +import Jsonnet from "@arakoodev/jsonnet"; +//@ts-ignore +import createClient from 'sync-rpc'; +import { fileURLToPath } from "url" +import path from "path"; +const server = new ArakooServer(); + +const app = server.createApp(); + +server.useCors() + +const jsonnet = new Jsonnet(); +const __dirname = fileURLToPath(import.meta.url); + +const openAICall = createClient(path.join(__dirname, "../lib/generateResponse.cjs")); + +app.post("/translate", async (c: any) => { + const { language, text } = await c.req.json(); + jsonnet.extString("language", language || ""); + jsonnet.extString("text", text || ""); + jsonnet.javascriptCallback("openAICall", openAICall); + let response = jsonnet.evaluateFile(path.join(__dirname, "../../jsonnet/main.jsonnet")); + return c.json(response); +}); + +server.listen(3000) + diff --git a/JS/edgechains/examples/language-translater/backend/src/lib/generateResponse.cts b/JS/edgechains/examples/language-translater/backend/src/lib/generateResponse.cts new file mode 100644 index 000000000..98b189a86 --- /dev/null +++ b/JS/edgechains/examples/language-translater/backend/src/lib/generateResponse.cts @@ -0,0 +1,31 @@ +import { z } from "zod"; +const path = require("path"); +const { OpenAI } = require("arakoodev/openai"); + +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key + +const openai = new OpenAI({ apiKey: openAIApiKey }) + +const schema = z.object({ + answer: z.string().describe("The answer to the question") +}) + +function openAICall() { + + return function (prompt: string) { + try { + return openai.zodSchemaResponse({ prompt, schema: schema }).then((res: any) => { + return JSON.stringify(res) + }) + + } catch (error) { + return error; + } + } +} + + +module.exports = openAICall; \ No newline at end of file diff --git a/JS/edgechains/examples/language-translater/backend/tsconfig.json b/JS/edgechains/examples/language-translater/backend/tsconfig.json new file mode 100644 index 000000000..d6f3a05e6 --- /dev/null +++ b/JS/edgechains/examples/language-translater/backend/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "ES2022", + "moduleResolution": "NodeNext", + "module": "NodeNext", + "rootDir": "src", + "outDir": "./dist", + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true + }, + "exclude": ["./jsonnet"] +} \ No newline at end of file diff --git a/JS/edgechains/examples/language-translater/frontend/index.html b/JS/edgechains/examples/language-translater/frontend/index.html new file mode 100644 index 000000000..9e9c637ad --- /dev/null +++ b/JS/edgechains/examples/language-translater/frontend/index.html @@ -0,0 +1,30 @@ + + + + + + Language Translator + + + +
+

Language Translator

+
+ + +
+
+ + +
+ +
+
+ + + + diff --git a/JS/edgechains/examples/language-translater/frontend/translator.js b/JS/edgechains/examples/language-translater/frontend/translator.js new file mode 100644 index 000000000..d30b8e79f --- /dev/null +++ b/JS/edgechains/examples/language-translater/frontend/translator.js @@ -0,0 +1,29 @@ +document.getElementById('translateButton').addEventListener('click', async () => { + const language = document.getElementById('language').value; + const text = document.getElementById('text').value; + const translatedTextDiv = document.getElementById('translatedText'); + + const endpoint = 'http://localhost:3000/translate'; + + try { + const response = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + language: language, + text: text + }) + }); + + if (response.ok) { + const data = await response.json(); + translatedTextDiv.textContent = data; + } else { + translatedTextDiv.textContent = 'Error: Unable to translate text.'; + } + } catch (error) { + translatedTextDiv.textContent = 'Error: Unable to connect to the translation service.'; + } +}); diff --git a/JS/edgechains/examples/language-translater/readme.md b/JS/edgechains/examples/language-translater/readme.md new file mode 100644 index 000000000..c92e67c3b --- /dev/null +++ b/JS/edgechains/examples/language-translater/readme.md @@ -0,0 +1,51 @@ +## Video + ``` + https://drive.google.com/file/d/1QclGlD6mDEN9PMDEXAMkdZb0wY5uew8p/view + ``` + +# Project Name + +This is a Language Translate app + +## Backend + +### Configuration + +1 Add OpenAiApi key in secrets.jsonnet + + ``` + local OPENAI_API_KEY = "sk-****"; + ``` + +## Usage + +1. Start the server: + + ```bash + cd backend + npm run start + ``` + +### Run With frontend + +1. Open the frontend/index.html file with live server + +2. Hit the endpoint in your browser if you're running the app from our examples + + ```bash + http://localhost:5500/JS/edgechains/examples/language-translater/frontend/ + ``` + + +### Run With endpoint + +1. Hit the `POST` endpoint. + +```bash +http://localhost:3000/translate + +body={ + "language":"hindi", + "text":"hi, how are you" +} +``` \ No newline at end of file diff --git a/JS/edgechains/examples/react-chain/jsonnet/react-chain.jsonnet b/JS/edgechains/examples/react-chain/jsonnet/react-chain.jsonnet index 216f38d85..db39de2fd 100644 --- a/JS/edgechains/examples/react-chain/jsonnet/react-chain.jsonnet +++ b/JS/edgechains/examples/react-chain/jsonnet/react-chain.jsonnet @@ -113,6 +113,7 @@ local promptTemplate = ||| Question: {} |||; + local UserQuestion = std.extVar("question"); diff --git a/JS/edgechains/examples/react-chain/jsonnet/secrets.jsonnet b/JS/edgechains/examples/react-chain/jsonnet/secrets.jsonnet index 407f1b1b5..aa979aece 100644 --- a/JS/edgechains/examples/react-chain/jsonnet/secrets.jsonnet +++ b/JS/edgechains/examples/react-chain/jsonnet/secrets.jsonnet @@ -1,5 +1,5 @@ -local OPENAI_API_KEY = "sk-***"; +local OPENAI_API_KEY = "sk-**"; { "openai_api_key":OPENAI_API_KEY, diff --git a/JS/edgechains/examples/react-chain/package.json b/JS/edgechains/examples/react-chain/package.json index 84843be74..4ee0d10eb 100644 --- a/JS/edgechains/examples/react-chain/package.json +++ b/JS/edgechains/examples/react-chain/package.json @@ -13,8 +13,7 @@ "license": "ISC", "dependencies": { "@arakoodev/jsonnet": "^0.2.1", - "arakoodev": "^1.0.8", - "axios": "^1.6.8", + "arakoodev": "^1.0.10", "sync-rpc": "^1.3.6" }, "devDependencies": { diff --git a/JS/edgechains/examples/react-chain/readme.md b/JS/edgechains/examples/react-chain/readme.md new file mode 100644 index 000000000..0b909dd3f --- /dev/null +++ b/JS/edgechains/examples/react-chain/readme.md @@ -0,0 +1,34 @@ +# React Chain Example + +This is an example project that demonstrates the usage of React Chain. + + +## Configuration + +1 Add OpenAiApi key in secrets.jsonnet + + ```bash + local OPENAI_API_KEY = "sk-****"; + ``` + +## Installation + +1. Install the dependencies: + + ```bash + npm install + ``` + +## Usage + +1. Start the server: + + ```bash + npm run start + ``` + +2. Hit the `GET` endpoint with question + + ```bash + http://localhost:5000?question=Author David Chanoff has collaborated with a U.S. Navy admiral who served as the ambassador to the United Kingdom under which President? + ``` diff --git a/JS/edgechains/examples/react-chain/src/lib/generateResponse.cts b/JS/edgechains/examples/react-chain/src/lib/generateResponse.cts index d1264111c..2d96e2a70 100644 --- a/JS/edgechains/examples/react-chain/src/lib/generateResponse.cts +++ b/JS/edgechains/examples/react-chain/src/lib/generateResponse.cts @@ -1,5 +1,5 @@ const path = require("path"); -const { ChatOpenAi } = require("arakoodev/openai"); +const { OpenAI } = require("arakoodev/openai"); const Jsonnet = require("@arakoodev/jsonnet"); const jsonnet = new Jsonnet(); @@ -7,16 +7,16 @@ const jsonnet = new Jsonnet(); const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key; -const openai = new ChatOpenAi({ - openAIApiKey, +const openai = new OpenAI({ + apiKey: openAIApiKey, temperature: 0, }); function openAICall() { return function (prompt: string) { try { - return openai.generateResponse(prompt).then((res: any) => { - return res; + return openai.chat({ prompt }).then((res: any) => { + return res.content; }); } catch (error) { return error; diff --git a/JS/edgechains/examples/react-chain/src/routes/react-chain.ts b/JS/edgechains/examples/react-chain/src/routes/react-chain.ts index cc222d83a..8733dd248 100644 --- a/JS/edgechains/examples/react-chain/src/routes/react-chain.ts +++ b/JS/edgechains/examples/react-chain/src/routes/react-chain.ts @@ -26,7 +26,7 @@ ReactChainRouter.get("/", async (c: any) => { let response = jsonnet .javascriptCallback("callWikipediaApi", callWikipediaApi) .evaluateFile(path.join(__dirname, "../../../jsonnet/react-chain.jsonnet")); - + console.log({ response }) return c.json(response); } catch (error) { return c.json({ diff --git a/JS/edgechains/examples/summarize-page/dist/index.js b/JS/edgechains/examples/summarize-page/dist/index.js new file mode 100644 index 000000000..92fa1d857 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/dist/index.js @@ -0,0 +1,21 @@ +import { ArakooServer } from "arakoodev/arakooserver"; +import Jsonnet from "@arakoodev/jsonnet"; +//@ts-ignore +import createClient from 'sync-rpc'; +import { fileURLToPath } from "url"; +import path from "path"; +const server = new ArakooServer(); +const app = server.createApp(); +const jsonnet = new Jsonnet(); +const __dirname = fileURLToPath(import.meta.url); +const openAICall = createClient(path.join(__dirname, "../lib/generateResponse.cjs")); +const getPageContent = createClient(path.join(__dirname, "../lib/getDataFromUrl.cjs")); +app.get("/", async (c) => { + const pageUrl = c.req.query("pageUrl"); + jsonnet.extString("pageUrl", pageUrl || ""); + jsonnet.javascriptCallback("openAICall", openAICall); + jsonnet.javascriptCallback("getPageContent", getPageContent); + let response = jsonnet.evaluateFile(path.join(__dirname, "../../jsonnet/main.jsonnet")); + return c.json(response); +}); +server.listen(3000); diff --git a/JS/edgechains/examples/summarize-page/dist/lib/generateResponse.cjs b/JS/edgechains/examples/summarize-page/dist/lib/generateResponse.cjs new file mode 100644 index 000000000..b2201b8f8 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/dist/lib/generateResponse.cjs @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const { OpenAI } = require("arakoodev/openai"); +const zod_1 = require("zod"); +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key; +const schema = zod_1.z.object({ + answer: zod_1.z.string().describe("The answer to the question") +}); +const openai = new OpenAI({ + apiKey: openAIApiKey, + temperature: 0, +}); +function openAICall() { + return function (prompt) { + try { + return openai.zodSchemaResponse({ prompt, schema: schema }).then((res) => { + console.log({ res }); + return JSON.stringify(res); + }); + } + catch (error) { + return error; + } + }; +} +module.exports = openAICall; diff --git a/JS/edgechains/examples/summarize-page/dist/lib/getDataFromUrl.cjs b/JS/edgechains/examples/summarize-page/dist/lib/getDataFromUrl.cjs new file mode 100644 index 000000000..25cec9ac5 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/dist/lib/getDataFromUrl.cjs @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const { WebScraper } = require("arakoodev/scraper"); +const scraper = new WebScraper(); +function getPageContent() { + return (url) => { + return scraper.getContent(url).then((res) => { + return res; + }); + }; +} +module.exports = getPageContent; diff --git a/JS/edgechains/examples/summarize-page/jsonnet/main.jsonnet b/JS/edgechains/examples/summarize-page/jsonnet/main.jsonnet new file mode 100644 index 000000000..d8d047804 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/jsonnet/main.jsonnet @@ -0,0 +1,24 @@ +local promptTemplate = ||| + You are a helpful AI assistant capable of providing detailed summaries. Your task is to summarize the given content thoroughly and in a verbose manner. + + Content to be summarized: + ------- + {content} + -------- + Given answer should look like this: The Summary of the content is: [Your Summary]. + Please remember to provide a thorough and detailed summary. Don't give incomplete summary or just a few words. Specially focus when the summary ends the end should not be abrupt. + + |||; + + +local pageUrl = std.extVar("pageUrl"); +local getPageContent(pageUrl) = + local pageContent = arakoo.native("getPageContent")(pageUrl); + local promptWithPageContent = std.strReplace(promptTemplate,'{content}', pageContent + "\n"); + promptWithPageContent; + +local main(prompt) = + local response = arakoo.native("openAICall")(prompt); + response; + +main(getPageContent(pageUrl)) \ No newline at end of file diff --git a/JS/edgechains/examples/summarize-page/jsonnet/secrets.jsonnet b/JS/edgechains/examples/summarize-page/jsonnet/secrets.jsonnet new file mode 100644 index 000000000..978cacd33 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/jsonnet/secrets.jsonnet @@ -0,0 +1,7 @@ + +local OPENAI_API_KEY = "sk-proj-***"; + +{ + "openai_api_key":OPENAI_API_KEY, +} + diff --git a/JS/edgechains/examples/summarize-page/package.json b/JS/edgechains/examples/summarize-page/package.json new file mode 100644 index 000000000..e5bb7a8e8 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/package.json @@ -0,0 +1,22 @@ +{ + "name": "chat-with-youtube-video", + "version": "1.0.0", + "description": "", + "main": "index.js", + "type": "module", + "keywords": [], + "author": "", + "scripts": { + "start": "tsc && node --experimental-wasm-modules ./dist/index.js" + }, + "license": "ISC", + "dependencies": { + "@arakoodev/jsonnet": "^0.2.1", + "arakoodev": "^1.0.10", + "sync-rpc": "^1.3.6", + "zod": "^3.23.8" + }, + "devDependencies": { + "@types/request": "^2.48.12" + } +} diff --git a/JS/edgechains/examples/summarize-page/readme.md b/JS/edgechains/examples/summarize-page/readme.md new file mode 100644 index 000000000..3e90ce024 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/readme.md @@ -0,0 +1,37 @@ +## Video + ``` + https://drive.google.com/file/d/14t8B_A6MGgDE5E2j3YPM4DnO3r_WOUun/view + ``` + + +## Installation + +1. Install the required dependencies: + + ```bash + npm install + ``` + +## Configuration + +1 Add OpenAiApi key in secrets.jsonnet + ```bash + local OPENAI_API_KEY = "sk-****"; + ``` + +## Usage + +1. Start the server: + + ```bash + npm run start + ``` + +2. Hit the `GET` endpoint. + + + ```bash + + http://localhost:3000/?pageUrl=https://en.wikipedia.org/wiki/Association_football + ``` + diff --git a/JS/edgechains/examples/summarize-page/src/index.ts b/JS/edgechains/examples/summarize-page/src/index.ts new file mode 100644 index 000000000..a35eb40c3 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/src/index.ts @@ -0,0 +1,30 @@ + +import { ArakooServer } from "arakoodev/arakooserver"; +import Jsonnet from "@arakoodev/jsonnet"; +//@ts-ignore +import createClient from 'sync-rpc'; + +import { fileURLToPath } from "url" +import path from "path"; +const server = new ArakooServer(); + +const app = server.createApp(); + +const jsonnet = new Jsonnet(); +const __dirname = fileURLToPath(import.meta.url); + +const openAICall = createClient(path.join(__dirname, "../lib/generateResponse.cjs")); +const getPageContent = createClient(path.join(__dirname, "../lib/getDataFromUrl.cjs")); + + +app.get("/", async (c: any) => { + const pageUrl = c.req.query("pageUrl"); + jsonnet.extString("pageUrl", pageUrl || ""); + jsonnet.javascriptCallback("openAICall", openAICall); + jsonnet.javascriptCallback("getPageContent", getPageContent); + let response = jsonnet.evaluateFile(path.join(__dirname, "../../jsonnet/main.jsonnet")); + return c.json(response); +}); + +server.listen(3000) + diff --git a/JS/edgechains/examples/summarize-page/src/lib/generateResponse.cts b/JS/edgechains/examples/summarize-page/src/lib/generateResponse.cts new file mode 100644 index 000000000..49a446c83 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/src/lib/generateResponse.cts @@ -0,0 +1,35 @@ +const path = require("path"); +const { OpenAI } = require("arakoodev/openai"); +import { z } from "zod"; +const Jsonnet = require("@arakoodev/jsonnet"); +const jsonnet = new Jsonnet(); + +const secretsPath = path.join(__dirname, "../../jsonnet/secrets.jsonnet"); +const openAIApiKey = JSON.parse(jsonnet.evaluateFile(secretsPath)).openai_api_key + +const schema = z.object({ + answer: z.string().describe("The answer to the question") +}) + +const openai = new OpenAI({ + apiKey: openAIApiKey, + temperature: 0, +}) + +function openAICall() { + + return function (prompt: string) { + try { + return openai.zodSchemaResponse({ prompt, schema: schema }).then((res: any) => { + console.log({ res }) + return JSON.stringify(res) + }) + + } catch (error) { + return error; + } + } +} + + +module.exports = openAICall; \ No newline at end of file diff --git a/JS/edgechains/examples/summarize-page/src/lib/getDataFromUrl.cts b/JS/edgechains/examples/summarize-page/src/lib/getDataFromUrl.cts new file mode 100644 index 000000000..552a3b6c5 --- /dev/null +++ b/JS/edgechains/examples/summarize-page/src/lib/getDataFromUrl.cts @@ -0,0 +1,14 @@ + +const { WebScraper } = require("arakoodev/scraper") + +const scraper = new WebScraper(); + +function getPageContent() { + return (url: string) => { + return scraper.getContent(url).then((res: any) => { + return res + }) + } +} + +module.exports = getPageContent; \ No newline at end of file diff --git a/JS/edgechains/examples/summarize-page/tsconfig.json b/JS/edgechains/examples/summarize-page/tsconfig.json new file mode 100644 index 000000000..54ff1e2dd --- /dev/null +++ b/JS/edgechains/examples/summarize-page/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "ES2022", + "moduleResolution": "NodeNext", + "module": "NodeNext", + "rootDir": "./src", + "outDir": "./dist", + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true + }, + "exclude": ["./**/*.test.ts", "vitest.config.ts", "./jsonnet/**/*"] +} \ No newline at end of file diff --git a/JS/edgechains/examples/wikiExample/.gitignore b/JS/edgechains/examples/wikiExample/.gitignore deleted file mode 100644 index 03c61f409..000000000 --- a/JS/edgechains/examples/wikiExample/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# standard exclusions -node_modules - -# build artifacts -dist - -# environment files -.env diff --git a/JS/edgechains/examples/wikiExample/README.md b/JS/edgechains/examples/wikiExample/README.md deleted file mode 100644 index 9b4660be9..000000000 --- a/JS/edgechains/examples/wikiExample/README.md +++ /dev/null @@ -1,15 +0,0 @@ -## How to run WikiExample:- - -- Use the following command in the root directory to run this example: - - `npm i` - - `npm run build` - - `npm start` - -- This example searches on Wikipedia on given input by you and then it will ask to GPT to summarize the result of Wikipedia in `30` bullet points. - -- To use this example make a `POST` request call on `http://localhost:3000/wiki-summary`. -- Request body: - `{"input": "Your Search Topic"}` diff --git a/JS/edgechains/examples/wikiExample/esbuild.build.js b/JS/edgechains/examples/wikiExample/esbuild.build.js deleted file mode 100644 index 8da795c0e..000000000 --- a/JS/edgechains/examples/wikiExample/esbuild.build.js +++ /dev/null @@ -1,45 +0,0 @@ -const esbuild = require("esbuild"); -const path = require("path"); -const fs = require("fs"); -const { execSync } = require("child_process"); - -const outputDir = path.resolve(__dirname, "dist"); - -if (!fs.existsSync(outputDir)) { - fs.mkdirSync(outputDir); -} - -const distPath = path.join(process.cwd(), "dist"); - -fs.promises.mkdir(distPath, { recursive: true }); - -esbuild - .build({ - entryPoints: ["./src/index.ts"], - bundle: true, - minify: true, - platform: "node", - outfile: "./dist/index.js", - tsconfig: "./tsconfig.json", - target: "node21.1.0", - external: [ - "express", - "tsx", - "typescript", - "typeorm", - "react", - "react-dom", - "pg", - "jsdom", - "hono", - "@hanazuki/node-jsonnet", - "readline/promises", - ], - format: "cjs", - loader: { - ".html": "text", - ".css": "css", - ".jsonnet": "text", - }, - }) - .catch(() => process.exit(1)); diff --git a/JS/edgechains/examples/wikiExample/ormconfig.json b/JS/edgechains/examples/wikiExample/ormconfig.json deleted file mode 100644 index 21819c00f..000000000 --- a/JS/edgechains/examples/wikiExample/ormconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "type": "postgres", - "host": "db.rmzqtepwnzoxgkkzjctt.supabase.co", - "port": 5432, - "username": "postgres", - "password": "xaX0MYcf1YiJlChK", - "database": "postgres", - "entities": ["dist/entities/**/*.js"], - "synchronize": false, - "logging": false -} diff --git a/JS/edgechains/examples/wikiExample/package.json b/JS/edgechains/examples/wikiExample/package.json deleted file mode 100644 index 0f8e4658a..000000000 --- a/JS/edgechains/examples/wikiExample/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "example", - "version": "1.0.0", - "description": "", - "main": "dist/index.js", - "scripts": { - "build": "rm -rf dist && tsc", - "start": "node --experimental-wasm-modules dist/index.js", - "lint": "eslint --ignore-path .eslintignore --ext .js,.ts", - "format": "prettier --ignore-path .gitignore --write \"**/*.+(js|ts|json)\"", - "test": "npx jest" - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "@arakoodev/jsonnet": "0.1.2", - "@hono/node-server": "^1.2.0", - "@types/dotenv": "^8.2.0", - "hono": "^3.9.2", - "pg": "^8.11.3", - "reflect-metadata": "^0.1.13", - "tsc": "^2.0.4", - "typescript": "^5.3.3" - }, - "devDependencies": { - "@arakoodev/edgechains.js": "0.1.22", - "@types/jest": "^29.5.8", - "@types/node": "^20.9.4", - "@typescript-eslint/eslint-plugin": "^6.11.0", - "@typescript-eslint/parser": "^6.11.0", - "axios": "^1.6.2", - "dotenv": "^16.3.1", - "dts-bundle-generator": "^8.1.2", - "eslint": "^8.54.0", - "eslint-config-prettier": "^9.0.0", - "eslint-config-standard-with-typescript": "^40.0.0", - "eslint-plugin-import": "^2.29.0", - "eslint-plugin-n": "^16.3.1", - "eslint-plugin-promise": "^6.1.1", - "jest": "^29.7.0", - "prettier": "^3.1.0", - "react": "^18.2.0", - "ts-jest": "^29.1.1", - "tsx": "^3.12.2", - "typeorm": "^0.3.17", - "typescript": "^5.0.2" - } -} diff --git a/JS/edgechains/examples/wikiExample/src/WikiExample.test.ts b/JS/edgechains/examples/wikiExample/src/WikiExample.test.ts deleted file mode 100644 index 86552f3db..000000000 --- a/JS/edgechains/examples/wikiExample/src/WikiExample.test.ts +++ /dev/null @@ -1,7 +0,0 @@ -const wikiSummary = require("./WikiExample"); - -describe("Wiki Search", () => { - it("should return a response", async () => { - expect(await wikiSummary("Barak Obama")).toContain("Barak Obama"); - }, 30000); -}); diff --git a/JS/edgechains/examples/wikiExample/src/WikiExample.ts b/JS/edgechains/examples/wikiExample/src/WikiExample.ts deleted file mode 100644 index b92809791..000000000 --- a/JS/edgechains/examples/wikiExample/src/WikiExample.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { OpenAiEndpoint } from "@arakoodev/edgechains.js"; -import * as path from "path"; -import { Hono } from "hono"; -import axios from "axios"; - -const getJsonnet = async () => { - let jsonnet = await import("@arakoodev/jsonnet"); - return jsonnet.default; -}; - -export const WikiRouter = new Hono(); - -WikiRouter.post("/wiki-summary", async (c) => { - const query = await c.req.json(); - const summary = await wikiSummary(query.input); - - return c.json({ message: summary }, 200); -}); - -const wikiJsonnetPath = path.join(__dirname, "../src/wiki.jsonnet"); - -const gpt3endpoint = new OpenAiEndpoint( - "https://api.openai.com/v1/chat/completions", - process.env.OPENAI_API_KEY!, - "", - "gpt-3.5-turbo", - "user", - parseInt("0.7") -); - -export async function wikiSummary(input: string) { - const Jsonnet = await getJsonnet(); - const jsonnet = new Jsonnet(); - const wikiResponse = await axios - .post( - "https://en.wikipedia.org/w/api.php", - {}, - { - params: { - action: "query", - prop: "extracts", - format: "json", - titles: input, - explaintext: "", - }, - headers: { - "content-type": "application/x-www-form-urlencoded", - Accept: "application/json", - }, - } - ) - .then(function (response) { - return Object.values(response.data.query.pages); - }) - .catch(function (error) { - if (error.response) { - console.log("Server responded with status code:", error.response.status); - console.log("Response data:", error.response.data); - } else if (error.request) { - console.log("No response received:", error.request); - } else { - console.log("Error creating request:", error.message); - } - }); - - const wikiJsonnet = await jsonnet - .extString("keepMaxTokens", "true") - .extString("maxTokens", "4096") - .extString("keepContext", "true") - .extString("context", wikiResponse[0].extract) - .evaluateFile(wikiJsonnetPath); - - const gpt3Response = await gpt3endpoint.gptFn(JSON.parse(wikiJsonnet).prompt); - - console.log("Summary from GPT: \n\n\n" + gpt3Response); - - return gpt3Response; -} diff --git a/JS/edgechains/examples/wikiExample/src/index.ts b/JS/edgechains/examples/wikiExample/src/index.ts deleted file mode 100644 index cdab1d916..000000000 --- a/JS/edgechains/examples/wikiExample/src/index.ts +++ /dev/null @@ -1,12 +0,0 @@ -import "dotenv/config"; -import { serve } from "@hono/node-server"; -import { Hono } from "hono"; -import { WikiRouter } from "./WikiExample"; - -const app = new Hono(); - -app.route("/", WikiRouter); - -serve(app, () => { - console.log("server running on port 3000"); -}); diff --git a/JS/edgechains/examples/wikiExample/src/wiki.jsonnet b/JS/edgechains/examples/wikiExample/src/wiki.jsonnet deleted file mode 100644 index 5eb6c16c6..000000000 --- a/JS/edgechains/examples/wikiExample/src/wiki.jsonnet +++ /dev/null @@ -1,32 +0,0 @@ -local stringToBool(s) = - if s == "true" then true - else false; - -local keepMaxTokens = stringToBool(std.extVar("keepMaxTokens")); -local maxTokens = if keepMaxTokens == "true" then std.parseInt(std.extVar("maxTokens")) else 5120; - -local preset = ||| - You are a Summary Generator Bot. For any question other than summarizing the data, you should tell that you cannot answer it. - You should detect the language and the characters the user is writing in, and reply in the same character set and language. - - You should follow the following template while answering the user: - - ``` - 1. - - 2. - - ... - ``` - Now, given the data, create a 30-bullet point summary of: - |||; -local keepContext = std.extVar("keepContext"); -local context = if keepContext == "true" then std.extVar("context") else ""; - - -local prompt = std.join("\n", [preset, context]); -{ - "maxTokens": maxTokens, - "typeOfKeepContext": std.type(keepContext), - "preset" : preset, - "context": context, - "prompt": if(std.length(prompt) > maxTokens) then std.substr(prompt, 0, maxTokens) else prompt -} \ No newline at end of file diff --git a/JS/edgechains/examples/wikiExample/tsconfig.json b/JS/edgechains/examples/wikiExample/tsconfig.json deleted file mode 100644 index 4ad142370..000000000 --- a/JS/edgechains/examples/wikiExample/tsconfig.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "compilerOptions": { - "types": ["dotenv/config", "jest", "node"], - "target": "ES2022", - "module": "NodeNext", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "jsx": "react-jsx", - "jsxImportSource": "hono/jsx", - "noImplicitAny": false, - "moduleResolution": "NodeNext", - "declaration": true, - "outDir": "dist" - }, - "include": ["src/**/*.ts", "dist/**/*.d.ts"] -}