Skip to content

Commit

Permalink
adding openai-function-and-web-scrapper (#362)
Browse files Browse the repository at this point in the history
  • Loading branch information
Shyam-Raghuwanshi authored May 23, 2024
1 parent 71e6ece commit c95ec39
Show file tree
Hide file tree
Showing 11 changed files with 155 additions and 116 deletions.
10 changes: 7 additions & 3 deletions JS/edgechains/arakoodev/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "arakoodev",
"version": "1.0.7",
"version": "1.0.9",
"main": "dist/index.js",
"files": [
"dist"
Expand All @@ -12,7 +12,7 @@
"./splitter": "./dist/splitter/src/index.js",
"./arakooserver": "./dist/arakooserver/src/index.js",
"./db": "./dist/db/src/index.js",
"./middleware": "./dist/middleware/src/index.js"
"./scraper": "./dist/scraper/src/index.js"
},
"scripts": {
"build": "rm -rf dist && tsc -b",
Expand All @@ -28,24 +28,28 @@
"@types/dotenv": "^8.2.0",
"axios": "^1.6.2",
"axios-retry": "^4.1.0",
"cheerio": "^1.0.0-rc.12",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
"dts-bundle-generator": "^9.3.1",
"esbuild": "^0.20.2",
"eventsource-parser": "^1.1.2",
"faiss-node": "^0.5.1",
"hono": "^4.2.2",
"jest-environment-jsdom": "^29.7.0",
"node-fetch": "^3.3.2",
"pdf-parse": "^1.1.1",
"pg": "^8.11.5",
"prettier": "^3.2.5",
"regenerator-runtime": "^0.14.1",
"request": "^2.88.2",
"retry": "^0.13.1",
"text-encoding": "^0.7.0",
"ts-node": "^10.9.2",
"typeorm": "^0.3.20",
"vitest": "^1.5.1",
"web-streams-polyfill": "^4.0.0"
"web-streams-polyfill": "^4.0.0",
"youtube-transcript": "^1.2.1"
},
"keywords": [],
"author": "",
Expand Down
17 changes: 13 additions & 4 deletions JS/edgechains/arakoodev/src/arakooserver/src/lib/hono/hono.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,20 @@
// src/arakooServer.ts
import { serve } from "@hono/node-server";
import { Hono } from "hono";
import { stream } from "hono/streaming";
import { cors } from "hono/cors";

export class ArakooServer {
app = new Hono();
Stream = stream;
app: Hono;

constructor() {
this.app = new Hono();
}

useCors(allowedEndpoints?:string, options?: any) {
this.app.use(allowedEndpoints || "*", cors(options));
}

createApp() {
createApp(): Hono {
return this.app;
}

Expand Down
1 change: 1 addition & 0 deletions JS/edgechains/arakoodev/src/document-loader/src/index.ts
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
export { PdfLoader } from "./lib/pdf-loader/pdfLoader.js";
export { YoutubeLoader } from "./lib/youtube-video-transcript-loader/youtubeLoader.js"
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import { YoutubeTranscript } from "youtube-transcript"

export class YoutubeLoader {
private videoUrl: string;
private transcript: YoutubeTranscript[] | null;

constructor(videoUrl: string) {
this.videoUrl = videoUrl;
this.transcript = null;
}

async loadTranscript() {
this.transcript = await YoutubeTranscript.fetchTranscript(this.videoUrl);
return this.transcript.map((t: any) => t.text).join(" ");
}
}
1 change: 0 additions & 1 deletion JS/edgechains/arakoodev/src/middleware/src/index.ts

This file was deleted.

4 changes: 0 additions & 4 deletions JS/edgechains/arakoodev/src/middleware/src/lib/cores/cors.ts

This file was deleted.

12 changes: 0 additions & 12 deletions JS/edgechains/arakoodev/src/middleware/src/tests/cors/cors.test.ts

This file was deleted.

2 changes: 1 addition & 1 deletion JS/edgechains/arakoodev/src/openai/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import "dotenv/config";

export { ChatOpenAi } from "./lib/endpoints/OpenAiEndpoint.js";
export { OpenAI } from "./lib/endpoints/OpenAiEndpoint.js";
export { Stream } from "./lib/streaming/OpenAiStreaming.js";
181 changes: 90 additions & 91 deletions JS/edgechains/arakoodev/src/openai/src/lib/endpoints/OpenAiEndpoint.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,172 +3,171 @@ import { config } from "dotenv";
config();
const openAI_url = "https://api.openai.com/v1/chat/completions";

interface ChatOpenAiOptions {
url?: string;
openAIApiKey?: string;
orgId?: string;
interface OpenAIConstructionOptions {
apiKey?: string;
}

interface messageOption {
role: string;
content: string;
name?: string;
}

interface OpenAIChatOptions {
model?: string;
role?: string;
max_tokens: number;
max_tokens?: number;
temperature?: number;
prompt?: string;
messages?: messageOption[];
}

export class ChatOpenAi {
url: string;
openAIApiKey: string;
orgId: string;
model: string;
role: string;
max_tokens: number;
temperature: number;
interface chatWithFunctionOptions {
model?: string;
role?: string;
max_tokens?: number;
temperature?: number;
prompt?: string;
functions?: object | Array<object>;
messages?: messageOption[];
function_call?: string;
}

interface chatWithFunctionReturnOptions {
content: string;
function_call: {
name: string;
arguments: string;
};
}

interface OpenAIChatReturnOptions {
content: string;
}

constructor(options: ChatOpenAiOptions) {
this.url = options.url || openAI_url;
this.max_tokens = options.max_tokens || 256;
this.openAIApiKey = options.openAIApiKey || process.env.OPENAI_API_KEY!;
this.orgId = options.orgId || "";
this.model = options.model || "gpt-3.5-turbo";
this.role = options.role || "user";
this.temperature = options.temperature || 0.5;
export class OpenAI {
apiKey: string;
constructor(options: OpenAIConstructionOptions) {
this.apiKey = options.apiKey || process.env.OPENAI_API_KEY || "";
}

async generateResponse(prompt: string): Promise<string> {
async chat(chatOptions: OpenAIChatOptions): Promise<OpenAIChatReturnOptions> {
const responce = await axios
.post(
openAI_url,
{
model: this.model,
messages: [
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt ? [
{
role: this.role,
content: prompt,
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
],
max_tokens: this.max_tokens,
temperature: this.temperature,
] : chatOptions.messages,
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
},
{
headers: {
Authorization: "Bearer " + this.openAIApiKey,
Authorization: "Bearer " + this.apiKey,
"content-type": "application/json",
},
}
},
)
.then((response) => {
return response.data.choices;
})
.catch((error) => {
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error);
} else {
console.log("Error creating request:", error.message);
}
});
return responce[0].message.content;
return responce[0].message;
}

async generateEmbeddings(resp): Promise<any> {
const response = await axios
async chatWithFunction(chatOptions: chatWithFunctionOptions): Promise<chatWithFunctionReturnOptions> {
const responce = await axios
.post(
"https://api.openai.com/v1/embeddings",
openAI_url,
{
model: "text-embedding-ada-002",
input: resp,
model: chatOptions.model || "gpt-3.5-turbo",
messages: chatOptions.prompt ? [
{
role: chatOptions.role || "user",
content: chatOptions.prompt,
},
] : chatOptions.messages,
max_tokens: chatOptions.max_tokens || 256,
temperature: chatOptions.temperature || 0.7,
functions: chatOptions.functions,
function_call: chatOptions.function_call || "auto"
},
{
headers: {
Authorization: `Bearer ${this.openAIApiKey}`,
Authorization: "Bearer " + this.apiKey,
"content-type": "application/json",
},
}
},
)
.then((response) => {
return response.data.data;
return response.data.choices;
})
.catch((error) => {
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error.request);
console.log("No response received:", error);
} else {
console.log("Error creating request:", error.message);
}
});
return response;
return responce[0].message;
}

async chatWithAI(chatMessages: any) {
async generateEmbeddings(resp): Promise<any> {
const response = await axios
.post(
openAI_url,
"https://api.openai.com/v1/embeddings",
{
model: this.model,
messages: chatMessages,
temperature: this.temperature,
model: "text-embedding-ada-002",
input: resp,
},
{
headers: {
Authorization: "Bearer " + this.openAIApiKey,
Authorization: `Bearer ${this.apiKey}`,
"content-type": "application/json",
},
}
},
)
.then((response) => {
return response.data.choices;
return response.data.data;
})
.catch((error) => {
console.log({ error });
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log(
"Server responded with status code:",
error.response.status,
);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error.request);
} else {
console.log("Error creating request:", error.message);
}
});

return response;
}

async testResponseGeneration(prompt: string): Promise<string> {
const responce = await axios
.post(
openAI_url,
{
model: this.model,
messages: [
{
role: this.role,
content: prompt,
},
],
temperature: this.temperature,
},
{
headers: {
Authorization: "Bearer " + this.openAIApiKey,
"content-type": "application/json",
},
}
)
.then(function (response) {
return response.data.choices;
})
.catch(function (error) {
if (error.response) {
console.log("Server responded with status code:", error.response.status);
console.log("Response data:", error.response.data);
} else if (error.request) {
console.log("No response received:", error.request);
} else {
console.log("Error creating request:", error.message);
}
});
return responce[0].message.content;
}
}

2 changes: 2 additions & 0 deletions JS/edgechains/arakoodev/src/scraper/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export { WebScraper } from "./lib/webScraper";

Loading

0 comments on commit c95ec39

Please sign in to comment.