Skip to content

Commit

Permalink
feat: calling openai api
Browse files Browse the repository at this point in the history
  • Loading branch information
linonetwo committed Nov 20, 2024
1 parent a5cb577 commit b59740c
Show file tree
Hide file tree
Showing 4 changed files with 169 additions and 26 deletions.
3 changes: 2 additions & 1 deletion deno.json
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
{
"tasks": {
"dev": "deno run --watch --allow-read --allow-net main.ts"
"dev": "deno run --watch --allow-read --allow-net --env-file main.ts"
},
"imports": {
"@std/assert": "jsr:@std/assert@1",
"@std/fs": "jsr:@std/fs@^1.0.5",
"@std/path": "jsr:@std/path@^1.0.8",
"openai": "npm:openai@^4.72.0",
"tiddlywiki": "npm:tiddlywiki@^5.3.6",
"tw5-typed": "npm:tw5-typed@^0.5.14"
},
Expand Down
128 changes: 126 additions & 2 deletions deno.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

55 changes: 39 additions & 16 deletions generateChatML.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
import { join } from "@std/path";
import "tw5-typed";
import type {} from "tw5-typed";
import { TiddlyWiki } from "tiddlywiki";
import OpenAI from "openai";

const openai = new OpenAI({
apiKey: Deno.env.get("DEEPSEEK_API_KEY"),
baseURL: "https://api.deepseek.com",
});

const wikiInstance = TiddlyWiki();
const wikiPath = join(Deno.cwd(), "wiki");
Expand All @@ -15,18 +21,23 @@ export async function generateChatML(tidContent: string): Promise<string> {
.filter((tiddler) => tiddler !== undefined)
.filter((tiddler) => tiddler.fields.prompt !== undefined);

const chatmlContents = await Promise.all(dataPromptTiddlers.map(async (tiddler) => {
const prompt = tiddler.fields.prompt as string;
const AIOutput = await callLLM(prompt, tidContent);
const chatmlContent = wikiInstance.wiki.renderTiddler("text/plain", tiddler.fields.title, {
variables: {
prompt,
AIOutput,
}
})
return chatmlContent;
}))

const chatmlContents = await Promise.all(
dataPromptTiddlers.map(async (tiddler) => {
const prompt = tiddler.fields.prompt as string;
const AIOutput = await callLLM(prompt, tidContent);
const chatmlContent = wikiInstance.wiki.renderTiddler(
"text/plain",
tiddler.fields.title,
{
variables: {
prompt,
AIOutput,
},
},
);
return chatmlContent;
}),
);

// 合并所有消息
return chatmlContents.join("");
Expand All @@ -36,7 +47,19 @@ async function callLLM(
systemPrompt: string,
userPrompt: string,
): Promise<string> {
// 调用 LLM API 的逻辑,提取为可替换的函数
// ...implementation...
return ""; // 返回助手的回复
console.log("Calling LLM with systemPrompt:", systemPrompt);
console.log("Calling LLM with userPrompt:", userPrompt);

const completion = await openai.chat.completions.create({
model: "deepseek-chat",
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: userPrompt },
],
"stream": false,
});

const assistantMessage = completion.choices[0]?.message?.content || "";
console.log("Received assistantMessage:", assistantMessage);
return assistantMessage;
}
9 changes: 2 additions & 7 deletions main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,8 @@ async function readTidFilesAndCreateChatML(folderPath: string) {
await processFolder(folderPath);
}

async function callLLM(systemPrompt: string, userPrompt: string): Promise<string> {
// 调用 LLM API 的逻辑,提取为可替换的函数
// ...implementation...
return ''; // 返回助手的回复
}

const subFolder = "core/wiki/config/ui";
if (import.meta.main) {
const folderPath = join(Deno.cwd(), '..', 'TiddlyWiki5');
const folderPath = join(Deno.cwd(), '..', 'TiddlyWiki5', ...subFolder.split('/'));
readTidFilesAndCreateChatML(folderPath);
}

0 comments on commit b59740c

Please sign in to comment.