Skip to content

feat: node prompt version #4141

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Mar 19, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 63 additions & 17 deletions packages/global/core/ai/prompt/AIChat.ts
Original file line number Diff line number Diff line change
@@ -1,54 +1,70 @@
import { PromptTemplateItem } from '../type.d';
import { i18nT } from '../../../../web/i18n/utils';
import { getPromptByVersion } from './agent';

export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
{
title: i18nT('app:template.standard_template'),
desc: i18nT('app:template.standard_template_des'),
value: `{
value: {
['4.9.0']: `{
"sourceName": "{{source}}",
"updateTime": "{{updateTime}}",
"content": "{{q}}\n{{a}}"
}
`
}
},
{
title: i18nT('app:template.qa_template'),
desc: i18nT('app:template.qa_template_des'),
value: `<Question>
value: {
['4.9.0']: `<Question>
{{q}}
</Question>
<Answer>
{{a}}
</Answer>`
}
},
{
title: i18nT('app:template.standard_strict'),
desc: i18nT('app:template.standard_strict_des'),
value: `{
value: {
['4.9.0']: `{
"sourceName": "{{source}}",
"updateTime": "{{updateTime}}",
"content": "{{q}}\n{{a}}"
}
`
}
},
{
title: i18nT('app:template.hard_strict'),
desc: i18nT('app:template.hard_strict_des'),
value: `<Question>
value: {
['4.9.0']: `<Question>
{{q}}
</Question>
<Answer>
{{a}}
</Answer>`
}
}
];

export const getQuoteTemplate = (version?: string) => {
const defaultTemplate = Prompt_QuoteTemplateList[0].value;

return getPromptByVersion(version, defaultTemplate);
};

export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
{
title: i18nT('app:template.standard_template'),
desc: '',
value: `使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
value: {
['4.9.0']: `使用 <Reference></Reference> 标记中的内容作为本次对话的参考:

<Reference>
{{quote}}
Expand All @@ -62,11 +78,13 @@ export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
- 使用与问题相同的语言回答。

问题:"""{{question}}"""`
}
},
{
title: i18nT('app:template.qa_template'),
desc: '',
value: `使用 <QA></QA> 标记中的问答对进行回答。
value: {
['4.9.0']: `使用 <QA></QA> 标记中的问答对进行回答。

<QA>
{{quote}}
Expand All @@ -79,11 +97,13 @@ export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
- 避免提及你是从 QA 获取的知识,只需要回复答案。

问题:"""{{question}}"""`
}
},
{
title: i18nT('app:template.standard_strict'),
desc: '',
value: `忘记你已有的知识,仅使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
value: {
['4.9.0']: `忘记你已有的知识,仅使用 <Reference></Reference> 标记中的内容作为本次对话的参考:

<Reference>
{{quote}}
Expand All @@ -101,11 +121,13 @@ export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
- 使用与问题相同的语言回答。

问题:"""{{question}}"""`
}
},
{
title: i18nT('app:template.hard_strict'),
desc: '',
value: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。
value: {
['4.9.0']: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。

<QA>
{{quote}}
Expand All @@ -126,14 +148,16 @@ export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
- 使用与问题相同的语言回答。

问题:"""{{question}}"""`
}
}
];

export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
{
title: i18nT('app:template.standard_template'),
desc: '',
value: `使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
value: {
['4.9.0']: `使用 <Reference></Reference> 标记中的内容作为本次对话的参考:

<Reference>
{{quote}}
Expand All @@ -145,11 +169,13 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
- 保持答案与 <Reference></Reference> 中描述的一致。
- 使用 Markdown 语法优化回答格式。
- 使用与问题相同的语言回答。`
}
},
{
title: i18nT('app:template.qa_template'),
desc: '',
value: `使用 <QA></QA> 标记中的问答对进行回答。
value: {
['4.9.0']: `使用 <QA></QA> 标记中的问答对进行回答。

<QA>
{{quote}}
Expand All @@ -160,11 +186,13 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
- 回答的内容应尽可能与 <答案></答案> 中的内容一致。
- 如果没有相关的问答对,你需要澄清。
- 避免提及你是从 QA 获取的知识,只需要回复答案。`
}
},
{
title: i18nT('app:template.standard_strict'),
desc: '',
value: `忘记你已有的知识,仅使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
value: {
['4.9.0']: `忘记你已有的知识,仅使用 <Reference></Reference> 标记中的内容作为本次对话的参考:

<Reference>
{{quote}}
Expand All @@ -180,11 +208,13 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
- 保持答案与 <Reference></Reference> 中描述的一致。
- 使用 Markdown 语法优化回答格式。
- 使用与问题相同的语言回答。`
}
},
{
title: i18nT('app:template.hard_strict'),
desc: '',
value: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。
value: {
['4.9.0']: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。

<QA>
{{quote}}
Expand All @@ -203,12 +233,28 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
- 避免提及你是从 QA 获取的知识,只需要回复答案。
- 使用 Markdown 语法优化回答格式。
- 使用与问题相同的语言回答。`
}
}
];

export const getQuotePrompt = (version?: string, role: 'user' | 'system' = 'user') => {
const quotePromptTemplates =
role === 'user' ? Prompt_userQuotePromptList : Prompt_systemQuotePromptList;

const defaultTemplate = quotePromptTemplates[0].value;

return getPromptByVersion(version, defaultTemplate);
};

// Document quote prompt
export const Prompt_DocumentQuote = `将 <FilesContent></FilesContent> 中的内容作为本次对话的参考:
<FilesContent>
{{quote}}
</FilesContent>
`;
export const getDocumentQuotePrompt = (version: string) => {
const promptMap = {
['4.9.0']: `将 <FilesContent></FilesContent> 中的内容作为本次对话的参考:
<FilesContent>
{{quote}}
</FilesContent>
`
};

return getPromptByVersion(version, promptMap);
};
65 changes: 59 additions & 6 deletions packages/global/core/ai/prompt/agent.ts
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

抽离 getPromptByVersion

Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,29 @@ A2:
`
};

export const Prompt_ExtractJson = `你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。
export const getPromptByVersion = (version?: string, promptMap: Record<string, string> = {}) => {
const versions = Object.keys(promptMap).sort((a, b) => {
const [majorA, minorA, patchA] = a.split('.').map(Number);
const [majorB, minorB, patchB] = b.split('.').map(Number);

if (majorA !== majorB) return majorB - majorA;
if (minorA !== minorB) return minorB - minorA;
return patchB - patchA;
});

if (!version) {
return promptMap[versions[0]];
}

if (version in promptMap) {
return promptMap[version];
}
return promptMap[versions[versions.length - 1]];
};

export const getExtractJsonPrompt = (version?: string) => {
const promptMap: Record<string, string> = {
['4.8.1']: `你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。
<提取要求>
{{description}}
</提取要求>
Expand All @@ -44,9 +66,31 @@ export const Prompt_ExtractJson = `你可以从 <对话记录></对话记录>
{{text}}
</对话记录>

提取的 json 字符串:`;
提取的 json 字符串:`
};

return getPromptByVersion(version, promptMap);
};

export const getExtractJsonToolPrompt = (version?: string) => {
const promptMap: Record<string, string> = {
['4.8.1']: `我正在执行一个函数,需要你提供一些参数,请以 JSON 字符串格式返回这些参数,要求:
"""
- {{description}}
- 不是每个参数都是必须生成的,如果没有合适的参数值,不要生成该参数,或返回空字符串。
- 需要结合前面的对话内容,一起生成合适的参数。
"""

本次输入内容: """{{content}}"""
`
};

export const Prompt_CQJson = `请帮我执行一个“问题分类”任务,将问题分类为以下几种类型之一:
return getPromptByVersion(version, promptMap);
};

export const getCQPrompt = (version?: string) => {
const promptMap: Record<string, string> = {
['4.8.1']: `请帮我执行一个"问题分类"任务,将问题分类为以下几种类型之一:

"""
{{typeList}}
Expand All @@ -64,14 +108,23 @@ export const Prompt_CQJson = `请帮我执行一个“问题分类”任务,

问题:"{{question}}"
类型ID=
`;
`
};

return getPromptByVersion(version, promptMap);
};

export const PROMPT_QUESTION_GUIDE = `You are an AI assistant tasked with predicting the user's next question based on the conversation history. Your goal is to generate 3 potential questions that will guide the user to continue the conversation. When generating these questions, adhere to the following rules:
export const getQuestionGuidePrompt = () => {
return `You are an AI assistant tasked with predicting the user's next question based on the conversation history. Your goal is to generate 3 potential questions that will guide the user to continue the conversation. When generating these questions, adhere to the following rules:

1. Use the same language as the user's last question in the conversation history.
2. Keep each question under 20 characters in length.

Analyze the conversation history provided to you and use it as context to generate relevant and engaging follow-up questions. Your predictions should be logical extensions of the current topic or related areas that the user might be interested in exploring further.

Remember to maintain consistency in tone and style with the existing conversation while providing diverse options for the user to choose from. Your goal is to keep the conversation flowing naturally and help the user delve deeper into the subject matter or explore related topics.`;
export const PROMPT_QUESTION_GUIDE_FOOTER = `Please strictly follow the format rules: \nReturn questions in JSON format: ['Question 1', 'Question 2', 'Question 3']. Your output: `;
};

export const getQuestionGuideFooterPrompt = () => {
return `Please strictly follow the format rules: \nReturn questions in JSON format: ['Question 1', 'Question 2', 'Question 3']. Your output: `;
};
2 changes: 1 addition & 1 deletion packages/global/core/ai/type.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,5 +80,5 @@ export * from 'openai';
export type PromptTemplateItem = {
title: string;
desc: string;
value: string;
value: Record<string, string>;
};
1 change: 0 additions & 1 deletion packages/global/core/app/constants.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { PROMPT_QUESTION_GUIDE } from '../ai/prompt/agent';
import {
AppTTSConfigType,
AppFileSelectConfigType,
Expand Down
7 changes: 0 additions & 7 deletions packages/global/core/workflow/template/input.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,13 +76,6 @@ export const Input_Template_Text_Quote: FlowNodeInputItemType = {
valueType: WorkflowIOValueTypeEnum.string
};

export const Input_Template_File_Link_Prompt: FlowNodeInputItemType = {
key: NodeInputKeyEnum.fileUrlList,
renderTypeList: [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.input],
label: i18nT('app:file_quote_link'),
debugLabel: i18nT('app:file_quote_link'),
valueType: WorkflowIOValueTypeEnum.arrayString
};
export const Input_Template_File_Link: FlowNodeInputItemType = {
key: NodeInputKeyEnum.fileUrlList,
renderTypeList: [FlowNodeInputTypeEnum.reference],
Expand Down
4 changes: 2 additions & 2 deletions packages/global/core/workflow/template/system/aiChat/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import {
Input_Template_History,
Input_Template_System_Prompt,
Input_Template_UserChatInput,
Input_Template_File_Link_Prompt
Input_Template_File_Link
} from '../../input';
import { chatNodeSystemPromptTip, systemPromptTip } from '../../tip';
import { getHandleConfig } from '../../utils';
Expand Down Expand Up @@ -129,7 +129,7 @@ export const AiChatModule: FlowNodeTemplateType = {
},
Input_Template_History,
Input_Template_Dataset_Quote,
Input_Template_File_Link_Prompt,
Input_Template_File_Link,
{ ...Input_Template_UserChatInput, toolDescription: i18nT('workflow:user_question') }
],
outputs: [
Expand Down
4 changes: 2 additions & 2 deletions packages/global/core/workflow/template/system/tools.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import { chatNodeSystemPromptTip, systemPromptTip } from '../tip';
import { LLMModelTypeEnum } from '../../../ai/constants';
import { getHandleConfig } from '../utils';
import { i18nT } from '../../../../../web/i18n/utils';
import { Input_Template_File_Link_Prompt } from '../input';
import { Input_Template_File_Link } from '../input';

export const ToolModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.tools,
Expand Down Expand Up @@ -97,7 +97,7 @@ export const ToolModule: FlowNodeTemplateType = {
placeholder: chatNodeSystemPromptTip
},
Input_Template_History,
Input_Template_File_Link_Prompt,
Input_Template_File_Link,
Input_Template_UserChatInput
],
outputs: [
Expand Down
6 changes: 3 additions & 3 deletions packages/service/core/ai/functions/createQuestionGuide.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ import { countGptMessagesTokens, countPromptTokens } from '../../../common/strin
import { loadRequestMessages } from '../../chat/utils';
import { llmCompletionsBodyFormat } from '../utils';
import {
PROMPT_QUESTION_GUIDE,
PROMPT_QUESTION_GUIDE_FOOTER
getQuestionGuideFooterPrompt,
getQuestionGuidePrompt
} from '@fastgpt/global/core/ai/prompt/agent';
import { addLog } from '../../../common/system/log';
import json5 from 'json5';
Expand All @@ -27,7 +27,7 @@ export async function createQuestionGuide({
...messages,
{
role: 'user',
content: `${customPrompt || PROMPT_QUESTION_GUIDE}\n${PROMPT_QUESTION_GUIDE_FOOTER}`
content: `${customPrompt || getQuestionGuidePrompt()}\n${getQuestionGuideFooterPrompt()}`
}
];
const requestMessages = await loadRequestMessages({
Expand Down
Loading
Loading