From a01c50efb4676b3ecde2c468f5d0a6b782e62558 Mon Sep 17 00:00:00 2001 From: tidgi Date: Sat, 29 Jun 2024 18:35:31 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BD=BF=E7=94=A8=E5=A4=AA=E8=AE=B0=E6=A1=8C?= =?UTF-8?q?=E9=9D=A2=E7=89=88=E5=A4=87=E4=BB=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...$__plugins_linonetwo_tidgi-language-model.json | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 tiddlers/$__plugins_linonetwo_tidgi-language-model.json diff --git a/tiddlers/$__plugins_linonetwo_tidgi-language-model.json b/tiddlers/$__plugins_linonetwo_tidgi-language-model.json deleted file mode 100644 index 15e6048..0000000 --- a/tiddlers/$__plugins_linonetwo_tidgi-language-model.json +++ /dev/null @@ -1,15 +0,0 @@ -[ - { - "author": "LinOnetwo", - "dependents": "", - "description": "Chat with TidGi's build-in language model service (LLama/Rwkv) in Tiddlywiki. A private, local and rooted ChatGPT AI.", - "list": "readme config tree", - "name": "TG AI", - "plugin-type": "plugin", - "text": "{\"tiddlers\":{\"$:/plugins/linonetwo/tidgi-language-model/config\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/config\",\"tags\":\"$:/tags/ControlPanel/SettingsTab\",\"caption\":\"<>\",\"text\":\"\\\\define lingo-base() $:/plugins/linonetwo/tidgi-language-model/language/\\n\\n<>\\n\\n---\\n\\n!! <>\\n\\n;<>\\n:<$select tiddler='$:/plugins/linonetwo/tidgi-language-model/configs/DefaultModelRunner'>\\n\\t\\n <>\\n;<>\\n:<$select tiddler='$:/plugins/linonetwo/tidgi-language-model/configs/DefaultSystemTemplate'>\\n\\t<$list filter=\\\"[all[shadows+tiddlers]tag[$:/tags/AI/SystemTemplate]]\\\">\\n\\t\\t\\n\\t\\n <>\\n\\n!! <>\\n\\n<>\\n\\n<>\\n\\n!! <>\\n\\n<>\\n\\n<>\\n\\n!! <>\\n\\n;<>\\n:<>\\n\"},\"$:/plugins/linonetwo/tidgi-language-model/configs/DefaultSystemPrompt\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/configs/DefaultSystemPrompt\",\"text\":\"You known much on TiddlyWiki. You should answer in wikitext format(`!` for title, and `#` for numeral list, `*` for unordered list.).\"},\"$:/plugins/linonetwo/tidgi-language-model/configs/DefaultModelRunner\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/configs/DefaultModelRunner\",\"text\":\"llama.cpp\"},\"$:/plugins/linonetwo/tidgi-language-model/configs/DefaultSystemTemplate\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/configs/DefaultSystemTemplate\",\"text\":\"$:/plugins/linonetwo/tidgi-language-model/prompts/openchat.jinja\"},\"$:/plugins/linonetwo/tidgi-language-model/WidgetParameter\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/WidgetParameter\",\"text\":\"\\\\define lingo-base() $:/plugins/linonetwo/tidgi-language-model/language/\\n\\n<>\\n\"},\"$:/plugins/linonetwo/tidgi-language-model/readme\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/readme\",\"text\":\"\\\\define lingo-base() $:/plugins/linonetwo/tidgi-language-model/language/\\n\\n<>\\n\"},\"$:/plugins/linonetwo/tidgi-language-model/tree\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/tree\",\"type\":\"text/vnd.tiddlywiki\",\"text\":\"<>\"},\"$:/plugins/linonetwo/tidgi-language-model/readme/types/ChatCompletionEvent\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/readme/types/ChatCompletionEvent\",\"text\":\"```typescript\\ninterface ChatCompletionEvent {\\n event: Event;\\n type: 'tidgi-chat';\\n name: 'completion-finish';\\n paramObject: {\\n id: string; // Chat id from OpenAI\\n created: Date; // Chat time\\n assistant: string; // Answer of ChatGPT\\n user: string; // Your question\\n };\\n widget: ChatGPTWidget;\\n historyTiddler: string;\\n}\\n```\"},\"$:/plugins/linonetwo/tidgi-language-model/docs/types/LLaMa\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/docs/types/LLaMa\",\"type\":\"application/typescript\",\"text\":\"type LLamaChatPromptOptions = {\\n onToken?: (tokens: Token[]) => void;\\n signal?: AbortSignal;\\n maxTokens?: number;\\n /**\\n * Temperature is a hyperparameter that controls the randomness of the generated text.\\n * It affects the probability distribution of the model's output tokens.\\n * A higher temperature (e.g., 1.5) makes the output more random and creative,\\n * while a lower temperature (e.g., 0.5) makes the output more focused, deterministic, and conservative.\\n * The suggested temperature is 0.8, which provides a balance between randomness and determinism.\\n * At the extreme, a temperature of 0 will always pick the most likely next token, leading to identical outputs in each run.\\n *\\n * Set to `0` to disable.\\n * Disabled by default (set to `0`).\\n */\\n temperature?: number;\\n /**\\n * Limits the model to consider only the K most likely next tokens for sampling at each step of sequence generation.\\n * An integer number between `1` and the size of the vocabulary.\\n * Set to `0` to disable (which uses the full vocabulary).\\n *\\n * Only relevant when `temperature` is set to a value greater than 0.\\n */\\n topK?: number;\\n /**\\n * Dynamically selects the smallest set of tokens whose cumulative probability exceeds the threshold P,\\n * and samples the next token only from this set.\\n * A float number between `0` and `1`.\\n * Set to `1` to disable.\\n *\\n * Only relevant when `temperature` is set to a value greater than `0`.\\n */\\n topP?: number;\\n grammar?: LlamaGrammar;\\n /**\\n * Trim whitespace from the end of the generated text\\n * Disabled by default.\\n */\\n trimWhitespaceSuffix?: boolean;\\n repeatPenalty?: false | LlamaChatSessionRepeatPenalty;\\n};\\ntype LlamaChatSessionRepeatPenalty = {\\n /**\\n * Number of recent tokens generated by the model to apply penalties to repetition of.\\n * Defaults to `64`.\\n */\\n lastTokens?: number;\\n punishTokensFilter?: (tokens: Token[]) => Token[];\\n /**\\n * Penalize new line tokens.\\n * Enabled by default.\\n */\\n penalizeNewLine?: boolean;\\n /**\\n * The relative amount to lower the probability of the tokens in `punishTokens` by\\n * Defaults to `1.1`.\\n * Set to `1` to disable.\\n */\\n penalty?: number;\\n /**\\n * For n time a token is in the `punishTokens` array, lower its probability by `n * frequencyPenalty`\\n * Disabled by default (`0`).\\n * Set to a value between `0` and `1` to enable.\\n */\\n frequencyPenalty?: number;\\n /**\\n * Lower the probability of all the tokens in the `punishTokens` array by `presencePenalty`\\n * Disabled by default (`0`).\\n * Set to a value between `0` and `1` to enable.\\n */\\n presencePenalty?: number;\\n};\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/DefaultPromptTemplate\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/DefaultPromptTemplate\",\"text\":\"<>\\n\\n<% if [] %>\\n<>\\n<% endif %>\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/DefaultSystemPrompt\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/DefaultSystemPrompt\",\"text\":\"You know Tiddlywiki. You should answer the questions in wikitext format\\n\\n* `!` indicates a title\\n* `#` indicates a ordered list item\\n* `*` for an unordered list item\\n* The rest is similar to markdown\\n\\nWhen using mermaid to draw mind maps, The generated mind map is wrapped in the form of\\n\\n$$$text/vnd.tiddlywiki.mermaid\\ngraph TD\\n A --> B\\n$$$\\n\\nYou will play the role of a knowledge management expert, generating sensible and professional answers.\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Readme\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Readme\",\"text\":\"\\\\define lingo-base() $:/plugins/linonetwo/tidgi-language-model/language/\\n\\n! <>\\n\\n<>\\n\\n!! Chatting in the sidebar\\n\\nYour sidebar will have an extra TG AI page, you can have a conversation directly, the history of the conversation will be saved in [[a state tiddler|$:/state/plugins/linonetwo/tidgi-language-model/side-bar-history]]. If you want to clear the history, just delete the tiddler pointed to by the `history` parameter, too remember that since this is a NodeJS wiki, it doesn't save the state entries, they are cleared after refresh, so please save them yourself as needed.\\n\\nTo get an answer from the AI, enter text into the textarea and hit enter or click the {{$:/core/images/add-comment}} \\\"Chat\\\" button on the right side of the textarea. If you want to enter multiple lines of text, you can use Shift + Enter to change lines. There is a grabber in the bottom right corner of the textarea to drag the textarea to the height you want.\\n\\nClicking the {{$:/core/images/import-button}} \\\"Attach Tiddler\\\" button on the left side of the textarea opens a input box at the top of the textarea. Enter the title of the tiddler or a filter expression here (both needs to satisfy the Tiddlywiki filter expression syntax), and keep the box open to import the content of that tiddler into the textarea. This allows you to perform various advanced operations such as:\\n\\n# Expand this tiddler\\n# Rewrite the content of this tiddler\\n# Translate the tiddler into Chinese\\n\\n!! Using Widgets\\n\\nLLaMa is actually a widget that allows you to customize the chatbot according to your needs:\\n\\n```html\\n<$tidgi-chat />\\n```\\n\\nRead [[readme/en-GB/Widget|$:/plugins/linonetwo/tidgi-language-model/readme/en-GB/Widget]] for more information.\\n\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Name\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Name\",\"text\":\"Tidgi Language Model\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Description\",\"text\":\"Chat with TidGi (Desktop only)'s build-in language model service (LLama/Rwkv) in Tiddlywiki. A private, local and rooted ChatGPT AI.\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/Description\",\"text\":\"These settings let you customise the behaviour of \\\"Tidgi Language Model\\\" plugin.\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/PluginConfigs/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/PluginConfigs/Caption\",\"text\":\"Plugin Configs\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemPrompt/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemPrompt/Caption\",\"text\":\"Default System Prompt\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemPrompt/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemPrompt/Description\",\"text\":\"Set a system prompt word for sidebar AI chats and other AI chat interfaces that are not configured with a system prompt word, and it will be spliced directly in front of the user-entered question.\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemPrompt/Editor\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemPrompt/Editor\",\"text\":\"<$edit-text tiddler=\\\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/DefaultSystemPrompt\\\" tabindex=-1 focus=false cancelPopups=\\\"yes\\\" fileDrop=no tag=\\\"textarea\\\" class=\\\"default-system-prompt-editor\\\" />\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultPromptTemplate/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultPromptTemplate/Caption\",\"text\":\"Default Prompt Template\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultPromptTemplate/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultPromptTemplate/Description\",\"text\":\"A template that combines user input and custom context. Sections will be provided as wikitext variables.\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultPromptTemplate/Editor\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultPromptTemplate/Editor\",\"text\":\"<$edit-text tiddler=\\\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/DefaultPromptTemplate\\\" tabindex=-1 focus=false cancelPopups=\\\"yes\\\" fileDrop=no tag=\\\"textarea\\\" class=\\\"default-system-prompt-editor\\\" />\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemTemplate/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemTemplate/Caption\",\"text\":\"Default System Template\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemTemplate/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultSystemTemplate/Description\",\"text\":\"Combining system prompt words and user input, tiddlers need to be in JinJa format. Tiddlers with the `$:/tags/AI/SystemTemplate` heading will appear in this drop-down list. Select the system template that fits the currently used model for optimal generation.\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultModelRunner/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultModelRunner/Caption\",\"text\":\"Default Model Runner\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultModelRunner/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/DefaultModelRunner/Description\",\"text\":\"Different models are supported by different runners. the LLaMa.CPP runner supports most of the GGUF models.\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/TidGiConfigs/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/TidGiConfigs/Caption\",\"text\":\"TidGi Configs\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/TidGiConfigs/Timeout/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/TidGiConfigs/Timeout/Caption\",\"text\":\"Timeout time (minutes)\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/TidGiConfigs/Timeout/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/Configs/TidGiConfigs/Timeout/Description\",\"text\":\"Please go to \\\"Settings-Language Model-Timeout/Description\\\" to set this value. If the language model doesn't say anything after waiting for such a long time, the generation will be aborted to prevent it from getting into a dead loop. If the language model often aborts for no reason, you can increase this value appropriately.\"},\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/WidgetParameter\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/en-GB/WidgetParameter\",\"text\":\"\\\\define lingo-base() $:/plugins/linonetwo/tidgi-language-model/language/\\n\\nVarious optional parameters can also be added to customize the behavior.\\n\\n|!Attributes |!Explanation |\\n|history |Fill in an tiddler title for persistent storage of chat logs |\\n|scroll |If yes, the conversation record can be scrolled up and down, but the height must be specified in the outer layer of the widget, refer to the [[sidebar|$:/plugins/linonetwo/tidgi-language-model/side-bar]] writing |\\n|component |DOM tag type for microware, default is div |\\n|className |Class name of the widget for custom styles |\\n|readonly |If it is readonly, no dialog input box will appear, and it will be used for display only with the history parameter. |\\n|systemPrompt |System messages to customize the AI's behavior, such as \\\"You are an experienced lawyer\\\" |\\n|promptTemplate | <> |\\n\\nIn addition, the following LLaMa parameters are supported:\\n\\n[[$:/plugins/linonetwo/tidgi-language-model/docs/types/LLaMa]]\\n\\nIts specific usage can check the [[official documentation|https://withcatai.github.io/node-llama-cpp/api/classes/LlamaModel]].\\n\\nNow there is no multi-round dialogue, even in a micro-piece chat, but also a single round of dialogue, multi-round dialogue and so on the next version to engage.\\n\\n!! Advance\\n\\nIf you nest your own action in the widget, you can get the result of the answer when the conversation is completed, which requires that you know how to write a widget that supports actions. The output is stored in the `output-text` variable.\\n\\nAt the same time, you can also catch bubbling events of the widget when the conversation completes, as well as global events, both using `addEventListener` and `$tw.hooks.addHook` (the event name is `tidgi-chat`) respectively. The following is the type definition of the event load.\\n\\n{{$:/plugins/linonetwo/tidgi-language-model/readme/types/ChatCompletionEvent}}\\n\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/DefaultPromptTemplate\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/DefaultPromptTemplate\",\"text\":\"<>\\n\\n<% if [] %>\\n<>\\n<% endif %>\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/DefaultSystemPrompt\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/DefaultSystemPrompt\",\"text\":\"你了解太微(Tiddlywiki)。你应该用 wikitext 格式回答问题\\n\\n* `!`表示标题\\n* `#`表示数字列表项\\n* `*`表示无序列表项\\n* 其余类似 markdown\\n\\n使用 mermaid 绘制思维导图时,用以下形式包裹生成的思维导图\\n\\n$$$text/vnd.tiddlywiki.mermaid\\ngraph TD\\n A --> B\\n$$$\\n\\n你将扮演一个知识管理专家,生成合理且专业的回答。\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Readme\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Readme\",\"text\":\"\\\\define lingo-base() $:/plugins/linonetwo/tidgi-language-model/language/\\n\\n! <>\\n\\n<>\\n\\n!! 在侧边栏聊天\\n\\n你的侧边栏会多出一个 TG AI 页面,可以直接进行对话,对话的历史会保存在 [[一个state条目里|$:/state/plugins/linonetwo/tidgi-language-model/side-bar-history]]。如要清除历史记录,则直接删除 `history` 参数指向的条目,太记因为是 NodeJS wiki,所以不会保存 state 条目,刷新后就清空,请按需自己保存。\\n\\n在对话框中输入文本,按下回车键或点击对话框右侧的 {{$:/core/images/add-comment}}「进行对话」按钮,就可以得到 AI 的回答。如果你想要输入多行文本,可以使用 Shift + 回车来换行。对话框右下角有一个抓手可以把对话框拖大到你想要的高度。\\n\\n点击对话框左侧的 {{$:/core/images/import-button}}「附加条目」按钮,可以在对话框上方打开一个输入筛选器的输入框,在这边输入你想要的条目标题或筛选器(需要符合筛选器语法),保持这个输入框开启,就可以将该条目的内容导入到对话框中,这样就可以实现各种高级功能例如:\\n\\n# 扩写这个条目内容\\n# 改写这个条目内容\\n# 翻译内容为英文\\n\\n!! 使用微件\\n\\nLLaMa 实际上是一个微件,你可以按照自己的需求定制聊天机器人:\\n\\n```html\\n<$tidgi-chat />\\n```\\n\\n详细参数和用法见[[readme/zh-Hans/Widget|$:/plugins/linonetwo/tidgi-language-model/readme/zh-Hans/Widget]]。\\n\\n\\n\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Name\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Name\",\"text\":\"太记语言模型\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Description\",\"text\":\"在太微中与太记(仅桌面版)内置的语言模型服务(LLama/Rwkv)聊天。一个私人的、本地的和完全解锁(root过)的 ChatGPT 人工智能。\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/Description\",\"text\":\"通过这些设置,您可以自定义「太记语言模型」插件的行为。\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/PluginConfigs/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/PluginConfigs/Caption\",\"text\":\"插件设置\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemPrompt/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemPrompt/Caption\",\"text\":\"默认系统提示词\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemPrompt/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemPrompt/Description\",\"text\":\"为侧边栏AI聊天和其它未配置系统提示词的AI聊天界面设置系统提示词,它将会被直接拼接在用户输入的问题前面。\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemPrompt/Editor\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemPrompt/Editor\",\"text\":\"<$edit-text tiddler=\\\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/DefaultSystemPrompt\\\" tabindex=-1 focus=false cancelPopups=\\\"yes\\\" fileDrop=no tag=\\\"textarea\\\" class=\\\"default-system-prompt-editor\\\" />\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultPromptTemplate/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultPromptTemplate/Caption\",\"text\":\"默认提示词模板\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultPromptTemplate/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultPromptTemplate/Description\",\"text\":\"组合用户输入和自定义上下文的模板。各部分将以维基文本变量的形式提供。\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultPromptTemplate/Editor\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultPromptTemplate/Editor\",\"text\":\"<$edit-text tiddler=\\\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/DefaultPromptTemplate\\\" tabindex=-1 focus=false cancelPopups=\\\"yes\\\" fileDrop=no tag=\\\"textarea\\\" class=\\\"default-system-prompt-editor\\\" />\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemTemplate/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemTemplate/Caption\",\"text\":\"默认系统模板\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemTemplate/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultSystemTemplate/Description\",\"text\":\"组合系统提示词和用户输入,条目的内容需要为 JinJa 格式。带有 `$:/tags/AI/SystemTemplate` 标题的条目会出现在这个下拉列表里。选择适合当前所用模型的系统模板以获得最佳生成效果。\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultModelRunner/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultModelRunner/Caption\",\"text\":\"默认使用的语言模型运行器\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultModelRunner/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/DefaultModelRunner/Description\",\"text\":\"不同运行器支持不同的模型。LLaMa.CPP 运行器支持大部分 GGUF 模型。\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/TidGiConfigs/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/TidGiConfigs/Caption\",\"text\":\"太记的设置\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/TidGiConfigs/Timeout/Caption\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/TidGiConfigs/Timeout/Caption\",\"text\":\"超时时间(分钟)\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/TidGiConfigs/Timeout/Description\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/Configs/TidGiConfigs/Timeout/Description\",\"text\":\"请到太记的「设置-语言模型-超时时长」去设置这个值。等待这么长时间之后,如果语言模型还一言不发,就中止生成,防止陷入死循环。如果语言模型经常无缘无故中止生成,可以适当增加这个值。\"},\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/WidgetParameter\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/language/zh-Hans/WidgetParameter\",\"text\":\"\\\\define lingo-base() $:/plugins/linonetwo/tidgi-language-model/language/\\n\\n还可以添加各种可选参数来定制行为:\\n\\n|!参数 |!解释 |\\n|history |填写一个条目的标题,用于持久化存储聊天记录 |\\n|component |微件的DOM标签类型,默认为div |\\n|className |微件的类名,用于自定义样式 |\\n|readonly |如果为readonly,则不会出现对话输入框,配合history参数仅做展示用 |\\n|systemPrompt |系统消息,用于AI的行为,例如\\\"你是一个经验丰富的律师\\\" |\\n|promptTemplate | <> |\\n\\n除此之外,还支持如下 LLaMa 参数:\\n\\n[[$:/plugins/linonetwo/tidgi-language-model/docs/types/LLaMa]]\\n\\n其具体用法可以查看[[官方文档|https://withcatai.github.io/node-llama-cpp/api/classes/LlamaModel]]。\\n\\n现在还没有做多轮对话,即便是在一个微件里聊的,也都是单轮对话,多轮对话等下个版本再搞。\\n\\n!! 高级\\n\\n如果在微件中嵌套自己的 action,就可以在对话完成时拿到回答的结果,这需要你知道该如何编写一个支持 action 的微件。输出的结果保存在 `output-text` 变量中。\\n\\n同时,在对话完成时你也可以捕获到微件的冒泡事件,以及全局事件,二者分别使用`addEventListener`和`$tw.hooks.addHook`(事件名称就是`tidgi-chat`)来实现。如下是事件负载的类型定义:\\n\\n{{$:/plugins/linonetwo/tidgi-language-model/readme/types/ChatCompletionEvent}}\\n\"},\"$:/plugins/linonetwo/tidgi-language-model/prompts/openchat.jinja\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/prompts/openchat.jinja\",\"text\":\"{% if messages[0]['role'] == 'system' %}\\n {% set loop_messages = messages[1:] %}\\n {% set system_message = messages[0]['content'].strip() + '<|end_of_turn|>' %}\\n{% else %}\\n {% set loop_messages = messages %}\\n {% set system_message = '' %}\\n{% endif %}\\n\\n{{ bos_token + system_message }}\\n{% for message in loop_messages %}\\n {% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}\\n {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}\\n {% endif %}\\n \\n {{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>' }}\\n \\n {% if loop.last and message['role'] == 'user' and add_generation_prompt %}\\n {{ 'GPT4 Correct Assistant:' }}\\n {% endif %}\\n{% endfor %}\",\"type\":\"text/plain\",\"caption\":\"openchat.jinja\",\"tags\":\"$:/tags/AI/SystemTemplate\"},\"$:/plugins/linonetwo/tidgi-language-model/SidebarTab\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/SidebarTab\",\"type\":\"text/vnd.tiddlywiki\",\"tags\":\"$:/tags/SideBar\",\"caption\":\"<>\",\"text\":\"<$tidgi-chat history=\\\"$:/state/plugins/linonetwo/tidgi-language-model/side-bar-history\\\" scroll=\\\"yes\\\" temperature=\\\"0.5\\\" />\\n\"},\"$:/plugins/linonetwo/tidgi-language-model/tidgi-chat-widget.js\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/tidgi-chat-widget.js\",\"module-type\":\"widget\",\"type\":\"application/javascript\",\"Modern.TiddlyDev#Origin\":\"tidgi-chat-widget.ts\",\"text\":\"\\\"use strict\\\";var import_widget=require(\\\"$:/core/modules/widgets/widget.js\\\"),isChinese=()=>$tw.wiki.getTiddler(\\\"$:/language\\\").fields.text.includes(\\\"zh\\\"),getChatResultUserButton=t=>{const{zh:e,deleteButtonText:i,copyButtonText:n,editButtonText:o,assistant:s,onDelete:a,onEdit:r,user:l,attachment:d}=t;let u;void 0!==a&&(u=$tw.utils.domMaker(\\\"button\\\",{\\\"class\\\":\\\"delete-button\\\",innerHTML:i,attributes:{title:e?\\\"删除问题\\\":\\\"Delete question\\\"}})).addEventListener(\\\"click\\\",()=>{a()});var c=$tw.utils.domMaker(\\\"button\\\",{\\\"class\\\":\\\"copy-button\\\",innerHTML:n,attributes:{title:e?\\\"复制原文\\\":\\\"Copy raw text\\\"}});c.addEventListener(\\\"click\\\",()=>{$tw.utils.copyToClipboard(s)});let p;return void 0!==r&&(p=$tw.utils.domMaker(\\\"button\\\",{\\\"class\\\":\\\"edit-button\\\",innerHTML:o,attributes:{title:e?\\\"重新生成问题\\\":\\\"Regenerate question\\\"}})).addEventListener(\\\"click\\\",()=>{r(l,d)}),{deleteButton:u,copyButton:c,editButton:p}},renderConversation=({id:t,assistant:e,user:i,created:n,attachment:o},s,a,r,l,d,u)=>{var{deleteButton:c,copyButton:p,editButton:h}=getChatResultUserButton({zh:s,deleteButtonText:r,copyButtonText:l,editButtonText:a,assistant:e,onDelete:u,onEdit:d,user:i,attachment:o});return $tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation\\\",attributes:{\\\"chatgpt-conversation\\\":t},children:[$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation-message chatgpt-conversation-assistant\\\",children:[$tw.utils.domMaker(\\\"p\\\",{innerHTML:$tw.wiki.renderText(\\\"text/html\\\",\\\"text/vnd.tiddlywiki\\\",e)}),p]}),$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation-message chatgpt-conversation-user\\\",children:[$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"conversation-datetime\\\",text:new Date(n).toLocaleString()}),$tw.utils.domMaker(\\\"p\\\",{text:i}),...o?[$tw.utils.domMaker(\\\"pre\\\",{text:o})]:[],...void 0===c?[]:[c],...void 0===h?[]:[h]]})]})},renderChattingConversation=t=>{const{zh:o,user:s,cancelButtonText:e,conversations:a,onCancel:i,attachment:r,editButtonText:l,deleteButtonText:d,copyButtonText:u,onEdit:c}=t,n=$tw.utils.domMaker(\\\"pre\\\",{text:o?\\\"思考中...\\\":\\\"Thinking...\\\",style:{background:\\\"transparent\\\",marginTop:\\\"0\\\",marginBottom:\\\"0\\\",padding:\\\"0\\\",border:\\\"none\\\"}}),p=$tw.utils.domMaker(\\\"span\\\",{text:\\\"0%\\\",style:{marginLeft:\\\"0.5em\\\"}}),h=$tw.utils.domMaker(\\\"div\\\",{text:o?\\\"模型加载中\\\":\\\"Model Loading\\\",style:{display:\\\"none\\\",background:\\\"transparent\\\",marginTop:\\\"0\\\",marginBottom:\\\"0\\\",padding:\\\"0\\\",border:\\\"none\\\"},children:[p]});let g,m;void 0!==i&&(m=$tw.utils.domMaker(\\\"button\\\",{\\\"class\\\":\\\"cancel-button\\\",innerHTML:e,attributes:{title:o?\\\"中止生成\\\":\\\"Cancel generation\\\"}})).addEventListener(\\\"click\\\",()=>{i(g)});return{conversation:g=$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation chatgpt-conversation-chating\\\",children:[$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation-message chatgpt-conversation-assistant\\\",children:[$tw.utils.domMaker(\\\"p\\\",{children:[h,n]}),...void 0===m?[]:[m]]}),$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation-message chatgpt-conversation-user\\\",children:[$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"conversation-datetime\\\",text:(new Date).toLocaleString()}),$tw.utils.domMaker(\\\"p\\\",{text:s}),...r?[$tw.utils.domMaker(\\\"pre\\\",{text:r})]:[]]})]}),answerBox:n,printError:t=>{g.remove();let e;var{deleteButton:i,editButton:n}=getChatResultUserButton({zh:o,deleteButtonText:d,copyButtonText:u,editButtonText:l,assistant:t,onDelete:()=>null==e?void 0:e.remove(),onEdit:c,user:s,attachment:r});e=$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation chatgpt-conversation-error\\\",children:[$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation-message chatgpt-conversation-assistant\\\",text:t}),$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chatgpt-conversation-message chatgpt-conversation-user\\\",children:[$tw.utils.domMaker(\\\"p\\\",{text:s}),...r?[$tw.utils.domMaker(\\\"pre\\\",{text:r})]:[],...void 0===i?[]:[i],...void 0===n?[]:[n]]})]}),a.append(e)},updateProgress:t=>{var e;void 0!==t&&t<1?(n.style.display=\\\"none\\\",h.style.display=\\\"block\\\",e=\\\"\\\".concat((100*t).toFixed(2),\\\"%\\\"),p.innerText=e):(n.style.display=\\\"block\\\",h.style.display=\\\"none\\\")}}},historyManager=i=>({getHistory:()=>{let t=[];try{t=JSON.parse($tw.wiki.getTiddlerText(i)||\\\"[]\\\")}catch{}return t},setHistory:t=>{var e;$tw.wiki.addTiddler(new $tw.Tiddler(null!=(e=$tw.wiki.getTiddler(i))?e:{},{title:i,text:JSON.stringify(t),type:\\\"application/json\\\"}))}}),ChatGPTWidget=class extends import_widget.widget{constructor(){super(...arguments),this.containerNodeTag=\\\"div\\\",this.containerNodeClass=\\\"\\\",this.tmpHistoryTiddler=\\\"$:/temp/linonetwo/tidgi-language-model/history-\\\".concat(Date.now()),this.historyTiddler=this.tmpHistoryTiddler,this.chatButtonText=$tw.wiki.renderText(\\\"text/html\\\",\\\"text/vnd.tiddlywiki\\\",$tw.wiki.getTiddlerText(\\\"$:/core/images/add-comment\\\")),this.attachmentButtonText=$tw.wiki.renderText(\\\"text/html\\\",\\\"text/vnd.tiddlywiki\\\",$tw.wiki.getTiddlerText(\\\"$:/core/images/import-button\\\")),this.editButtonText=$tw.wiki.renderText(\\\"text/html\\\",\\\"text/vnd.tiddlywiki\\\",$tw.wiki.getTiddlerText(\\\"$:/core/images/edit-button\\\")),this.deleteButtonText=$tw.wiki.renderText(\\\"text/html\\\",\\\"text/vnd.tiddlywiki\\\",$tw.wiki.getTiddlerText(\\\"$:/core/images/delete-button\\\")),this.cancelButtonText=$tw.wiki.renderText(\\\"text/html\\\",\\\"text/vnd.tiddlywiki\\\",$tw.wiki.getTiddlerText(\\\"$:/core/images/cancel-button\\\")),this.copyButtonText=$tw.wiki.renderText(\\\"text/html\\\",\\\"text/vnd.tiddlywiki\\\",$tw.wiki.getTiddlerText(\\\"$:/core/images/copy-clipboard\\\")),this.scroll=!1,this.readonly=!1,this.runLanguageModelOptions={completionOptions:{prompt:\\\"Say Hello to me.\\\"},loadConfig:{},id:\\\"tidgi-chat-widget\\\"},this.runner=\\\"llama.cpp\\\",this.systemPrompt=\\\"\\\",this.promptTemplate=\\\"\\\"}initialise(t,e){super.initialise(t,e),this.computeAttributes()}execute(){this.containerNodeTag=this.getAttribute(\\\"component\\\",\\\"div\\\"),this.containerNodeClass=this.getAttribute(\\\"className\\\",\\\"\\\"),this.historyTiddler=this.getAttribute(\\\"history\\\",\\\"\\\")||this.tmpHistoryTiddler,this.scroll=\\\"yes\\\"===(null==(e=null==(t=this.getAttribute(\\\"scroll\\\"))?void 0:t.toLowerCase)?void 0:e.call(t)),this.readonly=\\\"yes\\\"===(null==(e=null==(t=this.getAttribute(\\\"readonly\\\"))?void 0:t.toLowerCase)?void 0:e.call(t));var t=$tw.wiki.getTiddlerText(\\\"$:/plugins/linonetwo/tidgi-language-model/configs/DefaultModelRunner\\\"),e=(this.runner=this.getAttribute(\\\"runner\\\",t||\\\"llama.cpp\\\"),Number(this.getAttribute(\\\"temperature\\\"))),t=Number(this.getAttribute(\\\"topP\\\")),i=Number.parseInt(this.getAttribute(\\\"maxTokens\\\"),10),i=(Number.isSafeInteger(i)&&0> USER: <> ASSISTANT:\\\"),$tw.wiki.getTiddlerText(\\\"$:/plugins/linonetwo/tidgi-language-model/configs/DefaultSystemTemplate\\\")),t=e?$tw.wiki.getTiddlerText(e):void 0;this.systemTemplate=this.getAttribute(\\\"systemTemplate\\\")||t,this.makeChildWidgets()}render(t,e){var i,n;void 0!==$tw.browser&&(this.execute(),i=$tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"conversations\\\"}),n=$tw.utils.domMaker(this.containerNodeTag,{\\\"class\\\":\\\"tidgi-language-model-container \\\".concat(this.containerNodeClass),children:[i]}),t.insertBefore(n,e),this.domNodes.push(n),this.chat(n,i))}refresh(t){var e=this.computeAttributes();return 0<$tw.utils.count(e)||null!=(e=t[this.historyTiddler])&&e.deleted?(this.refreshSelf(),!0):this.refreshChildren(t)}chat(t,x){try{const y=isChinese(),{getHistory:k,setHistory:$}=historyManager(this.historyTiddler);let v;if(!this.readonly){const b=$tw.utils.domMaker(\\\"textarea\\\",{\\\"class\\\":\\\"chat-input\\\",attributes:{type:\\\"text\\\",placeholder:y?\\\"输入一个问题...\\\":\\\"Ask a question...\\\",autofocus:!0,rows:1}}),M=(v=(t,e)=>{b.value=t,B.value=null!=e?e:\\\"\\\"},$tw.utils.domMaker(\\\"button\\\",{\\\"class\\\":\\\"chat-button\\\",innerHTML:this.chatButtonText,attributes:{title:y?\\\"进行对话\\\":\\\"Chat\\\"}}));var e=$tw.utils.domMaker(\\\"button\\\",{\\\"class\\\":\\\"attachment-button\\\",innerHTML:this.attachmentButtonText,attributes:{title:y?\\\"附加条目\\\":\\\"Attach Tiddler\\\"}});const B=$tw.utils.domMaker(\\\"input\\\",{\\\"class\\\":\\\"attachment-input\\\",attributes:{type:\\\"text\\\",placeholder:y?\\\"填入条目标题或筛选器表达式\\\":\\\"Fill in Tiddler title or filter expression\\\",autofocus:!1,hidden:!0}});t.prepend($tw.utils.domMaker(\\\"div\\\",{\\\"class\\\":\\\"chat-box\\\",children:[e,b,M]}));t.prepend(B);let T=!1;const n=r=>{var t,e,i;if(!T){const l=b.value.trim();if(l){var n=B.hidden?\\\"\\\":$tw.wiki.filterTiddlers(B.value).map(t=>$tw.wiki.getTiddlerText(t)).join(\\\"\\\\n\\\\n\\\");b.value=\\\"\\\",T=!0,M.disabled=!0;const d=this.runner||\\\"llama.cpp\\\",u=String(Date.now());let s=\\\"\\\",a=0;const c=t=>{var e;const i={id:u,created:a,assistant:s,user:l,attachment:B.hidden?\\\"\\\":B.value},n=($([...k(),i]),t.remove(),renderConversation(i,y,this.editButtonText,this.deleteButtonText,this.copyButtonText,v,()=>{n.remove(),$(k().filter(({id:t})=>t!==i.id))}));x.prepend(n),this.setVariable(\\\"output-text\\\",s);var o={event:r,type:\\\"tidgi-chat\\\",name:\\\"completion-finish\\\",paramObject:{...i,created:new Date(1e3*i.created)},widget:this,historyTiddler:this.historyTiddler};null!=(e=this.invokeAction)&&e.call(this,this,o),this.dispatchEvent(o),$tw.hooks.invokeHook(\\\"tidgi-chat\\\",o),T=!1,M.disabled=!1};const{conversation:p,answerBox:h,printError:g,updateProgress:m}=renderChattingConversation({zh:y,user:l,attachment:n,onCancel:async t=>{await window.service.languageModel.abortLanguageModel(d,u),T=!1,M.disabled=!1,t.remove()},onEdit:v,conversations:x,editButtonText:this.editButtonText,deleteButtonText:this.deleteButtonText,copyButtonText:this.copyButtonText,cancelButtonText:this.cancelButtonText});x.prepend(p);n=$tw.wiki.renderText(\\\"text/plain-formatted\\\",\\\"text/vnd.tiddlywiki\\\",this.promptTemplate,{variables:{userInputText:l,attachment:n}});if(void 0!==(null==(t=null==(t=null==window?void 0:window.observables)?void 0:t.languageModel)?void 0:t.runLanguageModel$))try{const w=t=>{console.error(t),g(String(t)),T=!1,M.disabled=!1};let t;(t=\\\"llama.cpp\\\"===d?window.observables.languageModel.runLanguageModel$(d,{completionOptions:{...null==(e=this.runLanguageModelOptions)?void 0:e.completionOptions,prompt:n},sessionOptions:{systemPrompt:this.systemPrompt},templates:{template:this.systemTemplate},loadConfig:null==(i=this.runLanguageModelOptions)?void 0:i.loadConfig,id:u}):t).subscribe({next:t=>{var e;try{if(t.id!==u)return;\\\"type\\\"in t&&\\\"progress\\\"===t.type?m(t.percentage):\\\"token\\\"in t&&(s=\\\"\\\".concat(s).concat(null!=(e=t.token)?e:\\\"\\\"),h.textContent=\\\"\\\".concat(s,\\\"█\\\"),a=Date.now())}catch(i){w(i)}x.scrollTop=x.scrollHeight},error:w,complete:()=>{c(p)}})}catch(o){console.error(o),g(String(o))}}}};M.addEventListener(\\\"click\\\",n),e.addEventListener(\\\"click\\\",()=>{B.hidden=!B.hidden}),b.addEventListener(\\\"keydown\\\",t=>{t.isComposing||\\\"Enter\\\"!==t.code||t.shiftKey||(t.preventDefault(),n(t))})}for(const o of k()){const s=renderConversation(o,y,this.editButtonText,this.deleteButtonText,this.copyButtonText,v,this.readonly?void 0:()=>{s.remove(),$(k().filter(({id:t})=>t!==o.id))});x.append(s)}}catch(i){console.error(i),t.textContent=String(i)}}};exports[\\\"tidgi-chat\\\"]=ChatGPTWidget;\"},\"$:/plugins/linonetwo/tidgi-language-model/tidgi-chat-widget.css\":{\"title\":\"$:/plugins/linonetwo/tidgi-language-model/tidgi-chat-widget.css\",\"tags\":[\"$:/tags/Stylesheet\"],\"type\":\"text/css\",\"Modern.TiddlyDev#Origin\":\"../../temp_stylePlugin:src/tidgi-language-model/style.less\",\"text\":\".tidgi-language-model-container{height:100%;width:100%;display:flex;padding:10px 0;flex-direction:column}.tidgi-language-model-container .conversations{width:100%;flex-grow:1;overflow-y:auto}.tidgi-language-model-container .chat-box{width:100%;display:flex;border:1.5px solid #888a;border-radius:5px;background:#8881}.tidgi-language-model-container .chat-input{flex-grow:1;font-size:16px;min-height:3.5em;padding:1em .5em;overflow:hidden;resize:vertical}.tidgi-language-model-container .chat-button{font-size:20px}.tidgi-language-model-container .chatgpt-conversation{display:flex;flex-direction:column}.tidgi-language-model-container .chatgpt-conversation-assistant{background-image:linear-gradient(0deg,#8883,#8883)}.tidgi-language-model-container .chatgpt-conversation-error .chatgpt-conversation-assistant{color:red}.tidgi-language-model-container .chatgpt-conversation-user{font-weight:750;padding-top:18px!important}.tidgi-language-model-container .chatgpt-conversation-message{padding:10px 20px;position:relative;min-height:3.5em}.tidgi-language-model-container .chatgpt-conversation-message .conversation-datetime{position:absolute;left:0;top:0;-webkit-user-select:none;-moz-user-select:none;user-select:none;opacity:.5;font-size:12px;font-weight:500px}.tidgi-language-model-container .chatgpt-conversation-message .edit-button{position:absolute;right:0;top:18px}.tidgi-language-model-container .chatgpt-conversation-message .cancel-button,.tidgi-language-model-container .chatgpt-conversation-message .copy-button{position:absolute;right:0;top:5px}.tidgi-language-model-container .chatgpt-conversation-message .delete-button{position:absolute;right:40px;top:18px}.default-system-prompt-editor{width:100%}\"}}}", - "title": "$:/plugins/linonetwo/tidgi-language-model", - "type": "application/json", - "version": "0.3.1", - "Modern.TiddlyDev#SHA256-Hashed": "af5f9a3e4dd3b445fd8e2173363e90bf7f720457107980dca47e9d699f877021" - } -] \ No newline at end of file