diff --git a/bun.lockb b/bun.lockb index 7221783..1212713 100644 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/package.json b/package.json index e3f67ca..5d64161 100644 --- a/package.json +++ b/package.json @@ -44,10 +44,12 @@ "react": "18.2.0", "react-dom": "18.2.0", "react-i18next": "^14.1.0", + "react-icons": "^5.2.1", "react-markdown": "8.0.0", "react-router-dom": "6.10.0", "react-syntax-highlighter": "^15.5.0", "react-toastify": "^10.0.4", + "rehype-katex": "6.0.3", "rehype-mathjax": "4.0.3", "remark-gfm": "3.0.1", "remark-math": "5.1.1", diff --git a/src/assets/fonts/Arimo.ttf b/src/assets/fonts/Arimo.ttf new file mode 100644 index 0000000..2fec902 Binary files /dev/null and b/src/assets/fonts/Arimo.ttf differ diff --git a/src/assets/inter.ttf b/src/assets/inter.ttf deleted file mode 100644 index 937b1e9..0000000 Binary files a/src/assets/inter.ttf and /dev/null differ diff --git a/src/assets/onest.ttf b/src/assets/onest.ttf deleted file mode 100644 index 6c77d20..0000000 Binary files a/src/assets/onest.ttf and /dev/null differ diff --git a/src/assets/tailwind.css b/src/assets/tailwind.css index e64b648..e6703c6 100644 --- a/src/assets/tailwind.css +++ b/src/assets/tailwind.css @@ -1,77 +1,67 @@ @font-face { - font-family: "Inter"; - src: url("inter.ttf") format("truetype"); + font-family: "Arimo"; + src: url("fonts/Arimo.ttf"); + font-display: swap; } -@font-face { - font-family: "Onest"; - src: url("onest.ttf") format("truetype"); -} - -.inter { - font-family: "Inter", sans-serif !important; -} - -.onest { - font-family: "Onest", sans-serif !important; +.arimo { + font-family: "Arimo", sans-serif; + font-weight: 500; + font-style: normal; } @tailwind base; @tailwind components; @tailwind utilities; + + .ant-select-selection-search-input { border: none !important; box-shadow: none !important; } -.gradient-border { - --borderWidth: 3px; - position: relative; - border-radius: var(--borderWidth); +/* Hide scrollbar for Chrome, Safari and Opera */ +.no-scrollbar::-webkit-scrollbar { + display: none; } -.gradient-border:after { - content: ""; - position: absolute; - top: calc(-1 * var(--borderWidth)); - left: calc(-1 * var(--borderWidth)); - height: calc(100% + var(--borderWidth) * 2); - width: calc(100% + var(--borderWidth) * 2); - background: linear-gradient( - 60deg, - #f79533, - #f37055, - #ef4e7b, - #a166ab, - #5073b8, - #1098ad, - #07b39b, - #6fba82 - ); - border-radius: calc(2 * var(--borderWidth)); - z-index: -1; - animation: animatedgradient 3s ease alternate infinite; - background-size: 300% 300%; + +/* Hide scrollbar for IE, Edge and Firefox */ +.no-scrollbar { + -ms-overflow-style: none; /* IE and Edge */ + scrollbar-width: none; /* Firefox */ } -@keyframes animatedgradient { +@keyframes gradient-border { 0% { - background-position: 0% 50%; + border-image-source: linear-gradient( + 45deg, + #f79533, + #f37055, + #ef4e7b, + #a166ab + ); } 50% { - background-position: 100% 50%; + border-image-source: linear-gradient(45deg, #ef4e7b, #a166ab); + } + 74% { + border-image-source: linear-gradient(60deg, #5073b8, #1098ad); } 100% { - background-position: 0% 50%; + border-image-source: linear-gradient( + 45deg, + #f79533, + #f37055, + #ef4e7b, + #a166ab + ); } } -/* Hide scrollbar for Chrome, Safari and Opera */ -.no-scrollbar::-webkit-scrollbar { - display: none; -} -/* Hide scrollbar for IE, Edge and Firefox */ -.no-scrollbar { - -ms-overflow-style: none; /* IE and Edge */ - scrollbar-width: none; /* Firefox */ +.animated-gradient-border { + border: 3px solid; + border-image-slice: 1; + animation: gradient-border 3s infinite; + border-radius: 10px; } diff --git a/src/components/Common/Markdown.tsx b/src/components/Common/Markdown.tsx index 1f85ace..c736c59 100644 --- a/src/components/Common/Markdown.tsx +++ b/src/components/Common/Markdown.tsx @@ -1,13 +1,12 @@ import remarkGfm from "remark-gfm" import remarkMath from "remark-math" import ReactMarkdown from "react-markdown" + import "property-information" import React from "react" import { CodeBlock } from "./CodeBlock" - export default function Markdown({ message }: { message: string }) { - return ( + ) : ( {children} diff --git a/src/components/Common/Playground/WebSearch.tsx b/src/components/Common/Playground/WebSearch.tsx index 5eb352e..6143f9d 100644 --- a/src/components/Common/Playground/WebSearch.tsx +++ b/src/components/Common/Playground/WebSearch.tsx @@ -4,7 +4,7 @@ import { useTranslation } from "react-i18next" export const WebSearch = () => { const {t} = useTranslation('common') return ( -
+
diff --git a/src/components/Option/Playground/PlaygroundForm.tsx b/src/components/Option/Playground/PlaygroundForm.tsx index 1cd98c3..c3e97ec 100644 --- a/src/components/Option/Playground/PlaygroundForm.tsx +++ b/src/components/Option/Playground/PlaygroundForm.tsx @@ -4,7 +4,7 @@ import React from "react" import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize" import { toBase64 } from "~/libs/to-base64" import { useMessageOption } from "~/hooks/useMessageOption" -import { Checkbox, Dropdown, Select, Switch, Tooltip } from "antd" +import { Checkbox, Dropdown, Switch, Tooltip } from "antd" import { Image } from "antd" import { useWebUI } from "~/store/webui" import { defaultEmbeddingModelForRag } from "~/services/ollama" @@ -13,6 +13,7 @@ import { getVariable } from "~/utils/select-varaible" import { useTranslation } from "react-i18next" import { KnowledgeSelect } from "../Knowledge/KnowledgeSelect" import { useSpeechRecognition } from "@/hooks/useSpeechRecognition" +import { PiGlobe } from "react-icons/pi" type Props = { dropedFile: File | undefined @@ -250,19 +251,7 @@ export const PlaygroundForm = ({ dropedFile }: Props) => { {!selectedKnowledge && (
- - - + setWebSearch(e)} diff --git a/src/components/Sidepanel/Chat/body.tsx b/src/components/Sidepanel/Chat/body.tsx index 46daeb5..d9a2d8e 100644 --- a/src/components/Sidepanel/Chat/body.tsx +++ b/src/components/Sidepanel/Chat/body.tsx @@ -3,11 +3,19 @@ import { PlaygroundMessage } from "~/components/Common/Playground/Message" import { useMessage } from "~/hooks/useMessage" import { EmptySidePanel } from "../Chat/empty" import { useWebUI } from "@/store/webui" +import { MessageSourcePopup } from "@/components/Common/Playground/MessageSourcePopup" export const SidePanelBody = () => { - const { messages, streaming, regenerateLastMessage, editMessage } = - useMessage() + const { + messages, + streaming, + regenerateLastMessage, + editMessage, + isSearchingInternet + } = useMessage() const divRef = React.useRef(null) + const [isSourceOpen, setIsSourceOpen] = React.useState(false) + const [source, setSource] = React.useState(null) const { ttsEnabled } = useWebUI() React.useEffect(() => { if (divRef.current) { @@ -27,19 +35,26 @@ export const SidePanelBody = () => { currentMessageIndex={index} totalMessages={messages.length} onRengerate={regenerateLastMessage} + isProcessing={streaming} + isSearchingInternet={isSearchingInternet} + sources={message.sources} onEditFormSubmit={(value) => { editMessage(index, value, !message.isBot) }} - isProcessing={streaming} + onSourceClick={(data) => { + setSource(data) + setIsSourceOpen(true) + }} isTTSEnabled={ttsEnabled} /> ))} - {import.meta.env.BROWSER === "chrome" ? ( -
- ) : ( -
- )} +
+
) } diff --git a/src/components/Sidepanel/Chat/form.tsx b/src/components/Sidepanel/Chat/form.tsx index eaaa902..92fd1cc 100644 --- a/src/components/Sidepanel/Chat/form.tsx +++ b/src/components/Sidepanel/Chat/form.tsx @@ -4,13 +4,14 @@ import React from "react" import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize" import { useMessage } from "~/hooks/useMessage" import { toBase64 } from "~/libs/to-base64" -import { Checkbox, Dropdown, Image, Tooltip } from "antd" +import { Checkbox, Dropdown, Image, Switch, Tooltip } from "antd" import { useWebUI } from "~/store/webui" import { defaultEmbeddingModelForRag } from "~/services/ollama" import { ImageIcon, MicIcon, StopCircleIcon, X } from "lucide-react" import { useTranslation } from "react-i18next" import { ModelSelect } from "@/components/Common/ModelSelect" import { useSpeechRecognition } from "@/hooks/useSpeechRecognition" +import { PiGlobeX, PiGlobe } from "react-icons/pi" type Props = { dropedFile: File | undefined @@ -88,6 +89,13 @@ export const SidepanelForm = ({ dropedFile }: Props) => { return } } + if (webSearch) { + const defaultEM = await defaultEmbeddingModelForRag() + if (!defaultEM) { + form.setFieldError("message", t("formError.noEmbeddingModel")) + return + } + } form.reset() textAreaFocus() await sendMessage({ @@ -111,7 +119,9 @@ export const SidepanelForm = ({ dropedFile }: Props) => { speechToTextLanguage, stopStreamingRequest, streaming, - setChatMode + setChatMode, + webSearch, + setWebSearch } = useMessage() React.useEffect(() => { @@ -137,6 +147,30 @@ export const SidepanelForm = ({ dropedFile }: Props) => { } }) + React.useEffect(() => { + const handleDrop = (e: DragEvent) => { + e.preventDefault() + if (e.dataTransfer?.items) { + for (let i = 0; i < e.dataTransfer.items.length; i++) { + if (e.dataTransfer.items[i].type === "text/plain") { + e.dataTransfer.items[i].getAsString((text) => { + form.setFieldValue("message", text) + }) + } + } + } + } + const handleDragOver = (e: DragEvent) => { + e.preventDefault() + } + textareaRef.current?.addEventListener("drop", handleDrop) + textareaRef.current?.addEventListener("dragover", handleDragOver) + return () => { + textareaRef.current?.removeEventListener("drop", handleDrop) + textareaRef.current?.removeEventListener("dragover", handleDragOver) + } + }, []) + return (
{ return } } + if (webSearch) { + const defaultEM = await defaultEmbeddingModelForRag() + if (!defaultEM) { + form.setFieldError("message", t("formError.noEmbeddingModel")) + return + } + } await stopListening() form.reset() textAreaFocus() @@ -210,6 +251,20 @@ export const SidepanelForm = ({ dropedFile }: Props) => { {...form.getInputProps("message")} />
+ + + {browserSupportsSpeechRecognition && ( diff --git a/src/entries/options/App.tsx b/src/entries/options/App.tsx index d77b142..620b183 100644 --- a/src/entries/options/App.tsx +++ b/src/entries/options/App.tsx @@ -19,7 +19,7 @@ function IndexOption() { algorithm: mode === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm, token: { - fontFamily: i18n.language === "ru" ? "Onest" : "Inter" + fontFamily: "Arimo" } }} renderEmpty={() => ( diff --git a/src/entries/options/index.html b/src/entries/options/index.html index 2abb927..05423c3 100644 --- a/src/entries/options/index.html +++ b/src/entries/options/index.html @@ -2,7 +2,7 @@ Page Assist - A Web UI for Local AI Models - + diff --git a/src/entries/sidepanel/App.tsx b/src/entries/sidepanel/App.tsx index 7d1d3aa..0ee2e9d 100644 --- a/src/entries/sidepanel/App.tsx +++ b/src/entries/sidepanel/App.tsx @@ -20,7 +20,7 @@ function IndexSidepanel() { algorithm: mode === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm, token: { - fontFamily: i18n.language === "ru" ? "Onest" : "Inter" + fontFamily: "Arimo" } }} renderEmpty={() => ( diff --git a/src/entries/sidepanel/index.html b/src/entries/sidepanel/index.html index c7f91b9..1e7c405 100644 --- a/src/entries/sidepanel/index.html +++ b/src/entries/sidepanel/index.html @@ -2,7 +2,7 @@ Page Assist - A Web UI for Local AI Models - + diff --git a/src/hooks/useMessage.tsx b/src/hooks/useMessage.tsx index 8260795..0b20edf 100644 --- a/src/hooks/useMessage.tsx +++ b/src/hooks/useMessage.tsx @@ -2,11 +2,12 @@ import React from "react" import { cleanUrl } from "~/libs/clean-url" import { defaultEmbeddingModelForRag, + geWebSearchFollowUpPrompt, getOllamaURL, promptForRag, systemPromptForNonRag } from "~/services/ollama" -import { type Message } from "~/store/option" +import { useStoreMessageOption, type Message } from "~/store/option" import { useStoreMessage } from "~/store" import { HumanMessage, SystemMessage } from "@langchain/core/messages" import { getDataFromCurrentTab } from "~/libs/get-html" @@ -29,6 +30,7 @@ import { useStorage } from "@plasmohq/storage/hook" import { useStoreChatModelSettings } from "@/store/model" import { ChatOllama } from "@/models/ChatOllama" import { getAllDefaultModelSettings } from "@/services/model-settings" +import { getSystemPromptForWeb } from "@/web/web" export const useMessage = () => { const { @@ -42,6 +44,9 @@ export const useMessage = () => { const { t } = useTranslation("option") const [selectedModel, setSelectedModel] = useStorage("selectedModel") const currentChatModelSettings = useStoreChatModelSettings() + const { setIsSearchingInternet, webSearch, setWebSearch, isSearchingInternet } = + useStoreMessageOption() + const { history, setHistory, @@ -571,6 +576,249 @@ export const useMessage = () => { } } + const searchChatMode = async ( + message: string, + image: string, + isRegenerate: boolean, + messages: Message[], + history: ChatHistory, + signal: AbortSignal + ) => { + const url = await getOllamaURL() + setStreaming(true) + const userDefaultModelSettings = await getAllDefaultModelSettings() + if (image.length > 0) { + image = `data:image/jpeg;base64,${image.split(",")[1]}` + } + + const ollama = new ChatOllama({ + model: selectedModel!, + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed + }) + + let newMessage: Message[] = [] + let generateMessageId = generateID() + + if (!isRegenerate) { + newMessage = [ + ...messages, + { + isBot: false, + name: "You", + message, + sources: [], + images: [image] + }, + { + isBot: true, + name: selectedModel, + message: "▋", + sources: [], + id: generateMessageId + } + ] + } else { + newMessage = [ + ...messages, + { + isBot: true, + name: selectedModel, + message: "▋", + sources: [], + id: generateMessageId + } + ] + } + setMessages(newMessage) + let fullText = "" + let contentToSave = "" + + try { + setIsSearchingInternet(true) + + let query = message + + if (newMessage.length > 2) { + let questionPrompt = await geWebSearchFollowUpPrompt() + const lastTenMessages = newMessage.slice(-10) + lastTenMessages.pop() + const chat_history = lastTenMessages + .map((message) => { + return `${message.isBot ? "Assistant: " : "Human: "}${message.message}` + }) + .join("\n") + const promptForQuestion = questionPrompt + .replaceAll("{chat_history}", chat_history) + .replaceAll("{question}", message) + const questionOllama = new ChatOllama({ + model: selectedModel!, + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: + currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: + currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? + userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed + }) + const response = await questionOllama.invoke(promptForQuestion) + query = response.content.toString() + } + + const { prompt, source } = await getSystemPromptForWeb(query) + setIsSearchingInternet(false) + + // message = message.trim().replaceAll("\n", " ") + + let humanMessage = new HumanMessage({ + content: [ + { + text: message, + type: "text" + } + ] + }) + if (image.length > 0) { + humanMessage = new HumanMessage({ + content: [ + { + text: message, + type: "text" + }, + { + image_url: image, + type: "image_url" + } + ] + }) + } + + const applicationChatHistory = generateHistory(history) + + if (prompt) { + applicationChatHistory.unshift( + new SystemMessage({ + content: [ + { + text: prompt, + type: "text" + } + ] + }) + ) + } + + const chunks = await ollama.stream( + [...applicationChatHistory, humanMessage], + { + signal: signal + } + ) + let count = 0 + for await (const chunk of chunks) { + contentToSave += chunk.content + fullText += chunk.content + if (count === 0) { + setIsProcessing(true) + } + setMessages((prev) => { + return prev.map((message) => { + if (message.id === generateMessageId) { + return { + ...message, + message: fullText + "▋" + } + } + return message + }) + }) + count++ + } + // update the message with the full text + setMessages((prev) => { + return prev.map((message) => { + if (message.id === generateMessageId) { + return { + ...message, + message: fullText, + sources: source + } + } + return message + }) + }) + + setHistory([ + ...history, + { + role: "user", + content: message, + image + }, + { + role: "assistant", + content: fullText + } + ]) + + await saveMessageOnSuccess({ + historyId, + setHistoryId, + isRegenerate, + selectedModel: selectedModel, + message, + image, + fullText, + source + }) + + setIsProcessing(false) + setStreaming(false) + } catch (e) { + const errorSave = await saveMessageOnError({ + e, + botMessage: fullText, + history, + historyId, + image, + selectedModel, + setHistory, + setHistoryId, + userMessage: message, + isRegenerating: isRegenerate + }) + + if (!errorSave) { + notification.error({ + message: t("error"), + description: e?.message || t("somethingWentWrong") + }) + } + setIsProcessing(false) + setStreaming(false) + } finally { + setAbortController(null) + } + } + const onSubmit = async ({ message, image, @@ -597,14 +845,25 @@ export const useMessage = () => { } if (chatMode === "normal") { - await normalChatMode( - message, - image, - isRegenerate, - chatHistory || messages, - memory || history, - signal - ) + if (webSearch) { + await searchChatMode( + message, + image, + isRegenerate || false, + messages, + memory || history, + signal + ) + } else { + await normalChatMode( + message, + image, + isRegenerate, + chatHistory || messages, + memory || history, + signal + ) + } } else { const newEmbeddingController = new AbortController() let embeddingSignal = newEmbeddingController.signal @@ -714,6 +973,9 @@ export const useMessage = () => { isEmbedding, speechToTextLanguage, setSpeechToTextLanguage, - regenerateLastMessage + regenerateLastMessage, + webSearch, + setWebSearch, + isSearchingInternet, } } diff --git a/src/routes/index.tsx b/src/routes/index.tsx index 86af217..2a4dd60 100644 --- a/src/routes/index.tsx +++ b/src/routes/index.tsx @@ -10,16 +10,13 @@ export const OptionRouting = () => { const { i18n } = useTranslation() return ( -
+
}> - {import.meta.env.BROWSER === "chrome" ? ( - - ) : ( - - )} + {import.meta.env.BROWSER === "chrome" ? ( + + ) : ( + + )}
) @@ -30,16 +27,13 @@ export const SidepanelRouting = () => { const { i18n } = useTranslation() return ( -
+
}> - {import.meta.env.BROWSER === "chrome" ? ( - - ) : ( - - )} + {import.meta.env.BROWSER === "chrome" ? ( + + ) : ( + + )}
) diff --git a/src/services/ollama.ts b/src/services/ollama.ts index e1ce53b..be1da4f 100644 --- a/src/services/ollama.ts +++ b/src/services/ollama.ts @@ -13,11 +13,42 @@ const DEFAULT_RAG_QUESTION_PROMPT = const DEFAUTL_RAG_SYSTEM_PROMPT = `You are a helpful AI assistant. Use the following pieces of context to answer the question at the end. If you don't know the answer, just say you don't know. DO NOT try to make up an answer. If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context. {context} Question: {question} Helpful answer:` -const DEFAULT_WEBSEARCH_PROMP = `You are a helpful assistant that can answer any questions. You can use the following search results in case you want to answer questions about anything in real-time. The current date and time are {current_date_time}. +const DEFAULT_WEBSEARCH_PROMP = `You are an AI model who is expert at searching the web and answering user's queries. -Search results: +Generate a response that is informative and relevant to the user's query based on provided search results. the current date and time are {current_date_time}. -{search_results}` +\`search-results\` block provides knowledge from the web search results. You can use this information to generate a meaningful response. + + + {search_results} + +` + +const DEFAULT_WEBSEARCH_FOLLOWUP_PROMPT = `You will give a follow-up question. You need to rephrase the follow-up question if needed so it is a standalone question that can be used by the AI model to search the internet. + +Example: + +Follow-up question: What are the symptoms of a heart attack? + +Rephrased question: Symptoms of a heart attack. + +Follow-up question: Where is the upcoming Olympics being held? + +Rephrased question: Location of the upcoming Olympics. + +Follow-up question: Taylor Swift's latest album? + +Rephrased question: Name of Taylor Swift's latest album. + + +Previous Conversation: + +{chat_history} + +Follow-up question: {question} + +Rephrased question: +` export const getOllamaURL = async () => { const ollamaURL = await storage.get("ollamaURL") @@ -289,7 +320,7 @@ export const setWebSearchPrompt = async (prompt: string) => { export const geWebSearchFollowUpPrompt = async () => { const prompt = await storage.get("webSearchFollowUpPrompt") if (!prompt || prompt.length === 0) { - return DEFAULT_RAG_QUESTION_PROMPT + return DEFAULT_WEBSEARCH_FOLLOWUP_PROMPT } return prompt } diff --git a/tailwind.config.js b/tailwind.config.js index f4b105c..a30235c 100644 --- a/tailwind.config.js +++ b/tailwind.config.js @@ -3,5 +3,5 @@ module.exports = { mode: "jit", darkMode: "class", content: ["./src/**/*.tsx"], - plugins: [require("@tailwindcss/forms"), require("@tailwindcss/typography"),] + plugins: [require("@tailwindcss/forms"), require("@tailwindcss/typography")] } diff --git a/wxt.config.ts b/wxt.config.ts index 0e89e7a..8d94b9f 100644 --- a/wxt.config.ts +++ b/wxt.config.ts @@ -48,7 +48,7 @@ export default defineConfig({ outDir: "build", manifest: { - version: "1.1.11", + version: "1.1.12", name: process.env.TARGET === "firefox" ? "Page Assist - A Web UI for Local AI Models"