From 2bcec1aabd86024b6a97248ac9974277f1b9ece8 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Wed, 17 Apr 2024 18:08:51 +0900 Subject: [PATCH 01/26] Move common type declarations to a new file --- src/api/cache.ts | 2 +- src/api/openai.ts | 31 ++++++------------------------- src/api/types.ts | 26 ++++++++++++++++++++++++++ src/chat/App.tsx | 2 +- src/chat/components/ChatItem.tsx | 2 +- src/main.ts | 3 ++- src/settings.ts | 5 ++--- 7 files changed, 39 insertions(+), 32 deletions(-) create mode 100644 src/api/types.ts diff --git a/src/api/cache.ts b/src/api/cache.ts index 163f2cb..4ed70d8 100644 --- a/src/api/cache.ts +++ b/src/api/cache.ts @@ -1,6 +1,6 @@ import { createHash } from 'crypto'; import Markpilot from 'src/main'; -import { APIClient, ChatMessage } from './openai'; +import { APIClient, ChatMessage } from './types'; export class MemoryCache implements APIClient { private store: Map = new Map(); diff --git a/src/api/openai.ts b/src/api/openai.ts index b1b5b44..8f32795 100644 --- a/src/api/openai.ts +++ b/src/api/openai.ts @@ -3,6 +3,12 @@ import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; import { getThisMonthAsString, getTodayAsString } from 'src/utils'; +import { + APIClient, + ChatCompletionsModel, + ChatMessage, + CompletionsModel, +} from './types'; export const COMPLETIONS_MODELS = [ 'gpt-3.5-turbo-instruct', @@ -84,31 +90,6 @@ export const MODEL_OUTPUT_COSTS: Record< 'gpt-3.5-turbo-16k-0613': 1.5, }; -export type CompletionsModel = (typeof COMPLETIONS_MODELS)[number]; - -export type ChatCompletionsModel = (typeof CHAT_COMPLETIONS_MODELS)[number]; - -export type ChatRole = 'system' | 'assistant' | 'user'; - -export interface ChatMessage { - role: ChatRole; - content: string; -} - -export interface ChatHistory { - messages: ChatMessage[]; - response: string; -} - -export interface APIClient { - fetchChat(messages: ChatMessage[]): AsyncGenerator; - fetchCompletions( - language: string, - prefix: string, - suffix: string, - ): Promise; -} - export class OpenAIClient implements APIClient { constructor(private plugin: Markpilot) {} diff --git a/src/api/types.ts b/src/api/types.ts new file mode 100644 index 0000000..c8ef143 --- /dev/null +++ b/src/api/types.ts @@ -0,0 +1,26 @@ +import { CHAT_COMPLETIONS_MODELS, COMPLETIONS_MODELS } from './openai'; + +export type CompletionsModel = (typeof COMPLETIONS_MODELS)[number]; + +export type ChatCompletionsModel = (typeof CHAT_COMPLETIONS_MODELS)[number]; + +export type ChatRole = 'system' | 'assistant' | 'user'; + +export interface ChatMessage { + role: ChatRole; + content: string; +} + +export interface ChatHistory { + messages: ChatMessage[]; + response: string; +} + +export interface APIClient { + fetchChat(messages: ChatMessage[]): AsyncGenerator; + fetchCompletions( + language: string, + prefix: string, + suffix: string, + ): Promise; +} diff --git a/src/chat/App.tsx b/src/chat/App.tsx index a7a23fe..9e647f7 100644 --- a/src/chat/App.tsx +++ b/src/chat/App.tsx @@ -1,5 +1,5 @@ import { useEffect, useState } from 'react'; -import { ChatHistory, ChatRole } from 'src/api/openai'; +import { ChatHistory, ChatRole } from 'src/api/types'; import Markpilot from 'src/main'; import { ChatInput } from './components/ChatBox'; import { ChatItem } from './components/ChatItem'; diff --git a/src/chat/components/ChatItem.tsx b/src/chat/components/ChatItem.tsx index 760ce9f..9525f25 100644 --- a/src/chat/components/ChatItem.tsx +++ b/src/chat/components/ChatItem.tsx @@ -2,7 +2,7 @@ import { Bot, Copy, User } from 'lucide-react'; import ReactMarkdown from 'react-markdown'; import rehypeKatex from 'rehype-katex'; import remarkMath from 'remark-math'; -import { ChatMessage } from 'src/api/openai'; +import { ChatMessage } from 'src/api/types'; export function ChatItem({ message }: { message: ChatMessage }) { return ( diff --git a/src/main.ts b/src/main.ts index 443ef85..2bbd513 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,7 +1,8 @@ import { Extension } from '@codemirror/state'; import { addIcon, Notice, Plugin, setIcon } from 'obsidian'; import { MemoryCache } from './api/cache'; -import { APIClient, OpenAIClient } from './api/openai'; +import { OpenAIClient } from './api/openai'; +import { APIClient } from './api/types'; import { CHAT_VIEW_TYPE, ChatView } from './chat/view'; import { inlineCompletionsExtension } from './editor/extension'; import botOffIcon from './icons/bot-off.svg'; diff --git a/src/settings.ts b/src/settings.ts index 02b4dfb..8ddbc9d 100644 --- a/src/settings.ts +++ b/src/settings.ts @@ -1,12 +1,11 @@ import Chart from 'chart.js/auto'; import { App, Notice, PluginSettingTab, Setting } from 'obsidian'; +import { CHAT_COMPLETIONS_MODELS, COMPLETIONS_MODELS } from './api/openai'; import { - CHAT_COMPLETIONS_MODELS, ChatCompletionsModel, ChatHistory, - COMPLETIONS_MODELS, CompletionsModel, -} from './api/openai'; +} from './api/types'; import Markpilot from './main'; import { getDaysInCurrentMonth } from './utils'; From 1d50a28427e1293bf2dba05dbeb6db7c6548d6e3 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Thu, 18 Apr 2024 16:50:30 +0900 Subject: [PATCH 02/26] Rewrite API logic to support other providers --- src/api/cache.ts | 5 +- src/api/client.ts | 151 +++++++++++++++++++++++ src/api/openai.ts | 212 -------------------------------- src/api/provider.ts | 46 +++++++ src/api/types.ts | 15 --- src/api/usage.ts | 157 ++++++++++++++++++++++++ src/main.ts | 44 ++++--- src/settings.ts | 289 +++++++++++++++++++++++++++++++------------- src/utils.ts | 9 ++ 9 files changed, 596 insertions(+), 332 deletions(-) create mode 100644 src/api/client.ts delete mode 100644 src/api/openai.ts create mode 100644 src/api/provider.ts create mode 100644 src/api/usage.ts diff --git a/src/api/cache.ts b/src/api/cache.ts index 4ed70d8..e6b35d7 100644 --- a/src/api/cache.ts +++ b/src/api/cache.ts @@ -1,8 +1,9 @@ import { createHash } from 'crypto'; import Markpilot from 'src/main'; -import { APIClient, ChatMessage } from './types'; +import { APIClient } from './client'; +import { ChatMessage } from './types'; -export class MemoryCache implements APIClient { +export class MemoryCacheProxy implements APIClient { private store: Map = new Map(); constructor( diff --git a/src/api/client.ts b/src/api/client.ts new file mode 100644 index 0000000..6c97ea8 --- /dev/null +++ b/src/api/client.ts @@ -0,0 +1,151 @@ +import { getEncoding } from 'js-tiktoken'; +import { Notice } from 'obsidian'; +import OpenAI, { ClientOptions } from 'openai'; +import Markpilot from 'src/main'; +import { Provider } from './provider'; +import { ChatMessage } from './types'; +import { UsageTracker } from './usage'; + +export interface APIClient { + fetchChat(messages: ChatMessage[]): AsyncGenerator; + fetchCompletions( + language: string, + prefix: string, + suffix: string, + ): Promise; +} + +// TODO: +// Allow use of APIs that are not compatible with the OpenAI API standard. +export class BaseAPIClient implements APIClient { + constructor( + private tracker: UsageTracker, + private plugin: Markpilot, + ) {} + + getInstance(provider: Provider) { + const { settings } = this.plugin; + + const options: ClientOptions = { + apiKey: undefined, + baseURL: undefined, + dangerouslyAllowBrowser: true, + }; + switch (provider) { + case 'openai': + options.apiKey = settings.providers.openai.apiKey; + if (options.apiKey === undefined || !options.apiKey?.startsWith('sk')) { + new Notice('OpenAI API key is not set or invalid.'); + return; + } + break; + case 'openrouter': + options.baseURL = 'https://openrouter.ai/api/v1'; + options.apiKey = settings.providers.openrouter.apiKey; + if (options.apiKey === undefined || !options.apiKey?.startsWith('sk')) { + new Notice('OpenRouter API key is not set or invalid.'); + return; + } + break; + case 'ollama': + options.baseURL = 'http://localhost:11434/v1/'; + options.baseURL = settings.providers.ollama.apiUrl; + if (options.apiKey === undefined) { + new Notice('Ollama API URL is not set or invalid.'); + return; + } + break; + default: + throw new Error('Invalid API provider.'); + } + + return new OpenAI(options); + } + + async *fetchChat(messages: ChatMessage[]) { + const { settings } = this.plugin; + + const instance = this.getInstance(settings.chat.provider); + if (instance === undefined) { + return; + } + + try { + const stream = await instance.chat.completions.create({ + messages, + model: settings.chat.model, + max_tokens: settings.chat.maxTokens, + temperature: settings.chat.temperature, + top_p: 1, + n: 1, + stream: true, + }); + + const contents = []; + for await (const chunk of stream) { + const content = chunk.choices[0].delta.content ?? ''; + contents.push(content); + yield content; + } + + // Update usage cost estimates. + const enc = getEncoding('gpt2'); // Assume GPT-2 encoding + const inputMessage = messages + .map((message) => message.content) + .join('\n'); + const outputMessage = contents.join(''); + const inputTokens = enc.encode(inputMessage).length; + const outputTokens = enc.encode(outputMessage).length; + await this.tracker.add( + settings.chat.provider, + settings.chat.model, + inputTokens, + outputTokens, + ); + } catch (error) { + console.error(error); + new Notice( + 'Failed to fetch chat completions. Make sure your API key or API URL is correct.', + ); + } + } + + async fetchCompletions(language: string, prefix: string, suffix: string) { + const { settings } = this.plugin; + + const instance = this.getInstance(settings.chat.provider); + if (instance === undefined) { + return; + } + + try { + const completions = await instance.completions.create({ + prompt: `Continue the following code written in ${language} language:\n\n${prefix}`, + suffix, + model: settings.completions.model, + max_tokens: settings.completions.maxTokens, + temperature: settings.completions.temperature, + top_p: 1, + n: 1, + stop: ['\n\n\n'], + }); + + // Update usage cost estimates. + const inputTokens = completions.usage?.prompt_tokens ?? 0; + const outputTokens = completions.usage?.completion_tokens ?? 0; + await this.tracker.add( + settings.completions.provider, + settings.completions.model, + inputTokens, + outputTokens, + ); + + return completions.choices[0].text; + } catch (error) { + console.error(error); + new Notice( + 'Failed to fetch completions. Make sure your API key or API URL is correct.', + ); + } + } +} diff --git a/src/api/openai.ts b/src/api/openai.ts deleted file mode 100644 index 8f32795..0000000 --- a/src/api/openai.ts +++ /dev/null @@ -1,212 +0,0 @@ -import { getEncoding } from 'js-tiktoken'; -import { Notice } from 'obsidian'; -import OpenAI from 'openai'; -import Markpilot from 'src/main'; -import { getThisMonthAsString, getTodayAsString } from 'src/utils'; -import { - APIClient, - ChatCompletionsModel, - ChatMessage, - CompletionsModel, -} from './types'; - -export const COMPLETIONS_MODELS = [ - 'gpt-3.5-turbo-instruct', - 'davinci-002', - 'babbage-002', -] as const; - -export const CHAT_COMPLETIONS_MODELS = [ - 'gpt-4-0125-preview', - 'gpt-4-turbo-preview', - 'gpt-4-1106-preview', - 'gpt-4-vision-preview', - 'gpt-4', - 'gpt-4-0314', - 'gpt-4-0613', - 'gpt-4-32k', - 'gpt-4-32k-0314', - 'gpt-4-32k-0613', - 'gpt-3.5-turbo', - 'gpt-3.5-turbo-16k', - 'gpt-3.5-turbo-0301', - 'gpt-3.5-turbo-0613', - 'gpt-3.5-turbo-1106', - 'gpt-3.5-turbo-0125', - 'gpt-3.5-turbo-16k-0613', -] as const; - -export const MODEL_INPUT_COSTS: Record< - | (typeof COMPLETIONS_MODELS)[number] - | (typeof CHAT_COMPLETIONS_MODELS)[number], - number -> = { - 'gpt-3.5-turbo-instruct': 1.5, - 'davinci-002': 12.0, - 'babbage-002': 1.6, - 'gpt-4-0125-preview': 10.0, - 'gpt-4-turbo-preview': 10.0, - 'gpt-4-1106-preview': 10.0, - 'gpt-4-vision-preview': 10.0, - 'gpt-4': 30.0, - 'gpt-4-0314': 30.0, - 'gpt-4-0613': 30.0, - 'gpt-4-32k': 60.0, - 'gpt-4-32k-0314': 60.0, - 'gpt-4-32k-0613': 60.0, - 'gpt-3.5-turbo': 0.5, - 'gpt-3.5-turbo-16k': 0.5, - 'gpt-3.5-turbo-0301': 0.5, - 'gpt-3.5-turbo-0613': 0.5, - 'gpt-3.5-turbo-1106': 0.5, - 'gpt-3.5-turbo-0125': 0.5, - 'gpt-3.5-turbo-16k-0613': 0.5, -} as const; - -export const MODEL_OUTPUT_COSTS: Record< - | (typeof COMPLETIONS_MODELS)[number] - | (typeof CHAT_COMPLETIONS_MODELS)[number], - number -> = { - 'gpt-3.5-turbo-instruct': 2.0, - 'davinci-002': 12.0, - 'babbage-002': 1.6, - 'gpt-4-0125-preview': 30, - 'gpt-4-turbo-preview': 30, - 'gpt-4-1106-preview': 30, - 'gpt-4-vision-preview': 30, - 'gpt-4': 60, - 'gpt-4-0314': 60, - 'gpt-4-0613': 60, - 'gpt-4-32k': 120, - 'gpt-4-32k-0314': 120, - 'gpt-4-32k-0613': 120, - 'gpt-3.5-turbo': 1.5, - 'gpt-3.5-turbo-16k': 1.5, - 'gpt-3.5-turbo-0301': 1.5, - 'gpt-3.5-turbo-0613': 1.5, - 'gpt-3.5-turbo-1106': 1.5, - 'gpt-3.5-turbo-0125': 1.5, - 'gpt-3.5-turbo-16k-0613': 1.5, -}; - -export class OpenAIClient implements APIClient { - constructor(private plugin: Markpilot) {} - - get openai() { - const apiKey = this.plugin.settings.apiKey ?? ''; - return new OpenAI({ apiKey, dangerouslyAllowBrowser: true }); - } - - async *fetchChat(messages: ChatMessage[]) { - if (this.openai === undefined) { - return; - } - - const { settings } = this.plugin; - - const thisMonth = getThisMonthAsString(); - if (settings.usage.monthlyCosts[thisMonth] >= settings.usage.monthlyLimit) { - new Notice( - 'Monthly usage limit reached. Please increase the limit to keep on using the features.', - ); - return; - } - - try { - const stream = await this.openai.chat.completions.create({ - messages, - model: settings.chat.model, - max_tokens: settings.chat.maxTokens, - temperature: settings.chat.temperature, - top_p: 1, - n: 1, - stream: true, - }); - - const contents = []; - for await (const chunk of stream) { - const content = chunk.choices[0].delta.content ?? ''; - contents.push(content); - yield content; - } - - // Update usage cost estimates. - const enc = getEncoding('gpt2'); // Assume GPT-2 encoding - const inputMessage = messages - .map((message) => message.content) - .join('\n'); - const outputMessage = contents.join(''); - const inputTokens = enc.encode(inputMessage).length; - const outputTokens = enc.encode(outputMessage).length; - await this.updateUsage(settings.chat.model, inputTokens, outputTokens); - } catch (error) { - console.error(error); - new Notice('Failed to fetch chat completions.'); - } - } - - async fetchCompletions(language: string, prefix: string, suffix: string) { - if (this.openai === undefined) { - return; - } - - const { settings } = this.plugin; - - const thisMonth = getThisMonthAsString(); - if (settings.usage.monthlyCosts[thisMonth] >= settings.usage.monthlyLimit) { - new Notice( - 'Monthly usage limit reached. Please increase the limit to keep on using the features.', - ); - return; - } - - try { - const completions = await this.openai.completions.create({ - prompt: `Continue the following code written in ${language} language:\n\n${prefix}`, - suffix, - model: settings.completions.model, - max_tokens: settings.completions.maxTokens, - temperature: settings.completions.temperature, - top_p: 1, - n: 1, - stop: ['\n\n\n'], - }); - - // Update usage cost estimates. - const inputTokens = completions.usage?.prompt_tokens ?? 0; - const outputTokens = completions.usage?.completion_tokens ?? 0; - await this.updateUsage( - settings.completions.model, - inputTokens, - outputTokens, - ); - - return completions.choices[0].text; - } catch (error) { - console.error(error); - new Notice('Failed to fetch completions.'); - } - } - - async updateUsage( - model: CompletionsModel | ChatCompletionsModel, - inputTokens: number, - outputTokens: number, - ) { - const { settings } = this.plugin; - const today = getTodayAsString(); - const thisMonth = getThisMonthAsString(); - if (settings.usage.dailyCosts[today] === undefined) { - settings.usage.dailyCosts[today] = 0; - } - const cost = - (inputTokens * MODEL_INPUT_COSTS[model] + - outputTokens * MODEL_OUTPUT_COSTS[model]) / - 1_000_000; - settings.usage.dailyCosts[today] += cost; - settings.usage.monthlyCosts[thisMonth] += cost; - - await this.plugin.saveSettings(); - } -} diff --git a/src/api/provider.ts b/src/api/provider.ts new file mode 100644 index 0000000..6927bef --- /dev/null +++ b/src/api/provider.ts @@ -0,0 +1,46 @@ +export type Provider = (typeof PROVIDERS)[number]; + +export type OpenAIModel = (typeof OPENAI_MODELS)[number]; + +export type OpenRouterModel = (typeof OPENROUTER_MODELS)[number]; + +export type OllamaModel = (typeof OLLAMA_MODELS)[number]; + +export type Model = OpenAIModel | OpenRouterModel | OllamaModel; + +export const PROVIDERS = ['openai', 'openrouter', 'ollama'] as const; + +export const OPENAI_MODELS = [ + 'gpt-3.5-turbo-instruct', + 'davinci-002', + 'babbage-002', + 'gpt-4-0125-preview', + 'gpt-4-turbo-preview', + 'gpt-4-1106-preview', + 'gpt-4-vision-preview', + 'gpt-4', + 'gpt-4-0314', + 'gpt-4-0613', + 'gpt-4-32k', + 'gpt-4-32k-0314', + 'gpt-4-32k-0613', + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-16k', + 'gpt-3.5-turbo-0301', + 'gpt-3.5-turbo-0613', + 'gpt-3.5-turbo-1106', + 'gpt-3.5-turbo-0125', + 'gpt-3.5-turbo-16k-0613', +] as const; + +// TODO +export const OPENROUTER_MODELS = ['gpt-4']; + +// TODO +export const OLLAMA_MODELS = ['gpt-4']; + +export const MODELS = { + openai: OPENAI_MODELS, + openrouter: OPENROUTER_MODELS, + ollama: OLLAMA_MODELS, +}; diff --git a/src/api/types.ts b/src/api/types.ts index c8ef143..b7f4280 100644 --- a/src/api/types.ts +++ b/src/api/types.ts @@ -1,9 +1,3 @@ -import { CHAT_COMPLETIONS_MODELS, COMPLETIONS_MODELS } from './openai'; - -export type CompletionsModel = (typeof COMPLETIONS_MODELS)[number]; - -export type ChatCompletionsModel = (typeof CHAT_COMPLETIONS_MODELS)[number]; - export type ChatRole = 'system' | 'assistant' | 'user'; export interface ChatMessage { @@ -15,12 +9,3 @@ export interface ChatHistory { messages: ChatMessage[]; response: string; } - -export interface APIClient { - fetchChat(messages: ChatMessage[]): AsyncGenerator; - fetchCompletions( - language: string, - prefix: string, - suffix: string, - ): Promise; -} diff --git a/src/api/usage.ts b/src/api/usage.ts new file mode 100644 index 0000000..783f41d --- /dev/null +++ b/src/api/usage.ts @@ -0,0 +1,157 @@ +import { Notice } from 'obsidian'; +import Markpilot from 'src/main'; +import { getThisMonthAsString, getTodayAsString } from 'src/utils'; +import { APIClient } from './client'; +import { + Model, + OllamaModel, + OpenAIModel, + OpenRouterModel, + Provider, +} from './provider'; +import { ChatMessage } from './types'; + +const OPENAI_MODEL_INPUT_COSTS: Record = { + 'gpt-3.5-turbo-instruct': 1.5, + 'davinci-002': 12.0, + 'babbage-002': 1.6, + 'gpt-4-0125-preview': 10.0, + 'gpt-4-turbo-preview': 10.0, + 'gpt-4-1106-preview': 10.0, + 'gpt-4-vision-preview': 10.0, + 'gpt-4': 30.0, + 'gpt-4-0314': 30.0, + 'gpt-4-0613': 30.0, + 'gpt-4-32k': 60.0, + 'gpt-4-32k-0314': 60.0, + 'gpt-4-32k-0613': 60.0, + 'gpt-3.5-turbo': 0.5, + 'gpt-3.5-turbo-16k': 0.5, + 'gpt-3.5-turbo-0301': 0.5, + 'gpt-3.5-turbo-0613': 0.5, + 'gpt-3.5-turbo-1106': 0.5, + 'gpt-3.5-turbo-0125': 0.5, + 'gpt-3.5-turbo-16k-0613': 0.5, +} as const; + +const OPENAI_MODEL_OUTPUT_COSTS: Record = { + 'gpt-3.5-turbo-instruct': 2.0, + 'davinci-002': 12.0, + 'babbage-002': 1.6, + 'gpt-4-0125-preview': 30, + 'gpt-4-turbo-preview': 30, + 'gpt-4-1106-preview': 30, + 'gpt-4-vision-preview': 30, + 'gpt-4': 60, + 'gpt-4-0314': 60, + 'gpt-4-0613': 60, + 'gpt-4-32k': 120, + 'gpt-4-32k-0314': 120, + 'gpt-4-32k-0613': 120, + 'gpt-3.5-turbo': 1.5, + 'gpt-3.5-turbo-16k': 1.5, + 'gpt-3.5-turbo-0301': 1.5, + 'gpt-3.5-turbo-0613': 1.5, + 'gpt-3.5-turbo-1106': 1.5, + 'gpt-3.5-turbo-0125': 1.5, + 'gpt-3.5-turbo-16k-0613': 1.5, +}; + +// TODO +const OPENROUTER_INPUT_COSTS: Record = { + 'gpt-4': 30, +}; + +// TODO +const OPENROUTER_OUTPUT_COSTS: Record = { + 'gpt-4': 60, +}; + +// TODO +const OLLAMA_INPUT_COSTS: Record = { + 'gpt-4': 0, +}; + +// TODO +const OLLAMA_OUTPUT_COSTS: Record = { + 'gpt-4': 0, +}; + +const INPUT_COSTS: Record> = { + openai: OPENAI_MODEL_INPUT_COSTS, + openrouter: OPENROUTER_INPUT_COSTS, + ollama: OLLAMA_INPUT_COSTS, +}; + +const OUTPUT_COSTS: Record> = { + openai: OPENAI_MODEL_OUTPUT_COSTS, + openrouter: OPENROUTER_OUTPUT_COSTS, + ollama: OLLAMA_OUTPUT_COSTS, +}; + +export class UsageTracker { + constructor(private plugin: Markpilot) {} + + async add( + provider: Provider, + model: Model | Model, + inputTokens: number, + outputTokens: number, + ) { + const { settings } = this.plugin; + + const today = getTodayAsString(); + const thisMonth = getThisMonthAsString(); + if (settings.usage.dailyCosts[today] === undefined) { + settings.usage.dailyCosts[today] = 0; + } + + const cost = + (inputTokens * INPUT_COSTS[provider][model] + + outputTokens * OUTPUT_COSTS[provider][model]) / + 1_000_000; + + settings.usage.dailyCosts[today] += cost; + settings.usage.monthlyCosts[thisMonth] += cost; + + await this.plugin.saveSettings(); + } +} + +export class UsageMonitorProxy implements APIClient { + constructor( + private client: APIClient, + private plugin: Markpilot, + ) {} + + hasReachedLimit() { + const { settings } = this.plugin; + + const thisMonth = getThisMonthAsString(); + return ( + settings.usage.monthlyCosts[thisMonth] >= settings.usage.monthlyLimit + ); + } + + async *fetchChat(messages: ChatMessage[]) { + if (this.hasReachedLimit()) { + new Notice( + 'Monthly usage limit reached. Please increase the limit to keep on using inline completions.', + ); + return; + } + + yield* this.client.fetchChat(messages); + } + + async fetchCompletions(language: string, prefix: string, suffix: string) { + if (this.hasReachedLimit()) { + new Notice( + 'Monthly usage limit reached. Please increase the limit to keep on using chat view.', + ); + return; + } + + return await this.client.fetchCompletions(language, prefix, suffix); + } +} diff --git a/src/main.ts b/src/main.ts index 2bbd513..334278e 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,8 +1,8 @@ import { Extension } from '@codemirror/state'; import { addIcon, Notice, Plugin, setIcon } from 'obsidian'; -import { MemoryCache } from './api/cache'; -import { OpenAIClient } from './api/openai'; -import { APIClient } from './api/types'; +import { MemoryCacheProxy } from './api/cache'; +import { APIClient, BaseAPIClient } from './api/client'; +import { UsageMonitorProxy, UsageTracker } from './api/usage'; import { CHAT_VIEW_TYPE, ChatView } from './chat/view'; import { inlineCompletionsExtension } from './editor/extension'; import botOffIcon from './icons/bot-off.svg'; @@ -13,8 +13,9 @@ import { } from './settings'; export default class Markpilot extends Plugin { - client: APIClient; settings: MarkpilotSettings; + + client: APIClient; view: ChatView; extensions: Extension[]; @@ -22,13 +23,18 @@ export default class Markpilot extends Plugin { await this.loadSettings(); this.addSettingTab(new MarkpilotSettingTab(this.app, this)); - // Initialize the OpenAI API client and - // register the editor extension and chat view. - const client = new OpenAIClient(this); - const cache = new MemoryCache(client, this); - this.client = cache; + // Initialize the OpenAI API client. + const tracker = new UsageTracker(this); + const client = new BaseAPIClient(tracker, this); + const clientWithMonitor = new UsageMonitorProxy(client, this); + const clientWithCache = new MemoryCacheProxy(clientWithMonitor, this); + this.client = clientWithCache; + + // Register the editor extension. this.extensions = this.getEditorExtension(); this.registerEditorExtension(this.extensions); + + // Register the chat view. this.registerView(CHAT_VIEW_TYPE, (leaf) => { this.view = new ChatView(leaf, this); return this.view; @@ -37,16 +43,7 @@ export default class Markpilot extends Plugin { this.activateView(); } - // Notify the user if the OpenAI API key is not set. - if ( - (this.settings.completions.enabled || this.settings.chat.enabled) && - !this.settings.apiKey?.startsWith('sk') - ) { - new Notice( - 'OpenAI API key is not set. Please register it in the settings tab to use the features.', - ); - } - + // Register the ribbon actions and commands. this.registerRibbonActions(); this.registerCommands(); } @@ -87,6 +84,7 @@ export default class Markpilot extends Plugin { new Notice('Inline completions enabled.'); }, }); + this.addCommand({ id: 'disable-completions', name: 'Disable inline completions', @@ -96,6 +94,7 @@ export default class Markpilot extends Plugin { new Notice('Inline completions disabled.'); }, }); + this.addCommand({ id: 'toggle-completions', name: 'Toggle inline completions', @@ -107,6 +106,7 @@ export default class Markpilot extends Plugin { ); }, }); + this.addCommand({ id: 'enable-chat-view', name: 'Enable chat view', @@ -117,6 +117,7 @@ export default class Markpilot extends Plugin { new Notice('Chat view enabled.'); }, }); + this.addCommand({ id: 'disable-chat-view', name: 'Disable chat view', @@ -127,6 +128,7 @@ export default class Markpilot extends Plugin { new Notice('Chat view disabled.'); }, }); + this.addCommand({ id: 'toggle-chat-view', name: 'Toggle chat view', @@ -139,6 +141,7 @@ export default class Markpilot extends Plugin { ); }, }); + this.addCommand({ id: 'clear-chat-history', name: 'Clear chat history', @@ -152,6 +155,7 @@ export default class Markpilot extends Plugin { new Notice('Chat history cleared.'); }, }); + this.addCommand({ id: 'enable-cache', name: 'Enable cache', @@ -161,6 +165,7 @@ export default class Markpilot extends Plugin { new Notice('Cache enabled.'); }, }); + this.addCommand({ id: 'disable-cache', name: 'Disable cache', @@ -170,6 +175,7 @@ export default class Markpilot extends Plugin { new Notice('Cache disabled.'); }, }); + this.addCommand({ id: 'toggle-cache', name: 'Toggle cache', diff --git a/src/settings.ts b/src/settings.ts index 8ddbc9d..5a3b496 100644 --- a/src/settings.ts +++ b/src/settings.ts @@ -1,20 +1,28 @@ import Chart from 'chart.js/auto'; import { App, Notice, PluginSettingTab, Setting } from 'obsidian'; -import { CHAT_COMPLETIONS_MODELS, COMPLETIONS_MODELS } from './api/openai'; -import { - ChatCompletionsModel, - ChatHistory, - CompletionsModel, -} from './api/types'; +import { Model, MODELS, Provider, PROVIDERS } from './api/provider'; +import { ChatHistory } from './api/types'; import Markpilot from './main'; -import { getDaysInCurrentMonth } from './utils'; +import { getDaysInCurrentMonth, validateURL } from './utils'; export interface MarkpilotSettings { - apiKey: string | undefined; + version: string; + providers: { + openai: { + apiKey: string | undefined; + }; + openrouter: { + apiKey: string | undefined; + }; + ollama: { + apiUrl: string | undefined; + }; + }; completions: { enabled: boolean; - model: CompletionsModel; + provider: Provider; + model: Model; maxTokens: number; temperature: number; waitTime: number; @@ -24,7 +32,8 @@ export interface MarkpilotSettings { }; chat: { enabled: boolean; - model: ChatCompletionsModel; + provider: Provider; + model: Model; maxTokens: number; temperature: number; history: ChatHistory; @@ -40,12 +49,24 @@ export interface MarkpilotSettings { } export const DEFAULT_SETTINGS: MarkpilotSettings = { - apiKey: undefined, + version: '1.2.0', + providers: { + openai: { + apiKey: undefined, + }, + openrouter: { + apiKey: undefined, + }, + ollama: { + apiUrl: undefined, + }, + }, completions: { enabled: true, - model: 'gpt-3.5-turbo-instruct', + provider: 'openai', + model: 'gpt-3.5-turbo', maxTokens: 64, - temperature: 1, + temperature: 0, waitTime: 500, windowSize: 512, acceptKey: 'Tab', @@ -53,9 +74,10 @@ export const DEFAULT_SETTINGS: MarkpilotSettings = { }, chat: { enabled: true, - model: 'gpt-3.5-turbo-0125', + provider: 'openai', + model: 'gpt-3.5-turbo', maxTokens: 1024, - temperature: 0.1, + temperature: 0.5, history: { messages: [], response: '', @@ -87,19 +109,79 @@ export class MarkpilotSettingTab extends PluginSettingTab { const { plugin } = this; const { settings } = plugin; - new Setting(containerEl).setName('OpenAI').setHeading(); + /************************************************************/ + /* Providers */ + /************************************************************/ + + new Setting(containerEl).setName('Providers').setHeading(); new Setting(containerEl) - .setName('OpenAI API Key') - .setDesc('Enter your OpenAI API key to enable features.') + .setName('OpenAI API key') + .setDesc('Enter your OpenAI API key.') .addText((text) => - text.setValue(settings.apiKey ?? '').onChange(async (value) => { - settings.apiKey = value; - await plugin.saveSettings(); - new Notice('Suceessfully saved API key.'); + text + .setValue(settings.providers.openai.apiKey ?? '') + .onChange(async (value) => { + settings.providers.openai.apiKey = value; + await plugin.saveSettings(); + new Notice('Successfully saved OpenAI API key.'); + }), + ); + + new Setting(containerEl) + .setName('OpenRouter API key') + .setDesc('Enter your OpenRouter API key.') + .addText((text) => + text + .setValue(settings.providers.openrouter.apiKey ?? '') + .onChange(async (value) => { + settings.providers.openrouter.apiKey = value; + await plugin.saveSettings(); + new Notice('Successfully saved OpenRouter API key.'); + }), + ); + + new Setting(containerEl) + .setName('Ollama API URL') + .setDesc('Enter your Ollama API URL.') + .addText((text) => + text + .setValue(settings.providers.ollama.apiUrl ?? '') + .onChange(async (value) => { + if (validateURL(value)) { + new Notice('Invalid Ollama API URL.'); + return; + } + settings.providers.ollama.apiUrl = value; + await plugin.saveSettings(); + new Notice('Successfully saved Ollama API URL.'); + }), + ); + + new Setting(containerEl) + .setName('Test Ollama API connection') + .setDesc('Test the connection to the local Ollama API.') + .addButton((button) => + button.setButtonText('Test Connection').onClick(async () => { + const apiUrl = settings.providers.ollama.apiUrl; + if (apiUrl === undefined) { + new Notice('Ollama API URL is not set.'); + return; + } + // TODO + const response = await fetch(apiUrl); + if (response.ok) { + new Notice('Successfully connected to Ollama API.'); + } else { + new Notice('Failed to connect to Ollama API.'); + } }), ); + /************************************************************/ + /* Inline completions */ + /************************************************************/ + new Setting(containerEl).setName('Inline completions').setHeading(); new Setting(containerEl) @@ -114,49 +196,64 @@ export class MarkpilotSettingTab extends PluginSettingTab { this.display(); // Re-render settings tab }), ); + + new Setting(containerEl) + .setDisabled(!settings.chat.enabled) + .setName('Provider') + .setDesc('Select the provider for inline completions.') + .addDropdown((dropdown) => { + for (const option of PROVIDERS) { + dropdown.addOption(option, option); + } + dropdown.setValue(settings.completions.provider); + dropdown.onChange(async (value) => { + settings.completions.provider = value as Provider; + await plugin.saveSettings(); + this.display(); // Re-render settings tab + }); + }); + new Setting(containerEl) .setDisabled(!settings.completions.enabled) .setName('Model') .setDesc('Select the model for inline completions.') .addDropdown((dropdown) => { - for (const option of COMPLETIONS_MODELS) { + for (const option of MODELS[settings.completions.provider]) { dropdown.addOption(option, option); } dropdown.setValue(settings.completions.model); dropdown.onChange(async (value) => { - settings.completions.model = value as CompletionsModel; + settings.completions.model = value as Model; await plugin.saveSettings(); }); }); + new Setting(containerEl) .setDisabled(!settings.completions.enabled) .setName('Max tokens') .setDesc('Set the max tokens for inline completions.') - .addText((text) => - text - .setValue(settings.completions.maxTokens.toString()) + .addSlider((slider) => + slider + .setValue(settings.completions.maxTokens) + .setLimits(128, 8192, 128) + .setDynamicTooltip() .onChange(async (value) => { - const amount = parseInt(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.completions.maxTokens = amount; + settings.completions.maxTokens = value; await plugin.saveSettings(); }), ); + new Setting(containerEl) .setDisabled(!settings.completions.enabled) .setName('Temperature') .setDesc('Set the temperature for inline completions.') - .addText((text) => - text - .setValue(settings.completions.temperature.toString()) + .addSlider((slider) => + slider + .setValue(settings.completions.temperature) + .setLimits(0, 1, 0.01) + .setDynamicTooltip() .onChange(async (value) => { - const amount = parseFloat(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.completions.temperature = amount; + settings.completions.temperature = value; await plugin.saveSettings(); }), ); @@ -167,15 +264,13 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setDesc( 'Time in milliseconds which it will wait for before fetching inline completions from the server.', ) - .addText((text) => - text - .setValue(settings.completions.waitTime.toString()) + .addSlider((slider) => + slider + .setValue(settings.completions.waitTime) + .setLimits(0, 1000, 100) + .setDynamicTooltip() .onChange(async (value) => { - const amount = parseFloat(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.completions.waitTime = amount; + settings.completions.waitTime = value; await plugin.saveSettings(); // Editor extension needs to be updated when settings are changed // because some fields e.g. `acceptKey` become stale and there is no way @@ -183,25 +278,25 @@ export class MarkpilotSettingTab extends PluginSettingTab { plugin.updateEditorExtension(); }), ); + new Setting(containerEl) .setDisabled(!settings.completions.enabled) .setName('Window size') .setDesc( 'Set the window size for inline completions. The window size the number of characters around the cursor used to obtain inline completions', ) - .addText((text) => - text - .setValue(settings.completions.windowSize.toString()) + .addSlider((slider) => + slider + .setValue(settings.completions.windowSize) + .setLimits(128, 8192, 128) + .setDynamicTooltip() .onChange(async (value) => { - const amount = parseInt(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.completions.windowSize = amount; + settings.completions.windowSize = value; await plugin.saveSettings(); plugin.updateEditorExtension(); }), ); + new Setting(containerEl) .setDisabled(!settings.completions.enabled) .setName('Accept key') @@ -217,6 +312,7 @@ export class MarkpilotSettingTab extends PluginSettingTab { plugin.updateEditorExtension(); }), ); + new Setting(containerEl) .setDisabled(!settings.completions.enabled) .setName('Reject key') @@ -233,6 +329,10 @@ export class MarkpilotSettingTab extends PluginSettingTab { }), ); + /************************************************************/ + /* Chat View */ + /************************************************************/ + new Setting(containerEl).setName('Chat view').setHeading(); new Setting(containerEl) @@ -250,53 +350,72 @@ export class MarkpilotSettingTab extends PluginSettingTab { this.display(); // Re-render settings tab }), ); + + new Setting(containerEl) + .setDisabled(!settings.chat.enabled) + .setName('Provider') + .setDesc('Select the provider for chat view.') + .addDropdown((dropdown) => { + for (const option of PROVIDERS) { + dropdown.addOption(option, option); + } + dropdown.setValue(settings.chat.provider); + dropdown.onChange(async (value) => { + settings.chat.provider = value as Provider; + await plugin.saveSettings(); + this.display(); // Re-render settings tab + }); + }); + new Setting(containerEl) .setDisabled(!settings.chat.enabled) .setName('Model') .setDesc('Select the model for GPT.') .addDropdown((dropdown) => { - for (const option of CHAT_COMPLETIONS_MODELS) { + for (const option of MODELS[settings.chat.provider]) { dropdown.addOption(option, option); } dropdown.setValue(settings.chat.model); dropdown.onChange(async (value) => { - settings.chat.model = value as ChatCompletionsModel; + settings.chat.model = value as Model; await plugin.saveSettings(); }); }); + new Setting(containerEl) .setDisabled(!settings.chat.enabled) .setName('Max tokens') .setDesc('Set the max tokens for chat view.') - .addText((text) => - text - .setValue(settings.chat.maxTokens.toString()) + .addSlider((slider) => + slider + .setValue(settings.chat.maxTokens) + .setLimits(128, 8192, 128) + .setDynamicTooltip() .onChange(async (value) => { - const amount = parseFloat(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.chat.maxTokens = amount; + settings.chat.maxTokens = value; await plugin.saveSettings(); }), ); + new Setting(containerEl) .setDisabled(!settings.chat.enabled) .setName('Temperature') .setDesc('Set the temperature for chat view.') - .addText((text) => - text - .setValue(settings.chat.temperature.toString()) + .addSlider((slider) => + slider + .setValue(settings.chat.temperature) + .setLimits(0, 1, 0.01) + .setDynamicTooltip() .onChange(async (value) => { - const amount = parseFloat(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.chat.temperature = amount; + settings.chat.temperature = value; await plugin.saveSettings(); }), ); + /************************************************************/ + /* Cache */ + /************************************************************/ + new Setting(containerEl).setName('Cache').setHeading(); new Setting(containerEl) @@ -312,6 +431,10 @@ export class MarkpilotSettingTab extends PluginSettingTab { }), ); + /************************************************************/ + /* Usage */ + /************************************************************/ + new Setting(containerEl).setName('Usage').setHeading(); new Setting(containerEl) @@ -319,15 +442,13 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setDesc( 'Set the monthly limit for the usage costs (USD). When this limit is reached, the plugin will disable both inline completions and chat view', ) - .addText((text) => - text - .setValue(settings.usage.monthlyLimit.toString()) + .addSlider((slider) => + slider + .setValue(settings.usage.monthlyLimit) + .setLimits(0, 100, 1) + .setDynamicTooltip() .onChange(async (value) => { - const amount = parseFloat(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.usage.monthlyLimit = amount; + settings.usage.monthlyLimit = value; await plugin.saveSettings(); }), ); @@ -338,10 +459,10 @@ export class MarkpilotSettingTab extends PluginSettingTab { 'Below you can find the estimated usage of OpenAI API for inline completions and chat view this month', ); - this.renderGraph(); + this.showMonthlyCosts(); } - renderGraph() { + showMonthlyCosts() { const { plugin } = this; const { settings } = plugin; diff --git a/src/utils.ts b/src/utils.ts index 8fea7e2..74722de 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -2,6 +2,15 @@ export function uuid(): string { return crypto.randomUUID(); } +export function validateURL(url: string): boolean { + try { + new URL(url); + return true; + } catch { + return false; + } +} + // Debounce an async function by waiting for `wait` milliseconds before resolving. // If a new request is made before the timeout, the previous request is cancelled. /* eslint-disable @typescript-eslint/no-explicit-any */ From 420ac0ad2b6e6bb87490e2b19d372f8e839dd9f7 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Thu, 18 Apr 2024 17:33:44 +0900 Subject: [PATCH 03/26] Add feature to ignore files by name and tags --- package.json | 1 + src/api/client.ts | 2 +- src/api/provider.ts | 10 +-- src/api/usage.ts | 20 ++++-- src/main.ts | 23 ++++++- src/settings.ts | 144 +++++++++++++++++++++++++++++--------------- yarn.lock | 14 +++++ 7 files changed, 151 insertions(+), 63 deletions(-) diff --git a/package.json b/package.json index 5d6bca7..fecc3a4 100644 --- a/package.json +++ b/package.json @@ -37,6 +37,7 @@ "chart.js": "^4.4.2", "js-tiktoken": "^1.0.10", "lucide-react": "^0.363.0", + "minimatch": "^9.0.4", "openai": "^4.30.0", "react": "^18.2.0", "react-dom": "^18.2.0", diff --git a/src/api/client.ts b/src/api/client.ts index 6c97ea8..4cb42cd 100644 --- a/src/api/client.ts +++ b/src/api/client.ts @@ -113,7 +113,7 @@ export class BaseAPIClient implements APIClient { async fetchCompletions(language: string, prefix: string, suffix: string) { const { settings } = this.plugin; - const instance = this.getInstance(settings.chat.provider); + const instance = this.getInstance(settings.completions.provider); if (instance === undefined) { return; } diff --git a/src/api/provider.ts b/src/api/provider.ts index 6927bef..5f6af4e 100644 --- a/src/api/provider.ts +++ b/src/api/provider.ts @@ -33,11 +33,13 @@ export const OPENAI_MODELS = [ 'gpt-3.5-turbo-16k-0613', ] as const; -// TODO -export const OPENROUTER_MODELS = ['gpt-4']; +// TODO: +// This is a placeholder. +export const OPENROUTER_MODELS = ['gpt-4'] as const; -// TODO -export const OLLAMA_MODELS = ['gpt-4']; +// TODO: +// This is a placeholder. +export const OLLAMA_MODELS = ['gpt-4'] as const; export const MODELS = { openai: OPENAI_MODELS, diff --git a/src/api/usage.ts b/src/api/usage.ts index 783f41d..f6481fe 100644 --- a/src/api/usage.ts +++ b/src/api/usage.ts @@ -57,33 +57,41 @@ const OPENAI_MODEL_OUTPUT_COSTS: Record = { 'gpt-3.5-turbo-16k-0613': 1.5, }; -// TODO +// TODO: +// This is a placeholder. const OPENROUTER_INPUT_COSTS: Record = { 'gpt-4': 30, }; -// TODO +// TODO: +// This is a placeholder. const OPENROUTER_OUTPUT_COSTS: Record = { 'gpt-4': 60, }; -// TODO +// TODO: +// This is a placeholder. const OLLAMA_INPUT_COSTS: Record = { 'gpt-4': 0, }; -// TODO +// TODO: +// This is a placeholder. const OLLAMA_OUTPUT_COSTS: Record = { 'gpt-4': 0, }; -const INPUT_COSTS: Record> = { +// TODO: +// Replace `Record` to an appropriate type. +const INPUT_COSTS: Record> = { openai: OPENAI_MODEL_INPUT_COSTS, openrouter: OPENROUTER_INPUT_COSTS, ollama: OLLAMA_INPUT_COSTS, }; -const OUTPUT_COSTS: Record> = { +// TODO: +// Replace `Record` to an appropriate type. +const OUTPUT_COSTS: Record> = { openai: OPENAI_MODEL_OUTPUT_COSTS, openrouter: OPENROUTER_OUTPUT_COSTS, ollama: OLLAMA_OUTPUT_COSTS, diff --git a/src/main.ts b/src/main.ts index 334278e..c58efa5 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,5 +1,6 @@ import { Extension } from '@codemirror/state'; -import { addIcon, Notice, Plugin, setIcon } from 'obsidian'; +import { minimatch } from 'minimatch'; +import { addIcon, MarkdownView, Notice, Plugin, setIcon } from 'obsidian'; import { MemoryCacheProxy } from './api/cache'; import { APIClient, BaseAPIClient } from './api/client'; import { UsageMonitorProxy, UsageTracker } from './api/usage'; @@ -191,9 +192,25 @@ export default class Markpilot extends Plugin { getEditorExtension() { return inlineCompletionsExtension(async (...args) => { - if (this.settings.completions.enabled) { - return this.client.fetchCompletions(...args); + // TODO: + // Extract this logic to somewhere appropriate. + const view = this.app.workspace.getActiveViewOfType(MarkdownView); + const file = view?.file; + const content = view?.editor.getValue(); + const isIgnoredFile = this.settings.completions.ignoredFiles.some( + (pattern) => file?.path && minimatch(file?.path, pattern), + ); + const hasIgnoredTags = this.settings.completions.ignoredTags.some((tag) => + content?.includes(tag), + ); + if ( + isIgnoredFile || + hasIgnoredTags || + !this.settings.completions.enabled + ) { + return; } + return this.client.fetchCompletions(...args); }, this); } diff --git a/src/settings.ts b/src/settings.ts index 5a3b496..74de365 100644 --- a/src/settings.ts +++ b/src/settings.ts @@ -29,6 +29,8 @@ export interface MarkpilotSettings { windowSize: number; acceptKey: string; rejectKey: string; + ignoredFiles: string[]; + ignoredTags: string[]; }; chat: { enabled: boolean; @@ -64,13 +66,15 @@ export const DEFAULT_SETTINGS: MarkpilotSettings = { completions: { enabled: true, provider: 'openai', - model: 'gpt-3.5-turbo', + model: 'gpt-3.5-turbo-instruct', maxTokens: 64, temperature: 0, waitTime: 500, windowSize: 512, acceptKey: 'Tab', rejectKey: 'Escape', + ignoredFiles: [], + ignoredTags: [], }, chat: { enabled: true, @@ -148,13 +152,8 @@ export class MarkpilotSettingTab extends PluginSettingTab { text .setValue(settings.providers.ollama.apiUrl ?? '') .onChange(async (value) => { - if (validateURL(value)) { - new Notice('Invalid Ollama API URL.'); - return; - } settings.providers.ollama.apiUrl = value; await plugin.saveSettings(); - new Notice('Successfully saved Ollama API URL.'); }), ); @@ -168,11 +167,16 @@ export class MarkpilotSettingTab extends PluginSettingTab { new Notice('Ollama API URL is not set.'); return; } - // TODO - const response = await fetch(apiUrl); - if (response.ok) { + if (!validateURL(apiUrl)) { + new Notice('Invalid Ollama API URL.'); + return; + } + // TODO: + // Properly implement logic for checking Ollama API status. + try { + await fetch(apiUrl); new Notice('Successfully connected to Ollama API.'); - } else { + } catch { new Notice('Failed to connect to Ollama API.'); } }), @@ -198,44 +202,48 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setDisabled(!settings.chat.enabled) .setName('Provider') .setDesc('Select the provider for inline completions.') .addDropdown((dropdown) => { for (const option of PROVIDERS) { dropdown.addOption(option, option); } - dropdown.setValue(settings.completions.provider); - dropdown.onChange(async (value) => { - settings.completions.provider = value as Provider; - await plugin.saveSettings(); - this.display(); // Re-render settings tab - }); + dropdown + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.provider) + .onChange(async (value) => { + settings.completions.provider = value as Provider; + await plugin.saveSettings(); + this.display(); // Re-render settings tab + }); }); new Setting(containerEl) - .setDisabled(!settings.completions.enabled) .setName('Model') .setDesc('Select the model for inline completions.') .addDropdown((dropdown) => { for (const option of MODELS[settings.completions.provider]) { dropdown.addOption(option, option); } - dropdown.setValue(settings.completions.model); - dropdown.onChange(async (value) => { - settings.completions.model = value as Model; - await plugin.saveSettings(); - }); + dropdown + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.model) + .onChange(async (value) => { + settings.completions.model = value as Model; + await plugin.saveSettings(); + }); }); new Setting(containerEl) - .setDisabled(!settings.completions.enabled) .setName('Max tokens') .setDesc('Set the max tokens for inline completions.') .addSlider((slider) => slider + .setDisabled(!settings.completions.enabled) .setValue(settings.completions.maxTokens) .setLimits(128, 8192, 128) + // TODO: + // Figure out how to add unit to the slider .setDynamicTooltip() .onChange(async (value) => { settings.completions.maxTokens = value; @@ -244,11 +252,11 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setDisabled(!settings.completions.enabled) .setName('Temperature') .setDesc('Set the temperature for inline completions.') .addSlider((slider) => slider + .setDisabled(!settings.completions.enabled) .setValue(settings.completions.temperature) .setLimits(0, 1, 0.01) .setDynamicTooltip() @@ -259,13 +267,13 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setDisabled(!settings.completions.enabled) .setName('Wait time') .setDesc( 'Time in milliseconds which it will wait for before fetching inline completions from the server.', ) .addSlider((slider) => slider + .setDisabled(!settings.completions.enabled) .setValue(settings.completions.waitTime) .setLimits(0, 1000, 100) .setDynamicTooltip() @@ -280,13 +288,13 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setDisabled(!settings.completions.enabled) .setName('Window size') .setDesc( 'Set the window size for inline completions. The window size the number of characters around the cursor used to obtain inline completions', ) .addSlider((slider) => slider + .setDisabled(!settings.completions.enabled) .setValue(settings.completions.windowSize) .setLimits(128, 8192, 128) .setDynamicTooltip() @@ -298,13 +306,13 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setDisabled(!settings.completions.enabled) .setName('Accept key') .setDesc( 'Set the key to accept inline completions. The list of available keys can be found at: https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values', ) .addText((text) => text + .setDisabled(!settings.completions.enabled) .setValue(settings.completions.acceptKey) .onChange(async (value) => { settings.completions.acceptKey = value; @@ -314,13 +322,13 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setDisabled(!settings.completions.enabled) .setName('Reject key') .setDesc( 'Set the key to reject inline completions. The list of available keys can be found at: https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values', ) .addText((text) => text + .setDisabled(!settings.completions.enabled) .setValue(settings.completions.rejectKey) .onChange(async (value) => { settings.completions.rejectKey = value; @@ -329,6 +337,39 @@ export class MarkpilotSettingTab extends PluginSettingTab { }), ); + new Setting(containerEl) + + .setName('Ignored files') + .setDesc( + 'Set the list of files to ignore inline completions. The completions will not be triggered in these files.', + ) + .addTextArea((text) => + text + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.ignoredFiles.join('\n')) + .setPlaceholder('myFile.md\nmyDirectory/**/*.md') + .onChange(async (value) => { + settings.completions.ignoredFiles = value.split('\n'); + await plugin.saveSettings(); + }), + ); + + new Setting(containerEl) + .setName('Ignored tags') + .setDesc( + 'Set the list of tags to ignore inline completions. The completions will not be triggered in these tags.', + ) + .addTextArea((text) => + text + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.ignoredTags.join('\n')) + .setPlaceholder('#myTag\n#myTag2') + .onChange(async (value) => { + settings.completions.ignoredTags = value.split('\n'); + await plugin.saveSettings(); + }), + ); + /************************************************************/ /* Chat View */ /************************************************************/ @@ -352,42 +393,44 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setDisabled(!settings.chat.enabled) .setName('Provider') .setDesc('Select the provider for chat view.') .addDropdown((dropdown) => { for (const option of PROVIDERS) { dropdown.addOption(option, option); } - dropdown.setValue(settings.chat.provider); - dropdown.onChange(async (value) => { - settings.chat.provider = value as Provider; - await plugin.saveSettings(); - this.display(); // Re-render settings tab - }); + dropdown + .setDisabled(!settings.chat.enabled) + .setValue(settings.chat.provider) + .onChange(async (value) => { + settings.chat.provider = value as Provider; + await plugin.saveSettings(); + this.display(); // Re-render settings tab + }); }); new Setting(containerEl) - .setDisabled(!settings.chat.enabled) .setName('Model') .setDesc('Select the model for GPT.') .addDropdown((dropdown) => { for (const option of MODELS[settings.chat.provider]) { dropdown.addOption(option, option); } - dropdown.setValue(settings.chat.model); - dropdown.onChange(async (value) => { - settings.chat.model = value as Model; - await plugin.saveSettings(); - }); + dropdown + .setDisabled(!settings.chat.enabled) + .setValue(settings.chat.model) + .onChange(async (value) => { + settings.chat.model = value as Model; + await plugin.saveSettings(); + }); }); new Setting(containerEl) - .setDisabled(!settings.chat.enabled) .setName('Max tokens') .setDesc('Set the max tokens for chat view.') .addSlider((slider) => slider + .setDisabled(!settings.chat.enabled) .setValue(settings.chat.maxTokens) .setLimits(128, 8192, 128) .setDynamicTooltip() @@ -398,11 +441,11 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setDisabled(!settings.chat.enabled) .setName('Temperature') .setDesc('Set the temperature for chat view.') .addSlider((slider) => slider + .setDisabled(!settings.chat.enabled) .setValue(settings.chat.temperature) .setLimits(0, 1, 0.01) .setDynamicTooltip() @@ -424,11 +467,14 @@ export class MarkpilotSettingTab extends PluginSettingTab { 'Turn this on to enable memory caching. The cached data will be invalided on startup.', ) .addToggle((toggle) => - toggle.setValue(settings.cache.enabled).onChange(async (value) => { - settings.cache.enabled = value; - await plugin.saveSettings(); - this.display(); // Re-render settings tab - }), + toggle + .setDisabled(!settings.completions.enabled) + .setValue(settings.cache.enabled) + .onChange(async (value) => { + settings.cache.enabled = value; + await plugin.saveSettings(); + this.display(); // Re-render settings tab + }), ); /************************************************************/ diff --git a/yarn.lock b/yarn.lock index 74f5900..4a12b2b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -501,6 +501,13 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + braces@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" @@ -1672,6 +1679,13 @@ minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: dependencies: brace-expansion "^1.1.7" +minimatch@^9.0.4: + version "9.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51" + integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw== + dependencies: + brace-expansion "^2.0.1" + moment@2.29.4: version "2.29.4" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" From e3b3dfa540776c6b75132ec29468204848046980 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Fri, 19 Apr 2024 18:10:37 +0900 Subject: [PATCH 04/26] Update settings UI --- src/settings.ts | 83 +++++++++++++++++++++++++++++++------------------ 1 file changed, 53 insertions(+), 30 deletions(-) diff --git a/src/settings.ts b/src/settings.ts index 74de365..10b1cf2 100644 --- a/src/settings.ts +++ b/src/settings.ts @@ -1,6 +1,12 @@ import Chart from 'chart.js/auto'; import { App, Notice, PluginSettingTab, Setting } from 'obsidian'; -import { Model, MODELS, Provider, PROVIDERS } from './api/provider'; +import { + DEFAULT_MODELS, + Model, + MODELS, + Provider, + PROVIDERS, +} from './api/provider'; import { ChatHistory } from './api/types'; import Markpilot from './main'; @@ -128,6 +134,11 @@ export class MarkpilotSettingTab extends PluginSettingTab { .onChange(async (value) => { settings.providers.openai.apiKey = value; await plugin.saveSettings(); + // NOTE: + // The API client needs to be updated when the API key, API URL or provider is changed, + // because these parameters are captured by the underlying library on initialization + // and become stale when the settings are changed. + plugin.updateAPIClient(); new Notice('Successfully saved OpenAI API key.'); }), ); @@ -141,6 +152,7 @@ export class MarkpilotSettingTab extends PluginSettingTab { .onChange(async (value) => { settings.providers.openrouter.apiKey = value; await plugin.saveSettings(); + plugin.updateAPIClient(); new Notice('Successfully saved OpenRouter API key.'); }), ); @@ -154,6 +166,7 @@ export class MarkpilotSettingTab extends PluginSettingTab { .onChange(async (value) => { settings.providers.ollama.apiUrl = value; await plugin.saveSettings(); + plugin.updateAPIClient(); }), ); @@ -213,7 +226,9 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setValue(settings.completions.provider) .onChange(async (value) => { settings.completions.provider = value as Provider; + settings.completions.model = DEFAULT_MODELS[value as Provider]; await plugin.saveSettings(); + plugin.updateAPIClient(); this.display(); // Re-render settings tab }); }); @@ -237,16 +252,15 @@ export class MarkpilotSettingTab extends PluginSettingTab { new Setting(containerEl) .setName('Max tokens') .setDesc('Set the max tokens for inline completions.') - .addSlider((slider) => - slider - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.maxTokens) - .setLimits(128, 8192, 128) - // TODO: - // Figure out how to add unit to the slider - .setDynamicTooltip() + .addText((text) => + text + .setValue(settings.completions.maxTokens.toString()) .onChange(async (value) => { - settings.completions.maxTokens = value; + const amount = parseInt(value); + if (isNaN(amount) || amount < 0) { + return; + } + settings.completions.maxTokens = amount; await plugin.saveSettings(); }), ); @@ -259,6 +273,8 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setDisabled(!settings.completions.enabled) .setValue(settings.completions.temperature) .setLimits(0, 1, 0.01) + // TODO: + // Figure out how to add unit to the slider .setDynamicTooltip() .onChange(async (value) => { settings.completions.temperature = value; @@ -280,6 +296,7 @@ export class MarkpilotSettingTab extends PluginSettingTab { .onChange(async (value) => { settings.completions.waitTime = value; await plugin.saveSettings(); + // NOTE: // Editor extension needs to be updated when settings are changed // because some fields e.g. `acceptKey` become stale and there is no way // to make the extension query it on the fly. @@ -292,14 +309,15 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setDesc( 'Set the window size for inline completions. The window size the number of characters around the cursor used to obtain inline completions', ) - .addSlider((slider) => - slider - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.windowSize) - .setLimits(128, 8192, 128) - .setDynamicTooltip() + .addText((text) => + text + .setValue(settings.completions.windowSize.toString()) .onChange(async (value) => { - settings.completions.windowSize = value; + const amount = parseInt(value); + if (isNaN(amount) || amount < 0) { + return; + } + settings.completions.windowSize = amount; await plugin.saveSettings(); plugin.updateEditorExtension(); }), @@ -404,7 +422,9 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setValue(settings.chat.provider) .onChange(async (value) => { settings.chat.provider = value as Provider; + settings.chat.model = DEFAULT_MODELS[value as Provider]; await plugin.saveSettings(); + plugin.updateAPIClient(); this.display(); // Re-render settings tab }); }); @@ -428,14 +448,15 @@ export class MarkpilotSettingTab extends PluginSettingTab { new Setting(containerEl) .setName('Max tokens') .setDesc('Set the max tokens for chat view.') - .addSlider((slider) => - slider - .setDisabled(!settings.chat.enabled) - .setValue(settings.chat.maxTokens) - .setLimits(128, 8192, 128) - .setDynamicTooltip() + .addText((text) => + text + .setValue(settings.chat.maxTokens.toString()) .onChange(async (value) => { - settings.chat.maxTokens = value; + const amount = parseFloat(value); + if (isNaN(amount) || amount < 0) { + return; + } + settings.chat.maxTokens = amount; await plugin.saveSettings(); }), ); @@ -488,13 +509,15 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setDesc( 'Set the monthly limit for the usage costs (USD). When this limit is reached, the plugin will disable both inline completions and chat view', ) - .addSlider((slider) => - slider - .setValue(settings.usage.monthlyLimit) - .setLimits(0, 100, 1) - .setDynamicTooltip() + .addText((text) => + text + .setValue(settings.usage.monthlyLimit.toString()) .onChange(async (value) => { - settings.usage.monthlyLimit = value; + const amount = parseFloat(value); + if (isNaN(amount) || amount < 0) { + return; + } + settings.usage.monthlyLimit = amount; await plugin.saveSettings(); }), ); From 52acd714d6263141f93e2688c6aefc415bc71cb7 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Fri, 19 Apr 2024 18:11:08 +0900 Subject: [PATCH 05/26] Use updated client for completions and chat --- src/api/client.ts | 161 ++++++++++++++++++++++++++++++-------------- src/api/provider.ts | 18 ++++- src/api/usage.ts | 25 +++---- src/chat/App.tsx | 4 +- src/main.ts | 89 +++++++++++++++++------- 5 files changed, 207 insertions(+), 90 deletions(-) diff --git a/src/api/client.ts b/src/api/client.ts index 4cb42cd..af69dbd 100644 --- a/src/api/client.ts +++ b/src/api/client.ts @@ -1,8 +1,8 @@ import { getEncoding } from 'js-tiktoken'; import { Notice } from 'obsidian'; -import OpenAI, { ClientOptions } from 'openai'; +import OpenAI from 'openai'; import Markpilot from 'src/main'; -import { Provider } from './provider'; +import { validateURL } from 'src/utils'; import { ChatMessage } from './types'; import { UsageTracker } from './usage'; @@ -15,63 +15,23 @@ export interface APIClient { ): Promise; } -// TODO: -// Allow use of APIs that are not compatible with the OpenAI API standard. -export class BaseAPIClient implements APIClient { +abstract class OpenAICompatibleAPIClient implements APIClient { constructor( - private tracker: UsageTracker, - private plugin: Markpilot, + protected tracker: UsageTracker, + protected plugin: Markpilot, ) {} - getInstance(provider: Provider) { - const { settings } = this.plugin; - - const options: ClientOptions = { - apiKey: undefined, - baseURL: undefined, - dangerouslyAllowBrowser: true, - }; - switch (provider) { - case 'openai': - options.apiKey = settings.providers.openai.apiKey; - if (options.apiKey === undefined || !options.apiKey?.startsWith('sk')) { - new Notice('OpenAI API key is not set or invalid.'); - return; - } - break; - case 'openrouter': - options.baseURL = 'https://openrouter.ai/api/v1'; - options.apiKey = settings.providers.openrouter.apiKey; - if (options.apiKey === undefined || !options.apiKey?.startsWith('sk')) { - new Notice('OpenRouter API key is not set or invalid.'); - return; - } - break; - case 'ollama': - options.baseURL = 'http://localhost:11434/v1/'; - options.baseURL = settings.providers.ollama.apiUrl; - if (options.apiKey === undefined) { - new Notice('Ollama API URL is not set or invalid.'); - return; - } - break; - default: - throw new Error('Invalid API provider.'); - } - - return new OpenAI(options); - } + abstract get openai(): OpenAI | undefined; async *fetchChat(messages: ChatMessage[]) { const { settings } = this.plugin; - const instance = this.getInstance(settings.chat.provider); - if (instance === undefined) { + if (this.openai === undefined) { return; } try { - const stream = await instance.chat.completions.create({ + const stream = await this.openai.chat.completions.create({ messages, model: settings.chat.model, max_tokens: settings.chat.maxTokens, @@ -113,13 +73,12 @@ export class BaseAPIClient implements APIClient { async fetchCompletions(language: string, prefix: string, suffix: string) { const { settings } = this.plugin; - const instance = this.getInstance(settings.completions.provider); - if (instance === undefined) { + if (this.openai === undefined) { return; } try { - const completions = await instance.completions.create({ + const completions = await this.openai.completions.create({ prompt: `Continue the following code written in ${language} language:\n\n${prefix}`, suffix, model: settings.completions.model, @@ -149,3 +108,103 @@ export class BaseAPIClient implements APIClient { } } } + +export class OpenAIAPIClient + extends OpenAICompatibleAPIClient + implements APIClient +{ + constructor(tracker: UsageTracker, plugin: Markpilot) { + super(tracker, plugin); + } + + get openai(): OpenAI | undefined { + const { settings } = this.plugin; + + const apiKey = settings.providers.openai.apiKey; + if (apiKey === undefined) { + new Notice('OpenAI API key is not set.'); + return; + } + if (!apiKey.startsWith('sk')) { + new Notice('OpenAI API key is invalid.'); + return; + } + + return new OpenAI({ + apiKey, + dangerouslyAllowBrowser: true, + }); + } +} + +export class OpenRouterAPIClient + extends OpenAICompatibleAPIClient + implements APIClient +{ + constructor(tracker: UsageTracker, plugin: Markpilot) { + super(tracker, plugin); + } + + get openai(): OpenAI | undefined { + const { settings } = this.plugin; + + const apiKey = settings.providers.openrouter.apiKey; + if (apiKey === undefined) { + new Notice('OpenRouter API key is not set.'); + return; + } + if (!apiKey.startsWith('sk')) { + new Notice('OpenRouter API key is invalid.'); + return; + } + + return new OpenAI({ + apiKey, + baseURL: 'https://openrouter.ai/api/v1', + dangerouslyAllowBrowser: true, + }); + } +} + +export class OllamaAPIClient + extends OpenAICompatibleAPIClient + implements APIClient +{ + constructor(tracker: UsageTracker, plugin: Markpilot) { + super(tracker, plugin); + } + + get openai(): OpenAI | undefined { + const { settings } = this.plugin; + + const apiUrl = settings.providers.ollama.apiUrl; + if (apiUrl === undefined) { + new Notice('Ollama API URL is not set.'); + return; + } + if (!validateURL(apiUrl)) { + new Notice('Ollama API URL is invalid.'); + return; + } + + return new OpenAI({ + baseURL: apiUrl, + dangerouslyAllowBrowser: true, + }); + } +} + +// TODO: +// Implement API client for Gemini. +export class GeminiAPIClient implements APIClient { + fetchChat(messages: ChatMessage[]): AsyncGenerator { + throw new Error('Method not implemented.'); + } + fetchCompletions( + language: string, + prefix: string, + suffix: string, + ): Promise { + throw new Error('Method not implemented.'); + } +} diff --git a/src/api/provider.ts b/src/api/provider.ts index 5f6af4e..808e5db 100644 --- a/src/api/provider.ts +++ b/src/api/provider.ts @@ -35,14 +35,28 @@ export const OPENAI_MODELS = [ // TODO: // This is a placeholder. -export const OPENROUTER_MODELS = ['gpt-4'] as const; +export const OPENROUTER_MODELS = [ + 'openai/gpt-3.5-turbo', + 'openai/gpt-4-turbo', +] as const; // TODO: // This is a placeholder. -export const OLLAMA_MODELS = ['gpt-4'] as const; +export const OLLAMA_MODELS = [ + 'llama2', + 'llama3', + 'codellama', + 'phind-codellama', +] as const; export const MODELS = { openai: OPENAI_MODELS, openrouter: OPENROUTER_MODELS, ollama: OLLAMA_MODELS, }; + +export const DEFAULT_MODELS: Record = { + openai: 'gpt-3.5-turbo', + openrouter: 'openai/gpt-3.5-turbo', + ollama: 'llama2', +}; diff --git a/src/api/usage.ts b/src/api/usage.ts index f6481fe..66af394 100644 --- a/src/api/usage.ts +++ b/src/api/usage.ts @@ -4,6 +4,7 @@ import { getThisMonthAsString, getTodayAsString } from 'src/utils'; import { APIClient } from './client'; import { Model, + OLLAMA_MODELS, OllamaModel, OpenAIModel, OpenRouterModel, @@ -60,26 +61,26 @@ const OPENAI_MODEL_OUTPUT_COSTS: Record = { // TODO: // This is a placeholder. const OPENROUTER_INPUT_COSTS: Record = { - 'gpt-4': 30, + 'openai/gpt-3.5-turbo': 0, + 'openai/gpt-4-turbo': 0, }; // TODO: // This is a placeholder. const OPENROUTER_OUTPUT_COSTS: Record = { - 'gpt-4': 60, + 'openai/gpt-3.5-turbo': 0, + 'openai/gpt-4-turbo': 0, }; -// TODO: -// This is a placeholder. -const OLLAMA_INPUT_COSTS: Record = { - 'gpt-4': 0, -}; +const OLLAMA_INPUT_COSTS = OLLAMA_MODELS.reduce( + (acc, model) => ({ ...acc, [model]: 0 }), + {}, +) as Record; -// TODO: -// This is a placeholder. -const OLLAMA_OUTPUT_COSTS: Record = { - 'gpt-4': 0, -}; +const OLLAMA_OUTPUT_COSTS: Record = OLLAMA_MODELS.reduce( + (acc, model) => ({ ...acc, [model]: 0 }), + {}, +) as Record; // TODO: // Replace `Record` to an appropriate type. diff --git a/src/chat/App.tsx b/src/chat/App.tsx index 9e647f7..1644bbb 100644 --- a/src/chat/App.tsx +++ b/src/chat/App.tsx @@ -42,12 +42,14 @@ export function App({ view, plugin }: { view: ChatView; plugin: Markpilot }) { useEffect(() => { if (turn === 'assistant') { (async () => { - for await (const chunk of plugin.client.fetchChat(history.messages)) { + const chunks = plugin.chatClient.fetchChat(history.messages); + for await (const chunk of chunks) { setHistory((history) => ({ ...history, response: history.response + chunk, })); } + setHistory((history) => ({ messages: [ ...history.messages, diff --git a/src/main.ts b/src/main.ts index c58efa5..5091162 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,8 +1,21 @@ import { Extension } from '@codemirror/state'; import { minimatch } from 'minimatch'; -import { addIcon, MarkdownView, Notice, Plugin, setIcon } from 'obsidian'; +import { + addIcon, + MarkdownView, + Notice, + Plugin, + setIcon, + WorkspaceLeaf, +} from 'obsidian'; import { MemoryCacheProxy } from './api/cache'; -import { APIClient, BaseAPIClient } from './api/client'; +import { + APIClient, + OllamaAPIClient, + OpenAIAPIClient, + OpenRouterAPIClient, +} from './api/client'; +import { Provider } from './api/provider'; import { UsageMonitorProxy, UsageTracker } from './api/usage'; import { CHAT_VIEW_TYPE, ChatView } from './chat/view'; import { inlineCompletionsExtension } from './editor/extension'; @@ -16,35 +29,28 @@ import { export default class Markpilot extends Plugin { settings: MarkpilotSettings; - client: APIClient; - view: ChatView; extensions: Extension[]; + completionsClient: APIClient; + chatClient: APIClient; + chatView: ChatView; async onload() { await this.loadSettings(); this.addSettingTab(new MarkpilotSettingTab(this.app, this)); - // Initialize the OpenAI API client. - const tracker = new UsageTracker(this); - const client = new BaseAPIClient(tracker, this); - const clientWithMonitor = new UsageMonitorProxy(client, this); - const clientWithCache = new MemoryCacheProxy(clientWithMonitor, this); - this.client = clientWithCache; + const { settings } = this; + this.completionsClient = this.createAPIClient( + settings.completions.provider, + ); + this.chatClient = this.createAPIClient(settings.chat.provider); - // Register the editor extension. - this.extensions = this.getEditorExtension(); + this.extensions = this.createEditorExtension(); this.registerEditorExtension(this.extensions); - - // Register the chat view. this.registerView(CHAT_VIEW_TYPE, (leaf) => { - this.view = new ChatView(leaf, this); - return this.view; + this.chatView = this.createChatView(leaf); + return this.chatView; }); - if (this.settings.chat.enabled) { - this.activateView(); - } - // Register the ribbon actions and commands. this.registerRibbonActions(); this.registerCommands(); } @@ -152,7 +158,7 @@ export default class Markpilot extends Plugin { response: '', }; this.saveSettings(); - this.view.clear?.(); + this.chatView.clear?.(); new Notice('Chat history cleared.'); }, }); @@ -190,7 +196,34 @@ export default class Markpilot extends Plugin { }); } - getEditorExtension() { + createAPIClient(provider: Provider) { + const tracker = new UsageTracker(this); + const client = (() => { + switch (provider) { + case 'openai': + return new OpenAIAPIClient(tracker, this); + case 'openrouter': + return new OpenRouterAPIClient(tracker, this); + case 'ollama': + return new OllamaAPIClient(tracker, this); + } + })(); + const clientWithMonitor = new UsageMonitorProxy(client, this); + const clientWithCache = new MemoryCacheProxy(clientWithMonitor, this); + + return clientWithCache; + } + + updateAPIClient() { + const { settings } = this; + + this.chatClient = this.createAPIClient(settings.chat.provider); + this.completionsClient = this.createAPIClient( + settings.completions.provider, + ); + } + + createEditorExtension() { return inlineCompletionsExtension(async (...args) => { // TODO: // Extract this logic to somewhere appropriate. @@ -210,7 +243,7 @@ export default class Markpilot extends Plugin { ) { return; } - return this.client.fetchCompletions(...args); + return this.completionsClient.fetchCompletions(...args); }, this); } @@ -220,11 +253,19 @@ export default class Markpilot extends Plugin { this.extensions.splice( 0, this.extensions.length, - ...this.getEditorExtension(), + ...this.createEditorExtension(), ); workspace.updateOptions(); } + createChatView(leaf: WorkspaceLeaf) { + const view = new ChatView(leaf, this); + if (this.settings.chat.enabled) { + this.activateView(); + } + return view; + } + async loadSettings() { this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData()); } From e8d6d7d751130fe202165d6a214e236cedf7a9b0 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Fri, 19 Apr 2024 18:33:47 +0900 Subject: [PATCH 06/26] Reorganise files --- src/api/client.ts | 202 +----------------- src/api/clients/gemini.ts | 18 ++ src/api/clients/ollama.ts | 35 +++ src/api/clients/openai-compatible.ts | 101 +++++++++ src/api/clients/openai.ts | 34 +++ src/api/clients/openrouter.ts | 35 +++ src/api/{usage.ts => costs.ts} | 78 ++----- src/api/models.ts | 60 ++++++ src/api/provider.ts | 63 +----- src/api/{cache.ts => proxies/memory-cache.ts} | 4 +- src/api/proxies/usage-monitor.ts | 43 ++++ src/chat/App.tsx | 2 +- src/chat/components/ChatItem.tsx | 2 +- src/main.ts | 17 +- src/settings.ts | 19 +- src/{api => }/types.ts | 0 16 files changed, 365 insertions(+), 348 deletions(-) create mode 100644 src/api/clients/gemini.ts create mode 100644 src/api/clients/ollama.ts create mode 100644 src/api/clients/openai-compatible.ts create mode 100644 src/api/clients/openai.ts create mode 100644 src/api/clients/openrouter.ts rename src/api/{usage.ts => costs.ts} (58%) create mode 100644 src/api/models.ts rename src/api/{cache.ts => proxies/memory-cache.ts} (95%) create mode 100644 src/api/proxies/usage-monitor.ts rename src/{api => }/types.ts (100%) diff --git a/src/api/client.ts b/src/api/client.ts index af69dbd..2aa860c 100644 --- a/src/api/client.ts +++ b/src/api/client.ts @@ -1,10 +1,4 @@ -import { getEncoding } from 'js-tiktoken'; -import { Notice } from 'obsidian'; -import OpenAI from 'openai'; -import Markpilot from 'src/main'; -import { validateURL } from 'src/utils'; -import { ChatMessage } from './types'; -import { UsageTracker } from './usage'; +import { ChatMessage } from '../types'; export interface APIClient { fetchChat(messages: ChatMessage[]): AsyncGenerator; @@ -14,197 +8,3 @@ export interface APIClient { suffix: string, ): Promise; } - -abstract class OpenAICompatibleAPIClient implements APIClient { - constructor( - protected tracker: UsageTracker, - protected plugin: Markpilot, - ) {} - - abstract get openai(): OpenAI | undefined; - - async *fetchChat(messages: ChatMessage[]) { - const { settings } = this.plugin; - - if (this.openai === undefined) { - return; - } - - try { - const stream = await this.openai.chat.completions.create({ - messages, - model: settings.chat.model, - max_tokens: settings.chat.maxTokens, - temperature: settings.chat.temperature, - top_p: 1, - n: 1, - stream: true, - }); - - const contents = []; - for await (const chunk of stream) { - const content = chunk.choices[0].delta.content ?? ''; - contents.push(content); - yield content; - } - - // Update usage cost estimates. - const enc = getEncoding('gpt2'); // Assume GPT-2 encoding - const inputMessage = messages - .map((message) => message.content) - .join('\n'); - const outputMessage = contents.join(''); - const inputTokens = enc.encode(inputMessage).length; - const outputTokens = enc.encode(outputMessage).length; - await this.tracker.add( - settings.chat.provider, - settings.chat.model, - inputTokens, - outputTokens, - ); - } catch (error) { - console.error(error); - new Notice( - 'Failed to fetch chat completions. Make sure your API key or API URL is correct.', - ); - } - } - - async fetchCompletions(language: string, prefix: string, suffix: string) { - const { settings } = this.plugin; - - if (this.openai === undefined) { - return; - } - - try { - const completions = await this.openai.completions.create({ - prompt: `Continue the following code written in ${language} language:\n\n${prefix}`, - suffix, - model: settings.completions.model, - max_tokens: settings.completions.maxTokens, - temperature: settings.completions.temperature, - top_p: 1, - n: 1, - stop: ['\n\n\n'], - }); - - // Update usage cost estimates. - const inputTokens = completions.usage?.prompt_tokens ?? 0; - const outputTokens = completions.usage?.completion_tokens ?? 0; - await this.tracker.add( - settings.completions.provider, - settings.completions.model, - inputTokens, - outputTokens, - ); - - return completions.choices[0].text; - } catch (error) { - console.error(error); - new Notice( - 'Failed to fetch completions. Make sure your API key or API URL is correct.', - ); - } - } -} - -export class OpenAIAPIClient - extends OpenAICompatibleAPIClient - implements APIClient -{ - constructor(tracker: UsageTracker, plugin: Markpilot) { - super(tracker, plugin); - } - - get openai(): OpenAI | undefined { - const { settings } = this.plugin; - - const apiKey = settings.providers.openai.apiKey; - if (apiKey === undefined) { - new Notice('OpenAI API key is not set.'); - return; - } - if (!apiKey.startsWith('sk')) { - new Notice('OpenAI API key is invalid.'); - return; - } - - return new OpenAI({ - apiKey, - dangerouslyAllowBrowser: true, - }); - } -} - -export class OpenRouterAPIClient - extends OpenAICompatibleAPIClient - implements APIClient -{ - constructor(tracker: UsageTracker, plugin: Markpilot) { - super(tracker, plugin); - } - - get openai(): OpenAI | undefined { - const { settings } = this.plugin; - - const apiKey = settings.providers.openrouter.apiKey; - if (apiKey === undefined) { - new Notice('OpenRouter API key is not set.'); - return; - } - if (!apiKey.startsWith('sk')) { - new Notice('OpenRouter API key is invalid.'); - return; - } - - return new OpenAI({ - apiKey, - baseURL: 'https://openrouter.ai/api/v1', - dangerouslyAllowBrowser: true, - }); - } -} - -export class OllamaAPIClient - extends OpenAICompatibleAPIClient - implements APIClient -{ - constructor(tracker: UsageTracker, plugin: Markpilot) { - super(tracker, plugin); - } - - get openai(): OpenAI | undefined { - const { settings } = this.plugin; - - const apiUrl = settings.providers.ollama.apiUrl; - if (apiUrl === undefined) { - new Notice('Ollama API URL is not set.'); - return; - } - if (!validateURL(apiUrl)) { - new Notice('Ollama API URL is invalid.'); - return; - } - - return new OpenAI({ - baseURL: apiUrl, - dangerouslyAllowBrowser: true, - }); - } -} - -// TODO: -// Implement API client for Gemini. -export class GeminiAPIClient implements APIClient { - fetchChat(messages: ChatMessage[]): AsyncGenerator { - throw new Error('Method not implemented.'); - } - fetchCompletions( - language: string, - prefix: string, - suffix: string, - ): Promise { - throw new Error('Method not implemented.'); - } -} diff --git a/src/api/clients/gemini.ts b/src/api/clients/gemini.ts new file mode 100644 index 0000000..3ab2efd --- /dev/null +++ b/src/api/clients/gemini.ts @@ -0,0 +1,18 @@ +import { ChatMessage } from '../../types'; +import { APIClient } from '../client'; + +// TODO: +// Implement API client for Gemini. + +export class GeminiAPIClient implements APIClient { + fetchChat(messages: ChatMessage[]): AsyncGenerator { + throw new Error('Method not implemented.'); + } + fetchCompletions( + language: string, + prefix: string, + suffix: string, + ): Promise { + throw new Error('Method not implemented.'); + } +} diff --git a/src/api/clients/ollama.ts b/src/api/clients/ollama.ts new file mode 100644 index 0000000..c9ad90a --- /dev/null +++ b/src/api/clients/ollama.ts @@ -0,0 +1,35 @@ +import { Notice } from 'obsidian'; +import OpenAI from 'openai'; +import Markpilot from 'src/main'; +import { validateURL } from 'src/utils'; +import { APIClient } from '../client'; +import { CostsTracker } from '../costs'; +import { OpenAICompatibleAPIClient } from './openai-compatible'; + +export class OllamaAPIClient + extends OpenAICompatibleAPIClient + implements APIClient +{ + constructor(tracker: CostsTracker, plugin: Markpilot) { + super(tracker, plugin); + } + + get openai(): OpenAI | undefined { + const { settings } = this.plugin; + + const apiUrl = settings.providers.ollama.apiUrl; + if (apiUrl === undefined) { + new Notice('Ollama API URL is not set.'); + return; + } + if (!validateURL(apiUrl)) { + new Notice('Ollama API URL is invalid.'); + return; + } + + return new OpenAI({ + baseURL: apiUrl, + dangerouslyAllowBrowser: true, + }); + } +} diff --git a/src/api/clients/openai-compatible.ts b/src/api/clients/openai-compatible.ts new file mode 100644 index 0000000..e143c43 --- /dev/null +++ b/src/api/clients/openai-compatible.ts @@ -0,0 +1,101 @@ +import { getEncoding } from 'js-tiktoken'; +import { Notice } from 'obsidian'; +import OpenAI from 'openai'; +import Markpilot from 'src/main'; +import { ChatMessage } from '../../types'; +import { APIClient } from '../client'; +import { CostsTracker } from '../costs'; + +export abstract class OpenAICompatibleAPIClient implements APIClient { + constructor( + protected tracker: CostsTracker, + protected plugin: Markpilot, + ) {} + + abstract get openai(): OpenAI | undefined; + + async *fetchChat(messages: ChatMessage[]) { + const { settings } = this.plugin; + + if (this.openai === undefined) { + return; + } + + try { + const stream = await this.openai.chat.completions.create({ + messages, + model: settings.chat.model, + max_tokens: settings.chat.maxTokens, + temperature: settings.chat.temperature, + top_p: 1, + n: 1, + stream: true, + }); + + const contents = []; + for await (const chunk of stream) { + const content = chunk.choices[0].delta.content ?? ''; + contents.push(content); + yield content; + } + + // Update usage cost estimates. + const enc = getEncoding('gpt2'); // Assume GPT-2 encoding + const inputMessage = messages + .map((message) => message.content) + .join('\n'); + const outputMessage = contents.join(''); + const inputTokens = enc.encode(inputMessage).length; + const outputTokens = enc.encode(outputMessage).length; + await this.tracker.add( + settings.chat.provider, + settings.chat.model, + inputTokens, + outputTokens, + ); + } catch (error) { + console.error(error); + new Notice( + 'Failed to fetch chat completions. Make sure your API key or API URL is correct.', + ); + } + } + + async fetchCompletions(language: string, prefix: string, suffix: string) { + const { settings } = this.plugin; + + if (this.openai === undefined) { + return; + } + + try { + const completions = await this.openai.completions.create({ + prompt: `Continue the following code written in ${language} language:\n\n${prefix}`, + suffix, + model: settings.completions.model, + max_tokens: settings.completions.maxTokens, + temperature: settings.completions.temperature, + top_p: 1, + n: 1, + stop: ['\n\n\n'], + }); + + // Update usage cost estimates. + const inputTokens = completions.usage?.prompt_tokens ?? 0; + const outputTokens = completions.usage?.completion_tokens ?? 0; + await this.tracker.add( + settings.completions.provider, + settings.completions.model, + inputTokens, + outputTokens, + ); + + return completions.choices[0].text; + } catch (error) { + console.error(error); + new Notice( + 'Failed to fetch completions. Make sure your API key or API URL is correct.', + ); + } + } +} diff --git a/src/api/clients/openai.ts b/src/api/clients/openai.ts new file mode 100644 index 0000000..bf27d65 --- /dev/null +++ b/src/api/clients/openai.ts @@ -0,0 +1,34 @@ +import { Notice } from 'obsidian'; +import OpenAI from 'openai'; +import Markpilot from 'src/main'; +import { APIClient } from '../client'; +import { CostsTracker } from '../costs'; +import { OpenAICompatibleAPIClient } from './openai-compatible'; + +export class OpenAIAPIClient + extends OpenAICompatibleAPIClient + implements APIClient +{ + constructor(tracker: CostsTracker, plugin: Markpilot) { + super(tracker, plugin); + } + + get openai(): OpenAI | undefined { + const { settings } = this.plugin; + + const apiKey = settings.providers.openai.apiKey; + if (apiKey === undefined) { + new Notice('OpenAI API key is not set.'); + return; + } + if (!apiKey.startsWith('sk')) { + new Notice('OpenAI API key is invalid.'); + return; + } + + return new OpenAI({ + apiKey, + dangerouslyAllowBrowser: true, + }); + } +} diff --git a/src/api/clients/openrouter.ts b/src/api/clients/openrouter.ts new file mode 100644 index 0000000..abba5fd --- /dev/null +++ b/src/api/clients/openrouter.ts @@ -0,0 +1,35 @@ +import { Notice } from 'obsidian'; +import OpenAI from 'openai'; +import Markpilot from 'src/main'; +import { APIClient } from '../client'; +import { CostsTracker } from '../costs'; +import { OpenAICompatibleAPIClient } from './openai-compatible'; + +export class OpenRouterAPIClient + extends OpenAICompatibleAPIClient + implements APIClient +{ + constructor(tracker: CostsTracker, plugin: Markpilot) { + super(tracker, plugin); + } + + get openai(): OpenAI | undefined { + const { settings } = this.plugin; + + const apiKey = settings.providers.openrouter.apiKey; + if (apiKey === undefined) { + new Notice('OpenRouter API key is not set.'); + return; + } + if (!apiKey.startsWith('sk')) { + new Notice('OpenRouter API key is invalid.'); + return; + } + + return new OpenAI({ + apiKey, + baseURL: 'https://openrouter.ai/api/v1', + dangerouslyAllowBrowser: true, + }); + } +} diff --git a/src/api/usage.ts b/src/api/costs.ts similarity index 58% rename from src/api/usage.ts rename to src/api/costs.ts index 66af394..0f5b19a 100644 --- a/src/api/usage.ts +++ b/src/api/costs.ts @@ -1,16 +1,7 @@ -import { Notice } from 'obsidian'; import Markpilot from 'src/main'; import { getThisMonthAsString, getTodayAsString } from 'src/utils'; -import { APIClient } from './client'; -import { - Model, - OLLAMA_MODELS, - OllamaModel, - OpenAIModel, - OpenRouterModel, - Provider, -} from './provider'; -import { ChatMessage } from './types'; +import { Model, OpenAIModel, OpenRouterModel } from './models'; +import { OFFLINE_PROVIDERS, OnlineProvider, Provider } from './provider'; const OPENAI_MODEL_INPUT_COSTS: Record = { 'gpt-3.5-turbo-instruct': 1.5, @@ -72,33 +63,21 @@ const OPENROUTER_OUTPUT_COSTS: Record = { 'openai/gpt-4-turbo': 0, }; -const OLLAMA_INPUT_COSTS = OLLAMA_MODELS.reduce( - (acc, model) => ({ ...acc, [model]: 0 }), - {}, -) as Record; - -const OLLAMA_OUTPUT_COSTS: Record = OLLAMA_MODELS.reduce( - (acc, model) => ({ ...acc, [model]: 0 }), - {}, -) as Record; - // TODO: // Replace `Record` to an appropriate type. -const INPUT_COSTS: Record> = { +const INPUT_COSTS: Record> = { openai: OPENAI_MODEL_INPUT_COSTS, openrouter: OPENROUTER_INPUT_COSTS, - ollama: OLLAMA_INPUT_COSTS, }; // TODO: // Replace `Record` to an appropriate type. -const OUTPUT_COSTS: Record> = { +const OUTPUT_COSTS: Record> = { openai: OPENAI_MODEL_OUTPUT_COSTS, openrouter: OPENROUTER_OUTPUT_COSTS, - ollama: OLLAMA_OUTPUT_COSTS, }; -export class UsageTracker { +export class CostsTracker { constructor(private plugin: Markpilot) {} async add( @@ -109,6 +88,11 @@ export class UsageTracker { ) { const { settings } = this.plugin; + // No costs associated with offline providers. + if (provider in OFFLINE_PROVIDERS) { + return; + } + const today = getTodayAsString(); const thisMonth = getThisMonthAsString(); if (settings.usage.dailyCosts[today] === undefined) { @@ -116,8 +100,8 @@ export class UsageTracker { } const cost = - (inputTokens * INPUT_COSTS[provider][model] + - outputTokens * OUTPUT_COSTS[provider][model]) / + (inputTokens * INPUT_COSTS[provider as OnlineProvider][model] + + outputTokens * OUTPUT_COSTS[provider as OnlineProvider][model]) / 1_000_000; settings.usage.dailyCosts[today] += cost; @@ -126,41 +110,3 @@ export class UsageTracker { await this.plugin.saveSettings(); } } - -export class UsageMonitorProxy implements APIClient { - constructor( - private client: APIClient, - private plugin: Markpilot, - ) {} - - hasReachedLimit() { - const { settings } = this.plugin; - - const thisMonth = getThisMonthAsString(); - return ( - settings.usage.monthlyCosts[thisMonth] >= settings.usage.monthlyLimit - ); - } - - async *fetchChat(messages: ChatMessage[]) { - if (this.hasReachedLimit()) { - new Notice( - 'Monthly usage limit reached. Please increase the limit to keep on using inline completions.', - ); - return; - } - - yield* this.client.fetchChat(messages); - } - - async fetchCompletions(language: string, prefix: string, suffix: string) { - if (this.hasReachedLimit()) { - new Notice( - 'Monthly usage limit reached. Please increase the limit to keep on using chat view.', - ); - return; - } - - return await this.client.fetchCompletions(language, prefix, suffix); - } -} diff --git a/src/api/models.ts b/src/api/models.ts new file mode 100644 index 0000000..b205f06 --- /dev/null +++ b/src/api/models.ts @@ -0,0 +1,60 @@ +import { Provider } from './provider'; + +export type OpenAIModel = (typeof OPENAI_MODELS)[number]; + +export type OpenRouterModel = (typeof OPENROUTER_MODELS)[number]; + +export type OllamaModel = (typeof OLLAMA_MODELS)[number]; + +export type Model = OpenAIModel | OpenRouterModel | OllamaModel; + +export const OPENAI_MODELS = [ + 'gpt-3.5-turbo-instruct', + 'davinci-002', + 'babbage-002', + 'gpt-4-0125-preview', + 'gpt-4-turbo-preview', + 'gpt-4-1106-preview', + 'gpt-4-vision-preview', + 'gpt-4', + 'gpt-4-0314', + 'gpt-4-0613', + 'gpt-4-32k', + 'gpt-4-32k-0314', + 'gpt-4-32k-0613', + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-16k', + 'gpt-3.5-turbo-0301', + 'gpt-3.5-turbo-0613', + 'gpt-3.5-turbo-1106', + 'gpt-3.5-turbo-0125', + 'gpt-3.5-turbo-16k-0613', +] as const; + +// TODO: +// This is a placeholder. +export const OPENROUTER_MODELS = [ + 'openai/gpt-3.5-turbo', + 'openai/gpt-4-turbo', +] as const; + +// TODO: +// This is a placeholder. +export const OLLAMA_MODELS = [ + 'llama2', + 'llama3', + 'codellama', + 'phind-codellama', +] as const; + +export const MODELS = { + openai: OPENAI_MODELS, + openrouter: OPENROUTER_MODELS, + ollama: OLLAMA_MODELS, +}; + +export const DEFAULT_MODELS: Record = { + openai: 'gpt-3.5-turbo', + openrouter: 'openai/gpt-3.5-turbo', + ollama: 'llama2', +}; diff --git a/src/api/provider.ts b/src/api/provider.ts index 808e5db..9b8fcb9 100644 --- a/src/api/provider.ts +++ b/src/api/provider.ts @@ -1,62 +1,13 @@ -export type Provider = (typeof PROVIDERS)[number]; +export type OnlineProvider = (typeof ONLINE_PROVIDERS)[number]; -export type OpenAIModel = (typeof OPENAI_MODELS)[number]; +export type OfflineProvider = (typeof OFFLINE_PROVIDERS)[number]; -export type OpenRouterModel = (typeof OPENROUTER_MODELS)[number]; +export type Provider = OnlineProvider | OfflineProvider; -export type OllamaModel = (typeof OLLAMA_MODELS)[number]; +export const ONLINE_PROVIDERS = ['openai', 'openrouter'] as const; -export type Model = OpenAIModel | OpenRouterModel | OllamaModel; +export const OFFLINE_PROVIDERS = ['ollama'] as const; -export const PROVIDERS = ['openai', 'openrouter', 'ollama'] as const; +export const PROVIDERS = [...ONLINE_PROVIDERS, ...OFFLINE_PROVIDERS] as const; -export const OPENAI_MODELS = [ - 'gpt-3.5-turbo-instruct', - 'davinci-002', - 'babbage-002', - 'gpt-4-0125-preview', - 'gpt-4-turbo-preview', - 'gpt-4-1106-preview', - 'gpt-4-vision-preview', - 'gpt-4', - 'gpt-4-0314', - 'gpt-4-0613', - 'gpt-4-32k', - 'gpt-4-32k-0314', - 'gpt-4-32k-0613', - 'gpt-3.5-turbo', - 'gpt-3.5-turbo-16k', - 'gpt-3.5-turbo-0301', - 'gpt-3.5-turbo-0613', - 'gpt-3.5-turbo-1106', - 'gpt-3.5-turbo-0125', - 'gpt-3.5-turbo-16k-0613', -] as const; - -// TODO: -// This is a placeholder. -export const OPENROUTER_MODELS = [ - 'openai/gpt-3.5-turbo', - 'openai/gpt-4-turbo', -] as const; - -// TODO: -// This is a placeholder. -export const OLLAMA_MODELS = [ - 'llama2', - 'llama3', - 'codellama', - 'phind-codellama', -] as const; - -export const MODELS = { - openai: OPENAI_MODELS, - openrouter: OPENROUTER_MODELS, - ollama: OLLAMA_MODELS, -}; - -export const DEFAULT_MODELS: Record = { - openai: 'gpt-3.5-turbo', - openrouter: 'openai/gpt-3.5-turbo', - ollama: 'llama2', -}; +export const DEFAULT_PROVIDER = 'openai' as Provider; diff --git a/src/api/cache.ts b/src/api/proxies/memory-cache.ts similarity index 95% rename from src/api/cache.ts rename to src/api/proxies/memory-cache.ts index e6b35d7..2b6af28 100644 --- a/src/api/cache.ts +++ b/src/api/proxies/memory-cache.ts @@ -1,7 +1,7 @@ import { createHash } from 'crypto'; import Markpilot from 'src/main'; -import { APIClient } from './client'; -import { ChatMessage } from './types'; +import { ChatMessage } from '../../types'; +import { APIClient } from '../client'; export class MemoryCacheProxy implements APIClient { private store: Map = new Map(); diff --git a/src/api/proxies/usage-monitor.ts b/src/api/proxies/usage-monitor.ts new file mode 100644 index 0000000..7cb5e0e --- /dev/null +++ b/src/api/proxies/usage-monitor.ts @@ -0,0 +1,43 @@ +import { Notice } from 'obsidian'; +import Markpilot from 'src/main'; +import { getThisMonthAsString } from 'src/utils'; +import { ChatMessage } from '../../types'; +import { APIClient } from '../client'; + +export class UsageMonitorProxy implements APIClient { + constructor( + private client: APIClient, + private plugin: Markpilot, + ) {} + + hasReachedLimit() { + const { settings } = this.plugin; + + const thisMonth = getThisMonthAsString(); + return ( + settings.usage.monthlyCosts[thisMonth] >= settings.usage.monthlyLimit + ); + } + + async *fetchChat(messages: ChatMessage[]) { + if (this.hasReachedLimit()) { + new Notice( + 'Monthly usage limit reached. Please increase the limit to keep on using inline completions.', + ); + return; + } + + yield* this.client.fetchChat(messages); + } + + async fetchCompletions(language: string, prefix: string, suffix: string) { + if (this.hasReachedLimit()) { + new Notice( + 'Monthly usage limit reached. Please increase the limit to keep on using chat view.', + ); + return; + } + + return await this.client.fetchCompletions(language, prefix, suffix); + } +} diff --git a/src/chat/App.tsx b/src/chat/App.tsx index 1644bbb..9e5ea03 100644 --- a/src/chat/App.tsx +++ b/src/chat/App.tsx @@ -1,6 +1,6 @@ import { useEffect, useState } from 'react'; -import { ChatHistory, ChatRole } from 'src/api/types'; import Markpilot from 'src/main'; +import { ChatHistory, ChatRole } from 'src/types'; import { ChatInput } from './components/ChatBox'; import { ChatItem } from './components/ChatItem'; import { ChatView } from './view'; diff --git a/src/chat/components/ChatItem.tsx b/src/chat/components/ChatItem.tsx index 9525f25..7d284e0 100644 --- a/src/chat/components/ChatItem.tsx +++ b/src/chat/components/ChatItem.tsx @@ -2,7 +2,7 @@ import { Bot, Copy, User } from 'lucide-react'; import ReactMarkdown from 'react-markdown'; import rehypeKatex from 'rehype-katex'; import remarkMath from 'remark-math'; -import { ChatMessage } from 'src/api/types'; +import { ChatMessage } from 'src/types'; export function ChatItem({ message }: { message: ChatMessage }) { return ( diff --git a/src/main.ts b/src/main.ts index 5091162..89f5a53 100644 --- a/src/main.ts +++ b/src/main.ts @@ -8,15 +8,14 @@ import { setIcon, WorkspaceLeaf, } from 'obsidian'; -import { MemoryCacheProxy } from './api/cache'; -import { - APIClient, - OllamaAPIClient, - OpenAIAPIClient, - OpenRouterAPIClient, -} from './api/client'; +import { APIClient } from './api/client'; +import { OllamaAPIClient } from './api/clients/ollama'; +import { OpenAIAPIClient } from './api/clients/openai'; +import { OpenRouterAPIClient } from './api/clients/openrouter'; +import { CostsTracker } from './api/costs'; import { Provider } from './api/provider'; -import { UsageMonitorProxy, UsageTracker } from './api/usage'; +import { MemoryCacheProxy } from './api/proxies/memory-cache'; +import { UsageMonitorProxy } from './api/proxies/usage-monitor'; import { CHAT_VIEW_TYPE, ChatView } from './chat/view'; import { inlineCompletionsExtension } from './editor/extension'; import botOffIcon from './icons/bot-off.svg'; @@ -197,7 +196,7 @@ export default class Markpilot extends Plugin { } createAPIClient(provider: Provider) { - const tracker = new UsageTracker(this); + const tracker = new CostsTracker(this); const client = (() => { switch (provider) { case 'openai': diff --git a/src/settings.ts b/src/settings.ts index 10b1cf2..7888f11 100644 --- a/src/settings.ts +++ b/src/settings.ts @@ -1,13 +1,8 @@ import Chart from 'chart.js/auto'; import { App, Notice, PluginSettingTab, Setting } from 'obsidian'; -import { - DEFAULT_MODELS, - Model, - MODELS, - Provider, - PROVIDERS, -} from './api/provider'; -import { ChatHistory } from './api/types'; +import { DEFAULT_MODELS, Model, MODELS } from './api/models'; +import { DEFAULT_PROVIDER, Provider, PROVIDERS } from './api/provider'; +import { ChatHistory } from './types'; import Markpilot from './main'; import { getDaysInCurrentMonth, validateURL } from './utils'; @@ -71,8 +66,8 @@ export const DEFAULT_SETTINGS: MarkpilotSettings = { }, completions: { enabled: true, - provider: 'openai', - model: 'gpt-3.5-turbo-instruct', + provider: DEFAULT_PROVIDER, + model: DEFAULT_MODELS[DEFAULT_PROVIDER], maxTokens: 64, temperature: 0, waitTime: 500, @@ -84,8 +79,8 @@ export const DEFAULT_SETTINGS: MarkpilotSettings = { }, chat: { enabled: true, - provider: 'openai', - model: 'gpt-3.5-turbo', + provider: DEFAULT_PROVIDER, + model: DEFAULT_MODELS[DEFAULT_PROVIDER], maxTokens: 1024, temperature: 0.5, history: { diff --git a/src/api/types.ts b/src/types.ts similarity index 100% rename from src/api/types.ts rename to src/types.ts From ba7b9795230abf2c62f4dc54a0d05a659c435061 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Fri, 19 Apr 2024 22:05:27 +0900 Subject: [PATCH 07/26] Add prototype for few shot prompting --- .../block-quote/example1/assistant.txt | 8 ++ src/api/prompts/block-quote/example1/user.md | 5 ++ src/api/prompts/block-quote/index.ts | 0 src/api/prompts/block-quote/params.json | 3 + .../prompts/code-block/example1/assistant.txt | 6 ++ src/api/prompts/code-block/example1/user.md | 4 + .../prompts/code-block/example2/assistant.txt | 14 ++++ src/api/prompts/code-block/example2/user.md | 10 +++ .../prompts/code-block/example3/assistant.txt | 7 ++ src/api/prompts/code-block/example3/user.md | 11 +++ src/api/prompts/code-block/index.ts | 8 ++ src/api/prompts/code-block/params.json | 3 + src/api/prompts/context.ts | 80 +++++++++++++++++++ .../prompts/heading/example1/assistant.txt | 6 ++ src/api/prompts/heading/example1/user.md | 11 +++ .../prompts/heading/example2/assistant.txt | 6 ++ src/api/prompts/heading/example2/user.md | 5 ++ src/api/prompts/heading/index.ts | 0 src/api/prompts/heading/params.json | 3 + src/api/prompts/index.ts | 21 +++++ .../prompts/list-item/example1/assistant.txt | 7 ++ src/api/prompts/list-item/example1/user.md | 7 ++ src/api/prompts/list-item/index.ts | 0 src/api/prompts/list-item/params.json | 3 + .../prompts/math-block/example1/assistant.txt | 7 ++ src/api/prompts/math-block/example1/user.md | 5 ++ .../prompts/math-block/example2/assistant.txt | 6 ++ src/api/prompts/math-block/example2/user.md | 9 +++ src/api/prompts/math-block/index.ts | 0 src/api/prompts/math-block/params.json | 3 + .../prompts/paragraph/example1/assistant.txt | 7 ++ src/api/prompts/paragraph/example1/user.md | 5 ++ src/api/prompts/paragraph/index.ts | 0 src/api/prompts/paragraph/params.json | 3 + src/api/prompts/system.txt | 10 +++ 35 files changed, 283 insertions(+) create mode 100644 src/api/prompts/block-quote/example1/assistant.txt create mode 100644 src/api/prompts/block-quote/example1/user.md create mode 100644 src/api/prompts/block-quote/index.ts create mode 100644 src/api/prompts/block-quote/params.json create mode 100644 src/api/prompts/code-block/example1/assistant.txt create mode 100644 src/api/prompts/code-block/example1/user.md create mode 100644 src/api/prompts/code-block/example2/assistant.txt create mode 100644 src/api/prompts/code-block/example2/user.md create mode 100644 src/api/prompts/code-block/example3/assistant.txt create mode 100644 src/api/prompts/code-block/example3/user.md create mode 100644 src/api/prompts/code-block/index.ts create mode 100644 src/api/prompts/code-block/params.json create mode 100644 src/api/prompts/context.ts create mode 100644 src/api/prompts/heading/example1/assistant.txt create mode 100644 src/api/prompts/heading/example1/user.md create mode 100644 src/api/prompts/heading/example2/assistant.txt create mode 100644 src/api/prompts/heading/example2/user.md create mode 100644 src/api/prompts/heading/index.ts create mode 100644 src/api/prompts/heading/params.json create mode 100644 src/api/prompts/index.ts create mode 100644 src/api/prompts/list-item/example1/assistant.txt create mode 100644 src/api/prompts/list-item/example1/user.md create mode 100644 src/api/prompts/list-item/index.ts create mode 100644 src/api/prompts/list-item/params.json create mode 100644 src/api/prompts/math-block/example1/assistant.txt create mode 100644 src/api/prompts/math-block/example1/user.md create mode 100644 src/api/prompts/math-block/example2/assistant.txt create mode 100644 src/api/prompts/math-block/example2/user.md create mode 100644 src/api/prompts/math-block/index.ts create mode 100644 src/api/prompts/math-block/params.json create mode 100644 src/api/prompts/paragraph/example1/assistant.txt create mode 100644 src/api/prompts/paragraph/example1/user.md create mode 100644 src/api/prompts/paragraph/index.ts create mode 100644 src/api/prompts/paragraph/params.json create mode 100644 src/api/prompts/system.txt diff --git a/src/api/prompts/block-quote/example1/assistant.txt b/src/api/prompts/block-quote/example1/assistant.txt new file mode 100644 index 0000000..aa7a54f --- /dev/null +++ b/src/api/prompts/block-quote/example1/assistant.txt @@ -0,0 +1,8 @@ + +English + +The quote is from Adam Smith's "The Wealth Of Nations". It is often used to describe the concept of the invisible hand in economics. + +led by an invisible hand to promote an end which was no part of his intention. +> +> The Wealth Of Nations, Book IV, Chapter V \ No newline at end of file diff --git a/src/api/prompts/block-quote/example1/user.md b/src/api/prompts/block-quote/example1/user.md new file mode 100644 index 0000000..b22d34f --- /dev/null +++ b/src/api/prompts/block-quote/example1/user.md @@ -0,0 +1,5 @@ +# Adam Smith's Invisible Hand + +Adam Smith, in his seminal work "The Wealth of Nations," coined the term "invisible hand" to describe the self-regulating nature of markets. + +> Every individual... neither intends to promote the public interest, nor knows how much he is promoting it... he intends only his own security; and by directing that industry in such a manner as its produce may be of the greatest value, he intends only his own gain, and he is in this, as in many other cases, diff --git a/src/api/prompts/block-quote/index.ts b/src/api/prompts/block-quote/index.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/api/prompts/block-quote/params.json b/src/api/prompts/block-quote/params.json new file mode 100644 index 0000000..e5ca2a7 --- /dev/null +++ b/src/api/prompts/block-quote/params.json @@ -0,0 +1,3 @@ +{ + "stop": ["\n\n"] +} diff --git a/src/api/prompts/code-block/example1/assistant.txt b/src/api/prompts/code-block/example1/assistant.txt new file mode 100644 index 0000000..f77a351 --- /dev/null +++ b/src/api/prompts/code-block/example1/assistant.txt @@ -0,0 +1,6 @@ + +C + +This paragraph explains the strcmp function in C, which is used for comparing two strings. + +strcmp \ No newline at end of file diff --git a/src/api/prompts/code-block/example1/user.md b/src/api/prompts/code-block/example1/user.md new file mode 100644 index 0000000..593b472 --- /dev/null +++ b/src/api/prompts/code-block/example1/user.md @@ -0,0 +1,4 @@ +# String Comparison + +In C, `` function compares two strings and returns an integer value based on the comparison. +The function compares the two strings character by character until it finds a difference or reaches the end of one of the strings. diff --git a/src/api/prompts/code-block/example2/assistant.txt b/src/api/prompts/code-block/example2/assistant.txt new file mode 100644 index 0000000..5373806 --- /dev/null +++ b/src/api/prompts/code-block/example2/assistant.txt @@ -0,0 +1,14 @@ + +JavaScript + +This JavaScript function should sort an array of numbers in ascending order using the bubble sort algorithm. +The bubble sort algorithm works by comparing each element in the array with the element next to it, and swapping them if they are in the wrong order. +In this specific case the code should use 2 spaces for indentation. + +for (let i = 0; i < array.length; i++) { + for (let j = 0; j < array.length; j++) { + if (array[j] > array[j + 1]) { + [array[j], array[j + 1]] = [array[j + 1], array[j]]; + } + } +} \ No newline at end of file diff --git a/src/api/prompts/code-block/example2/user.md b/src/api/prompts/code-block/example2/user.md new file mode 100644 index 0000000..07013bf --- /dev/null +++ b/src/api/prompts/code-block/example2/user.md @@ -0,0 +1,10 @@ +# Bubble Sort + +Bubble sort is a simple sorting algorithm that repeatedly steps through the list, compares adjacent elements and swaps them if they are in the wrong order. + +```js +function bubbleSort(array) { + + return array; +} +``` diff --git a/src/api/prompts/code-block/example3/assistant.txt b/src/api/prompts/code-block/example3/assistant.txt new file mode 100644 index 0000000..999bc69 --- /dev/null +++ b/src/api/prompts/code-block/example3/assistant.txt @@ -0,0 +1,7 @@ + +Python + +This Python function should take an integer as input and determine if it is a prime number. +In this specific case the code should use 4 spaces for indentation. + +n: int \ No newline at end of file diff --git a/src/api/prompts/code-block/example3/user.md b/src/api/prompts/code-block/example3/user.md new file mode 100644 index 0000000..41e1830 --- /dev/null +++ b/src/api/prompts/code-block/example3/user.md @@ -0,0 +1,11 @@ +```python +def is_prime() -> int: + if n < 2: + return False + i = 2 + while i * i <= n: + if n % i == 0: + return False + i += 1 + return True +``` diff --git a/src/api/prompts/code-block/index.ts b/src/api/prompts/code-block/index.ts new file mode 100644 index 0000000..19dc590 --- /dev/null +++ b/src/api/prompts/code-block/index.ts @@ -0,0 +1,8 @@ +import { FewShowExample } from '../example'; +import assistant from './assistant.txt'; +import user from './user.md'; + +export const CODE_BLOCK_EXAMPLE: FewShowExample = { + user, + assistant, +}; diff --git a/src/api/prompts/code-block/params.json b/src/api/prompts/code-block/params.json new file mode 100644 index 0000000..ce71a08 --- /dev/null +++ b/src/api/prompts/code-block/params.json @@ -0,0 +1,3 @@ +{ + "stop": ["```", "````"] +} diff --git a/src/api/prompts/context.ts b/src/api/prompts/context.ts new file mode 100644 index 0000000..64cfab7 --- /dev/null +++ b/src/api/prompts/context.ts @@ -0,0 +1,80 @@ +function generateRandomString(n: number): string { + let result = ''; + const characters = '0123456789abcdef'; + for (let i = 0; i < n; i++) { + const randomIndex = Math.floor(Math.random() * characters.length); + result += characters[randomIndex]; + } + return result; +} + +const UNIQUE_CURSOR = `${generateRandomString(16)}`; +const HEADER_REGEX = `^#+\\s.*${UNIQUE_CURSOR}.*$`; +const UNORDERED_LIST_REGEX = `^\\s*(-|\\*)\\s.*${UNIQUE_CURSOR}.*$`; +const TASK_LIST_REGEX = `^\\s*(-|[0-9]+\\.) +\\[.\\]\\s.*${UNIQUE_CURSOR}.*$`; +const BLOCK_QUOTES_REGEX = `^\\s*>.*${UNIQUE_CURSOR}.*$`; +const NUMBERED_LIST_REGEX = `^\\s*\\d+\\.\\s.*${UNIQUE_CURSOR}.*$`; +const MATH_BLOCK_REGEX = /\$\$[\s\S]*?\$\$/g; +const INLINE_MATH_BLOCK_REGEX = /\$[\s\S]*?\$/g; +const CODE_BLOCK_REGEX = /```[\s\S]*?```/g; +const INLINE_CODE_BLOCK_REGEX = /`.*`/g; + +export const CONTEXTS = [ + 'heading', + 'paragraph', + 'list-item', + 'block-quote', + 'math-block', + 'code-block', +] as const; + +export type Context = (typeof CONTEXTS)[number]; + +// TODO: +// Determine the language of code blocks and return it along with the context. +export function getContext(prefix: string, suffix: string): Context { + const text = prefix + UNIQUE_CURSOR + suffix; + if (new RegExp(HEADER_REGEX, 'gm').test(text)) { + return 'heading'; + } + if (new RegExp(BLOCK_QUOTES_REGEX, 'gm').test(text)) { + return 'block-quote'; + } + if ( + new RegExp(NUMBERED_LIST_REGEX, 'gm').test(text) || + new RegExp(UNORDERED_LIST_REGEX, 'gm').test(text) || + new RegExp(TASK_LIST_REGEX, 'gm').test(text) + ) { + return 'list-item'; + } + if ( + isCursorInRegexBlock(text, MATH_BLOCK_REGEX) || + isCursorInRegexBlock(text, INLINE_MATH_BLOCK_REGEX) + ) { + return 'math-block'; + } + + if ( + isCursorInRegexBlock(text, CODE_BLOCK_REGEX) || + isCursorInRegexBlock(text, INLINE_CODE_BLOCK_REGEX) + ) { + return 'code-block'; + } + + return 'paragraph'; +} + +function isCursorInRegexBlock(text: string, regex: RegExp): boolean { + const codeBlocks = extractBlocks(text, regex); + for (const block of codeBlocks) { + if (block.includes(UNIQUE_CURSOR)) { + return true; + } + } + return false; +} + +function extractBlocks(text: string, regex: RegExp) { + const codeBlocks = text.match(regex); + return codeBlocks ? codeBlocks.map((block) => block.trim()) : []; +} diff --git a/src/api/prompts/heading/example1/assistant.txt b/src/api/prompts/heading/example1/assistant.txt new file mode 100644 index 0000000..00f3dc0 --- /dev/null +++ b/src/api/prompts/heading/example1/assistant.txt @@ -0,0 +1,6 @@ + +English + +This is a list of the plays written by William Shakespeare, so the missing title "William Shakespeare" should be inserted and nothing after it. + +William Shakespeare \ No newline at end of file diff --git a/src/api/prompts/heading/example1/user.md b/src/api/prompts/heading/example1/user.md new file mode 100644 index 0000000..c8830c7 --- /dev/null +++ b/src/api/prompts/heading/example1/user.md @@ -0,0 +1,11 @@ +# + +## Plays + +### The Tragedy of Macbeth + +The Tragedy of Macbeth is a play by William Shakespeare. It is believed to have been written between 1603 and 1607. + +### Romeo and Juliet + +Romeo and Juliet is a play by William Shakespeare. It is one of his most famous works and is often considered one of the greatest love stories ever told. diff --git a/src/api/prompts/heading/example2/assistant.txt b/src/api/prompts/heading/example2/assistant.txt new file mode 100644 index 0000000..49fc36d --- /dev/null +++ b/src/api/prompts/heading/example2/assistant.txt @@ -0,0 +1,6 @@ + +English + +This paragraph summarizes the main points of the First Punic War. The title starts with "First" and ends with the year "264-241 BC", so the missing word "Punic War" should be inserted. + +Punic War \ No newline at end of file diff --git a/src/api/prompts/heading/example2/user.md b/src/api/prompts/heading/example2/user.md new file mode 100644 index 0000000..528a686 --- /dev/null +++ b/src/api/prompts/heading/example2/user.md @@ -0,0 +1,5 @@ +# Punic Wars + +## First <>, 264-241 BC + +The First Punic War was fought between Rome and Carthage from 264 to 241 BCE. It was the first of three major wars fought between the two powers. The war was fought over control of Sicily, a large island located to the south of Italy. The war ended with a Roman victory and the signing of a peace treaty that saw Carthage cede control of Sicily to Rome. diff --git a/src/api/prompts/heading/index.ts b/src/api/prompts/heading/index.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/api/prompts/heading/params.json b/src/api/prompts/heading/params.json new file mode 100644 index 0000000..f29602f --- /dev/null +++ b/src/api/prompts/heading/params.json @@ -0,0 +1,3 @@ +{ + "stop": ["\n"] +} diff --git a/src/api/prompts/index.ts b/src/api/prompts/index.ts new file mode 100644 index 0000000..1b9a18d --- /dev/null +++ b/src/api/prompts/index.ts @@ -0,0 +1,21 @@ +import systemPrompt from './system.txt'; + +export interface FewShowExample { + user: string; + assistant: string; +} + +export class PromptGenerator { + private systemPrompt = systemPrompt; + + generate(prefix: string, suffix: string): string { + // TODO: + // 1. Determine the context from prefix and suffix. + // 2. Generate a prompt based on the context, with prefix and suffix trimmed according to window size. + const language = 'english'; + if (language) { + return this.systemPrompt.replace('{{LANGUAGE}}', language); + } + return ''; + } +} diff --git a/src/api/prompts/list-item/example1/assistant.txt b/src/api/prompts/list-item/example1/assistant.txt new file mode 100644 index 0000000..d6479ef --- /dev/null +++ b/src/api/prompts/list-item/example1/assistant.txt @@ -0,0 +1,7 @@ + +English + +This is a list of the ACID principles in database management. The "Consistency" and "Isolation" principles are missing and so must be inserted in a consistent format. + +Consistency: Transactions maintain database validity by transitioning it between consistent states. +- Isolation: Transactions execute independently, preventing interference between concurrent operations. \ No newline at end of file diff --git a/src/api/prompts/list-item/example1/user.md b/src/api/prompts/list-item/example1/user.md new file mode 100644 index 0000000..42bb874 --- /dev/null +++ b/src/api/prompts/list-item/example1/user.md @@ -0,0 +1,7 @@ +# The ACID Principle + +The ACID principle in database management ensures transaction reliability: + +- Atomicity: Transactions are all-or-nothing, guaranteeing data integrity. +- +- Durability: Committed transactions persist even through system failures. diff --git a/src/api/prompts/list-item/index.ts b/src/api/prompts/list-item/index.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/api/prompts/list-item/params.json b/src/api/prompts/list-item/params.json new file mode 100644 index 0000000..e5ca2a7 --- /dev/null +++ b/src/api/prompts/list-item/params.json @@ -0,0 +1,3 @@ +{ + "stop": ["\n\n"] +} diff --git a/src/api/prompts/math-block/example1/assistant.txt b/src/api/prompts/math-block/example1/assistant.txt new file mode 100644 index 0000000..a2d766c --- /dev/null +++ b/src/api/prompts/math-block/example1/assistant.txt @@ -0,0 +1,7 @@ + +LaTeX + +This is an example of the contrapositive of a statement. +In this case, the original statement is "If P, then Q." The contrapositive is "If not Q, then not P.", which is written in LaTeX as $\neg Q\implies\neg P$. + +\neg Q\implies\neg P \ No newline at end of file diff --git a/src/api/prompts/math-block/example1/user.md b/src/api/prompts/math-block/example1/user.md new file mode 100644 index 0000000..fc64c5e --- /dev/null +++ b/src/api/prompts/math-block/example1/user.md @@ -0,0 +1,5 @@ +# Contraposition + +The contraposition of a statement is the statement formed by negating both the hypothesis and conclusion of the original statement and then interchanging them. + +For instance, the contraposition of a statement $P\implies Q$, is "". diff --git a/src/api/prompts/math-block/example2/assistant.txt b/src/api/prompts/math-block/example2/assistant.txt new file mode 100644 index 0000000..f3ce34f --- /dev/null +++ b/src/api/prompts/math-block/example2/assistant.txt @@ -0,0 +1,6 @@ + +LaTeX + +This is the formula for the standard deviation of a sample. It should be the LaTeX formula representing the square root of the average of the squared differences between each data point and the mean. + +\sqrt{\frac{\sum_{i=1}^{n}(x_i - \mu)^2}{n}} \ No newline at end of file diff --git a/src/api/prompts/math-block/example2/user.md b/src/api/prompts/math-block/example2/user.md new file mode 100644 index 0000000..3d73065 --- /dev/null +++ b/src/api/prompts/math-block/example2/user.md @@ -0,0 +1,9 @@ +# Standard Deviation + +The standard deviation is given by the formula: + +$$ +\sigma = +$$ + +where $\mu$ is the mean of the observations, $n$ is the number of observations and $x_i$ is the value of the $i$-th observation. diff --git a/src/api/prompts/math-block/index.ts b/src/api/prompts/math-block/index.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/api/prompts/math-block/params.json b/src/api/prompts/math-block/params.json new file mode 100644 index 0000000..6de3036 --- /dev/null +++ b/src/api/prompts/math-block/params.json @@ -0,0 +1,3 @@ +{ + "stop": ["$ ", "$$"] +} diff --git a/src/api/prompts/paragraph/example1/assistant.txt b/src/api/prompts/paragraph/example1/assistant.txt new file mode 100644 index 0000000..5ab2f73 --- /dev/null +++ b/src/api/prompts/paragraph/example1/assistant.txt @@ -0,0 +1,7 @@ + +English, French + +This paragraph explains the passé composé. +It shows "Je vais à la gare" (I go to the station) as an example of the present tense. The corresponding passé composé should be "Je suis allé à la gare" (I went to the station). + +"Je suis allé à la gare" (I went to the station). \ No newline at end of file diff --git a/src/api/prompts/paragraph/example1/user.md b/src/api/prompts/paragraph/example1/user.md new file mode 100644 index 0000000..f8f51be --- /dev/null +++ b/src/api/prompts/paragraph/example1/user.md @@ -0,0 +1,5 @@ +# Passé Composé + +The passé composé is formed by using the auxiliary verb "avoir" or "etre" and the past participle of the main verb. The auxiliary verb is conjugated in the present tense, while the past participle remains unchanged. + +For example, the passé composé of "Je vais à la gare" (I go to the station) is diff --git a/src/api/prompts/paragraph/index.ts b/src/api/prompts/paragraph/index.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/api/prompts/paragraph/params.json b/src/api/prompts/paragraph/params.json new file mode 100644 index 0000000..e5ca2a7 --- /dev/null +++ b/src/api/prompts/paragraph/params.json @@ -0,0 +1,3 @@ +{ + "stop": ["\n\n"] +} diff --git a/src/api/prompts/system.txt b/src/api/prompts/system.txt new file mode 100644 index 0000000..3d026d7 --- /dev/null +++ b/src/api/prompts/system.txt @@ -0,0 +1,10 @@ +Predict the most logical text written in the language {{LANGUAGE}} at the location of the . +Your answer can be either code, a single word, or multiple sentences, depending on the language . +Your answer cannot have any overlapping text directly surrounding the . +Your answer must have the following format: + +Here, you write the language of your answer, e.g. English, Chinese, Python, JSON etc. + +Here, you reason about the answer; use the 80/20 principle to be brief. + +Here, you write the text that should be at the location of \ No newline at end of file From bb6c59096402cb05d5ec33e134bc98f54ace9d22 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Fri, 19 Apr 2024 22:06:34 +0900 Subject: [PATCH 08/26] Reorganise files --- src/api/clients/gemini.ts | 2 +- src/api/clients/ollama.ts | 2 +- src/api/clients/openai-compatible.ts | 2 +- src/api/clients/openai.ts | 2 +- src/api/clients/openrouter.ts | 2 +- src/api/{client.ts => index.ts} | 0 src/api/proxies/memory-cache.ts | 2 +- src/api/proxies/usage-monitor.ts | 2 +- src/index.d.ts | 10 ++++++++++ src/main.ts | 2 +- 10 files changed, 18 insertions(+), 8 deletions(-) rename src/api/{client.ts => index.ts} (100%) diff --git a/src/api/clients/gemini.ts b/src/api/clients/gemini.ts index 3ab2efd..9bf4fb9 100644 --- a/src/api/clients/gemini.ts +++ b/src/api/clients/gemini.ts @@ -1,5 +1,5 @@ +import { APIClient } from '..'; import { ChatMessage } from '../../types'; -import { APIClient } from '../client'; // TODO: // Implement API client for Gemini. diff --git a/src/api/clients/ollama.ts b/src/api/clients/ollama.ts index c9ad90a..05ce474 100644 --- a/src/api/clients/ollama.ts +++ b/src/api/clients/ollama.ts @@ -2,7 +2,7 @@ import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; import { validateURL } from 'src/utils'; -import { APIClient } from '../client'; +import { APIClient } from '..'; import { CostsTracker } from '../costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; diff --git a/src/api/clients/openai-compatible.ts b/src/api/clients/openai-compatible.ts index e143c43..519a77c 100644 --- a/src/api/clients/openai-compatible.ts +++ b/src/api/clients/openai-compatible.ts @@ -2,8 +2,8 @@ import { getEncoding } from 'js-tiktoken'; import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; +import { APIClient } from '..'; import { ChatMessage } from '../../types'; -import { APIClient } from '../client'; import { CostsTracker } from '../costs'; export abstract class OpenAICompatibleAPIClient implements APIClient { diff --git a/src/api/clients/openai.ts b/src/api/clients/openai.ts index bf27d65..cfc558b 100644 --- a/src/api/clients/openai.ts +++ b/src/api/clients/openai.ts @@ -1,7 +1,7 @@ import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; -import { APIClient } from '../client'; +import { APIClient } from '..'; import { CostsTracker } from '../costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; diff --git a/src/api/clients/openrouter.ts b/src/api/clients/openrouter.ts index abba5fd..6be34fd 100644 --- a/src/api/clients/openrouter.ts +++ b/src/api/clients/openrouter.ts @@ -1,7 +1,7 @@ import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; -import { APIClient } from '../client'; +import { APIClient } from '..'; import { CostsTracker } from '../costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; diff --git a/src/api/client.ts b/src/api/index.ts similarity index 100% rename from src/api/client.ts rename to src/api/index.ts diff --git a/src/api/proxies/memory-cache.ts b/src/api/proxies/memory-cache.ts index 2b6af28..af4b8f3 100644 --- a/src/api/proxies/memory-cache.ts +++ b/src/api/proxies/memory-cache.ts @@ -1,7 +1,7 @@ import { createHash } from 'crypto'; import Markpilot from 'src/main'; +import { APIClient } from '..'; import { ChatMessage } from '../../types'; -import { APIClient } from '../client'; export class MemoryCacheProxy implements APIClient { private store: Map = new Map(); diff --git a/src/api/proxies/usage-monitor.ts b/src/api/proxies/usage-monitor.ts index 7cb5e0e..b9b3c6e 100644 --- a/src/api/proxies/usage-monitor.ts +++ b/src/api/proxies/usage-monitor.ts @@ -1,8 +1,8 @@ import { Notice } from 'obsidian'; import Markpilot from 'src/main'; import { getThisMonthAsString } from 'src/utils'; +import { APIClient } from '..'; import { ChatMessage } from '../../types'; -import { APIClient } from '../client'; export class UsageMonitorProxy implements APIClient { constructor( diff --git a/src/index.d.ts b/src/index.d.ts index cdb2b1a..9c536e7 100644 --- a/src/index.d.ts +++ b/src/index.d.ts @@ -2,3 +2,13 @@ declare module '*.svg' { const content: string; export default content; } + +declare module '*.md' { + const content: string; + export default content; +} + +declare module '*.txt' { + const content: string; + export default content; +} diff --git a/src/main.ts b/src/main.ts index 89f5a53..6c24ad8 100644 --- a/src/main.ts +++ b/src/main.ts @@ -8,7 +8,7 @@ import { setIcon, WorkspaceLeaf, } from 'obsidian'; -import { APIClient } from './api/client'; +import { APIClient } from './api'; import { OllamaAPIClient } from './api/clients/ollama'; import { OpenAIAPIClient } from './api/clients/openai'; import { OpenRouterAPIClient } from './api/clients/openrouter'; From 4d0d54b84fcf75aea91e2ed67759877d0a96ccd6 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 00:57:39 +0900 Subject: [PATCH 09/26] Pretty print provider names in settings --- src/api/provider.ts | 6 ++++++ src/settings.ts | 11 ++++++++--- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/api/provider.ts b/src/api/provider.ts index 9b8fcb9..40d044f 100644 --- a/src/api/provider.ts +++ b/src/api/provider.ts @@ -10,4 +10,10 @@ export const OFFLINE_PROVIDERS = ['ollama'] as const; export const PROVIDERS = [...ONLINE_PROVIDERS, ...OFFLINE_PROVIDERS] as const; +export const PROVIDERS_NAMES: Record = { + openai: 'OpenAI', + openrouter: 'OpenRouter', + ollama: 'Ollama', +}; + export const DEFAULT_PROVIDER = 'openai' as Provider; diff --git a/src/settings.ts b/src/settings.ts index 7888f11..de48578 100644 --- a/src/settings.ts +++ b/src/settings.ts @@ -1,7 +1,12 @@ import Chart from 'chart.js/auto'; import { App, Notice, PluginSettingTab, Setting } from 'obsidian'; import { DEFAULT_MODELS, Model, MODELS } from './api/models'; -import { DEFAULT_PROVIDER, Provider, PROVIDERS } from './api/provider'; +import { + DEFAULT_PROVIDER, + Provider, + PROVIDERS, + PROVIDERS_NAMES, +} from './api/provider'; import { ChatHistory } from './types'; import Markpilot from './main'; @@ -214,7 +219,7 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setDesc('Select the provider for inline completions.') .addDropdown((dropdown) => { for (const option of PROVIDERS) { - dropdown.addOption(option, option); + dropdown.addOption(option, PROVIDERS_NAMES[option]); } dropdown .setDisabled(!settings.completions.enabled) @@ -410,7 +415,7 @@ export class MarkpilotSettingTab extends PluginSettingTab { .setDesc('Select the provider for chat view.') .addDropdown((dropdown) => { for (const option of PROVIDERS) { - dropdown.addOption(option, option); + dropdown.addOption(option, PROVIDERS_NAMES[option]); } dropdown .setDisabled(!settings.chat.enabled) From 7883c3675f223c1c48665a2c9b0ce090239f5674 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 00:57:48 +0900 Subject: [PATCH 10/26] Refactor context detection logic --- src/api/prompts/context.ts | 74 ++++++++++++++++++-------------------- 1 file changed, 35 insertions(+), 39 deletions(-) diff --git a/src/api/prompts/context.ts b/src/api/prompts/context.ts index 64cfab7..4a2e6ef 100644 --- a/src/api/prompts/context.ts +++ b/src/api/prompts/context.ts @@ -1,22 +1,20 @@ -function generateRandomString(n: number): string { - let result = ''; - const characters = '0123456789abcdef'; - for (let i = 0; i < n; i++) { - const randomIndex = Math.floor(Math.random() * characters.length); - result += characters[randomIndex]; - } - return result; -} +// NOTE: +// This context detection module is heavily inspired by `j0rd1smit/obsidian-copilot-auto-completion`: +// https://github.com/j0rd1smit/obsidian-copilot-auto-completion/blob/32912133b3eea43b8bfca94258ce2ca55445b2ce/src/context_detection.ts + +// NOTE: +// Unicode character \uFFFF is not a valid character +// so we use it to represent the cursor position, assuming the user does not intentionally copy and paste it. +const CURSOR_CHAR = '\uFFFF'; -const UNIQUE_CURSOR = `${generateRandomString(16)}`; -const HEADER_REGEX = `^#+\\s.*${UNIQUE_CURSOR}.*$`; -const UNORDERED_LIST_REGEX = `^\\s*(-|\\*)\\s.*${UNIQUE_CURSOR}.*$`; -const TASK_LIST_REGEX = `^\\s*(-|[0-9]+\\.) +\\[.\\]\\s.*${UNIQUE_CURSOR}.*$`; -const BLOCK_QUOTES_REGEX = `^\\s*>.*${UNIQUE_CURSOR}.*$`; -const NUMBERED_LIST_REGEX = `^\\s*\\d+\\.\\s.*${UNIQUE_CURSOR}.*$`; +const HEADER_REGEX = /^#+\s.*\uFFFF.*$/gm; +const UNORDERED_LIST_REGEX = /^\s*(-|\*)\s.*\uFFFF.*$/gm; +const TASK_LIST_REGEX = /^\s*(-|[0-9]+\.) +\[.\]\s.*\uFFFF.*$/gm; +const BLOCK_QUOTES_REGEX = /^\s*>.*\uFFFF.*$/gm; +const NUMBERED_LIST_REGEX = /^\s*\d+\.\s.*\uFFFF.*$/gm; const MATH_BLOCK_REGEX = /\$\$[\s\S]*?\$\$/g; const INLINE_MATH_BLOCK_REGEX = /\$[\s\S]*?\$/g; -const CODE_BLOCK_REGEX = /```[\s\S]*?```/g; +const CODE_BLOCK_REGEX = /```(?.*)[\s\S]*?```/g; const INLINE_CODE_BLOCK_REGEX = /`.*`/g; export const CONTEXTS = [ @@ -30,33 +28,30 @@ export const CONTEXTS = [ export type Context = (typeof CONTEXTS)[number]; -// TODO: -// Determine the language of code blocks and return it along with the context. export function getContext(prefix: string, suffix: string): Context { - const text = prefix + UNIQUE_CURSOR + suffix; - if (new RegExp(HEADER_REGEX, 'gm').test(text)) { + const text = prefix + CURSOR_CHAR + suffix; + if (HEADER_REGEX.test(text)) { return 'heading'; } - if (new RegExp(BLOCK_QUOTES_REGEX, 'gm').test(text)) { + if (BLOCK_QUOTES_REGEX.test(text)) { return 'block-quote'; } if ( - new RegExp(NUMBERED_LIST_REGEX, 'gm').test(text) || - new RegExp(UNORDERED_LIST_REGEX, 'gm').test(text) || - new RegExp(TASK_LIST_REGEX, 'gm').test(text) + NUMBERED_LIST_REGEX.test(text) || + UNORDERED_LIST_REGEX.test(text) || + TASK_LIST_REGEX.test(text) ) { return 'list-item'; } if ( - isCursorInRegexBlock(text, MATH_BLOCK_REGEX) || - isCursorInRegexBlock(text, INLINE_MATH_BLOCK_REGEX) + isCursorInBlock(text, MATH_BLOCK_REGEX) || + isCursorInBlock(text, INLINE_MATH_BLOCK_REGEX) ) { return 'math-block'; } - if ( - isCursorInRegexBlock(text, CODE_BLOCK_REGEX) || - isCursorInRegexBlock(text, INLINE_CODE_BLOCK_REGEX) + isCursorInBlock(text, CODE_BLOCK_REGEX) || + isCursorInBlock(text, INLINE_CODE_BLOCK_REGEX) ) { return 'code-block'; } @@ -64,17 +59,18 @@ export function getContext(prefix: string, suffix: string): Context { return 'paragraph'; } -function isCursorInRegexBlock(text: string, regex: RegExp): boolean { - const codeBlocks = extractBlocks(text, regex); - for (const block of codeBlocks) { - if (block.includes(UNIQUE_CURSOR)) { - return true; - } +export function getLanguage(prefix: string, suffix: string): string { + const text = prefix + CURSOR_CHAR + suffix; + if (!isCursorInBlock(text, CODE_BLOCK_REGEX)) { + throw new Error('Cursor is not in a code block'); } - return false; + + const match = text.match(CODE_BLOCK_REGEX); + const language = match?.groups?.language ?? 'plaintext'; + return `${language}code-block`; } -function extractBlocks(text: string, regex: RegExp) { - const codeBlocks = text.match(regex); - return codeBlocks ? codeBlocks.map((block) => block.trim()) : []; +function isCursorInBlock(text: string, regex: RegExp): boolean { + const blocks = text.match(regex) ?? []; + return blocks.some((block) => block.includes(CURSOR_CHAR)); } From f8441a35e7c040cf4fa810c454f421158e5e8465 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:05:31 +0900 Subject: [PATCH 11/26] Update context detection code --- src/api/clients/gemini.ts | 1 - src/api/clients/openai-compatible.ts | 11 ++-- src/api/index.ts | 6 +- src/api/prompts/block-quote/index.ts | 14 ++++ src/api/prompts/block-quote/params.json | 3 - src/api/prompts/block-quote/system.txt | 12 ++++ src/api/prompts/code-block/index.ts | 30 +++++++-- src/api/prompts/code-block/params.json | 3 - src/api/prompts/code-block/system.txt | 12 ++++ src/api/prompts/context.ts | 5 +- src/api/prompts/generator.ts | 62 ++++++++++++++++++ src/api/prompts/heading/index.ts | 20 ++++++ src/api/prompts/heading/params.json | 3 - src/api/prompts/heading/system.txt | 12 ++++ src/api/prompts/index.ts | 22 ++----- src/api/prompts/list-item/index.ts | 14 ++++ src/api/prompts/list-item/params.json | 3 - src/api/prompts/list-item/system.txt | 13 ++++ src/api/prompts/math-block/index.ts | 20 ++++++ src/api/prompts/math-block/params.json | 3 - src/api/prompts/math-block/system.txt | 13 ++++ src/api/prompts/paragraph/index.ts | 14 ++++ src/api/prompts/paragraph/params.json | 3 - src/api/prompts/paragraph/system.txt | 12 ++++ src/api/prompts/system.txt | 10 --- src/api/proxies/memory-cache.ts | 33 ++++------ src/api/proxies/usage-monitor.ts | 4 +- src/editor/extension.ts | 1 - src/editor/listener.ts | 86 +++---------------------- 29 files changed, 281 insertions(+), 164 deletions(-) delete mode 100644 src/api/prompts/block-quote/params.json create mode 100644 src/api/prompts/block-quote/system.txt delete mode 100644 src/api/prompts/code-block/params.json create mode 100644 src/api/prompts/code-block/system.txt create mode 100644 src/api/prompts/generator.ts delete mode 100644 src/api/prompts/heading/params.json create mode 100644 src/api/prompts/heading/system.txt delete mode 100644 src/api/prompts/list-item/params.json create mode 100644 src/api/prompts/list-item/system.txt delete mode 100644 src/api/prompts/math-block/params.json create mode 100644 src/api/prompts/math-block/system.txt delete mode 100644 src/api/prompts/paragraph/params.json create mode 100644 src/api/prompts/paragraph/system.txt delete mode 100644 src/api/prompts/system.txt diff --git a/src/api/clients/gemini.ts b/src/api/clients/gemini.ts index 9bf4fb9..4612a7e 100644 --- a/src/api/clients/gemini.ts +++ b/src/api/clients/gemini.ts @@ -9,7 +9,6 @@ export class GeminiAPIClient implements APIClient { throw new Error('Method not implemented.'); } fetchCompletions( - language: string, prefix: string, suffix: string, ): Promise { diff --git a/src/api/clients/openai-compatible.ts b/src/api/clients/openai-compatible.ts index 519a77c..2435853 100644 --- a/src/api/clients/openai-compatible.ts +++ b/src/api/clients/openai-compatible.ts @@ -61,7 +61,7 @@ export abstract class OpenAICompatibleAPIClient implements APIClient { } } - async fetchCompletions(language: string, prefix: string, suffix: string) { + async fetchCompletions(prefix: string, suffix: string) { const { settings } = this.plugin; if (this.openai === undefined) { @@ -69,9 +69,10 @@ export abstract class OpenAICompatibleAPIClient implements APIClient { } try { - const completions = await this.openai.completions.create({ - prompt: `Continue the following code written in ${language} language:\n\n${prefix}`, - suffix, + // TODO: + // Get messages from the prompt generator. + const completions = await this.openai.chat.completions.create({ + messages: [], model: settings.completions.model, max_tokens: settings.completions.maxTokens, temperature: settings.completions.temperature, @@ -90,7 +91,7 @@ export abstract class OpenAICompatibleAPIClient implements APIClient { outputTokens, ); - return completions.choices[0].text; + return completions.choices[0].message.content ?? undefined; } catch (error) { console.error(error); new Notice( diff --git a/src/api/index.ts b/src/api/index.ts index 2aa860c..9e99472 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -2,9 +2,5 @@ import { ChatMessage } from '../types'; export interface APIClient { fetchChat(messages: ChatMessage[]): AsyncGenerator; - fetchCompletions( - language: string, - prefix: string, - suffix: string, - ): Promise; + fetchCompletions(prefix: string, suffix: string): Promise; } diff --git a/src/api/prompts/block-quote/index.ts b/src/api/prompts/block-quote/index.ts index e69de29..8ff3d6c 100644 --- a/src/api/prompts/block-quote/index.ts +++ b/src/api/prompts/block-quote/index.ts @@ -0,0 +1,14 @@ +import { FewShotPrompt } from '..'; +import example1Assistant from './example1/assistant.txt'; +import example1User from './example1/user.md'; +import system from './system.txt'; + +export const BlockQuotePrompt: FewShotPrompt = { + system, + examples: [ + { + user: example1User, + assistant: example1Assistant, + }, + ], +}; diff --git a/src/api/prompts/block-quote/params.json b/src/api/prompts/block-quote/params.json deleted file mode 100644 index e5ca2a7..0000000 --- a/src/api/prompts/block-quote/params.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "stop": ["\n\n"] -} diff --git a/src/api/prompts/block-quote/system.txt b/src/api/prompts/block-quote/system.txt new file mode 100644 index 0000000..293b6ca --- /dev/null +++ b/src/api/prompts/block-quote/system.txt @@ -0,0 +1,12 @@ +Complete the most suitable text at the location of the . +The is located within a Markdown block quote. +Your answer must complete this quote in a way that fits the context of the surrounding text. +Your answer must be written in the same language as the surrounding text. +Your answer must not overlap with any text adjacent to the . +Your answer must have the following format: + +Here, you write the language of your response e.g. English, Chinese, TypeScript, Python. + +Here, you reason about the answer, using the 80/20 rule for clarity and conciseness. + +Here, you write the text that should be inserted at the location of the . \ No newline at end of file diff --git a/src/api/prompts/code-block/index.ts b/src/api/prompts/code-block/index.ts index 19dc590..4a82bba 100644 --- a/src/api/prompts/code-block/index.ts +++ b/src/api/prompts/code-block/index.ts @@ -1,8 +1,26 @@ -import { FewShowExample } from '../example'; -import assistant from './assistant.txt'; -import user from './user.md'; +import { FewShotPrompt } from '..'; +import example1Assistant from './example1/assistant.txt'; +import example1User from './example1/user.md'; +import example2Assistant from './example2/assistant.txt'; +import example2User from './example2/user.md'; +import example3Assistant from './example3/assistant.txt'; +import example3User from './example3/user.md'; +import system from './system.txt'; -export const CODE_BLOCK_EXAMPLE: FewShowExample = { - user, - assistant, +export const CodeBlockPrompt: FewShotPrompt = { + system, + examples: [ + { + user: example1User, + assistant: example1Assistant, + }, + { + user: example2User, + assistant: example2Assistant, + }, + { + user: example3User, + assistant: example3Assistant, + }, + ], }; diff --git a/src/api/prompts/code-block/params.json b/src/api/prompts/code-block/params.json deleted file mode 100644 index ce71a08..0000000 --- a/src/api/prompts/code-block/params.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "stop": ["```", "````"] -} diff --git a/src/api/prompts/code-block/system.txt b/src/api/prompts/code-block/system.txt new file mode 100644 index 0000000..eeb173e --- /dev/null +++ b/src/api/prompts/code-block/system.txt @@ -0,0 +1,12 @@ +Complete the most suitable text at the location of the . +The is located within a Markdown codeblock, written in the language {{LANGUAGE}}. +Your answer must complete this code block in the language {{LANGUAGE}}. +Your answer must not complete any text outside this code block. +Your answer must not overlap with any text adjacent to the . +Your answer must have the following format: + +Here, you write the language of your response e.g. English, Chinese, TypeScript, Python. + +Here, you reason about the answer, using the 80/20 rule for clarity and conciseness. + +Here, you write the text that should be inserted at the location of the . \ No newline at end of file diff --git a/src/api/prompts/context.ts b/src/api/prompts/context.ts index 4a2e6ef..90c9521 100644 --- a/src/api/prompts/context.ts +++ b/src/api/prompts/context.ts @@ -71,6 +71,9 @@ export function getLanguage(prefix: string, suffix: string): string { } function isCursorInBlock(text: string, regex: RegExp): boolean { - const blocks = text.match(regex) ?? []; + const blocks = text.match(regex) as string[] | null; + if (blocks === null) { + return false; + } return blocks.some((block) => block.includes(CURSOR_CHAR)); } diff --git a/src/api/prompts/generator.ts b/src/api/prompts/generator.ts new file mode 100644 index 0000000..944579d --- /dev/null +++ b/src/api/prompts/generator.ts @@ -0,0 +1,62 @@ +import Markpilot from 'src/main'; +import { ChatMessage } from 'src/types'; +import { FewShotPrompt } from '.'; +import { BlockQuotePrompt } from './block-quote'; +import { CodeBlockPrompt } from './code-block'; +import { Context, getContext, getLanguage } from './context'; +import { HeadingPrompt } from './heading'; +import { ListItemPrompt } from './list-item'; +import { MathBlockPrompt } from './math-block'; +import { ParagraphPrompt } from './paragraph'; + +const PROMPTS: Record = { + heading: HeadingPrompt, + paragraph: ParagraphPrompt, + 'list-item': ListItemPrompt, + 'block-quote': BlockQuotePrompt, + 'math-block': MathBlockPrompt, + 'code-block': CodeBlockPrompt, +}; + +export class PromptGenerator { + constructor(private plugin: Markpilot) {} + + generate(prefix: string, suffix: string): ChatMessage[] { + const { settings } = this.plugin; + + const windowSize = settings.completions.windowSize; + const truncatedPrefix = prefix.slice( + prefix.length - windowSize / 2, + prefix.length, + ); + const truncatedSuffix = suffix.slice(0, windowSize / 2); + + const context = getContext(prefix, suffix); + const prompt = PROMPTS[context]; + if (context === 'code-block') { + const language = getLanguage(prefix, suffix); + prompt.system = prompt.system.replace('{{LANGUAGE}}', language); + } + + return [ + { + role: 'system', + content: prompt.system, + }, + ...prompt.examples.flatMap((example) => [ + { + role: 'user', + content: example.user, + }, + { + role: 'assistant', + content: example.assistant, + }, + ]), + { + role: 'user', + content: `${truncatedPrefix}${truncatedSuffix}`, + }, + ] as ChatMessage[]; + } +} diff --git a/src/api/prompts/heading/index.ts b/src/api/prompts/heading/index.ts index e69de29..c6291fb 100644 --- a/src/api/prompts/heading/index.ts +++ b/src/api/prompts/heading/index.ts @@ -0,0 +1,20 @@ +import { FewShotPrompt } from '..'; +import example1Assistant from './example1/assistant.txt'; +import example1User from './example1/user.md'; +import example2Assistant from './example2/assistant.txt'; +import example2User from './example2/user.md'; +import system from './system.txt'; + +export const HeadingPrompt: FewShotPrompt = { + system, + examples: [ + { + user: example1User, + assistant: example1Assistant, + }, + { + user: example2User, + assistant: example2Assistant, + }, + ], +}; diff --git a/src/api/prompts/heading/params.json b/src/api/prompts/heading/params.json deleted file mode 100644 index f29602f..0000000 --- a/src/api/prompts/heading/params.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "stop": ["\n"] -} diff --git a/src/api/prompts/heading/system.txt b/src/api/prompts/heading/system.txt new file mode 100644 index 0000000..4d9b1d7 --- /dev/null +++ b/src/api/prompts/heading/system.txt @@ -0,0 +1,12 @@ +Complete the most suitable text at the location of the . +The is located within a Markdown heading. +Your answer must complete the title for this heading that fits the context of the surrounding text. +Your answer must be written in the same language as the surrounding text. +Your answer must not overlap with any text adjacent to the . +Your answer must have the following format: + +Here, you write the language of your response e.g. English, Chinese, TypeScript, Python. + +Here, you reason about the answer, using the 80/20 rule for clarity and conciseness. + +Here, you write the text that should be inserted at the location of the . \ No newline at end of file diff --git a/src/api/prompts/index.ts b/src/api/prompts/index.ts index 1b9a18d..bab195c 100644 --- a/src/api/prompts/index.ts +++ b/src/api/prompts/index.ts @@ -1,21 +1,9 @@ -import systemPrompt from './system.txt'; +export interface FewShotPrompt { + system: string; + examples: FewShotExample[]; +} -export interface FewShowExample { +export interface FewShotExample { user: string; assistant: string; } - -export class PromptGenerator { - private systemPrompt = systemPrompt; - - generate(prefix: string, suffix: string): string { - // TODO: - // 1. Determine the context from prefix and suffix. - // 2. Generate a prompt based on the context, with prefix and suffix trimmed according to window size. - const language = 'english'; - if (language) { - return this.systemPrompt.replace('{{LANGUAGE}}', language); - } - return ''; - } -} diff --git a/src/api/prompts/list-item/index.ts b/src/api/prompts/list-item/index.ts index e69de29..3fc1fd3 100644 --- a/src/api/prompts/list-item/index.ts +++ b/src/api/prompts/list-item/index.ts @@ -0,0 +1,14 @@ +import { FewShotPrompt } from '..'; +import example1Assistant from './example1/assistant.txt'; +import example1User from './example1/user.md'; +import system from './system.txt'; + +export const ListItemPrompt: FewShotPrompt = { + system, + examples: [ + { + user: example1User, + assistant: example1Assistant, + }, + ], +}; diff --git a/src/api/prompts/list-item/params.json b/src/api/prompts/list-item/params.json deleted file mode 100644 index e5ca2a7..0000000 --- a/src/api/prompts/list-item/params.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "stop": ["\n\n"] -} diff --git a/src/api/prompts/list-item/system.txt b/src/api/prompts/list-item/system.txt new file mode 100644 index 0000000..a19f8d1 --- /dev/null +++ b/src/api/prompts/list-item/system.txt @@ -0,0 +1,13 @@ +Complete the most suitable text at the location of the . +The is located within a Markdown list item. +Your answer must complete one or multiple list items for this list that fits the context of the surrounding text. +Your answer must not complete any text that is not part of this list. +Your answer must be written in the same language as the surrounding text. +Your answer must not overlap with any text adjacent to the . +Your answer must have the following format: + +Here, you write the language of your response e.g. English, Chinese, TypeScript, Python. + +Here, you reason about the answer, using the 80/20 rule for clarity and conciseness. + +Here, you write the text that should be inserted at the location of the . \ No newline at end of file diff --git a/src/api/prompts/math-block/index.ts b/src/api/prompts/math-block/index.ts index e69de29..c6ad7eb 100644 --- a/src/api/prompts/math-block/index.ts +++ b/src/api/prompts/math-block/index.ts @@ -0,0 +1,20 @@ +import { FewShotPrompt } from '..'; +import example1Assistant from './example1/assistant.txt'; +import example1User from './example1/user.md'; +import example2Assistant from './example2/assistant.txt'; +import example2User from './example2/user.md'; +import system from './system.txt'; + +export const MathBlockPrompt: FewShotPrompt = { + system, + examples: [ + { + user: example1User, + assistant: example1Assistant, + }, + { + user: example2User, + assistant: example2Assistant, + }, + ], +}; diff --git a/src/api/prompts/math-block/params.json b/src/api/prompts/math-block/params.json deleted file mode 100644 index 6de3036..0000000 --- a/src/api/prompts/math-block/params.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "stop": ["$ ", "$$"] -} diff --git a/src/api/prompts/math-block/system.txt b/src/api/prompts/math-block/system.txt new file mode 100644 index 0000000..373980a --- /dev/null +++ b/src/api/prompts/math-block/system.txt @@ -0,0 +1,13 @@ +Complete the most suitable text at the location of the . +The is located within a Markdown math block. +Your answer must only contain LaTeX code that captures the math discussed in the surrounding text. +Your answer must not contain any text that is not part of the LaTeX code. +Your answer must be written in the same language as the surrounding text. +Your answer must not overlap with any text adjacent to the . +Your answer must have the following format: + +Here, you write the language of your response e.g. English, Chinese, TypeScript, Python. + +Here, you reason about the answer, using the 80/20 rule for clarity and conciseness. + +Here, you write the text that should be inserted at the location of the . \ No newline at end of file diff --git a/src/api/prompts/paragraph/index.ts b/src/api/prompts/paragraph/index.ts index e69de29..1e92634 100644 --- a/src/api/prompts/paragraph/index.ts +++ b/src/api/prompts/paragraph/index.ts @@ -0,0 +1,14 @@ +import { FewShotPrompt } from '..'; +import example1Assistant from './example1/assistant.txt'; +import example1User from './example1/user.md'; +import system from './system.txt'; + +export const ParagraphPrompt: FewShotPrompt = { + system, + examples: [ + { + user: example1User, + assistant: example1Assistant, + }, + ], +}; diff --git a/src/api/prompts/paragraph/params.json b/src/api/prompts/paragraph/params.json deleted file mode 100644 index e5ca2a7..0000000 --- a/src/api/prompts/paragraph/params.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "stop": ["\n\n"] -} diff --git a/src/api/prompts/paragraph/system.txt b/src/api/prompts/paragraph/system.txt new file mode 100644 index 0000000..3ea01c0 --- /dev/null +++ b/src/api/prompts/paragraph/system.txt @@ -0,0 +1,12 @@ +Complete the most suitable text at the location of the . +The is located within a Markdown paragraph. +Your answer must complete one or multiple sentences to this paragraph that fit the surrounding text. +Your answer must be written in the same language as the surrounding text. +Your answer must not overlap with any text adjacent to the . +Your answer must have the following format: + +Here, you write the language of your response e.g. English, Chinese, TypeScript, Python. + +Here, you reason about the answer, using the 80/20 rule for clarity and conciseness. + +Here, you write the text that should be inserted at the location of the . \ No newline at end of file diff --git a/src/api/prompts/system.txt b/src/api/prompts/system.txt deleted file mode 100644 index 3d026d7..0000000 --- a/src/api/prompts/system.txt +++ /dev/null @@ -1,10 +0,0 @@ -Predict the most logical text written in the language {{LANGUAGE}} at the location of the . -Your answer can be either code, a single word, or multiple sentences, depending on the language . -Your answer cannot have any overlapping text directly surrounding the . -Your answer must have the following format: - -Here, you write the language of your answer, e.g. English, Chinese, Python, JSON etc. - -Here, you reason about the answer; use the 80/20 principle to be brief. - -Here, you write the text that should be at the location of \ No newline at end of file diff --git a/src/api/proxies/memory-cache.ts b/src/api/proxies/memory-cache.ts index af4b8f3..37b4a05 100644 --- a/src/api/proxies/memory-cache.ts +++ b/src/api/proxies/memory-cache.ts @@ -16,33 +16,30 @@ export class MemoryCacheProxy implements APIClient { return this.client.fetchChat(messages); } - async fetchCompletions(language: string, prefix: string, suffix: string) { + async fetchCompletions(prefix: string, suffix: string) { const { settings } = this.plugin; if (!settings.cache.enabled) { - const completions = await this.client.fetchCompletions( - language, - prefix, - suffix, - ); + const completions = await this.client.fetchCompletions(prefix, suffix); return completions; } - // Extra whitespaces should not affect the completions. - const compactPrefix = prefix.replace(/\s\s+/g, ' '); - const compactSuffix = suffix.replace(/\s\s+/g, ' '); - // Use half the window size // because some characters may have overflowed due to extra whitespaces. const windowSize = settings.completions.windowSize / 2; - const truncatedPrefix = compactPrefix.slice( - compactPrefix.length - windowSize / 2, - compactPrefix.length, + const truncatedPrefix = prefix.slice( + prefix.length - windowSize / 2, + prefix.length, ); - const truncatedSuffix = compactSuffix.slice(0, windowSize / 2); + const truncatedSuffix = suffix.slice(0, windowSize / 2); + + // Extra whitespaces should not affect the completions. + // We remove them after truncating the prefix and suffix for efficiency. + const compactPrefix = truncatedPrefix.replace(/\s\s+/g, ' '); + const compactSuffix = truncatedSuffix.replace(/\s\s+/g, ' '); const hash = createHash('sha256') - .update(`${language} ${truncatedPrefix} ${truncatedSuffix} `, 'utf8') + .update(`${compactPrefix} ${compactSuffix} `, 'utf8') .digest('hex'); if (await this.store.has(hash)) { @@ -50,11 +47,7 @@ export class MemoryCacheProxy implements APIClient { return cache; } - const completions = await this.client.fetchCompletions( - language, - prefix, - suffix, - ); + const completions = await this.client.fetchCompletions(prefix, suffix); if (completions === undefined) { return undefined; } diff --git a/src/api/proxies/usage-monitor.ts b/src/api/proxies/usage-monitor.ts index b9b3c6e..de64e1b 100644 --- a/src/api/proxies/usage-monitor.ts +++ b/src/api/proxies/usage-monitor.ts @@ -30,7 +30,7 @@ export class UsageMonitorProxy implements APIClient { yield* this.client.fetchChat(messages); } - async fetchCompletions(language: string, prefix: string, suffix: string) { + async fetchCompletions(prefix: string, suffix: string) { if (this.hasReachedLimit()) { new Notice( 'Monthly usage limit reached. Please increase the limit to keep on using chat view.', @@ -38,6 +38,6 @@ export class UsageMonitorProxy implements APIClient { return; } - return await this.client.fetchCompletions(language, prefix, suffix); + return await this.client.fetchCompletions(prefix, suffix); } } diff --git a/src/editor/extension.ts b/src/editor/extension.ts index b10d683..53043b1 100644 --- a/src/editor/extension.ts +++ b/src/editor/extension.ts @@ -9,7 +9,6 @@ import { completionsStateField } from './state'; import { completionsRenderPlugin } from './view'; export type CompletionsFetcher = ( - language: string, prefix: string, suffix: string, ) => Promise; diff --git a/src/editor/listener.ts b/src/editor/listener.ts index 545a92b..5ab7443 100644 --- a/src/editor/listener.ts +++ b/src/editor/listener.ts @@ -1,12 +1,10 @@ -import { EditorState } from '@codemirror/state'; import { EditorView, ViewUpdate } from '@codemirror/view'; import { Notice } from 'obsidian'; import Markpilot from 'src/main'; import { CompletionsFetcher } from './extension'; -import { LanguageAlias, languagesAliases } from './languages'; import { setCompletionsEffect, unsetCompletionsEffect } from './state'; -function showCompletions(fetcher: CompletionsFetcher, plugin: Markpilot) { +function showCompletions(fetcher: CompletionsFetcher) { let lastHead = -1; let latestCompletionsId = 0; @@ -41,14 +39,13 @@ function showCompletions(fetcher: CompletionsFetcher, plugin: Markpilot) { const currentCompletionsId = ++latestCompletionsId; // Get the completions context with code blocks taken into account. - const { language, prefix, suffix } = getCompletionsContext(state, plugin); + const prefix = state.sliceDoc(0, head); + const suffix = state.sliceDoc(head, length); // Fetch completions from the server. - const completions = await fetcher(language, prefix, suffix).catch( - (error) => { - new Notice('Failed to fetch completions: ', error); - return undefined; - }, - ); + const completions = await fetcher(prefix, suffix).catch((error) => { + new Notice('Failed to fetch completions: ', error); + return undefined; + }); // if fetch has failed, ignore and return. if (completions === undefined) { return; @@ -65,74 +62,7 @@ function showCompletions(fetcher: CompletionsFetcher, plugin: Markpilot) { }; } -// NOTE: -// This is a bare-bone implementation -// because I was unable to find a parser that outputs an AST -// with the information indicating where each node spans. -function getCompletionsContext(state: EditorState, plugin: Markpilot) { - const head = state.selection.main.head; - const length = state.doc.length; - const prefix = state.sliceDoc(0, head); - const suffix = state.sliceDoc(head, length); - - const windowSize = plugin.settings.completions.windowSize; - const context = { - language: 'markdown', - prefix: prefix.slice(prefix.length - windowSize / 2, prefix.length), - suffix: suffix.slice(0, windowSize / 2), - }; - - // Pattern for the code block delimiter e.g. ```python or ``` - let pattern; - - let prefixChars = 0; - const prefixLines = prefix.split('\n').reverse(); - - for (const [i, line] of prefixLines.entries()) { - // Check if the line starts with a code block pattern. - const parts = /^(```|````|~~~|~~~~)/.exec(line); - if (parts !== null) { - pattern = parts[1]; - - // Check if the line ends with a language identifier. - const language = line.slice(pattern.length).trim(); - if (language === '') { - // Return default context as closing code block pattern is detected. - return context; - } else { - // Otherwise update the context with the language and prefix. - context.language = - languagesAliases[language as LanguageAlias] || language.toLowerCase(); - context.prefix = prefix.slice( - prefix.length - prefixChars, - prefix.length, - ); - break; - } - } else if (i === prefixLines.length - 1) { - // Return default context as no code block pattern is detected. - return context; - } - prefixChars += line.length + 1; - } - - let suffixChars = 0; - const suffixLines = suffix.split('\n'); - - for (const line of suffixLines) { - // Check if the line ends with the code block pattern detected above. - const parts = new RegExp(`^${pattern}\\s*$`).exec(line); - if (parts !== null) { - context.suffix = suffix.slice(0, suffixChars); - break; - } - suffixChars += line.length + 1; - } - - return context; -} - export const showCompletionsOnUpdate = ( fetcher: CompletionsFetcher, plugin: Markpilot, -) => EditorView.updateListener.of(showCompletions(fetcher, plugin)); +) => EditorView.updateListener.of(showCompletions(fetcher)); From 55dd63075cc85d21800007bbe206e1a5968b1b15 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:08:47 +0900 Subject: [PATCH 12/26] Add logic for settings migrations --- src/main.ts | 3 + src/settings/index.ts | 1 + src/settings/migrators/1.1.0-1.2.0.ts | 33 +++++++++ src/settings/runner.ts | 29 ++++++++ src/settings/versions/1.1.0/index.ts | 30 ++++++++ src/settings/versions/1.1.0/openai.ts | 95 +++++++++++++++++++++++++ src/settings/versions/1.2.0/index.ts | 55 ++++++++++++++ src/settings/versions/1.2.0/models.ts | 60 ++++++++++++++++ src/settings/versions/1.2.0/provider.ts | 13 ++++ src/settings/versions/1.2.0/types.ts | 11 +++ src/utils.ts | 7 ++ 11 files changed, 337 insertions(+) create mode 100644 src/settings/index.ts create mode 100644 src/settings/migrators/1.1.0-1.2.0.ts create mode 100644 src/settings/runner.ts create mode 100644 src/settings/versions/1.1.0/index.ts create mode 100644 src/settings/versions/1.1.0/openai.ts create mode 100644 src/settings/versions/1.2.0/index.ts create mode 100644 src/settings/versions/1.2.0/models.ts create mode 100644 src/settings/versions/1.2.0/provider.ts create mode 100644 src/settings/versions/1.2.0/types.ts diff --git a/src/main.ts b/src/main.ts index 6c24ad8..c35fcfd 100644 --- a/src/main.ts +++ b/src/main.ts @@ -24,6 +24,7 @@ import { MarkpilotSettings, MarkpilotSettingTab, } from './settings'; +import { SettingsMigrationsRunner } from './settings/runner'; export default class Markpilot extends Plugin { settings: MarkpilotSettings; @@ -35,6 +36,8 @@ export default class Markpilot extends Plugin { async onload() { await this.loadSettings(); + const runner = new SettingsMigrationsRunner(this); + await runner.apply(); this.addSettingTab(new MarkpilotSettingTab(this.app, this)); const { settings } = this; diff --git a/src/settings/index.ts b/src/settings/index.ts new file mode 100644 index 0000000..123a6bc --- /dev/null +++ b/src/settings/index.ts @@ -0,0 +1 @@ +export type SettingsMigrator = (settings: object) => object; diff --git a/src/settings/migrators/1.1.0-1.2.0.ts b/src/settings/migrators/1.1.0-1.2.0.ts new file mode 100644 index 0000000..c99e949 --- /dev/null +++ b/src/settings/migrators/1.1.0-1.2.0.ts @@ -0,0 +1,33 @@ +import { SettingsMigrator } from '..'; +import { MarkpilotSettings1_1_0 } from '../versions/1.1.0'; +import { MarkpilotSettings1_2_0 } from '../versions/1.2.0'; + +export const migrateVersion1_1_0_toVersion1_2_0: SettingsMigrator = ( + settings: MarkpilotSettings1_1_0, +) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const newSettings: MarkpilotSettings1_2_0 = structuredClone(settings) as any; + newSettings.providers = { + openai: { + apiKey: settings.apiKey, + }, + openrouter: { + apiKey: undefined, + }, + ollama: { + apiUrl: undefined, + }, + }; + newSettings.completions.provider = 'openai'; + newSettings.completions.ignoredFiles = []; + newSettings.completions.ignoredTags = []; + newSettings.chat.provider = 'openai'; + // Update if default models are still selected. + if (settings.completions.model === 'gpt-3.5-turbo-instruct') { + newSettings.completions.model = 'gpt-3.5-turbo'; + } + if (settings.chat.model === 'gpt-3.5-turbo-0125') { + newSettings.chat.model = 'gpt-3.5-turbo'; + } + return newSettings; +}; diff --git a/src/settings/runner.ts b/src/settings/runner.ts new file mode 100644 index 0000000..2c2b8ab --- /dev/null +++ b/src/settings/runner.ts @@ -0,0 +1,29 @@ +import { SettingsMigrator } from '.'; +import Markpilot from '../main'; +import { migrateVersion1_1_0_toVersion1_2_0 } from './migrators/1.1.0-1.2.0'; + +export class SettingsMigrationsRunner { + migrators: Record = { + '1.1.0': migrateVersion1_1_0_toVersion1_2_0, + }; + + constructor(private plugin: Markpilot) {} + + async apply() { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let settings = this.plugin.settings as any; + + while (true) { + // Settings versions and migrations were introduced from version 1.1.0. + const version = settings.version ?? '1.1.0'; + const migrator = this.migrators[version]; + if (migrator === undefined) { + break; + } + settings = migrator(settings); + } + + this.plugin.settings = settings; + await this.plugin.saveSettings(); + } +} diff --git a/src/settings/versions/1.1.0/index.ts b/src/settings/versions/1.1.0/index.ts new file mode 100644 index 0000000..7698133 --- /dev/null +++ b/src/settings/versions/1.1.0/index.ts @@ -0,0 +1,30 @@ +import { ChatCompletionsModel, ChatHistory, CompletionsModel } from './openai'; + +export interface MarkpilotSettings1_1_0 { + apiKey: string | undefined; + completions: { + enabled: boolean; + model: CompletionsModel; + maxTokens: number; + temperature: number; + waitTime: number; + windowSize: number; + acceptKey: string; + rejectKey: string; + }; + chat: { + enabled: boolean; + model: ChatCompletionsModel; + maxTokens: number; + temperature: number; + history: ChatHistory; + }; + cache: { + enabled: boolean; + }; + usage: { + dailyCosts: Record; // e.g. '2021-09-01' to 10.0 (USD) + monthlyCosts: Record; // e.g. '2021-09' to 100.0 (USD) + monthlyLimit: number; + }; +} diff --git a/src/settings/versions/1.1.0/openai.ts b/src/settings/versions/1.1.0/openai.ts new file mode 100644 index 0000000..8484fa5 --- /dev/null +++ b/src/settings/versions/1.1.0/openai.ts @@ -0,0 +1,95 @@ +export const COMPLETIONS_MODELS = [ + 'gpt-3.5-turbo-instruct', + 'davinci-002', + 'babbage-002', +] as const; + +export const CHAT_COMPLETIONS_MODELS = [ + 'gpt-4-0125-preview', + 'gpt-4-turbo-preview', + 'gpt-4-1106-preview', + 'gpt-4-vision-preview', + 'gpt-4', + 'gpt-4-0314', + 'gpt-4-0613', + 'gpt-4-32k', + 'gpt-4-32k-0314', + 'gpt-4-32k-0613', + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-16k', + 'gpt-3.5-turbo-0301', + 'gpt-3.5-turbo-0613', + 'gpt-3.5-turbo-1106', + 'gpt-3.5-turbo-0125', + 'gpt-3.5-turbo-16k-0613', +] as const; + +export const MODEL_INPUT_COSTS: Record< + | (typeof COMPLETIONS_MODELS)[number] + | (typeof CHAT_COMPLETIONS_MODELS)[number], + number +> = { + 'gpt-3.5-turbo-instruct': 1.5, + 'davinci-002': 12.0, + 'babbage-002': 1.6, + 'gpt-4-0125-preview': 10.0, + 'gpt-4-turbo-preview': 10.0, + 'gpt-4-1106-preview': 10.0, + 'gpt-4-vision-preview': 10.0, + 'gpt-4': 30.0, + 'gpt-4-0314': 30.0, + 'gpt-4-0613': 30.0, + 'gpt-4-32k': 60.0, + 'gpt-4-32k-0314': 60.0, + 'gpt-4-32k-0613': 60.0, + 'gpt-3.5-turbo': 0.5, + 'gpt-3.5-turbo-16k': 0.5, + 'gpt-3.5-turbo-0301': 0.5, + 'gpt-3.5-turbo-0613': 0.5, + 'gpt-3.5-turbo-1106': 0.5, + 'gpt-3.5-turbo-0125': 0.5, + 'gpt-3.5-turbo-16k-0613': 0.5, +} as const; + +export const MODEL_OUTPUT_COSTS: Record< + | (typeof COMPLETIONS_MODELS)[number] + | (typeof CHAT_COMPLETIONS_MODELS)[number], + number +> = { + 'gpt-3.5-turbo-instruct': 2.0, + 'davinci-002': 12.0, + 'babbage-002': 1.6, + 'gpt-4-0125-preview': 30, + 'gpt-4-turbo-preview': 30, + 'gpt-4-1106-preview': 30, + 'gpt-4-vision-preview': 30, + 'gpt-4': 60, + 'gpt-4-0314': 60, + 'gpt-4-0613': 60, + 'gpt-4-32k': 120, + 'gpt-4-32k-0314': 120, + 'gpt-4-32k-0613': 120, + 'gpt-3.5-turbo': 1.5, + 'gpt-3.5-turbo-16k': 1.5, + 'gpt-3.5-turbo-0301': 1.5, + 'gpt-3.5-turbo-0613': 1.5, + 'gpt-3.5-turbo-1106': 1.5, + 'gpt-3.5-turbo-0125': 1.5, + 'gpt-3.5-turbo-16k-0613': 1.5, +}; + +export type CompletionsModel = (typeof COMPLETIONS_MODELS)[number]; + +export type ChatCompletionsModel = (typeof CHAT_COMPLETIONS_MODELS)[number]; + +export type ChatRole = 'system' | 'assistant' | 'user'; + +export interface ChatMessage { + role: ChatRole; + content: string; +} + +export interface ChatHistory { + messages: ChatMessage[]; + response: string; +} diff --git a/src/settings/versions/1.2.0/index.ts b/src/settings/versions/1.2.0/index.ts new file mode 100644 index 0000000..5cd7133 --- /dev/null +++ b/src/settings/versions/1.2.0/index.ts @@ -0,0 +1,55 @@ +import { MarkpilotSettings } from 'src/settings'; +import { Equal, Expect } from 'src/utils'; +import { Model } from './models'; +import { Provider } from './provider'; +import { ChatHistory } from './types'; + +export interface MarkpilotSettings1_2_0 { + version: string; + providers: { + openai: { + apiKey: string | undefined; + }; + openrouter: { + apiKey: string | undefined; + }; + ollama: { + apiUrl: string | undefined; + }; + }; + completions: { + enabled: boolean; + provider: Provider; + model: Model; + maxTokens: number; + temperature: number; + waitTime: number; + windowSize: number; + acceptKey: string; + rejectKey: string; + ignoredFiles: string[]; + ignoredTags: string[]; + }; + chat: { + enabled: boolean; + provider: Provider; + model: Model; + maxTokens: number; + temperature: number; + history: ChatHistory; + }; + cache: { + enabled: boolean; + }; + usage: { + dailyCosts: Record; // e.g. '2021-09-01' to 10.0 (USD) + monthlyCosts: Record; // e.g. '2021-09' to 100.0 (USD) + monthlyLimit: number; + }; +} + +// Check the settings type in this version matches the current settings type. +// eslint-disable-next-line @typescript-eslint/no-unused-vars +type AssertEqualCurrentSettings = Expect< + Equal +>; diff --git a/src/settings/versions/1.2.0/models.ts b/src/settings/versions/1.2.0/models.ts new file mode 100644 index 0000000..b205f06 --- /dev/null +++ b/src/settings/versions/1.2.0/models.ts @@ -0,0 +1,60 @@ +import { Provider } from './provider'; + +export type OpenAIModel = (typeof OPENAI_MODELS)[number]; + +export type OpenRouterModel = (typeof OPENROUTER_MODELS)[number]; + +export type OllamaModel = (typeof OLLAMA_MODELS)[number]; + +export type Model = OpenAIModel | OpenRouterModel | OllamaModel; + +export const OPENAI_MODELS = [ + 'gpt-3.5-turbo-instruct', + 'davinci-002', + 'babbage-002', + 'gpt-4-0125-preview', + 'gpt-4-turbo-preview', + 'gpt-4-1106-preview', + 'gpt-4-vision-preview', + 'gpt-4', + 'gpt-4-0314', + 'gpt-4-0613', + 'gpt-4-32k', + 'gpt-4-32k-0314', + 'gpt-4-32k-0613', + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-16k', + 'gpt-3.5-turbo-0301', + 'gpt-3.5-turbo-0613', + 'gpt-3.5-turbo-1106', + 'gpt-3.5-turbo-0125', + 'gpt-3.5-turbo-16k-0613', +] as const; + +// TODO: +// This is a placeholder. +export const OPENROUTER_MODELS = [ + 'openai/gpt-3.5-turbo', + 'openai/gpt-4-turbo', +] as const; + +// TODO: +// This is a placeholder. +export const OLLAMA_MODELS = [ + 'llama2', + 'llama3', + 'codellama', + 'phind-codellama', +] as const; + +export const MODELS = { + openai: OPENAI_MODELS, + openrouter: OPENROUTER_MODELS, + ollama: OLLAMA_MODELS, +}; + +export const DEFAULT_MODELS: Record = { + openai: 'gpt-3.5-turbo', + openrouter: 'openai/gpt-3.5-turbo', + ollama: 'llama2', +}; diff --git a/src/settings/versions/1.2.0/provider.ts b/src/settings/versions/1.2.0/provider.ts new file mode 100644 index 0000000..9b8fcb9 --- /dev/null +++ b/src/settings/versions/1.2.0/provider.ts @@ -0,0 +1,13 @@ +export type OnlineProvider = (typeof ONLINE_PROVIDERS)[number]; + +export type OfflineProvider = (typeof OFFLINE_PROVIDERS)[number]; + +export type Provider = OnlineProvider | OfflineProvider; + +export const ONLINE_PROVIDERS = ['openai', 'openrouter'] as const; + +export const OFFLINE_PROVIDERS = ['ollama'] as const; + +export const PROVIDERS = [...ONLINE_PROVIDERS, ...OFFLINE_PROVIDERS] as const; + +export const DEFAULT_PROVIDER = 'openai' as Provider; diff --git a/src/settings/versions/1.2.0/types.ts b/src/settings/versions/1.2.0/types.ts new file mode 100644 index 0000000..b7f4280 --- /dev/null +++ b/src/settings/versions/1.2.0/types.ts @@ -0,0 +1,11 @@ +export type ChatRole = 'system' | 'assistant' | 'user'; + +export interface ChatMessage { + role: ChatRole; + content: string; +} + +export interface ChatHistory { + messages: ChatMessage[]; + response: string; +} diff --git a/src/utils.ts b/src/utils.ts index 74722de..e7140ab 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -69,3 +69,10 @@ export function getDaysInCurrentMonth(): Date[] { } return dates; } + +// Utility types used for settings migration. +export type Expect = T; +export type Equal = + (() => T extends X ? 1 : 2) extends () => T extends Y ? 1 : 2 + ? true + : false; From 7c73dca2fb599836e36fd6632fc1c3150f197924 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:20:34 +0900 Subject: [PATCH 13/26] Setup ESBuild loaders for .txt and .md files --- esbuild.config.mjs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/esbuild.config.mjs b/esbuild.config.mjs index 8f9a756..990def7 100644 --- a/esbuild.config.mjs +++ b/esbuild.config.mjs @@ -39,8 +39,11 @@ const context = await esbuild.context({ sourcemap: prod ? false : "inline", treeShaking: true, outfile: "main.js", - // For loading custom icons: - loader: { ".svg": "text" }, + loader: { + ".txt": "text", + ".md": "text", + ".svg": "text", // For custom icons, + }, }); if (prod) { From b327646648fb2eab8e7b8cb66776b58b73465a45 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:20:52 +0900 Subject: [PATCH 14/26] Add logic to generate prompt and parse response --- src/api/clients/ollama.ts | 9 +++++++-- src/api/clients/openai-compatible.ts | 13 +++++++++---- src/api/clients/openai.ts | 9 +++++++-- src/api/clients/openrouter.ts | 9 +++++++-- src/api/prompts/generator.ts | 5 +++++ src/main.ts | 8 +++++--- 6 files changed, 40 insertions(+), 13 deletions(-) diff --git a/src/api/clients/ollama.ts b/src/api/clients/ollama.ts index 05ce474..f3afd0d 100644 --- a/src/api/clients/ollama.ts +++ b/src/api/clients/ollama.ts @@ -4,14 +4,19 @@ import Markpilot from 'src/main'; import { validateURL } from 'src/utils'; import { APIClient } from '..'; import { CostsTracker } from '../costs'; +import { PromptGenerator } from '../prompts/generator'; import { OpenAICompatibleAPIClient } from './openai-compatible'; export class OllamaAPIClient extends OpenAICompatibleAPIClient implements APIClient { - constructor(tracker: CostsTracker, plugin: Markpilot) { - super(tracker, plugin); + constructor( + generator: PromptGenerator, + tracker: CostsTracker, + plugin: Markpilot, + ) { + super(generator, tracker, plugin); } get openai(): OpenAI | undefined { diff --git a/src/api/clients/openai-compatible.ts b/src/api/clients/openai-compatible.ts index 2435853..113796a 100644 --- a/src/api/clients/openai-compatible.ts +++ b/src/api/clients/openai-compatible.ts @@ -5,9 +5,11 @@ import Markpilot from 'src/main'; import { APIClient } from '..'; import { ChatMessage } from '../../types'; import { CostsTracker } from '../costs'; +import { PromptGenerator } from '../prompts/generator'; export abstract class OpenAICompatibleAPIClient implements APIClient { constructor( + protected generator: PromptGenerator, protected tracker: CostsTracker, protected plugin: Markpilot, ) {} @@ -69,10 +71,9 @@ export abstract class OpenAICompatibleAPIClient implements APIClient { } try { - // TODO: - // Get messages from the prompt generator. + const messages = this.generator.generate(prefix, suffix); const completions = await this.openai.chat.completions.create({ - messages: [], + messages, model: settings.completions.model, max_tokens: settings.completions.maxTokens, temperature: settings.completions.temperature, @@ -91,7 +92,11 @@ export abstract class OpenAICompatibleAPIClient implements APIClient { outputTokens, ); - return completions.choices[0].message.content ?? undefined; + const content = completions.choices[0].message.content; + if (content === null) { + return; + } + return this.generator.parse(content); } catch (error) { console.error(error); new Notice( diff --git a/src/api/clients/openai.ts b/src/api/clients/openai.ts index cfc558b..2d3cbdf 100644 --- a/src/api/clients/openai.ts +++ b/src/api/clients/openai.ts @@ -3,14 +3,19 @@ import OpenAI from 'openai'; import Markpilot from 'src/main'; import { APIClient } from '..'; import { CostsTracker } from '../costs'; +import { PromptGenerator } from '../prompts/generator'; import { OpenAICompatibleAPIClient } from './openai-compatible'; export class OpenAIAPIClient extends OpenAICompatibleAPIClient implements APIClient { - constructor(tracker: CostsTracker, plugin: Markpilot) { - super(tracker, plugin); + constructor( + generator: PromptGenerator, + tracker: CostsTracker, + plugin: Markpilot, + ) { + super(generator, tracker, plugin); } get openai(): OpenAI | undefined { diff --git a/src/api/clients/openrouter.ts b/src/api/clients/openrouter.ts index 6be34fd..1002b88 100644 --- a/src/api/clients/openrouter.ts +++ b/src/api/clients/openrouter.ts @@ -3,14 +3,19 @@ import OpenAI from 'openai'; import Markpilot from 'src/main'; import { APIClient } from '..'; import { CostsTracker } from '../costs'; +import { PromptGenerator } from '../prompts/generator'; import { OpenAICompatibleAPIClient } from './openai-compatible'; export class OpenRouterAPIClient extends OpenAICompatibleAPIClient implements APIClient { - constructor(tracker: CostsTracker, plugin: Markpilot) { - super(tracker, plugin); + constructor( + generator: PromptGenerator, + tracker: CostsTracker, + plugin: Markpilot, + ) { + super(generator, tracker, plugin); } get openai(): OpenAI | undefined { diff --git a/src/api/prompts/generator.ts b/src/api/prompts/generator.ts index 944579d..33556a7 100644 --- a/src/api/prompts/generator.ts +++ b/src/api/prompts/generator.ts @@ -59,4 +59,9 @@ export class PromptGenerator { }, ] as ChatMessage[]; } + + parse(content: string) { + const lines = content.split('\n'); + return lines.slice(lines.indexOf('') + 1).join('\n'); + } } diff --git a/src/main.ts b/src/main.ts index c35fcfd..0754f08 100644 --- a/src/main.ts +++ b/src/main.ts @@ -13,6 +13,7 @@ import { OllamaAPIClient } from './api/clients/ollama'; import { OpenAIAPIClient } from './api/clients/openai'; import { OpenRouterAPIClient } from './api/clients/openrouter'; import { CostsTracker } from './api/costs'; +import { PromptGenerator } from './api/prompts/generator'; import { Provider } from './api/provider'; import { MemoryCacheProxy } from './api/proxies/memory-cache'; import { UsageMonitorProxy } from './api/proxies/usage-monitor'; @@ -199,15 +200,16 @@ export default class Markpilot extends Plugin { } createAPIClient(provider: Provider) { + const generator = new PromptGenerator(this); const tracker = new CostsTracker(this); const client = (() => { switch (provider) { case 'openai': - return new OpenAIAPIClient(tracker, this); + return new OpenAIAPIClient(generator, tracker, this); case 'openrouter': - return new OpenRouterAPIClient(tracker, this); + return new OpenRouterAPIClient(generator, tracker, this); case 'ollama': - return new OllamaAPIClient(tracker, this); + return new OllamaAPIClient(generator, tracker, this); } })(); const clientWithMonitor = new UsageMonitorProxy(client, this); From af5bd6b8767ab3ea5e2bffa41452954085469491 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:37:48 +0900 Subject: [PATCH 15/26] Fix bugs in settings migration --- src/main.ts | 12 +++++++++--- src/settings/migrators/1.1.0-1.2.0.ts | 4 +++- src/settings/runner.ts | 14 +++++++++++--- 3 files changed, 23 insertions(+), 7 deletions(-) diff --git a/src/main.ts b/src/main.ts index 0754f08..a8014e0 100644 --- a/src/main.ts +++ b/src/main.ts @@ -37,8 +37,6 @@ export default class Markpilot extends Plugin { async onload() { await this.loadSettings(); - const runner = new SettingsMigrationsRunner(this); - await runner.apply(); this.addSettingTab(new MarkpilotSettingTab(this.app, this)); const { settings } = this; @@ -271,7 +269,15 @@ export default class Markpilot extends Plugin { } async loadSettings() { - this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData()); + const data = await this.loadData(); + if (data === null) { + this.settings = DEFAULT_SETTINGS; + return; + } + + this.settings = data; + const runner = new SettingsMigrationsRunner(this); + await runner.apply(); } async saveSettings() { diff --git a/src/settings/migrators/1.1.0-1.2.0.ts b/src/settings/migrators/1.1.0-1.2.0.ts index c99e949..418d58d 100644 --- a/src/settings/migrators/1.1.0-1.2.0.ts +++ b/src/settings/migrators/1.1.0-1.2.0.ts @@ -5,11 +5,13 @@ import { MarkpilotSettings1_2_0 } from '../versions/1.2.0'; export const migrateVersion1_1_0_toVersion1_2_0: SettingsMigrator = ( settings: MarkpilotSettings1_1_0, ) => { + const apiKey = settings.apiKey as string; + delete settings.apiKey; // eslint-disable-next-line @typescript-eslint/no-explicit-any const newSettings: MarkpilotSettings1_2_0 = structuredClone(settings) as any; newSettings.providers = { openai: { - apiKey: settings.apiKey, + apiKey, }, openrouter: { apiKey: undefined, diff --git a/src/settings/runner.ts b/src/settings/runner.ts index 2c2b8ab..46e8296 100644 --- a/src/settings/runner.ts +++ b/src/settings/runner.ts @@ -3,9 +3,12 @@ import Markpilot from '../main'; import { migrateVersion1_1_0_toVersion1_2_0 } from './migrators/1.1.0-1.2.0'; export class SettingsMigrationsRunner { - migrators: Record = { + nextMigrators: Record = { '1.1.0': migrateVersion1_1_0_toVersion1_2_0, }; + nextVersions: Record = { + '1.1.0': '1.2.0', + }; constructor(private plugin: Markpilot) {} @@ -13,14 +16,19 @@ export class SettingsMigrationsRunner { // eslint-disable-next-line @typescript-eslint/no-explicit-any let settings = this.plugin.settings as any; - while (true) { + // NOTE: + // An infinite loop would also work because of the break statement + // but we take the safe path here. + const maxIterations = Object.keys(this.nextMigrators).length; + for (let i = 0; i < maxIterations + 1; i++) { // Settings versions and migrations were introduced from version 1.1.0. const version = settings.version ?? '1.1.0'; - const migrator = this.migrators[version]; + const migrator = this.nextMigrators[version]; if (migrator === undefined) { break; } settings = migrator(settings); + settings.version = this.nextVersions[version]; } this.plugin.settings = settings; From 0525a8d68d2bb4eec78261670cec8afd17b1b0c0 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:42:41 +0900 Subject: [PATCH 16/26] Fix bugs in ribbon action display --- src/main.ts | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/main.ts b/src/main.ts index a8014e0..a76c2d7 100644 --- a/src/main.ts +++ b/src/main.ts @@ -52,21 +52,20 @@ export default class Markpilot extends Plugin { return this.chatView; }); + this.registerCustomIcons(); // Call before `registerRibbonActions()`. this.registerRibbonActions(); this.registerCommands(); } - registerRibbonActions() { - // Register custom icon. - // TODO: - // Remove once this PR gets merged: - // https://github.com/lucide-icons/lucide/pull/2079 + registerCustomIcons() { addIcon('bot-off', botOffIcon); + } + + registerRibbonActions() { + const { settings } = this; - // TODO: - // Extract duplicate logic when toggling features. const toggleCompletionsItem = this.addRibbonIcon( - 'bot', + settings.completions.enabled ? 'bot' : 'bot-off', 'Toggle inline completions', () => { this.settings.completions.enabled = !this.settings.completions.enabled; From 0022218a236127e59e675f2191316cca1bf36219 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:53:25 +0900 Subject: [PATCH 17/26] Refactor settings migration code in functional style --- src/settings/index.ts | 2 +- src/settings/migrators/1.1.0-1.2.0.ts | 46 ++++++++++++++++----------- src/settings/runner.ts | 12 +++---- 3 files changed, 33 insertions(+), 27 deletions(-) diff --git a/src/settings/index.ts b/src/settings/index.ts index 123a6bc..3b09f88 100644 --- a/src/settings/index.ts +++ b/src/settings/index.ts @@ -1 +1 @@ -export type SettingsMigrator = (settings: object) => object; +export type SettingsMigrator = (settings: From) => To; diff --git a/src/settings/migrators/1.1.0-1.2.0.ts b/src/settings/migrators/1.1.0-1.2.0.ts index 418d58d..93e2d0e 100644 --- a/src/settings/migrators/1.1.0-1.2.0.ts +++ b/src/settings/migrators/1.1.0-1.2.0.ts @@ -2,28 +2,38 @@ import { SettingsMigrator } from '..'; import { MarkpilotSettings1_1_0 } from '../versions/1.1.0'; import { MarkpilotSettings1_2_0 } from '../versions/1.2.0'; -export const migrateVersion1_1_0_toVersion1_2_0: SettingsMigrator = ( - settings: MarkpilotSettings1_1_0, -) => { - const apiKey = settings.apiKey as string; - delete settings.apiKey; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const newSettings: MarkpilotSettings1_2_0 = structuredClone(settings) as any; - newSettings.providers = { - openai: { - apiKey, +export const migrateVersion1_1_0_toVersion1_2_0: SettingsMigrator< + MarkpilotSettings1_1_0, + MarkpilotSettings1_2_0 +> = (settings) => { + const newSettings: MarkpilotSettings1_2_0 = { + version: '1.2.0', + providers: { + openai: { + apiKey: settings.apiKey, + }, + openrouter: { + apiKey: undefined, + }, + ollama: { + apiUrl: undefined, + }, }, - openrouter: { - apiKey: undefined, + completions: { + ...settings.completions, + provider: 'openai', + ignoredFiles: [], + ignoredTags: [], }, - ollama: { - apiUrl: undefined, + chat: { + ...settings.chat, + provider: 'openai', }, + cache: { + enabled: true, // Enable cache by default. + }, + usage: settings.usage, }; - newSettings.completions.provider = 'openai'; - newSettings.completions.ignoredFiles = []; - newSettings.completions.ignoredTags = []; - newSettings.chat.provider = 'openai'; // Update if default models are still selected. if (settings.completions.model === 'gpt-3.5-turbo-instruct') { newSettings.completions.model = 'gpt-3.5-turbo'; diff --git a/src/settings/runner.ts b/src/settings/runner.ts index 46e8296..8521cec 100644 --- a/src/settings/runner.ts +++ b/src/settings/runner.ts @@ -3,12 +3,9 @@ import Markpilot from '../main'; import { migrateVersion1_1_0_toVersion1_2_0 } from './migrators/1.1.0-1.2.0'; export class SettingsMigrationsRunner { - nextMigrators: Record = { + migrators: Record> = { '1.1.0': migrateVersion1_1_0_toVersion1_2_0, }; - nextVersions: Record = { - '1.1.0': '1.2.0', - }; constructor(private plugin: Markpilot) {} @@ -19,16 +16,15 @@ export class SettingsMigrationsRunner { // NOTE: // An infinite loop would also work because of the break statement // but we take the safe path here. - const maxIterations = Object.keys(this.nextMigrators).length; + const maxIterations = Object.keys(this.migrators).length; for (let i = 0; i < maxIterations + 1; i++) { // Settings versions and migrations were introduced from version 1.1.0. const version = settings.version ?? '1.1.0'; - const migrator = this.nextMigrators[version]; + const migrator = this.migrators[version]; if (migrator === undefined) { break; } - settings = migrator(settings); - settings.version = this.nextVersions[version]; + settings = migrator(structuredClone(settings)); } this.plugin.settings = settings; From f44e885b8ae6053844808df7ed356b9ad94e30eb Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:56:09 +0900 Subject: [PATCH 18/26] Set default accept key to Tab in new version --- src/settings/migrators/1.1.0-1.2.0.ts | 4 ++++ src/settings/runner.ts | 3 +++ 2 files changed, 7 insertions(+) diff --git a/src/settings/migrators/1.1.0-1.2.0.ts b/src/settings/migrators/1.1.0-1.2.0.ts index 93e2d0e..aa9f826 100644 --- a/src/settings/migrators/1.1.0-1.2.0.ts +++ b/src/settings/migrators/1.1.0-1.2.0.ts @@ -34,6 +34,10 @@ export const migrateVersion1_1_0_toVersion1_2_0: SettingsMigrator< }, usage: settings.usage, }; + // Update if default accept key is still selected. + if (settings.completions.acceptKey === 'Enter') { + newSettings.completions.acceptKey = 'Tab'; + } // Update if default models are still selected. if (settings.completions.model === 'gpt-3.5-turbo-instruct') { newSettings.completions.model = 'gpt-3.5-turbo'; diff --git a/src/settings/runner.ts b/src/settings/runner.ts index 8521cec..4d69fe8 100644 --- a/src/settings/runner.ts +++ b/src/settings/runner.ts @@ -25,6 +25,9 @@ export class SettingsMigrationsRunner { break; } settings = migrator(structuredClone(settings)); + if (settings.version === version) { + throw new Error('Settings migration did not update the version'); + } } this.plugin.settings = settings; From b5de7f554d7dde5888df72b62f61e49757b9ef17 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 02:58:37 +0900 Subject: [PATCH 19/26] Reorgniase files --- src/api/clients/gemini.ts | 3 +- src/api/clients/openai-compatible.ts | 3 +- src/api/index.ts | 14 +- src/api/prompts/generator.ts | 2 +- src/api/proxies/memory-cache.ts | 3 +- src/api/proxies/usage-monitor.ts | 3 +- src/chat/App.tsx | 2 +- src/chat/components/ChatItem.tsx | 2 +- src/settings.ts | 578 ------------------------- src/settings/index.ts | 579 +++++++++++++++++++++++++- src/settings/migrators/1.1.0-1.2.0.ts | 2 +- src/settings/migrators/index.ts | 1 + src/settings/runner.ts | 2 +- src/types.ts | 11 - 14 files changed, 600 insertions(+), 605 deletions(-) delete mode 100644 src/settings.ts create mode 100644 src/settings/migrators/index.ts delete mode 100644 src/types.ts diff --git a/src/api/clients/gemini.ts b/src/api/clients/gemini.ts index 4612a7e..66ab527 100644 --- a/src/api/clients/gemini.ts +++ b/src/api/clients/gemini.ts @@ -1,5 +1,4 @@ -import { APIClient } from '..'; -import { ChatMessage } from '../../types'; +import { APIClient, ChatMessage } from '..'; // TODO: // Implement API client for Gemini. diff --git a/src/api/clients/openai-compatible.ts b/src/api/clients/openai-compatible.ts index 113796a..7da46b4 100644 --- a/src/api/clients/openai-compatible.ts +++ b/src/api/clients/openai-compatible.ts @@ -2,8 +2,7 @@ import { getEncoding } from 'js-tiktoken'; import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; -import { APIClient } from '..'; -import { ChatMessage } from '../../types'; +import { APIClient, ChatMessage } from '..'; import { CostsTracker } from '../costs'; import { PromptGenerator } from '../prompts/generator'; diff --git a/src/api/index.ts b/src/api/index.ts index 9e99472..0fcf0b8 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -1,6 +1,16 @@ -import { ChatMessage } from '../types'; - export interface APIClient { fetchChat(messages: ChatMessage[]): AsyncGenerator; fetchCompletions(prefix: string, suffix: string): Promise; } + +export type ChatRole = 'system' | 'assistant' | 'user'; + +export interface ChatMessage { + role: ChatRole; + content: string; +} + +export interface ChatHistory { + messages: ChatMessage[]; + response: string; +} diff --git a/src/api/prompts/generator.ts b/src/api/prompts/generator.ts index 33556a7..1bdde3d 100644 --- a/src/api/prompts/generator.ts +++ b/src/api/prompts/generator.ts @@ -1,6 +1,6 @@ import Markpilot from 'src/main'; -import { ChatMessage } from 'src/types'; import { FewShotPrompt } from '.'; +import { ChatMessage } from '..'; import { BlockQuotePrompt } from './block-quote'; import { CodeBlockPrompt } from './code-block'; import { Context, getContext, getLanguage } from './context'; diff --git a/src/api/proxies/memory-cache.ts b/src/api/proxies/memory-cache.ts index 37b4a05..b79e869 100644 --- a/src/api/proxies/memory-cache.ts +++ b/src/api/proxies/memory-cache.ts @@ -1,7 +1,6 @@ import { createHash } from 'crypto'; import Markpilot from 'src/main'; -import { APIClient } from '..'; -import { ChatMessage } from '../../types'; +import { APIClient, ChatMessage } from '..'; export class MemoryCacheProxy implements APIClient { private store: Map = new Map(); diff --git a/src/api/proxies/usage-monitor.ts b/src/api/proxies/usage-monitor.ts index de64e1b..dfa603b 100644 --- a/src/api/proxies/usage-monitor.ts +++ b/src/api/proxies/usage-monitor.ts @@ -1,8 +1,7 @@ import { Notice } from 'obsidian'; import Markpilot from 'src/main'; import { getThisMonthAsString } from 'src/utils'; -import { APIClient } from '..'; -import { ChatMessage } from '../../types'; +import { APIClient, ChatMessage } from '..'; export class UsageMonitorProxy implements APIClient { constructor( diff --git a/src/chat/App.tsx b/src/chat/App.tsx index 9e5ea03..d746738 100644 --- a/src/chat/App.tsx +++ b/src/chat/App.tsx @@ -1,6 +1,6 @@ import { useEffect, useState } from 'react'; +import { ChatHistory, ChatRole } from 'src/api'; import Markpilot from 'src/main'; -import { ChatHistory, ChatRole } from 'src/types'; import { ChatInput } from './components/ChatBox'; import { ChatItem } from './components/ChatItem'; import { ChatView } from './view'; diff --git a/src/chat/components/ChatItem.tsx b/src/chat/components/ChatItem.tsx index 7d284e0..bfb79c5 100644 --- a/src/chat/components/ChatItem.tsx +++ b/src/chat/components/ChatItem.tsx @@ -2,7 +2,7 @@ import { Bot, Copy, User } from 'lucide-react'; import ReactMarkdown from 'react-markdown'; import rehypeKatex from 'rehype-katex'; import remarkMath from 'remark-math'; -import { ChatMessage } from 'src/types'; +import { ChatMessage } from 'src/api'; export function ChatItem({ message }: { message: ChatMessage }) { return ( diff --git a/src/settings.ts b/src/settings.ts deleted file mode 100644 index de48578..0000000 --- a/src/settings.ts +++ /dev/null @@ -1,578 +0,0 @@ -import Chart from 'chart.js/auto'; -import { App, Notice, PluginSettingTab, Setting } from 'obsidian'; -import { DEFAULT_MODELS, Model, MODELS } from './api/models'; -import { - DEFAULT_PROVIDER, - Provider, - PROVIDERS, - PROVIDERS_NAMES, -} from './api/provider'; -import { ChatHistory } from './types'; - -import Markpilot from './main'; -import { getDaysInCurrentMonth, validateURL } from './utils'; - -export interface MarkpilotSettings { - version: string; - providers: { - openai: { - apiKey: string | undefined; - }; - openrouter: { - apiKey: string | undefined; - }; - ollama: { - apiUrl: string | undefined; - }; - }; - completions: { - enabled: boolean; - provider: Provider; - model: Model; - maxTokens: number; - temperature: number; - waitTime: number; - windowSize: number; - acceptKey: string; - rejectKey: string; - ignoredFiles: string[]; - ignoredTags: string[]; - }; - chat: { - enabled: boolean; - provider: Provider; - model: Model; - maxTokens: number; - temperature: number; - history: ChatHistory; - }; - cache: { - enabled: boolean; - }; - usage: { - dailyCosts: Record; // e.g. '2021-09-01' to 10.0 (USD) - monthlyCosts: Record; // e.g. '2021-09' to 100.0 (USD) - monthlyLimit: number; - }; -} - -export const DEFAULT_SETTINGS: MarkpilotSettings = { - version: '1.2.0', - providers: { - openai: { - apiKey: undefined, - }, - openrouter: { - apiKey: undefined, - }, - ollama: { - apiUrl: undefined, - }, - }, - completions: { - enabled: true, - provider: DEFAULT_PROVIDER, - model: DEFAULT_MODELS[DEFAULT_PROVIDER], - maxTokens: 64, - temperature: 0, - waitTime: 500, - windowSize: 512, - acceptKey: 'Tab', - rejectKey: 'Escape', - ignoredFiles: [], - ignoredTags: [], - }, - chat: { - enabled: true, - provider: DEFAULT_PROVIDER, - model: DEFAULT_MODELS[DEFAULT_PROVIDER], - maxTokens: 1024, - temperature: 0.5, - history: { - messages: [], - response: '', - }, - }, - cache: { - enabled: false, - }, - usage: { - dailyCosts: {}, - monthlyCosts: {}, - monthlyLimit: 20, - }, -}; - -export class MarkpilotSettingTab extends PluginSettingTab { - constructor( - app: App, - private plugin: Markpilot, - ) { - super(app, plugin); - } - - async display() { - const { containerEl } = this; - - containerEl.empty(); - - const { plugin } = this; - const { settings } = plugin; - - /************************************************************/ - /* Providers */ - /************************************************************/ - - new Setting(containerEl).setName('Providers').setHeading(); - - new Setting(containerEl) - .setName('OpenAI API key') - .setDesc('Enter your OpenAI API key.') - .addText((text) => - text - .setValue(settings.providers.openai.apiKey ?? '') - .onChange(async (value) => { - settings.providers.openai.apiKey = value; - await plugin.saveSettings(); - // NOTE: - // The API client needs to be updated when the API key, API URL or provider is changed, - // because these parameters are captured by the underlying library on initialization - // and become stale when the settings are changed. - plugin.updateAPIClient(); - new Notice('Successfully saved OpenAI API key.'); - }), - ); - - new Setting(containerEl) - .setName('OpenRouter API key') - .setDesc('Enter your OpenRouter API key.') - .addText((text) => - text - .setValue(settings.providers.openrouter.apiKey ?? '') - .onChange(async (value) => { - settings.providers.openrouter.apiKey = value; - await plugin.saveSettings(); - plugin.updateAPIClient(); - new Notice('Successfully saved OpenRouter API key.'); - }), - ); - - new Setting(containerEl) - .setName('Ollama API URL') - .setDesc('Enter your Ollama API URL.') - .addText((text) => - text - .setValue(settings.providers.ollama.apiUrl ?? '') - .onChange(async (value) => { - settings.providers.ollama.apiUrl = value; - await plugin.saveSettings(); - plugin.updateAPIClient(); - }), - ); - - new Setting(containerEl) - .setName('Test Ollama API connection') - .setDesc('Test the connection to the local Ollama API.') - .addButton((button) => - button.setButtonText('Test Connection').onClick(async () => { - const apiUrl = settings.providers.ollama.apiUrl; - if (apiUrl === undefined) { - new Notice('Ollama API URL is not set.'); - return; - } - if (!validateURL(apiUrl)) { - new Notice('Invalid Ollama API URL.'); - return; - } - // TODO: - // Properly implement logic for checking Ollama API status. - try { - await fetch(apiUrl); - new Notice('Successfully connected to Ollama API.'); - } catch { - new Notice('Failed to connect to Ollama API.'); - } - }), - ); - - /************************************************************/ - /* Inline completions */ - /************************************************************/ - - new Setting(containerEl).setName('Inline completions').setHeading(); - - new Setting(containerEl) - .setName('Enable inline completions') - .setDesc('Turn this on to enable inline completions.') - .addToggle((toggle) => - toggle - .setValue(settings.completions.enabled) - .onChange(async (value) => { - settings.completions.enabled = value; - await plugin.saveSettings(); - this.display(); // Re-render settings tab - }), - ); - - new Setting(containerEl) - .setName('Provider') - .setDesc('Select the provider for inline completions.') - .addDropdown((dropdown) => { - for (const option of PROVIDERS) { - dropdown.addOption(option, PROVIDERS_NAMES[option]); - } - dropdown - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.provider) - .onChange(async (value) => { - settings.completions.provider = value as Provider; - settings.completions.model = DEFAULT_MODELS[value as Provider]; - await plugin.saveSettings(); - plugin.updateAPIClient(); - this.display(); // Re-render settings tab - }); - }); - - new Setting(containerEl) - .setName('Model') - .setDesc('Select the model for inline completions.') - .addDropdown((dropdown) => { - for (const option of MODELS[settings.completions.provider]) { - dropdown.addOption(option, option); - } - dropdown - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.model) - .onChange(async (value) => { - settings.completions.model = value as Model; - await plugin.saveSettings(); - }); - }); - - new Setting(containerEl) - .setName('Max tokens') - .setDesc('Set the max tokens for inline completions.') - .addText((text) => - text - .setValue(settings.completions.maxTokens.toString()) - .onChange(async (value) => { - const amount = parseInt(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.completions.maxTokens = amount; - await plugin.saveSettings(); - }), - ); - - new Setting(containerEl) - .setName('Temperature') - .setDesc('Set the temperature for inline completions.') - .addSlider((slider) => - slider - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.temperature) - .setLimits(0, 1, 0.01) - // TODO: - // Figure out how to add unit to the slider - .setDynamicTooltip() - .onChange(async (value) => { - settings.completions.temperature = value; - await plugin.saveSettings(); - }), - ); - - new Setting(containerEl) - .setName('Wait time') - .setDesc( - 'Time in milliseconds which it will wait for before fetching inline completions from the server.', - ) - .addSlider((slider) => - slider - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.waitTime) - .setLimits(0, 1000, 100) - .setDynamicTooltip() - .onChange(async (value) => { - settings.completions.waitTime = value; - await plugin.saveSettings(); - // NOTE: - // Editor extension needs to be updated when settings are changed - // because some fields e.g. `acceptKey` become stale and there is no way - // to make the extension query it on the fly. - plugin.updateEditorExtension(); - }), - ); - - new Setting(containerEl) - .setName('Window size') - .setDesc( - 'Set the window size for inline completions. The window size the number of characters around the cursor used to obtain inline completions', - ) - .addText((text) => - text - .setValue(settings.completions.windowSize.toString()) - .onChange(async (value) => { - const amount = parseInt(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.completions.windowSize = amount; - await plugin.saveSettings(); - plugin.updateEditorExtension(); - }), - ); - - new Setting(containerEl) - .setName('Accept key') - .setDesc( - 'Set the key to accept inline completions. The list of available keys can be found at: https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values', - ) - .addText((text) => - text - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.acceptKey) - .onChange(async (value) => { - settings.completions.acceptKey = value; - await plugin.saveSettings(); - plugin.updateEditorExtension(); - }), - ); - - new Setting(containerEl) - .setName('Reject key') - .setDesc( - 'Set the key to reject inline completions. The list of available keys can be found at: https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values', - ) - .addText((text) => - text - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.rejectKey) - .onChange(async (value) => { - settings.completions.rejectKey = value; - await plugin.saveSettings(); - plugin.updateEditorExtension(); - }), - ); - - new Setting(containerEl) - - .setName('Ignored files') - .setDesc( - 'Set the list of files to ignore inline completions. The completions will not be triggered in these files.', - ) - .addTextArea((text) => - text - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.ignoredFiles.join('\n')) - .setPlaceholder('myFile.md\nmyDirectory/**/*.md') - .onChange(async (value) => { - settings.completions.ignoredFiles = value.split('\n'); - await plugin.saveSettings(); - }), - ); - - new Setting(containerEl) - .setName('Ignored tags') - .setDesc( - 'Set the list of tags to ignore inline completions. The completions will not be triggered in these tags.', - ) - .addTextArea((text) => - text - .setDisabled(!settings.completions.enabled) - .setValue(settings.completions.ignoredTags.join('\n')) - .setPlaceholder('#myTag\n#myTag2') - .onChange(async (value) => { - settings.completions.ignoredTags = value.split('\n'); - await plugin.saveSettings(); - }), - ); - - /************************************************************/ - /* Chat View */ - /************************************************************/ - - new Setting(containerEl).setName('Chat view').setHeading(); - - new Setting(containerEl) - .setName('Enable chat view') - .setDesc('Turn this on to enable chat view.') - .addToggle((toggle) => - toggle.setValue(settings.chat.enabled).onChange(async (value) => { - settings.chat.enabled = value; - await plugin.saveSettings(); - if (value) { - plugin.activateView(); - } else { - plugin.deactivateView(); - } - this.display(); // Re-render settings tab - }), - ); - - new Setting(containerEl) - .setName('Provider') - .setDesc('Select the provider for chat view.') - .addDropdown((dropdown) => { - for (const option of PROVIDERS) { - dropdown.addOption(option, PROVIDERS_NAMES[option]); - } - dropdown - .setDisabled(!settings.chat.enabled) - .setValue(settings.chat.provider) - .onChange(async (value) => { - settings.chat.provider = value as Provider; - settings.chat.model = DEFAULT_MODELS[value as Provider]; - await plugin.saveSettings(); - plugin.updateAPIClient(); - this.display(); // Re-render settings tab - }); - }); - - new Setting(containerEl) - .setName('Model') - .setDesc('Select the model for GPT.') - .addDropdown((dropdown) => { - for (const option of MODELS[settings.chat.provider]) { - dropdown.addOption(option, option); - } - dropdown - .setDisabled(!settings.chat.enabled) - .setValue(settings.chat.model) - .onChange(async (value) => { - settings.chat.model = value as Model; - await plugin.saveSettings(); - }); - }); - - new Setting(containerEl) - .setName('Max tokens') - .setDesc('Set the max tokens for chat view.') - .addText((text) => - text - .setValue(settings.chat.maxTokens.toString()) - .onChange(async (value) => { - const amount = parseFloat(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.chat.maxTokens = amount; - await plugin.saveSettings(); - }), - ); - - new Setting(containerEl) - .setName('Temperature') - .setDesc('Set the temperature for chat view.') - .addSlider((slider) => - slider - .setDisabled(!settings.chat.enabled) - .setValue(settings.chat.temperature) - .setLimits(0, 1, 0.01) - .setDynamicTooltip() - .onChange(async (value) => { - settings.chat.temperature = value; - await plugin.saveSettings(); - }), - ); - - /************************************************************/ - /* Cache */ - /************************************************************/ - - new Setting(containerEl).setName('Cache').setHeading(); - - new Setting(containerEl) - .setName('Enable caching') - .setDesc( - 'Turn this on to enable memory caching. The cached data will be invalided on startup.', - ) - .addToggle((toggle) => - toggle - .setDisabled(!settings.completions.enabled) - .setValue(settings.cache.enabled) - .onChange(async (value) => { - settings.cache.enabled = value; - await plugin.saveSettings(); - this.display(); // Re-render settings tab - }), - ); - - /************************************************************/ - /* Usage */ - /************************************************************/ - - new Setting(containerEl).setName('Usage').setHeading(); - - new Setting(containerEl) - .setName('Monthly limit') - .setDesc( - 'Set the monthly limit for the usage costs (USD). When this limit is reached, the plugin will disable both inline completions and chat view', - ) - .addText((text) => - text - .setValue(settings.usage.monthlyLimit.toString()) - .onChange(async (value) => { - const amount = parseFloat(value); - if (isNaN(amount) || amount < 0) { - return; - } - settings.usage.monthlyLimit = amount; - await plugin.saveSettings(); - }), - ); - - new Setting(containerEl) - .setName('Monthly costs') - .setDesc( - 'Below you can find the estimated usage of OpenAI API for inline completions and chat view this month', - ); - - this.showMonthlyCosts(); - } - - showMonthlyCosts() { - const { plugin } = this; - const { settings } = plugin; - - const { containerEl } = this; - - const dates = getDaysInCurrentMonth(); - const data = dates.map((date) => ({ date, cost: 0 })); - for (const [day, cost] of Object.entries(settings.usage.dailyCosts)) { - const target = new Date(day + 'T00:00:00').toDateString(); - const index = dates.findIndex((date) => date.toDateString() === target); - if (index !== -1) { - data[index].cost = cost; - } - } - // Get the accent color from the theme - // using CSS variables provided by Obsidian: - // https://docs.obsidian.md/Reference/CSS+variables/Foundations/Colors#Accent+color - const style = getComputedStyle(containerEl); - const hue = style.getPropertyValue('--accent-h'); - const saturation = style.getPropertyValue('--accent-s'); - const lightness = style.getPropertyValue('--accent-l'); - const backgroundColor = `hsl(${hue}, ${saturation}, ${lightness})`; - new Chart(containerEl.createEl('canvas'), { - type: 'bar', - options: { - plugins: { - tooltip: { - callbacks: { label: (item) => `$${item.parsed.y}` }, - }, - }, - }, - data: { - labels: data.map((row) => row.date.toDateString()), - datasets: [ - { - label: 'OpenAI API', - data: data.map((row) => new Number(row.cost.toFixed(5))), - backgroundColor, - }, - ], - }, - }); - } -} diff --git a/src/settings/index.ts b/src/settings/index.ts index 3b09f88..69ea6c9 100644 --- a/src/settings/index.ts +++ b/src/settings/index.ts @@ -1 +1,578 @@ -export type SettingsMigrator = (settings: From) => To; +import Chart from 'chart.js/auto'; +import { App, Notice, PluginSettingTab, Setting } from 'obsidian'; +import { ChatHistory } from 'src/api'; +import { DEFAULT_MODELS, Model, MODELS } from '../api/models'; +import { + DEFAULT_PROVIDER, + Provider, + PROVIDERS, + PROVIDERS_NAMES, +} from '../api/provider'; + +import Markpilot from '../main'; +import { getDaysInCurrentMonth, validateURL } from '../utils'; + +export interface MarkpilotSettings { + version: string; + providers: { + openai: { + apiKey: string | undefined; + }; + openrouter: { + apiKey: string | undefined; + }; + ollama: { + apiUrl: string | undefined; + }; + }; + completions: { + enabled: boolean; + provider: Provider; + model: Model; + maxTokens: number; + temperature: number; + waitTime: number; + windowSize: number; + acceptKey: string; + rejectKey: string; + ignoredFiles: string[]; + ignoredTags: string[]; + }; + chat: { + enabled: boolean; + provider: Provider; + model: Model; + maxTokens: number; + temperature: number; + history: ChatHistory; + }; + cache: { + enabled: boolean; + }; + usage: { + dailyCosts: Record; // e.g. '2021-09-01' to 10.0 (USD) + monthlyCosts: Record; // e.g. '2021-09' to 100.0 (USD) + monthlyLimit: number; + }; +} + +export const DEFAULT_SETTINGS: MarkpilotSettings = { + version: '1.2.0', + providers: { + openai: { + apiKey: undefined, + }, + openrouter: { + apiKey: undefined, + }, + ollama: { + apiUrl: undefined, + }, + }, + completions: { + enabled: true, + provider: DEFAULT_PROVIDER, + model: DEFAULT_MODELS[DEFAULT_PROVIDER], + maxTokens: 64, + temperature: 0, + waitTime: 500, + windowSize: 512, + acceptKey: 'Tab', + rejectKey: 'Escape', + ignoredFiles: [], + ignoredTags: [], + }, + chat: { + enabled: true, + provider: DEFAULT_PROVIDER, + model: DEFAULT_MODELS[DEFAULT_PROVIDER], + maxTokens: 1024, + temperature: 0.5, + history: { + messages: [], + response: '', + }, + }, + cache: { + enabled: false, + }, + usage: { + dailyCosts: {}, + monthlyCosts: {}, + monthlyLimit: 20, + }, +}; + +export class MarkpilotSettingTab extends PluginSettingTab { + constructor( + app: App, + private plugin: Markpilot, + ) { + super(app, plugin); + } + + async display() { + const { containerEl } = this; + + containerEl.empty(); + + const { plugin } = this; + const { settings } = plugin; + + /************************************************************/ + /* Providers */ + /************************************************************/ + + new Setting(containerEl).setName('Providers').setHeading(); + + new Setting(containerEl) + .setName('OpenAI API key') + .setDesc('Enter your OpenAI API key.') + .addText((text) => + text + .setValue(settings.providers.openai.apiKey ?? '') + .onChange(async (value) => { + settings.providers.openai.apiKey = value; + await plugin.saveSettings(); + // NOTE: + // The API client needs to be updated when the API key, API URL or provider is changed, + // because these parameters are captured by the underlying library on initialization + // and become stale when the settings are changed. + plugin.updateAPIClient(); + new Notice('Successfully saved OpenAI API key.'); + }), + ); + + new Setting(containerEl) + .setName('OpenRouter API key') + .setDesc('Enter your OpenRouter API key.') + .addText((text) => + text + .setValue(settings.providers.openrouter.apiKey ?? '') + .onChange(async (value) => { + settings.providers.openrouter.apiKey = value; + await plugin.saveSettings(); + plugin.updateAPIClient(); + new Notice('Successfully saved OpenRouter API key.'); + }), + ); + + new Setting(containerEl) + .setName('Ollama API URL') + .setDesc('Enter your Ollama API URL.') + .addText((text) => + text + .setValue(settings.providers.ollama.apiUrl ?? '') + .onChange(async (value) => { + settings.providers.ollama.apiUrl = value; + await plugin.saveSettings(); + plugin.updateAPIClient(); + }), + ); + + new Setting(containerEl) + .setName('Test Ollama API connection') + .setDesc('Test the connection to the local Ollama API.') + .addButton((button) => + button.setButtonText('Test Connection').onClick(async () => { + const apiUrl = settings.providers.ollama.apiUrl; + if (apiUrl === undefined) { + new Notice('Ollama API URL is not set.'); + return; + } + if (!validateURL(apiUrl)) { + new Notice('Invalid Ollama API URL.'); + return; + } + // TODO: + // Properly implement logic for checking Ollama API status. + try { + await fetch(apiUrl); + new Notice('Successfully connected to Ollama API.'); + } catch { + new Notice('Failed to connect to Ollama API.'); + } + }), + ); + + /************************************************************/ + /* Inline completions */ + /************************************************************/ + + new Setting(containerEl).setName('Inline completions').setHeading(); + + new Setting(containerEl) + .setName('Enable inline completions') + .setDesc('Turn this on to enable inline completions.') + .addToggle((toggle) => + toggle + .setValue(settings.completions.enabled) + .onChange(async (value) => { + settings.completions.enabled = value; + await plugin.saveSettings(); + this.display(); // Re-render settings tab + }), + ); + + new Setting(containerEl) + .setName('Provider') + .setDesc('Select the provider for inline completions.') + .addDropdown((dropdown) => { + for (const option of PROVIDERS) { + dropdown.addOption(option, PROVIDERS_NAMES[option]); + } + dropdown + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.provider) + .onChange(async (value) => { + settings.completions.provider = value as Provider; + settings.completions.model = DEFAULT_MODELS[value as Provider]; + await plugin.saveSettings(); + plugin.updateAPIClient(); + this.display(); // Re-render settings tab + }); + }); + + new Setting(containerEl) + .setName('Model') + .setDesc('Select the model for inline completions.') + .addDropdown((dropdown) => { + for (const option of MODELS[settings.completions.provider]) { + dropdown.addOption(option, option); + } + dropdown + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.model) + .onChange(async (value) => { + settings.completions.model = value as Model; + await plugin.saveSettings(); + }); + }); + + new Setting(containerEl) + .setName('Max tokens') + .setDesc('Set the max tokens for inline completions.') + .addText((text) => + text + .setValue(settings.completions.maxTokens.toString()) + .onChange(async (value) => { + const amount = parseInt(value); + if (isNaN(amount) || amount < 0) { + return; + } + settings.completions.maxTokens = amount; + await plugin.saveSettings(); + }), + ); + + new Setting(containerEl) + .setName('Temperature') + .setDesc('Set the temperature for inline completions.') + .addSlider((slider) => + slider + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.temperature) + .setLimits(0, 1, 0.01) + // TODO: + // Figure out how to add unit to the slider + .setDynamicTooltip() + .onChange(async (value) => { + settings.completions.temperature = value; + await plugin.saveSettings(); + }), + ); + + new Setting(containerEl) + .setName('Wait time') + .setDesc( + 'Time in milliseconds which it will wait for before fetching inline completions from the server.', + ) + .addSlider((slider) => + slider + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.waitTime) + .setLimits(0, 1000, 100) + .setDynamicTooltip() + .onChange(async (value) => { + settings.completions.waitTime = value; + await plugin.saveSettings(); + // NOTE: + // Editor extension needs to be updated when settings are changed + // because some fields e.g. `acceptKey` become stale and there is no way + // to make the extension query it on the fly. + plugin.updateEditorExtension(); + }), + ); + + new Setting(containerEl) + .setName('Window size') + .setDesc( + 'Set the window size for inline completions. The window size the number of characters around the cursor used to obtain inline completions', + ) + .addText((text) => + text + .setValue(settings.completions.windowSize.toString()) + .onChange(async (value) => { + const amount = parseInt(value); + if (isNaN(amount) || amount < 0) { + return; + } + settings.completions.windowSize = amount; + await plugin.saveSettings(); + plugin.updateEditorExtension(); + }), + ); + + new Setting(containerEl) + .setName('Accept key') + .setDesc( + 'Set the key to accept inline completions. The list of available keys can be found at: https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values', + ) + .addText((text) => + text + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.acceptKey) + .onChange(async (value) => { + settings.completions.acceptKey = value; + await plugin.saveSettings(); + plugin.updateEditorExtension(); + }), + ); + + new Setting(containerEl) + .setName('Reject key') + .setDesc( + 'Set the key to reject inline completions. The list of available keys can be found at: https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values', + ) + .addText((text) => + text + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.rejectKey) + .onChange(async (value) => { + settings.completions.rejectKey = value; + await plugin.saveSettings(); + plugin.updateEditorExtension(); + }), + ); + + new Setting(containerEl) + + .setName('Ignored files') + .setDesc( + 'Set the list of files to ignore inline completions. The completions will not be triggered in these files.', + ) + .addTextArea((text) => + text + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.ignoredFiles.join('\n')) + .setPlaceholder('myFile.md\nmyDirectory/**/*.md') + .onChange(async (value) => { + settings.completions.ignoredFiles = value.split('\n'); + await plugin.saveSettings(); + }), + ); + + new Setting(containerEl) + .setName('Ignored tags') + .setDesc( + 'Set the list of tags to ignore inline completions. The completions will not be triggered in these tags.', + ) + .addTextArea((text) => + text + .setDisabled(!settings.completions.enabled) + .setValue(settings.completions.ignoredTags.join('\n')) + .setPlaceholder('#myTag\n#myTag2') + .onChange(async (value) => { + settings.completions.ignoredTags = value.split('\n'); + await plugin.saveSettings(); + }), + ); + + /************************************************************/ + /* Chat View */ + /************************************************************/ + + new Setting(containerEl).setName('Chat view').setHeading(); + + new Setting(containerEl) + .setName('Enable chat view') + .setDesc('Turn this on to enable chat view.') + .addToggle((toggle) => + toggle.setValue(settings.chat.enabled).onChange(async (value) => { + settings.chat.enabled = value; + await plugin.saveSettings(); + if (value) { + plugin.activateView(); + } else { + plugin.deactivateView(); + } + this.display(); // Re-render settings tab + }), + ); + + new Setting(containerEl) + .setName('Provider') + .setDesc('Select the provider for chat view.') + .addDropdown((dropdown) => { + for (const option of PROVIDERS) { + dropdown.addOption(option, PROVIDERS_NAMES[option]); + } + dropdown + .setDisabled(!settings.chat.enabled) + .setValue(settings.chat.provider) + .onChange(async (value) => { + settings.chat.provider = value as Provider; + settings.chat.model = DEFAULT_MODELS[value as Provider]; + await plugin.saveSettings(); + plugin.updateAPIClient(); + this.display(); // Re-render settings tab + }); + }); + + new Setting(containerEl) + .setName('Model') + .setDesc('Select the model for GPT.') + .addDropdown((dropdown) => { + for (const option of MODELS[settings.chat.provider]) { + dropdown.addOption(option, option); + } + dropdown + .setDisabled(!settings.chat.enabled) + .setValue(settings.chat.model) + .onChange(async (value) => { + settings.chat.model = value as Model; + await plugin.saveSettings(); + }); + }); + + new Setting(containerEl) + .setName('Max tokens') + .setDesc('Set the max tokens for chat view.') + .addText((text) => + text + .setValue(settings.chat.maxTokens.toString()) + .onChange(async (value) => { + const amount = parseFloat(value); + if (isNaN(amount) || amount < 0) { + return; + } + settings.chat.maxTokens = amount; + await plugin.saveSettings(); + }), + ); + + new Setting(containerEl) + .setName('Temperature') + .setDesc('Set the temperature for chat view.') + .addSlider((slider) => + slider + .setDisabled(!settings.chat.enabled) + .setValue(settings.chat.temperature) + .setLimits(0, 1, 0.01) + .setDynamicTooltip() + .onChange(async (value) => { + settings.chat.temperature = value; + await plugin.saveSettings(); + }), + ); + + /************************************************************/ + /* Cache */ + /************************************************************/ + + new Setting(containerEl).setName('Cache').setHeading(); + + new Setting(containerEl) + .setName('Enable caching') + .setDesc( + 'Turn this on to enable memory caching. The cached data will be invalided on startup.', + ) + .addToggle((toggle) => + toggle + .setDisabled(!settings.completions.enabled) + .setValue(settings.cache.enabled) + .onChange(async (value) => { + settings.cache.enabled = value; + await plugin.saveSettings(); + this.display(); // Re-render settings tab + }), + ); + + /************************************************************/ + /* Usage */ + /************************************************************/ + + new Setting(containerEl).setName('Usage').setHeading(); + + new Setting(containerEl) + .setName('Monthly limit') + .setDesc( + 'Set the monthly limit for the usage costs (USD). When this limit is reached, the plugin will disable both inline completions and chat view', + ) + .addText((text) => + text + .setValue(settings.usage.monthlyLimit.toString()) + .onChange(async (value) => { + const amount = parseFloat(value); + if (isNaN(amount) || amount < 0) { + return; + } + settings.usage.monthlyLimit = amount; + await plugin.saveSettings(); + }), + ); + + new Setting(containerEl) + .setName('Monthly costs') + .setDesc( + 'Below you can find the estimated usage of OpenAI API for inline completions and chat view this month', + ); + + this.showMonthlyCosts(); + } + + showMonthlyCosts() { + const { plugin } = this; + const { settings } = plugin; + + const { containerEl } = this; + + const dates = getDaysInCurrentMonth(); + const data = dates.map((date) => ({ date, cost: 0 })); + for (const [day, cost] of Object.entries(settings.usage.dailyCosts)) { + const target = new Date(day + 'T00:00:00').toDateString(); + const index = dates.findIndex((date) => date.toDateString() === target); + if (index !== -1) { + data[index].cost = cost; + } + } + // Get the accent color from the theme + // using CSS variables provided by Obsidian: + // https://docs.obsidian.md/Reference/CSS+variables/Foundations/Colors#Accent+color + const style = getComputedStyle(containerEl); + const hue = style.getPropertyValue('--accent-h'); + const saturation = style.getPropertyValue('--accent-s'); + const lightness = style.getPropertyValue('--accent-l'); + const backgroundColor = `hsl(${hue}, ${saturation}, ${lightness})`; + new Chart(containerEl.createEl('canvas'), { + type: 'bar', + options: { + plugins: { + tooltip: { + callbacks: { label: (item) => `$${item.parsed.y}` }, + }, + }, + }, + data: { + labels: data.map((row) => row.date.toDateString()), + datasets: [ + { + label: 'OpenAI API', + data: data.map((row) => new Number(row.cost.toFixed(5))), + backgroundColor, + }, + ], + }, + }); + } +} diff --git a/src/settings/migrators/1.1.0-1.2.0.ts b/src/settings/migrators/1.1.0-1.2.0.ts index aa9f826..5195fe6 100644 --- a/src/settings/migrators/1.1.0-1.2.0.ts +++ b/src/settings/migrators/1.1.0-1.2.0.ts @@ -1,4 +1,4 @@ -import { SettingsMigrator } from '..'; +import { SettingsMigrator } from '.'; import { MarkpilotSettings1_1_0 } from '../versions/1.1.0'; import { MarkpilotSettings1_2_0 } from '../versions/1.2.0'; diff --git a/src/settings/migrators/index.ts b/src/settings/migrators/index.ts new file mode 100644 index 0000000..3b09f88 --- /dev/null +++ b/src/settings/migrators/index.ts @@ -0,0 +1 @@ +export type SettingsMigrator = (settings: From) => To; diff --git a/src/settings/runner.ts b/src/settings/runner.ts index 4d69fe8..6af9d6d 100644 --- a/src/settings/runner.ts +++ b/src/settings/runner.ts @@ -1,5 +1,5 @@ -import { SettingsMigrator } from '.'; import Markpilot from '../main'; +import { SettingsMigrator } from './migrators'; import { migrateVersion1_1_0_toVersion1_2_0 } from './migrators/1.1.0-1.2.0'; export class SettingsMigrationsRunner { diff --git a/src/types.ts b/src/types.ts deleted file mode 100644 index b7f4280..0000000 --- a/src/types.ts +++ /dev/null @@ -1,11 +0,0 @@ -export type ChatRole = 'system' | 'assistant' | 'user'; - -export interface ChatMessage { - role: ChatRole; - content: string; -} - -export interface ChatHistory { - messages: ChatMessage[]; - response: string; -} From 9c541f2d51855e5d46b679b5600f44d2772dfacd Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 18:23:53 +0900 Subject: [PATCH 20/26] Add scraping CLI to fetch model information --- package.json | 7 +- src/api/clients/ollama.ts | 3 +- src/api/clients/openai-compatible.ts | 2 +- src/api/clients/openai.ts | 2 +- src/api/clients/openrouter.ts | 2 +- src/api/costs.ts | 112 ------ src/api/models.ts | 60 ---- src/api/provider.ts | 19 - src/api/providers/costs.ts | 44 +++ src/api/providers/index.ts | 10 + src/api/providers/models.ts | 27 ++ src/api/providers/ollama.json | 318 +++++++++++++++++ src/api/providers/openai.json | 22 ++ src/api/providers/openrouter.json | 502 +++++++++++++++++++++++++++ src/main.ts | 4 +- src/scripts/scrape.ts | 93 +++++ src/utils.ts | 8 + tsconfig.json | 23 +- yarn.lock | 228 +++++++++++- 19 files changed, 1275 insertions(+), 211 deletions(-) delete mode 100644 src/api/costs.ts delete mode 100644 src/api/models.ts delete mode 100644 src/api/provider.ts create mode 100644 src/api/providers/costs.ts create mode 100644 src/api/providers/index.ts create mode 100644 src/api/providers/models.ts create mode 100644 src/api/providers/ollama.json create mode 100644 src/api/providers/openai.json create mode 100644 src/api/providers/openrouter.json create mode 100644 src/scripts/scrape.ts diff --git a/package.json b/package.json index fecc3a4..c350acf 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,8 @@ "format:fix": "prettier src --write", "lint": "eslint src --max-warnings 0", "lint:fix": "eslint src --max-warnings 0 --fix", - "test": "echo \"Error: no test specified\"" + "test": "echo \"Error: no test specified\"", + "scrape": "ts-node src/scripts/scrape.ts" }, "keywords": [], "author": "", @@ -23,11 +24,15 @@ "@types/react-dom": "^18.2.22", "@typescript-eslint/eslint-plugin": "5.29.0", "@typescript-eslint/parser": "5.29.0", + "axios": "^1.6.8", "builtin-modules": "3.3.0", + "cheerio": "^1.0.0-rc.12", + "commander": "^12.0.0", "esbuild": "0.17.3", "eslint": "^8.57.0", "obsidian": "latest", "prettier": "^3.2.5", + "ts-node": "^10.9.2", "tslib": "2.4.0", "typescript": "4.7.4" }, diff --git a/src/api/clients/ollama.ts b/src/api/clients/ollama.ts index f3afd0d..da305e9 100644 --- a/src/api/clients/ollama.ts +++ b/src/api/clients/ollama.ts @@ -3,8 +3,8 @@ import OpenAI from 'openai'; import Markpilot from 'src/main'; import { validateURL } from 'src/utils'; import { APIClient } from '..'; -import { CostsTracker } from '../costs'; import { PromptGenerator } from '../prompts/generator'; +import { CostsTracker } from '../providers/costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; export class OllamaAPIClient @@ -34,6 +34,7 @@ export class OllamaAPIClient return new OpenAI({ baseURL: apiUrl, + apiKey: 'ollama', // Required but ignored. dangerouslyAllowBrowser: true, }); } diff --git a/src/api/clients/openai-compatible.ts b/src/api/clients/openai-compatible.ts index 7da46b4..edf978f 100644 --- a/src/api/clients/openai-compatible.ts +++ b/src/api/clients/openai-compatible.ts @@ -3,8 +3,8 @@ import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; import { APIClient, ChatMessage } from '..'; -import { CostsTracker } from '../costs'; import { PromptGenerator } from '../prompts/generator'; +import { CostsTracker } from '../providers/costs'; export abstract class OpenAICompatibleAPIClient implements APIClient { constructor( diff --git a/src/api/clients/openai.ts b/src/api/clients/openai.ts index 2d3cbdf..5d3cb65 100644 --- a/src/api/clients/openai.ts +++ b/src/api/clients/openai.ts @@ -2,8 +2,8 @@ import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; import { APIClient } from '..'; -import { CostsTracker } from '../costs'; import { PromptGenerator } from '../prompts/generator'; +import { CostsTracker } from '../providers/costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; export class OpenAIAPIClient diff --git a/src/api/clients/openrouter.ts b/src/api/clients/openrouter.ts index 1002b88..e00c97d 100644 --- a/src/api/clients/openrouter.ts +++ b/src/api/clients/openrouter.ts @@ -2,8 +2,8 @@ import { Notice } from 'obsidian'; import OpenAI from 'openai'; import Markpilot from 'src/main'; import { APIClient } from '..'; -import { CostsTracker } from '../costs'; import { PromptGenerator } from '../prompts/generator'; +import { CostsTracker } from '../providers/costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; export class OpenRouterAPIClient diff --git a/src/api/costs.ts b/src/api/costs.ts deleted file mode 100644 index 0f5b19a..0000000 --- a/src/api/costs.ts +++ /dev/null @@ -1,112 +0,0 @@ -import Markpilot from 'src/main'; -import { getThisMonthAsString, getTodayAsString } from 'src/utils'; -import { Model, OpenAIModel, OpenRouterModel } from './models'; -import { OFFLINE_PROVIDERS, OnlineProvider, Provider } from './provider'; - -const OPENAI_MODEL_INPUT_COSTS: Record = { - 'gpt-3.5-turbo-instruct': 1.5, - 'davinci-002': 12.0, - 'babbage-002': 1.6, - 'gpt-4-0125-preview': 10.0, - 'gpt-4-turbo-preview': 10.0, - 'gpt-4-1106-preview': 10.0, - 'gpt-4-vision-preview': 10.0, - 'gpt-4': 30.0, - 'gpt-4-0314': 30.0, - 'gpt-4-0613': 30.0, - 'gpt-4-32k': 60.0, - 'gpt-4-32k-0314': 60.0, - 'gpt-4-32k-0613': 60.0, - 'gpt-3.5-turbo': 0.5, - 'gpt-3.5-turbo-16k': 0.5, - 'gpt-3.5-turbo-0301': 0.5, - 'gpt-3.5-turbo-0613': 0.5, - 'gpt-3.5-turbo-1106': 0.5, - 'gpt-3.5-turbo-0125': 0.5, - 'gpt-3.5-turbo-16k-0613': 0.5, -} as const; - -const OPENAI_MODEL_OUTPUT_COSTS: Record = { - 'gpt-3.5-turbo-instruct': 2.0, - 'davinci-002': 12.0, - 'babbage-002': 1.6, - 'gpt-4-0125-preview': 30, - 'gpt-4-turbo-preview': 30, - 'gpt-4-1106-preview': 30, - 'gpt-4-vision-preview': 30, - 'gpt-4': 60, - 'gpt-4-0314': 60, - 'gpt-4-0613': 60, - 'gpt-4-32k': 120, - 'gpt-4-32k-0314': 120, - 'gpt-4-32k-0613': 120, - 'gpt-3.5-turbo': 1.5, - 'gpt-3.5-turbo-16k': 1.5, - 'gpt-3.5-turbo-0301': 1.5, - 'gpt-3.5-turbo-0613': 1.5, - 'gpt-3.5-turbo-1106': 1.5, - 'gpt-3.5-turbo-0125': 1.5, - 'gpt-3.5-turbo-16k-0613': 1.5, -}; - -// TODO: -// This is a placeholder. -const OPENROUTER_INPUT_COSTS: Record = { - 'openai/gpt-3.5-turbo': 0, - 'openai/gpt-4-turbo': 0, -}; - -// TODO: -// This is a placeholder. -const OPENROUTER_OUTPUT_COSTS: Record = { - 'openai/gpt-3.5-turbo': 0, - 'openai/gpt-4-turbo': 0, -}; - -// TODO: -// Replace `Record` to an appropriate type. -const INPUT_COSTS: Record> = { - openai: OPENAI_MODEL_INPUT_COSTS, - openrouter: OPENROUTER_INPUT_COSTS, -}; - -// TODO: -// Replace `Record` to an appropriate type. -const OUTPUT_COSTS: Record> = { - openai: OPENAI_MODEL_OUTPUT_COSTS, - openrouter: OPENROUTER_OUTPUT_COSTS, -}; - -export class CostsTracker { - constructor(private plugin: Markpilot) {} - - async add( - provider: Provider, - model: Model | Model, - inputTokens: number, - outputTokens: number, - ) { - const { settings } = this.plugin; - - // No costs associated with offline providers. - if (provider in OFFLINE_PROVIDERS) { - return; - } - - const today = getTodayAsString(); - const thisMonth = getThisMonthAsString(); - if (settings.usage.dailyCosts[today] === undefined) { - settings.usage.dailyCosts[today] = 0; - } - - const cost = - (inputTokens * INPUT_COSTS[provider as OnlineProvider][model] + - outputTokens * OUTPUT_COSTS[provider as OnlineProvider][model]) / - 1_000_000; - - settings.usage.dailyCosts[today] += cost; - settings.usage.monthlyCosts[thisMonth] += cost; - - await this.plugin.saveSettings(); - } -} diff --git a/src/api/models.ts b/src/api/models.ts deleted file mode 100644 index b205f06..0000000 --- a/src/api/models.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { Provider } from './provider'; - -export type OpenAIModel = (typeof OPENAI_MODELS)[number]; - -export type OpenRouterModel = (typeof OPENROUTER_MODELS)[number]; - -export type OllamaModel = (typeof OLLAMA_MODELS)[number]; - -export type Model = OpenAIModel | OpenRouterModel | OllamaModel; - -export const OPENAI_MODELS = [ - 'gpt-3.5-turbo-instruct', - 'davinci-002', - 'babbage-002', - 'gpt-4-0125-preview', - 'gpt-4-turbo-preview', - 'gpt-4-1106-preview', - 'gpt-4-vision-preview', - 'gpt-4', - 'gpt-4-0314', - 'gpt-4-0613', - 'gpt-4-32k', - 'gpt-4-32k-0314', - 'gpt-4-32k-0613', - 'gpt-3.5-turbo', - 'gpt-3.5-turbo-16k', - 'gpt-3.5-turbo-0301', - 'gpt-3.5-turbo-0613', - 'gpt-3.5-turbo-1106', - 'gpt-3.5-turbo-0125', - 'gpt-3.5-turbo-16k-0613', -] as const; - -// TODO: -// This is a placeholder. -export const OPENROUTER_MODELS = [ - 'openai/gpt-3.5-turbo', - 'openai/gpt-4-turbo', -] as const; - -// TODO: -// This is a placeholder. -export const OLLAMA_MODELS = [ - 'llama2', - 'llama3', - 'codellama', - 'phind-codellama', -] as const; - -export const MODELS = { - openai: OPENAI_MODELS, - openrouter: OPENROUTER_MODELS, - ollama: OLLAMA_MODELS, -}; - -export const DEFAULT_MODELS: Record = { - openai: 'gpt-3.5-turbo', - openrouter: 'openai/gpt-3.5-turbo', - ollama: 'llama2', -}; diff --git a/src/api/provider.ts b/src/api/provider.ts deleted file mode 100644 index 40d044f..0000000 --- a/src/api/provider.ts +++ /dev/null @@ -1,19 +0,0 @@ -export type OnlineProvider = (typeof ONLINE_PROVIDERS)[number]; - -export type OfflineProvider = (typeof OFFLINE_PROVIDERS)[number]; - -export type Provider = OnlineProvider | OfflineProvider; - -export const ONLINE_PROVIDERS = ['openai', 'openrouter'] as const; - -export const OFFLINE_PROVIDERS = ['ollama'] as const; - -export const PROVIDERS = [...ONLINE_PROVIDERS, ...OFFLINE_PROVIDERS] as const; - -export const PROVIDERS_NAMES: Record = { - openai: 'OpenAI', - openrouter: 'OpenRouter', - ollama: 'Ollama', -}; - -export const DEFAULT_PROVIDER = 'openai' as Provider; diff --git a/src/api/providers/costs.ts b/src/api/providers/costs.ts new file mode 100644 index 0000000..4e6dadb --- /dev/null +++ b/src/api/providers/costs.ts @@ -0,0 +1,44 @@ +import Markpilot from 'src/main'; +import { getThisMonthAsString, getTodayAsString } from 'src/utils'; +import { Provider } from '.'; +import { Model } from './models'; +import OllamaModelsJSON from './ollama.json'; +import OpenAIModelsJSON from './openai.json'; +import OpenRouterModelsJSON from './openrouter.json'; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const ModelsJSON: Record = { + ollama: OllamaModelsJSON, + openrouter: OpenRouterModelsJSON, + openai: OpenAIModelsJSON, +}; + +export class CostsTracker { + constructor(private plugin: Markpilot) {} + + async add( + provider: Provider, + model: Model | Model, + inputTokens: number, + outputTokens: number, + ) { + const { settings } = this.plugin; + + const today = getTodayAsString(); + const thisMonth = getThisMonthAsString(); + if (settings.usage.dailyCosts[today] === undefined) { + settings.usage.dailyCosts[today] = 0; + } + + const cost = + (inputTokens * ModelsJSON[provider][model].inputCost + + outputTokens * ModelsJSON[provider][model].outputCost) / + // Costs are stored in per 1M token. + 1_000_000; + + settings.usage.dailyCosts[today] += cost; + settings.usage.monthlyCosts[thisMonth] += cost; + + await this.plugin.saveSettings(); + } +} diff --git a/src/api/providers/index.ts b/src/api/providers/index.ts new file mode 100644 index 0000000..22700dc --- /dev/null +++ b/src/api/providers/index.ts @@ -0,0 +1,10 @@ +export type Provider = (typeof PROVIDERS)[number]; + +export const PROVIDERS = ['openai', 'openrouter', 'ollama'] as const; +export const PROVIDERS_NAMES: Record = { + openai: 'OpenAI', + openrouter: 'OpenRouter', + ollama: 'Ollama', +}; + +export const DEFAULT_PROVIDER = 'openai' as Provider; diff --git a/src/api/providers/models.ts b/src/api/providers/models.ts new file mode 100644 index 0000000..d5eae33 --- /dev/null +++ b/src/api/providers/models.ts @@ -0,0 +1,27 @@ +import { getObjectKeys } from 'src/utils'; +import { Provider } from '.'; +import OllamaModelsJSON from './ollama.json'; +import OpenAIModelsJSON from './openai.json'; +import OpenRouterModelsJSON from './openrouter.json'; + +export type OpenAIModel = keyof typeof OpenAIModelsJSON; +export type OpenRouterModel = keyof typeof OpenRouterModelsJSON; +export type OllamaModel = keyof typeof OllamaModelsJSON; + +export type Model = OpenAIModel | OpenRouterModel | OllamaModel; + +export const OPENAI_MODELS = getObjectKeys(OpenAIModelsJSON); +export const OPENROUTER_MODELS = getObjectKeys(OpenRouterModelsJSON); +export const OLLAMA_MODELS = getObjectKeys(OllamaModelsJSON); + +export const MODELS = { + openai: OPENAI_MODELS, + openrouter: OPENROUTER_MODELS, + ollama: OLLAMA_MODELS, +}; + +export const DEFAULT_MODELS: Record = { + openai: 'gpt-3.5-turbo', + openrouter: 'openai/gpt-3.5-turbo', + ollama: 'llama2', +}; diff --git a/src/api/providers/ollama.json b/src/api/providers/ollama.json new file mode 100644 index 0000000..f56f4c8 --- /dev/null +++ b/src/api/providers/ollama.json @@ -0,0 +1,318 @@ +{ + "llama3": { + "inputCost": 0, + "outputCost": 0 + }, + "wizardlm2": { + "inputCost": 0, + "outputCost": 0 + }, + "mistral": { + "inputCost": 0, + "outputCost": 0 + }, + "gemma": { + "inputCost": 0, + "outputCost": 0 + }, + "mixtral": { + "inputCost": 0, + "outputCost": 0 + }, + "llama2": { + "inputCost": 0, + "outputCost": 0 + }, + "codegemma": { + "inputCost": 0, + "outputCost": 0 + }, + "command-r": { + "inputCost": 0, + "outputCost": 0 + }, + "command-r-plus": { + "inputCost": 0, + "outputCost": 0 + }, + "llava": { + "inputCost": 0, + "outputCost": 0 + }, + "dbrx": { + "inputCost": 0, + "outputCost": 0 + }, + "codellama": { + "inputCost": 0, + "outputCost": 0 + }, + "dolphin-mixtral": { + "inputCost": 0, + "outputCost": 0 + }, + "qwen": { + "inputCost": 0, + "outputCost": 0 + }, + "llama2-uncensored": { + "inputCost": 0, + "outputCost": 0 + }, + "mistral-openorca": { + "inputCost": 0, + "outputCost": 0 + }, + "deepseek-coder": { + "inputCost": 0, + "outputCost": 0 + }, + "phi": { + "inputCost": 0, + "outputCost": 0 + }, + "nous-hermes2": { + "inputCost": 0, + "outputCost": 0 + }, + "dolphin-mistral": { + "inputCost": 0, + "outputCost": 0 + }, + "orca-mini": { + "inputCost": 0, + "outputCost": 0 + }, + "nomic-embed-text": { + "inputCost": 0, + "outputCost": 0 + }, + "zephyr": { + "inputCost": 0, + "outputCost": 0 + }, + "wizard-vicuna-uncensored": { + "inputCost": 0, + "outputCost": 0 + }, + "llama2-chinese": { + "inputCost": 0, + "outputCost": 0 + }, + "openhermes": { + "inputCost": 0, + "outputCost": 0 + }, + "vicuna": { + "inputCost": 0, + "outputCost": 0 + }, + "tinydolphin": { + "inputCost": 0, + "outputCost": 0 + }, + "tinyllama": { + "inputCost": 0, + "outputCost": 0 + }, + "openchat": { + "inputCost": 0, + "outputCost": 0 + }, + "starcoder2": { + "inputCost": 0, + "outputCost": 0 + }, + "wizardcoder": { + "inputCost": 0, + "outputCost": 0 + }, + "stable-code": { + "inputCost": 0, + "outputCost": 0 + }, + "starcoder": { + "inputCost": 0, + "outputCost": 0 + }, + "neural-chat": { + "inputCost": 0, + "outputCost": 0 + }, + "phind-codellama": { + "inputCost": 0, + "outputCost": 0 + }, + "yi": { + "inputCost": 0, + "outputCost": 0 + }, + "dolphin-phi": { + "inputCost": 0, + "outputCost": 0 + }, + "falcon": { + "inputCost": 0, + "outputCost": 0 + }, + "orca2": { + "inputCost": 0, + "outputCost": 0 + }, + "starling-lm": { + "inputCost": 0, + "outputCost": 0 + }, + "wizard-math": { + "inputCost": 0, + "outputCost": 0 + }, + "dolphincoder": { + "inputCost": 0, + "outputCost": 0 + }, + "nous-hermes": { + "inputCost": 0, + "outputCost": 0 + }, + "bakllava": { + "inputCost": 0, + "outputCost": 0 + }, + "medllama2": { + "inputCost": 0, + "outputCost": 0 + }, + "solar": { + "inputCost": 0, + "outputCost": 0 + }, + "nous-hermes2-mixtral": { + "inputCost": 0, + "outputCost": 0 + }, + "sqlcoder": { + "inputCost": 0, + "outputCost": 0 + }, + "wizardlm-uncensored": { + "inputCost": 0, + "outputCost": 0 + }, + "codeup": { + "inputCost": 0, + "outputCost": 0 + }, + "everythinglm": { + "inputCost": 0, + "outputCost": 0 + }, + "stablelm2": { + "inputCost": 0, + "outputCost": 0 + }, + "all-minilm": { + "inputCost": 0, + "outputCost": 0 + }, + "yarn-mistral": { + "inputCost": 0, + "outputCost": 0 + }, + "samantha-mistral": { + "inputCost": 0, + "outputCost": 0 + }, + "meditron": { + "inputCost": 0, + "outputCost": 0 + }, + "stable-beluga": { + "inputCost": 0, + "outputCost": 0 + }, + "mxbai-embed-large": { + "inputCost": 0, + "outputCost": 0 + }, + "magicoder": { + "inputCost": 0, + "outputCost": 0 + }, + "stablelm-zephyr": { + "inputCost": 0, + "outputCost": 0 + }, + "llama-pro": { + "inputCost": 0, + "outputCost": 0 + }, + "yarn-llama2": { + "inputCost": 0, + "outputCost": 0 + }, + "deepseek-llm": { + "inputCost": 0, + "outputCost": 0 + }, + "codebooga": { + "inputCost": 0, + "outputCost": 0 + }, + "mistrallite": { + "inputCost": 0, + "outputCost": 0 + }, + "wizard-vicuna": { + "inputCost": 0, + "outputCost": 0 + }, + "nexusraven": { + "inputCost": 0, + "outputCost": 0 + }, + "goliath": { + "inputCost": 0, + "outputCost": 0 + }, + "open-orca-platypus2": { + "inputCost": 0, + "outputCost": 0 + }, + "notux": { + "inputCost": 0, + "outputCost": 0 + }, + "megadolphin": { + "inputCost": 0, + "outputCost": 0 + }, + "alfred": { + "inputCost": 0, + "outputCost": 0 + }, + "duckdb-nsql": { + "inputCost": 0, + "outputCost": 0 + }, + "xwinlm": { + "inputCost": 0, + "outputCost": 0 + }, + "wizardlm": { + "inputCost": 0, + "outputCost": 0 + }, + "notus": { + "inputCost": 0, + "outputCost": 0 + }, + "codeqwen": { + "inputCost": 0, + "outputCost": 0 + }, + "snowflake-arctic-embed": { + "inputCost": 0, + "outputCost": 0 + } +} \ No newline at end of file diff --git a/src/api/providers/openai.json b/src/api/providers/openai.json new file mode 100644 index 0000000..a09e184 --- /dev/null +++ b/src/api/providers/openai.json @@ -0,0 +1,22 @@ +{ + "gpt-4-turbo": { + "inputCost": 10, + "outputCost": 30 + }, + "gpt-4": { + "inputCost": 30, + "outputCost": 60 + }, + "gpt-4-32k": { + "inputCost": 60, + "outputCost": 120 + }, + "gpt-3.5-turbo": { + "inputCost": 0.5, + "outputCost": 1.5 + }, + "gpt-3.5-turbo-instruct": { + "inputCost": 1.5, + "outputCost": 2 + } +} diff --git a/src/api/providers/openrouter.json b/src/api/providers/openrouter.json new file mode 100644 index 0000000..b1c2af9 --- /dev/null +++ b/src/api/providers/openrouter.json @@ -0,0 +1,502 @@ +{ + "nousresearch/nous-capybara-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "mistralai/mistral-7b-instruct:free": { + "inputCost": 0, + "outputCost": 0 + }, + "openchat/openchat-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "gryphe/mythomist-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "undi95/toppy-m-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "openrouter/cinematika-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "google/gemma-7b-it:free": { + "inputCost": 0, + "outputCost": 0 + }, + "jebcarter/psyfighter-13b": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "koboldai/psyfighter-13b-2": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "intel/neural-chat-7b": { + "inputCost": 0.005, + "outputCost": 0.005 + }, + "haotian-liu/llava-13b": { + "inputCost": 0.005, + "outputCost": 0.005 + }, + "nousresearch/nous-hermes-2-vision-7b": { + "inputCost": 0.005, + "outputCost": 0.005 + }, + "meta-llama/llama-2-13b-chat": { + "inputCost": 0.0001206, + "outputCost": 0.0001206 + }, + "pygmalionai/mythalion-13b": { + "inputCost": 0.001125, + "outputCost": 0.001125 + }, + "xwin-lm/xwin-lm-70b": { + "inputCost": 0.00375, + "outputCost": 0.00375 + }, + "alpindale/goliath-120b": { + "inputCost": 0.009375, + "outputCost": 0.009375 + }, + "neversleep/noromaid-20b": { + "inputCost": 0.00225, + "outputCost": 0.00225 + }, + "gryphe/mythomist-7b": { + "inputCost": 0.000375, + "outputCost": 0.000375 + }, + "sophosympatheia/midnight-rose-70b": { + "inputCost": 0.009, + "outputCost": 0.009 + }, + "undi95/remm-slerp-l2-13b:extended": { + "inputCost": 0.001125, + "outputCost": 0.001125 + }, + "mancer/weaver": { + "inputCost": 0.003375, + "outputCost": 0.003375 + }, + "mistralai/mixtral-8x7b-instruct": { + "inputCost": 0.00025, + "outputCost": 0.0004167 + }, + "nousresearch/nous-hermes-llama2-13b": { + "inputCost": 0.00027, + "outputCost": 0.00027 + }, + "nousresearch/nous-capybara-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "meta-llama/codellama-34b-instruct": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "codellama/codellama-70b-instruct": { + "inputCost": 0.00081, + "outputCost": 0.00081 + }, + "phind/phind-codellama-34b": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "teknium/openhermes-2-mistral-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "teknium/openhermes-2.5-mistral-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "undi95/remm-slerp-l2-13b": { + "inputCost": 0.00027, + "outputCost": 0.00027 + }, + "openrouter/cinematika-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "01-ai/yi-34b-chat": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "01-ai/yi-34b": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "01-ai/yi-6b": { + "inputCost": 0.000126, + "outputCost": 0.000126 + }, + "togethercomputer/stripedhyena-nous-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "togethercomputer/stripedhyena-hessian-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "mistralai/mixtral-8x7b": { + "inputCost": 0.00054, + "outputCost": 0.00054 + }, + "nousresearch/nous-hermes-yi-34b": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "nousresearch/nous-hermes-2-mixtral-8x7b-sft": { + "inputCost": 0.00054, + "outputCost": 0.00054 + }, + "nousresearch/nous-hermes-2-mistral-7b-dpo": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "open-orca/mistral-7b-openorca": { + "inputCost": 0.0001425, + "outputCost": 0.0001425 + }, + "huggingfaceh4/zephyr-7b-beta": { + "inputCost": 0.0001425, + "outputCost": 0.0001425 + }, + "openai/gpt-3.5-turbo": { + "inputCost": 0.0005, + "outputCost": 0.0015 + }, + "openai/gpt-3.5-turbo-0125": { + "inputCost": 0.0005, + "outputCost": 0.0015 + }, + "openai/gpt-3.5-turbo-16k": { + "inputCost": 0.003, + "outputCost": 0.004 + }, + "openai/gpt-4-turbo": { + "inputCost": 0.01, + "outputCost": 0.03 + }, + "openai/gpt-4-turbo-preview": { + "inputCost": 0.01, + "outputCost": 0.03 + }, + "openai/gpt-4": { + "inputCost": 0.03, + "outputCost": 0.06 + }, + "openai/gpt-4-32k": { + "inputCost": 0.06, + "outputCost": 0.12 + }, + "openai/gpt-4-vision-preview": { + "inputCost": 0.01, + "outputCost": 0.03 + }, + "openai/gpt-3.5-turbo-instruct": { + "inputCost": 0.0015, + "outputCost": 0.002 + }, + "google/palm-2-chat-bison": { + "inputCost": 0.00025, + "outputCost": 0.0005 + }, + "google/palm-2-codechat-bison": { + "inputCost": 0.00025, + "outputCost": 0.0005 + }, + "google/palm-2-chat-bison-32k": { + "inputCost": 0.00025, + "outputCost": 0.0005 + }, + "google/palm-2-codechat-bison-32k": { + "inputCost": 0.00025, + "outputCost": 0.0005 + }, + "google/gemini-pro": { + "inputCost": 0.000125, + "outputCost": 0.000375 + }, + "google/gemini-pro-vision": { + "inputCost": 0.000125, + "outputCost": 0.000375 + }, + "google/gemini-pro-1.5": { + "inputCost": 0.0025, + "outputCost": 0.0075 + }, + "perplexity/pplx-70b-online": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "perplexity/pplx-7b-online": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "perplexity/pplx-7b-chat": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "perplexity/pplx-70b-chat": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "perplexity/sonar-small-chat": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "perplexity/sonar-medium-chat": { + "inputCost": 0.0006, + "outputCost": 0.0006 + }, + "perplexity/sonar-small-online": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "perplexity/sonar-medium-online": { + "inputCost": 0.0006, + "outputCost": 0.0006 + }, + "anthropic/claude-3-opus": { + "inputCost": 0.015, + "outputCost": 0.075 + }, + "anthropic/claude-3-sonnet": { + "inputCost": 0.003, + "outputCost": 0.015 + }, + "anthropic/claude-3-haiku": { + "inputCost": 0.00025, + "outputCost": 0.00125 + }, + "anthropic/claude-3-opus:beta": { + "inputCost": 0.015, + "outputCost": 0.075 + }, + "anthropic/claude-3-sonnet:beta": { + "inputCost": 0.003, + "outputCost": 0.015 + }, + "anthropic/claude-3-haiku:beta": { + "inputCost": 0.00025, + "outputCost": 0.00125 + }, + "meta-llama/llama-2-70b-chat": { + "inputCost": 0.0006, + "outputCost": 0.0019 + }, + "nousresearch/nous-capybara-34b": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "jondurbin/airoboros-l2-70b": { + "inputCost": 0.0007, + "outputCost": 0.0009 + }, + "jondurbin/bagel-34b": { + "inputCost": 0.00575, + "outputCost": 0.00575 + }, + "austism/chronos-hermes-13b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "mistralai/mistral-7b-instruct": { + "inputCost": 0.0001, + "outputCost": 0.00025 + }, + "gryphe/mythomax-l2-13b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "openchat/openchat-7b": { + "inputCost": 0.00013, + "outputCost": 0.00013 + }, + "undi95/toppy-m-7b": { + "inputCost": 0.00015, + "outputCost": 0.00015 + }, + "lizpreciatior/lzlv-70b-fp16-hf": { + "inputCost": 0.0007, + "outputCost": 0.0009 + }, + "cognitivecomputations/dolphin-mixtral-8x7b": { + "inputCost": 0.0005, + "outputCost": 0.0005 + }, + "neversleep/noromaid-mixtral-8x7b-instruct": { + "inputCost": 0.008, + "outputCost": 0.008 + }, + "nousresearch/nous-hermes-2-mixtral-8x7b-dpo": { + "inputCost": 0.0003, + "outputCost": 0.0005 + }, + "rwkv/rwkv-5-world-3b": { + "inputCost": 0, + "outputCost": 0 + }, + "recursal/rwkv-5-3b-ai-town": { + "inputCost": 0, + "outputCost": 0 + }, + "recursal/eagle-7b": { + "inputCost": 0, + "outputCost": 0 + }, + "google/gemma-7b-it": { + "inputCost": 0.0001, + "outputCost": 0.0001 + }, + "databricks/dbrx-instruct": { + "inputCost": 0.0006, + "outputCost": 0.0006 + }, + "huggingfaceh4/zephyr-orpo-141b-a35b": { + "inputCost": 0.00065, + "outputCost": 0.00065 + }, + "meta-llama/llama-3-8b-instruct": { + "inputCost": 0.0001, + "outputCost": 0.0001 + }, + "meta-llama/llama-3-70b-instruct": { + "inputCost": 0.0008, + "outputCost": 0.0008 + }, + "microsoft/wizardlm-2-8x22b": { + "inputCost": 0.00065, + "outputCost": 0.00065 + }, + "microsoft/wizardlm-2-7b": { + "inputCost": 0.0001, + "outputCost": 0.0001 + }, + "mistralai/mixtral-8x22b": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "mistralai/mixtral-8x22b-instruct": { + "inputCost": 0.00065, + "outputCost": 0.00065 + }, + "anthropic/claude-2": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-2.1": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-2.0": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-instant-1": { + "inputCost": 0.0008, + "outputCost": 0.0024 + }, + "anthropic/claude-instant-1.2": { + "inputCost": 0.0008, + "outputCost": 0.0024 + }, + "anthropic/claude-2:beta": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-2.1:beta": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-2.0:beta": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-instant-1:beta": { + "inputCost": 0.0008, + "outputCost": 0.0024 + }, + "huggingfaceh4/zephyr-7b-beta:free": { + "inputCost": 0, + "outputCost": 0 + }, + "mistralai/mixtral-8x7b-instruct:nitro": { + "inputCost": 0.0005, + "outputCost": 0.0005 + }, + "meta-llama/llama-2-70b-chat:nitro": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "gryphe/mythomax-l2-13b:nitro": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "mistralai/mistral-7b-instruct:nitro": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "google/gemma-7b-it:nitro": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "databricks/dbrx-instruct:nitro": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "undi95/toppy-m-7b:nitro": { + "inputCost": 0.00015, + "outputCost": 0.00015 + }, + "microsoft/wizardlm-2-8x22b:nitro": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "meta-llama/llama-3-8b-instruct:nitro": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "meta-llama/llama-3-70b-instruct:nitro": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "gryphe/mythomax-l2-13b:extended": { + "inputCost": 0.0003, + "outputCost": 0.0003 + }, + "mistralai/mistral-tiny": { + "inputCost": 0.00025, + "outputCost": 0.00025 + }, + "mistralai/mistral-small": { + "inputCost": 0.002, + "outputCost": 0.006 + }, + "mistralai/mistral-medium": { + "inputCost": 0.0027, + "outputCost": 0.0081 + }, + "mistralai/mistral-large": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "cohere/command": { + "inputCost": 0.001, + "outputCost": 0.002 + }, + "cohere/command-r": { + "inputCost": 0.0005, + "outputCost": 0.0015 + }, + "cohere/command-r-plus": { + "inputCost": 0.003, + "outputCost": 0.015 + } +} \ No newline at end of file diff --git a/src/main.ts b/src/main.ts index a76c2d7..ce839fe 100644 --- a/src/main.ts +++ b/src/main.ts @@ -12,9 +12,9 @@ import { APIClient } from './api'; import { OllamaAPIClient } from './api/clients/ollama'; import { OpenAIAPIClient } from './api/clients/openai'; import { OpenRouterAPIClient } from './api/clients/openrouter'; -import { CostsTracker } from './api/costs'; import { PromptGenerator } from './api/prompts/generator'; -import { Provider } from './api/provider'; +import { Provider } from './api/providers'; +import { CostsTracker } from './api/providers/costs'; import { MemoryCacheProxy } from './api/proxies/memory-cache'; import { UsageMonitorProxy } from './api/proxies/usage-monitor'; import { CHAT_VIEW_TYPE, ChatView } from './chat/view'; diff --git a/src/scripts/scrape.ts b/src/scripts/scrape.ts new file mode 100644 index 0000000..84e8e5d --- /dev/null +++ b/src/scripts/scrape.ts @@ -0,0 +1,93 @@ +import axios from 'axios'; +import * as cheerio from 'cheerio'; +import { program } from 'commander'; +import * as fs from 'fs'; + +const MODELS_PATH = 'src/api/models'; +const OPENAI_MODELS_PATH = `${MODELS_PATH}/openai.json`; +const OPEN_ROUTER_MODELS_PATH = `${MODELS_PATH}/openrouter.json`; +const OLLAMA_MODELS_PATH = `${MODELS_PATH}/ollama.json`; + +async function scrapeOpenRouterModels() { + const response = await axios.get('https://openrouter.ai/docs'); + const selector = cheerio.load(response.data); + const rows = selector('#models > div > table > tbody > tr'); + const data: Record = {}; + for (const row of rows) { + const model = selector(row) + .find('td:nth-child(1) > code') + .first() + .text() + .trim(); + const inputCost = selector(row) + .find('td:nth-child(2) > div') + .contents() + .first() + .text() + .trim() + .replace('$', ''); + const outputCost = selector(row) + .find('td:nth-child(3) > div') + .contents() + .first() + .text() + .trim() + .replace('$', ''); + data[model] = { + // Normalise to per 1k token to 1M token. + inputCost: parseFloat(inputCost) * 1_000, + outputCost: parseFloat(outputCost) * 1_000, + }; + } + const json = JSON.stringify(data, null, 2); + fs.writeFileSync(OPEN_ROUTER_MODELS_PATH, json); +} + +async function scrapeOllamaModels() { + const response = await axios.get('https://ollama.com/library'); + const selector = cheerio.load(response.data); + const rows = selector('#repo > ul > li'); + const data: Record = {}; + for (const row of rows) { + const model = selector(row).find('a > h2').first().text().trim(); + const inputCost = 0; + const outputCost = 0; + data[model] = { + inputCost, + outputCost, + }; + } + const json = JSON.stringify(data, null, 2); + fs.writeFileSync(OLLAMA_MODELS_PATH, json); +} + +async function main(options: Options) { + switch (options.provider) { + case 'openai': + // OpenAI's pricing information is not available for some models, + // and even if they are available, they are not in a structured format and come with + // unpredictable naming patterns e.g. `gpt-4-turbo` pointing to `gpt-4-turbo-2024-04-09`. + // So for now we resort to manually update the models JSON file. + console.error( + `OpenAI provider is not supported for scraping.\nPlease update the JSON file at ${OPENAI_MODELS_PATH} manually by checking https://openai.com/pricing and https://platform.openai.com/docs/models.`, + ); + break; + case 'openrouter': + await scrapeOpenRouterModels(); + break; + case 'ollama': + await scrapeOllamaModels(); + break; + default: + console.error('Invalid provider'); + } +} + +interface Options { + provider: string; +} +program.option('-p, --provider ', 'Provider to scrape'); +program.parse(); +const options = program.opts() as Options; + +main(options); diff --git a/src/utils.ts b/src/utils.ts index e7140ab..9339544 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -76,3 +76,11 @@ export type Equal = (() => T extends X ? 1 : 2) extends () => T extends Y ? 1 : 2 ? true : false; + +// Utility function to get the keys of an object +// that provides more informative typing on the return value than `Object.keys`. +export function getObjectKeys>( + obj: T, +): (keyof T)[] { + return Object.keys(obj); +} diff --git a/tsconfig.json b/tsconfig.json index 01097e2..ed1043d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,4 +1,10 @@ { + "ts-node": { + // For scraping CLI scripts + "compilerOptions": { + "module": "CommonJS" + } + }, "compilerOptions": { "baseUrl": ".", "inlineSourceMap": true, @@ -10,17 +16,12 @@ "moduleResolution": "node", "importHelpers": true, "isolatedModules": true, - "strictNullChecks": true, - "lib": [ - "DOM", - "ES5", - "ES6", - "ES7" - ], + "strictNullChecks": true, + "lib": ["DOM", "ES5", "ES6", "ES7"], "jsx": "react-jsx", + // For loading model JSON files. + "resolveJsonModule": true, + "esModuleInterop": true }, - "include": [ - "**/*.ts", - "**/*.tsx" - ], + "include": ["**/*.ts", "**/*.tsx"] } diff --git a/yarn.lock b/yarn.lock index 4a12b2b..12b5166 100644 --- a/yarn.lock +++ b/yarn.lock @@ -21,6 +21,13 @@ style-mod "^4.1.0" w3c-keyname "^2.2.4" +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== + dependencies: + "@jridgewell/trace-mapping" "0.3.9" + "@esbuild/android-arm64@0.17.3": version "0.17.3" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.3.tgz#35d045f69c9b4cf3f8efcd1ced24a560213d3346" @@ -182,6 +189,24 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz#d9fae00a2d5cb40f92cfe64b47ad749fbc38f917" integrity sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw== +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== + +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@kurkle/color@^0.3.0": version "0.3.2" resolved "https://registry.yarnpkg.com/@kurkle/color/-/color-0.3.2.tgz#5acd38242e8bde4f9986e7913c8fdf49d3aa199f" @@ -208,6 +233,26 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@tsconfig/node10@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2" + integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== + "@types/codemirror@5.60.8": version "5.60.8" resolved "https://registry.yarnpkg.com/@types/codemirror/-/codemirror-5.60.8.tgz#b647d04b470e8e1836dd84b2879988fc55c9de68" @@ -424,7 +469,12 @@ acorn-jsx@^5.3.2: resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== -acorn@^8.9.0: +acorn-walk@^8.1.1: + version "8.3.2" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa" + integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A== + +acorn@^8.4.1, acorn@^8.9.0: version "8.11.3" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== @@ -458,6 +508,11 @@ ansi-styles@^4.1.0: dependencies: color-convert "^2.0.1" +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + argparse@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" @@ -473,6 +528,15 @@ asynckit@^0.4.0: resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== +axios@^1.6.8: + version "1.6.8" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.8.tgz#66d294951f5d988a00e87a0ffb955316a619ea66" + integrity sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ== + dependencies: + follow-redirects "^1.15.6" + form-data "^4.0.0" + proxy-from-env "^1.1.0" + bail@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/bail/-/bail-2.0.2.tgz#d26f5cd8fe5d6f832a31517b9f7c356040ba6d5d" @@ -493,6 +557,11 @@ base64-js@^1.5.1: resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== +boolbase@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" @@ -570,6 +639,31 @@ chart.js@^4.4.2: dependencies: "@kurkle/color" "^0.3.0" +cheerio-select@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cheerio-select/-/cheerio-select-2.1.0.tgz#4d8673286b8126ca2a8e42740d5e3c4884ae21b4" + integrity sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g== + dependencies: + boolbase "^1.0.0" + css-select "^5.1.0" + css-what "^6.1.0" + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + +cheerio@^1.0.0-rc.12: + version "1.0.0-rc.12" + resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0-rc.12.tgz#788bf7466506b1c6bf5fae51d24a2c4d62e47683" + integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== + dependencies: + cheerio-select "^2.1.0" + dom-serializer "^2.0.0" + domhandler "^5.0.3" + domutils "^3.0.1" + htmlparser2 "^8.0.1" + parse5 "^7.0.0" + parse5-htmlparser2-tree-adapter "^7.0.0" + color-convert@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" @@ -594,6 +688,11 @@ comma-separated-tokens@^2.0.0: resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee" integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== +commander@^12.0.0: + version "12.0.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-12.0.0.tgz#b929db6df8546080adfd004ab215ed48cf6f2592" + integrity sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA== + commander@^8.3.0: version "8.3.0" resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" @@ -604,6 +703,11 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + cross-spawn@^7.0.2: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" @@ -618,6 +722,22 @@ crypt@0.0.2: resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b" integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow== +css-select@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-5.1.0.tgz#b8ebd6554c3637ccc76688804ad3f6a6fdaea8a6" + integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg== + dependencies: + boolbase "^1.0.0" + css-what "^6.1.0" + domhandler "^5.0.2" + domutils "^3.0.1" + nth-check "^2.0.1" + +css-what@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + csstype@^3.0.2: version "3.1.3" resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" @@ -659,6 +779,11 @@ devlop@^1.0.0, devlop@^1.1.0: dependencies: dequal "^2.0.0" +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + digest-fetch@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/digest-fetch/-/digest-fetch-1.3.0.tgz#898e69264d00012a23cf26e8a3e40320143fc661" @@ -681,7 +806,37 @@ doctrine@^3.0.0: dependencies: esutils "^2.0.2" -entities@^4.4.0: +dom-serializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-2.0.0.tgz#e41b802e1eedf9f6cae183ce5e622d789d7d8e53" + integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + entities "^4.2.0" + +domelementtype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domhandler@^5.0.2, domhandler@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31" + integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== + dependencies: + domelementtype "^2.3.0" + +domutils@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.1.0.tgz#c47f551278d3dc4b0b1ab8cbb42d751a6f0d824e" + integrity sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA== + dependencies: + dom-serializer "^2.0.0" + domelementtype "^2.3.0" + domhandler "^5.0.3" + +entities@^4.2.0, entities@^4.4.0: version "4.5.0" resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== @@ -918,6 +1073,11 @@ flatted@^3.2.9: resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.1.tgz#21db470729a6734d4997002f439cb308987f567a" integrity sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw== +follow-redirects@^1.15.6: + version "1.15.6" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" + integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== + form-data-encoder@1.7.2: version "1.7.2" resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040" @@ -1118,6 +1278,16 @@ html-url-attributes@^3.0.0: resolved "https://registry.yarnpkg.com/html-url-attributes/-/html-url-attributes-3.0.0.tgz#fc4abf0c3fb437e2329c678b80abb3c62cff6f08" integrity sha512-/sXbVCWayk6GDVg3ctOX6nxaVj7So40FcFAnWlWGNAB1LpYKcV5Cd10APjPjW80O7zYW2MsjBV4zZ7IZO5fVow== +htmlparser2@^8.0.1: + version "8.0.2" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-8.0.2.tgz#f002151705b383e62433b5cf466f5b716edaec21" + integrity sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + entities "^4.4.0" + humanize-ms@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" @@ -1313,6 +1483,11 @@ lucide-react@^0.363.0: resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.363.0.tgz#2bb1f9d09b830dda86f5118fcd097f87247fe0e3" integrity sha512-AlsfPCsXQyQx7wwsIgzcKOL9LwC498LIMAo+c0Es5PkHJa33xwmYAkkSoKoJWWWSYQEStqu58/jT4tL2gi32uQ== +make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + md5@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f" @@ -1718,6 +1893,13 @@ node-fetch@^2.6.7: dependencies: whatwg-url "^5.0.0" +nth-check@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + obsidian@latest: version "1.5.7-1" resolved "https://registry.yarnpkg.com/obsidian/-/obsidian-1.5.7-1.tgz#6e367f015f6a1b6b13204135434bbe3c04d4dfc3" @@ -1795,6 +1977,14 @@ parse-entities@^4.0.0: is-decimal "^2.0.0" is-hexadecimal "^2.0.0" +parse5-htmlparser2-tree-adapter@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz#23c2cc233bcf09bb7beba8b8a69d46b08c62c2f1" + integrity sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g== + dependencies: + domhandler "^5.0.2" + parse5 "^7.0.0" + parse5@^7.0.0: version "7.1.2" resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.2.tgz#0736bebbfd77793823240a23b7fc5e010b7f8e32" @@ -1842,6 +2032,11 @@ property-information@^6.0.0: resolved "https://registry.yarnpkg.com/property-information/-/property-information-6.4.1.tgz#de8b79a7415fd2107dfbe65758bb2cc9dfcf60ac" integrity sha512-OHYtXfu5aI2sS2LWFSN5rgJjrQ4pCy8i1jubJLe2QvMF8JJ++HXTUIVWFLfXJoaOfvYYjk2SN8J2wFUWIGXT4w== +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== + punycode@^2.1.0: version "2.3.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" @@ -2058,6 +2253,25 @@ trough@^2.0.0: resolved "https://registry.yarnpkg.com/trough/-/trough-2.2.0.tgz#94a60bd6bd375c152c1df911a4b11d5b0256f50f" integrity sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw== +ts-node@^10.9.2: + version "10.9.2" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" + integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + tslib@2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" @@ -2171,6 +2385,11 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + vfile-location@^5.0.0: version "5.0.2" resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-5.0.2.tgz#220d9ca1ab6f8b2504a4db398f7ebc149f9cb464" @@ -2246,6 +2465,11 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" From c6c81c988ba378ec9a1d0ec3968c03f04883bcfd Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 18:24:09 +0900 Subject: [PATCH 21/26] Update settings migrators --- src/settings/index.ts | 6 +- src/settings/migrators/1.1.0-1.2.0.ts | 14 +- .../versions/1.1.0/{ => api}/openai.ts | 0 src/settings/versions/1.1.0/index.ts | 2 +- .../versions/1.2.0/{types.ts => api/index.ts} | 0 .../versions/1.2.0/api/providers/index.ts | 10 + .../versions/1.2.0/api/providers/models.ts | 27 + .../versions/1.2.0/api/providers/ollama.json | 318 +++++++++++ .../versions/1.2.0/api/providers/openai.json | 22 + .../1.2.0/api/providers/openrouter.json | 502 ++++++++++++++++++ src/settings/versions/1.2.0/index.ts | 6 +- src/settings/versions/1.2.0/models.ts | 60 --- src/settings/versions/1.2.0/provider.ts | 13 - 13 files changed, 899 insertions(+), 81 deletions(-) rename src/settings/versions/1.1.0/{ => api}/openai.ts (100%) rename src/settings/versions/1.2.0/{types.ts => api/index.ts} (100%) create mode 100644 src/settings/versions/1.2.0/api/providers/index.ts create mode 100644 src/settings/versions/1.2.0/api/providers/models.ts create mode 100644 src/settings/versions/1.2.0/api/providers/ollama.json create mode 100644 src/settings/versions/1.2.0/api/providers/openai.json create mode 100644 src/settings/versions/1.2.0/api/providers/openrouter.json delete mode 100644 src/settings/versions/1.2.0/models.ts delete mode 100644 src/settings/versions/1.2.0/provider.ts diff --git a/src/settings/index.ts b/src/settings/index.ts index 69ea6c9..91458b6 100644 --- a/src/settings/index.ts +++ b/src/settings/index.ts @@ -1,13 +1,13 @@ import Chart from 'chart.js/auto'; import { App, Notice, PluginSettingTab, Setting } from 'obsidian'; import { ChatHistory } from 'src/api'; -import { DEFAULT_MODELS, Model, MODELS } from '../api/models'; import { DEFAULT_PROVIDER, Provider, PROVIDERS, PROVIDERS_NAMES, -} from '../api/provider'; +} from 'src/api/providers'; +import { DEFAULT_MODELS, Model, MODELS } from 'src/api/providers/models'; import Markpilot from '../main'; import { getDaysInCurrentMonth, validateURL } from '../utils'; @@ -66,7 +66,7 @@ export const DEFAULT_SETTINGS: MarkpilotSettings = { apiKey: undefined, }, ollama: { - apiUrl: undefined, + apiUrl: 'http://localhost:11434/v1/', }, }, completions: { diff --git a/src/settings/migrators/1.1.0-1.2.0.ts b/src/settings/migrators/1.1.0-1.2.0.ts index 5195fe6..cc79ca2 100644 --- a/src/settings/migrators/1.1.0-1.2.0.ts +++ b/src/settings/migrators/1.1.0-1.2.0.ts @@ -1,6 +1,7 @@ import { SettingsMigrator } from '.'; import { MarkpilotSettings1_1_0 } from '../versions/1.1.0'; import { MarkpilotSettings1_2_0 } from '../versions/1.2.0'; +import { OPENAI_MODELS } from '../versions/1.2.0/api/providers/models'; export const migrateVersion1_1_0_toVersion1_2_0: SettingsMigrator< MarkpilotSettings1_1_0, @@ -16,24 +17,35 @@ export const migrateVersion1_1_0_toVersion1_2_0: SettingsMigrator< apiKey: undefined, }, ollama: { - apiUrl: undefined, + apiUrl: 'http://localhost:11434/v1/', }, }, completions: { ...settings.completions, provider: 'openai', + model: 'gpt-3.5-turbo', ignoredFiles: [], ignoredTags: [], }, chat: { ...settings.chat, provider: 'openai', + model: 'gpt-3.5-turbo', }, cache: { enabled: true, // Enable cache by default. }, usage: settings.usage, }; + // Update if OpenAI models selected by the user are no longer available. + // Version 1.1.0 only supported OpenAI but included models + // that are aliased, deprecated or only preview models. + if (!(settings.completions.model in OPENAI_MODELS)) { + newSettings.completions.model = 'gpt-3.5-turbo'; + } + if (!(settings.chat.model in OPENAI_MODELS)) { + newSettings.chat.model = 'gpt-3.5-turbo'; + } // Update if default accept key is still selected. if (settings.completions.acceptKey === 'Enter') { newSettings.completions.acceptKey = 'Tab'; diff --git a/src/settings/versions/1.1.0/openai.ts b/src/settings/versions/1.1.0/api/openai.ts similarity index 100% rename from src/settings/versions/1.1.0/openai.ts rename to src/settings/versions/1.1.0/api/openai.ts diff --git a/src/settings/versions/1.1.0/index.ts b/src/settings/versions/1.1.0/index.ts index 7698133..00216d0 100644 --- a/src/settings/versions/1.1.0/index.ts +++ b/src/settings/versions/1.1.0/index.ts @@ -1,4 +1,4 @@ -import { ChatCompletionsModel, ChatHistory, CompletionsModel } from './openai'; +import { ChatCompletionsModel, ChatHistory, CompletionsModel } from './api/openai'; export interface MarkpilotSettings1_1_0 { apiKey: string | undefined; diff --git a/src/settings/versions/1.2.0/types.ts b/src/settings/versions/1.2.0/api/index.ts similarity index 100% rename from src/settings/versions/1.2.0/types.ts rename to src/settings/versions/1.2.0/api/index.ts diff --git a/src/settings/versions/1.2.0/api/providers/index.ts b/src/settings/versions/1.2.0/api/providers/index.ts new file mode 100644 index 0000000..22700dc --- /dev/null +++ b/src/settings/versions/1.2.0/api/providers/index.ts @@ -0,0 +1,10 @@ +export type Provider = (typeof PROVIDERS)[number]; + +export const PROVIDERS = ['openai', 'openrouter', 'ollama'] as const; +export const PROVIDERS_NAMES: Record = { + openai: 'OpenAI', + openrouter: 'OpenRouter', + ollama: 'Ollama', +}; + +export const DEFAULT_PROVIDER = 'openai' as Provider; diff --git a/src/settings/versions/1.2.0/api/providers/models.ts b/src/settings/versions/1.2.0/api/providers/models.ts new file mode 100644 index 0000000..d5eae33 --- /dev/null +++ b/src/settings/versions/1.2.0/api/providers/models.ts @@ -0,0 +1,27 @@ +import { getObjectKeys } from 'src/utils'; +import { Provider } from '.'; +import OllamaModelsJSON from './ollama.json'; +import OpenAIModelsJSON from './openai.json'; +import OpenRouterModelsJSON from './openrouter.json'; + +export type OpenAIModel = keyof typeof OpenAIModelsJSON; +export type OpenRouterModel = keyof typeof OpenRouterModelsJSON; +export type OllamaModel = keyof typeof OllamaModelsJSON; + +export type Model = OpenAIModel | OpenRouterModel | OllamaModel; + +export const OPENAI_MODELS = getObjectKeys(OpenAIModelsJSON); +export const OPENROUTER_MODELS = getObjectKeys(OpenRouterModelsJSON); +export const OLLAMA_MODELS = getObjectKeys(OllamaModelsJSON); + +export const MODELS = { + openai: OPENAI_MODELS, + openrouter: OPENROUTER_MODELS, + ollama: OLLAMA_MODELS, +}; + +export const DEFAULT_MODELS: Record = { + openai: 'gpt-3.5-turbo', + openrouter: 'openai/gpt-3.5-turbo', + ollama: 'llama2', +}; diff --git a/src/settings/versions/1.2.0/api/providers/ollama.json b/src/settings/versions/1.2.0/api/providers/ollama.json new file mode 100644 index 0000000..f56f4c8 --- /dev/null +++ b/src/settings/versions/1.2.0/api/providers/ollama.json @@ -0,0 +1,318 @@ +{ + "llama3": { + "inputCost": 0, + "outputCost": 0 + }, + "wizardlm2": { + "inputCost": 0, + "outputCost": 0 + }, + "mistral": { + "inputCost": 0, + "outputCost": 0 + }, + "gemma": { + "inputCost": 0, + "outputCost": 0 + }, + "mixtral": { + "inputCost": 0, + "outputCost": 0 + }, + "llama2": { + "inputCost": 0, + "outputCost": 0 + }, + "codegemma": { + "inputCost": 0, + "outputCost": 0 + }, + "command-r": { + "inputCost": 0, + "outputCost": 0 + }, + "command-r-plus": { + "inputCost": 0, + "outputCost": 0 + }, + "llava": { + "inputCost": 0, + "outputCost": 0 + }, + "dbrx": { + "inputCost": 0, + "outputCost": 0 + }, + "codellama": { + "inputCost": 0, + "outputCost": 0 + }, + "dolphin-mixtral": { + "inputCost": 0, + "outputCost": 0 + }, + "qwen": { + "inputCost": 0, + "outputCost": 0 + }, + "llama2-uncensored": { + "inputCost": 0, + "outputCost": 0 + }, + "mistral-openorca": { + "inputCost": 0, + "outputCost": 0 + }, + "deepseek-coder": { + "inputCost": 0, + "outputCost": 0 + }, + "phi": { + "inputCost": 0, + "outputCost": 0 + }, + "nous-hermes2": { + "inputCost": 0, + "outputCost": 0 + }, + "dolphin-mistral": { + "inputCost": 0, + "outputCost": 0 + }, + "orca-mini": { + "inputCost": 0, + "outputCost": 0 + }, + "nomic-embed-text": { + "inputCost": 0, + "outputCost": 0 + }, + "zephyr": { + "inputCost": 0, + "outputCost": 0 + }, + "wizard-vicuna-uncensored": { + "inputCost": 0, + "outputCost": 0 + }, + "llama2-chinese": { + "inputCost": 0, + "outputCost": 0 + }, + "openhermes": { + "inputCost": 0, + "outputCost": 0 + }, + "vicuna": { + "inputCost": 0, + "outputCost": 0 + }, + "tinydolphin": { + "inputCost": 0, + "outputCost": 0 + }, + "tinyllama": { + "inputCost": 0, + "outputCost": 0 + }, + "openchat": { + "inputCost": 0, + "outputCost": 0 + }, + "starcoder2": { + "inputCost": 0, + "outputCost": 0 + }, + "wizardcoder": { + "inputCost": 0, + "outputCost": 0 + }, + "stable-code": { + "inputCost": 0, + "outputCost": 0 + }, + "starcoder": { + "inputCost": 0, + "outputCost": 0 + }, + "neural-chat": { + "inputCost": 0, + "outputCost": 0 + }, + "phind-codellama": { + "inputCost": 0, + "outputCost": 0 + }, + "yi": { + "inputCost": 0, + "outputCost": 0 + }, + "dolphin-phi": { + "inputCost": 0, + "outputCost": 0 + }, + "falcon": { + "inputCost": 0, + "outputCost": 0 + }, + "orca2": { + "inputCost": 0, + "outputCost": 0 + }, + "starling-lm": { + "inputCost": 0, + "outputCost": 0 + }, + "wizard-math": { + "inputCost": 0, + "outputCost": 0 + }, + "dolphincoder": { + "inputCost": 0, + "outputCost": 0 + }, + "nous-hermes": { + "inputCost": 0, + "outputCost": 0 + }, + "bakllava": { + "inputCost": 0, + "outputCost": 0 + }, + "medllama2": { + "inputCost": 0, + "outputCost": 0 + }, + "solar": { + "inputCost": 0, + "outputCost": 0 + }, + "nous-hermes2-mixtral": { + "inputCost": 0, + "outputCost": 0 + }, + "sqlcoder": { + "inputCost": 0, + "outputCost": 0 + }, + "wizardlm-uncensored": { + "inputCost": 0, + "outputCost": 0 + }, + "codeup": { + "inputCost": 0, + "outputCost": 0 + }, + "everythinglm": { + "inputCost": 0, + "outputCost": 0 + }, + "stablelm2": { + "inputCost": 0, + "outputCost": 0 + }, + "all-minilm": { + "inputCost": 0, + "outputCost": 0 + }, + "yarn-mistral": { + "inputCost": 0, + "outputCost": 0 + }, + "samantha-mistral": { + "inputCost": 0, + "outputCost": 0 + }, + "meditron": { + "inputCost": 0, + "outputCost": 0 + }, + "stable-beluga": { + "inputCost": 0, + "outputCost": 0 + }, + "mxbai-embed-large": { + "inputCost": 0, + "outputCost": 0 + }, + "magicoder": { + "inputCost": 0, + "outputCost": 0 + }, + "stablelm-zephyr": { + "inputCost": 0, + "outputCost": 0 + }, + "llama-pro": { + "inputCost": 0, + "outputCost": 0 + }, + "yarn-llama2": { + "inputCost": 0, + "outputCost": 0 + }, + "deepseek-llm": { + "inputCost": 0, + "outputCost": 0 + }, + "codebooga": { + "inputCost": 0, + "outputCost": 0 + }, + "mistrallite": { + "inputCost": 0, + "outputCost": 0 + }, + "wizard-vicuna": { + "inputCost": 0, + "outputCost": 0 + }, + "nexusraven": { + "inputCost": 0, + "outputCost": 0 + }, + "goliath": { + "inputCost": 0, + "outputCost": 0 + }, + "open-orca-platypus2": { + "inputCost": 0, + "outputCost": 0 + }, + "notux": { + "inputCost": 0, + "outputCost": 0 + }, + "megadolphin": { + "inputCost": 0, + "outputCost": 0 + }, + "alfred": { + "inputCost": 0, + "outputCost": 0 + }, + "duckdb-nsql": { + "inputCost": 0, + "outputCost": 0 + }, + "xwinlm": { + "inputCost": 0, + "outputCost": 0 + }, + "wizardlm": { + "inputCost": 0, + "outputCost": 0 + }, + "notus": { + "inputCost": 0, + "outputCost": 0 + }, + "codeqwen": { + "inputCost": 0, + "outputCost": 0 + }, + "snowflake-arctic-embed": { + "inputCost": 0, + "outputCost": 0 + } +} \ No newline at end of file diff --git a/src/settings/versions/1.2.0/api/providers/openai.json b/src/settings/versions/1.2.0/api/providers/openai.json new file mode 100644 index 0000000..a09e184 --- /dev/null +++ b/src/settings/versions/1.2.0/api/providers/openai.json @@ -0,0 +1,22 @@ +{ + "gpt-4-turbo": { + "inputCost": 10, + "outputCost": 30 + }, + "gpt-4": { + "inputCost": 30, + "outputCost": 60 + }, + "gpt-4-32k": { + "inputCost": 60, + "outputCost": 120 + }, + "gpt-3.5-turbo": { + "inputCost": 0.5, + "outputCost": 1.5 + }, + "gpt-3.5-turbo-instruct": { + "inputCost": 1.5, + "outputCost": 2 + } +} diff --git a/src/settings/versions/1.2.0/api/providers/openrouter.json b/src/settings/versions/1.2.0/api/providers/openrouter.json new file mode 100644 index 0000000..b1c2af9 --- /dev/null +++ b/src/settings/versions/1.2.0/api/providers/openrouter.json @@ -0,0 +1,502 @@ +{ + "nousresearch/nous-capybara-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "mistralai/mistral-7b-instruct:free": { + "inputCost": 0, + "outputCost": 0 + }, + "openchat/openchat-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "gryphe/mythomist-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "undi95/toppy-m-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "openrouter/cinematika-7b:free": { + "inputCost": 0, + "outputCost": 0 + }, + "google/gemma-7b-it:free": { + "inputCost": 0, + "outputCost": 0 + }, + "jebcarter/psyfighter-13b": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "koboldai/psyfighter-13b-2": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "intel/neural-chat-7b": { + "inputCost": 0.005, + "outputCost": 0.005 + }, + "haotian-liu/llava-13b": { + "inputCost": 0.005, + "outputCost": 0.005 + }, + "nousresearch/nous-hermes-2-vision-7b": { + "inputCost": 0.005, + "outputCost": 0.005 + }, + "meta-llama/llama-2-13b-chat": { + "inputCost": 0.0001206, + "outputCost": 0.0001206 + }, + "pygmalionai/mythalion-13b": { + "inputCost": 0.001125, + "outputCost": 0.001125 + }, + "xwin-lm/xwin-lm-70b": { + "inputCost": 0.00375, + "outputCost": 0.00375 + }, + "alpindale/goliath-120b": { + "inputCost": 0.009375, + "outputCost": 0.009375 + }, + "neversleep/noromaid-20b": { + "inputCost": 0.00225, + "outputCost": 0.00225 + }, + "gryphe/mythomist-7b": { + "inputCost": 0.000375, + "outputCost": 0.000375 + }, + "sophosympatheia/midnight-rose-70b": { + "inputCost": 0.009, + "outputCost": 0.009 + }, + "undi95/remm-slerp-l2-13b:extended": { + "inputCost": 0.001125, + "outputCost": 0.001125 + }, + "mancer/weaver": { + "inputCost": 0.003375, + "outputCost": 0.003375 + }, + "mistralai/mixtral-8x7b-instruct": { + "inputCost": 0.00025, + "outputCost": 0.0004167 + }, + "nousresearch/nous-hermes-llama2-13b": { + "inputCost": 0.00027, + "outputCost": 0.00027 + }, + "nousresearch/nous-capybara-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "meta-llama/codellama-34b-instruct": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "codellama/codellama-70b-instruct": { + "inputCost": 0.00081, + "outputCost": 0.00081 + }, + "phind/phind-codellama-34b": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "teknium/openhermes-2-mistral-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "teknium/openhermes-2.5-mistral-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "undi95/remm-slerp-l2-13b": { + "inputCost": 0.00027, + "outputCost": 0.00027 + }, + "openrouter/cinematika-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "01-ai/yi-34b-chat": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "01-ai/yi-34b": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "01-ai/yi-6b": { + "inputCost": 0.000126, + "outputCost": 0.000126 + }, + "togethercomputer/stripedhyena-nous-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "togethercomputer/stripedhyena-hessian-7b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "mistralai/mixtral-8x7b": { + "inputCost": 0.00054, + "outputCost": 0.00054 + }, + "nousresearch/nous-hermes-yi-34b": { + "inputCost": 0.00072, + "outputCost": 0.00072 + }, + "nousresearch/nous-hermes-2-mixtral-8x7b-sft": { + "inputCost": 0.00054, + "outputCost": 0.00054 + }, + "nousresearch/nous-hermes-2-mistral-7b-dpo": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "open-orca/mistral-7b-openorca": { + "inputCost": 0.0001425, + "outputCost": 0.0001425 + }, + "huggingfaceh4/zephyr-7b-beta": { + "inputCost": 0.0001425, + "outputCost": 0.0001425 + }, + "openai/gpt-3.5-turbo": { + "inputCost": 0.0005, + "outputCost": 0.0015 + }, + "openai/gpt-3.5-turbo-0125": { + "inputCost": 0.0005, + "outputCost": 0.0015 + }, + "openai/gpt-3.5-turbo-16k": { + "inputCost": 0.003, + "outputCost": 0.004 + }, + "openai/gpt-4-turbo": { + "inputCost": 0.01, + "outputCost": 0.03 + }, + "openai/gpt-4-turbo-preview": { + "inputCost": 0.01, + "outputCost": 0.03 + }, + "openai/gpt-4": { + "inputCost": 0.03, + "outputCost": 0.06 + }, + "openai/gpt-4-32k": { + "inputCost": 0.06, + "outputCost": 0.12 + }, + "openai/gpt-4-vision-preview": { + "inputCost": 0.01, + "outputCost": 0.03 + }, + "openai/gpt-3.5-turbo-instruct": { + "inputCost": 0.0015, + "outputCost": 0.002 + }, + "google/palm-2-chat-bison": { + "inputCost": 0.00025, + "outputCost": 0.0005 + }, + "google/palm-2-codechat-bison": { + "inputCost": 0.00025, + "outputCost": 0.0005 + }, + "google/palm-2-chat-bison-32k": { + "inputCost": 0.00025, + "outputCost": 0.0005 + }, + "google/palm-2-codechat-bison-32k": { + "inputCost": 0.00025, + "outputCost": 0.0005 + }, + "google/gemini-pro": { + "inputCost": 0.000125, + "outputCost": 0.000375 + }, + "google/gemini-pro-vision": { + "inputCost": 0.000125, + "outputCost": 0.000375 + }, + "google/gemini-pro-1.5": { + "inputCost": 0.0025, + "outputCost": 0.0075 + }, + "perplexity/pplx-70b-online": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "perplexity/pplx-7b-online": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "perplexity/pplx-7b-chat": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "perplexity/pplx-70b-chat": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "perplexity/sonar-small-chat": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "perplexity/sonar-medium-chat": { + "inputCost": 0.0006, + "outputCost": 0.0006 + }, + "perplexity/sonar-small-online": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "perplexity/sonar-medium-online": { + "inputCost": 0.0006, + "outputCost": 0.0006 + }, + "anthropic/claude-3-opus": { + "inputCost": 0.015, + "outputCost": 0.075 + }, + "anthropic/claude-3-sonnet": { + "inputCost": 0.003, + "outputCost": 0.015 + }, + "anthropic/claude-3-haiku": { + "inputCost": 0.00025, + "outputCost": 0.00125 + }, + "anthropic/claude-3-opus:beta": { + "inputCost": 0.015, + "outputCost": 0.075 + }, + "anthropic/claude-3-sonnet:beta": { + "inputCost": 0.003, + "outputCost": 0.015 + }, + "anthropic/claude-3-haiku:beta": { + "inputCost": 0.00025, + "outputCost": 0.00125 + }, + "meta-llama/llama-2-70b-chat": { + "inputCost": 0.0006, + "outputCost": 0.0019 + }, + "nousresearch/nous-capybara-34b": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "jondurbin/airoboros-l2-70b": { + "inputCost": 0.0007, + "outputCost": 0.0009 + }, + "jondurbin/bagel-34b": { + "inputCost": 0.00575, + "outputCost": 0.00575 + }, + "austism/chronos-hermes-13b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "mistralai/mistral-7b-instruct": { + "inputCost": 0.0001, + "outputCost": 0.00025 + }, + "gryphe/mythomax-l2-13b": { + "inputCost": 0.00018, + "outputCost": 0.00018 + }, + "openchat/openchat-7b": { + "inputCost": 0.00013, + "outputCost": 0.00013 + }, + "undi95/toppy-m-7b": { + "inputCost": 0.00015, + "outputCost": 0.00015 + }, + "lizpreciatior/lzlv-70b-fp16-hf": { + "inputCost": 0.0007, + "outputCost": 0.0009 + }, + "cognitivecomputations/dolphin-mixtral-8x7b": { + "inputCost": 0.0005, + "outputCost": 0.0005 + }, + "neversleep/noromaid-mixtral-8x7b-instruct": { + "inputCost": 0.008, + "outputCost": 0.008 + }, + "nousresearch/nous-hermes-2-mixtral-8x7b-dpo": { + "inputCost": 0.0003, + "outputCost": 0.0005 + }, + "rwkv/rwkv-5-world-3b": { + "inputCost": 0, + "outputCost": 0 + }, + "recursal/rwkv-5-3b-ai-town": { + "inputCost": 0, + "outputCost": 0 + }, + "recursal/eagle-7b": { + "inputCost": 0, + "outputCost": 0 + }, + "google/gemma-7b-it": { + "inputCost": 0.0001, + "outputCost": 0.0001 + }, + "databricks/dbrx-instruct": { + "inputCost": 0.0006, + "outputCost": 0.0006 + }, + "huggingfaceh4/zephyr-orpo-141b-a35b": { + "inputCost": 0.00065, + "outputCost": 0.00065 + }, + "meta-llama/llama-3-8b-instruct": { + "inputCost": 0.0001, + "outputCost": 0.0001 + }, + "meta-llama/llama-3-70b-instruct": { + "inputCost": 0.0008, + "outputCost": 0.0008 + }, + "microsoft/wizardlm-2-8x22b": { + "inputCost": 0.00065, + "outputCost": 0.00065 + }, + "microsoft/wizardlm-2-7b": { + "inputCost": 0.0001, + "outputCost": 0.0001 + }, + "mistralai/mixtral-8x22b": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "mistralai/mixtral-8x22b-instruct": { + "inputCost": 0.00065, + "outputCost": 0.00065 + }, + "anthropic/claude-2": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-2.1": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-2.0": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-instant-1": { + "inputCost": 0.0008, + "outputCost": 0.0024 + }, + "anthropic/claude-instant-1.2": { + "inputCost": 0.0008, + "outputCost": 0.0024 + }, + "anthropic/claude-2:beta": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-2.1:beta": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-2.0:beta": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "anthropic/claude-instant-1:beta": { + "inputCost": 0.0008, + "outputCost": 0.0024 + }, + "huggingfaceh4/zephyr-7b-beta:free": { + "inputCost": 0, + "outputCost": 0 + }, + "mistralai/mixtral-8x7b-instruct:nitro": { + "inputCost": 0.0005, + "outputCost": 0.0005 + }, + "meta-llama/llama-2-70b-chat:nitro": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "gryphe/mythomax-l2-13b:nitro": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "mistralai/mistral-7b-instruct:nitro": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "google/gemma-7b-it:nitro": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "databricks/dbrx-instruct:nitro": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "undi95/toppy-m-7b:nitro": { + "inputCost": 0.00015, + "outputCost": 0.00015 + }, + "microsoft/wizardlm-2-8x22b:nitro": { + "inputCost": 0.001, + "outputCost": 0.001 + }, + "meta-llama/llama-3-8b-instruct:nitro": { + "inputCost": 0.0002, + "outputCost": 0.0002 + }, + "meta-llama/llama-3-70b-instruct:nitro": { + "inputCost": 0.0009, + "outputCost": 0.0009 + }, + "gryphe/mythomax-l2-13b:extended": { + "inputCost": 0.0003, + "outputCost": 0.0003 + }, + "mistralai/mistral-tiny": { + "inputCost": 0.00025, + "outputCost": 0.00025 + }, + "mistralai/mistral-small": { + "inputCost": 0.002, + "outputCost": 0.006 + }, + "mistralai/mistral-medium": { + "inputCost": 0.0027, + "outputCost": 0.0081 + }, + "mistralai/mistral-large": { + "inputCost": 0.008, + "outputCost": 0.024 + }, + "cohere/command": { + "inputCost": 0.001, + "outputCost": 0.002 + }, + "cohere/command-r": { + "inputCost": 0.0005, + "outputCost": 0.0015 + }, + "cohere/command-r-plus": { + "inputCost": 0.003, + "outputCost": 0.015 + } +} \ No newline at end of file diff --git a/src/settings/versions/1.2.0/index.ts b/src/settings/versions/1.2.0/index.ts index 5cd7133..f01a68d 100644 --- a/src/settings/versions/1.2.0/index.ts +++ b/src/settings/versions/1.2.0/index.ts @@ -1,8 +1,8 @@ import { MarkpilotSettings } from 'src/settings'; import { Equal, Expect } from 'src/utils'; -import { Model } from './models'; -import { Provider } from './provider'; -import { ChatHistory } from './types'; +import { ChatHistory } from './api'; +import { Provider } from './api/providers'; +import { Model } from './api/providers/models'; export interface MarkpilotSettings1_2_0 { version: string; diff --git a/src/settings/versions/1.2.0/models.ts b/src/settings/versions/1.2.0/models.ts deleted file mode 100644 index b205f06..0000000 --- a/src/settings/versions/1.2.0/models.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { Provider } from './provider'; - -export type OpenAIModel = (typeof OPENAI_MODELS)[number]; - -export type OpenRouterModel = (typeof OPENROUTER_MODELS)[number]; - -export type OllamaModel = (typeof OLLAMA_MODELS)[number]; - -export type Model = OpenAIModel | OpenRouterModel | OllamaModel; - -export const OPENAI_MODELS = [ - 'gpt-3.5-turbo-instruct', - 'davinci-002', - 'babbage-002', - 'gpt-4-0125-preview', - 'gpt-4-turbo-preview', - 'gpt-4-1106-preview', - 'gpt-4-vision-preview', - 'gpt-4', - 'gpt-4-0314', - 'gpt-4-0613', - 'gpt-4-32k', - 'gpt-4-32k-0314', - 'gpt-4-32k-0613', - 'gpt-3.5-turbo', - 'gpt-3.5-turbo-16k', - 'gpt-3.5-turbo-0301', - 'gpt-3.5-turbo-0613', - 'gpt-3.5-turbo-1106', - 'gpt-3.5-turbo-0125', - 'gpt-3.5-turbo-16k-0613', -] as const; - -// TODO: -// This is a placeholder. -export const OPENROUTER_MODELS = [ - 'openai/gpt-3.5-turbo', - 'openai/gpt-4-turbo', -] as const; - -// TODO: -// This is a placeholder. -export const OLLAMA_MODELS = [ - 'llama2', - 'llama3', - 'codellama', - 'phind-codellama', -] as const; - -export const MODELS = { - openai: OPENAI_MODELS, - openrouter: OPENROUTER_MODELS, - ollama: OLLAMA_MODELS, -}; - -export const DEFAULT_MODELS: Record = { - openai: 'gpt-3.5-turbo', - openrouter: 'openai/gpt-3.5-turbo', - ollama: 'llama2', -}; diff --git a/src/settings/versions/1.2.0/provider.ts b/src/settings/versions/1.2.0/provider.ts deleted file mode 100644 index 9b8fcb9..0000000 --- a/src/settings/versions/1.2.0/provider.ts +++ /dev/null @@ -1,13 +0,0 @@ -export type OnlineProvider = (typeof ONLINE_PROVIDERS)[number]; - -export type OfflineProvider = (typeof OFFLINE_PROVIDERS)[number]; - -export type Provider = OnlineProvider | OfflineProvider; - -export const ONLINE_PROVIDERS = ['openai', 'openrouter'] as const; - -export const OFFLINE_PROVIDERS = ['ollama'] as const; - -export const PROVIDERS = [...ONLINE_PROVIDERS, ...OFFLINE_PROVIDERS] as const; - -export const DEFAULT_PROVIDER = 'openai' as Provider; From 50927bf5af18cf9ac2a1478fc64cd4e8daa81ce5 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 18:28:41 +0900 Subject: [PATCH 22/26] Refactor utils --- src/settings/utils.ts | 7 +++++ src/settings/versions/1.2.0/index.ts | 2 +- src/utils.ts | 42 +++++++++++----------------- 3 files changed, 24 insertions(+), 27 deletions(-) create mode 100644 src/settings/utils.ts diff --git a/src/settings/utils.ts b/src/settings/utils.ts new file mode 100644 index 0000000..5a042f4 --- /dev/null +++ b/src/settings/utils.ts @@ -0,0 +1,7 @@ +// Utility types used for settings migration. + +export type Expect = T; +export type Equal = + (() => T extends X ? 1 : 2) extends () => T extends Y ? 1 : 2 + ? true + : false; diff --git a/src/settings/versions/1.2.0/index.ts b/src/settings/versions/1.2.0/index.ts index f01a68d..4ebf191 100644 --- a/src/settings/versions/1.2.0/index.ts +++ b/src/settings/versions/1.2.0/index.ts @@ -1,5 +1,5 @@ import { MarkpilotSettings } from 'src/settings'; -import { Equal, Expect } from 'src/utils'; +import { Equal, Expect } from 'src/settings/utils'; import { ChatHistory } from './api'; import { Provider } from './api/providers'; import { Model } from './api/providers/models'; diff --git a/src/utils.ts b/src/utils.ts index 9339544..0d4d726 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,14 +1,9 @@ -export function uuid(): string { - return crypto.randomUUID(); -} - -export function validateURL(url: string): boolean { - try { - new URL(url); - return true; - } catch { - return false; - } +// Utility function to get the keys of an object +// that provides more informative typing on the return value than `Object.keys`. +export function getObjectKeys>( + obj: T, +): (keyof T)[] { + return Object.keys(obj); } // Debounce an async function by waiting for `wait` milliseconds before resolving. @@ -51,6 +46,16 @@ export function debounceAsyncFunc( }; } +// Utility function to validate the given string is a valid URL or not. +export function validateURL(url: string): boolean { + try { + new URL(url); + return true; + } catch { + return false; + } +} + export function getTodayAsString(): string { return new Date().toISOString().split('T')[0]; } @@ -69,18 +74,3 @@ export function getDaysInCurrentMonth(): Date[] { } return dates; } - -// Utility types used for settings migration. -export type Expect = T; -export type Equal = - (() => T extends X ? 1 : 2) extends () => T extends Y ? 1 : 2 - ? true - : false; - -// Utility function to get the keys of an object -// that provides more informative typing on the return value than `Object.keys`. -export function getObjectKeys>( - obj: T, -): (keyof T)[] { - return Object.keys(obj); -} From d49a4b88716436da4d7e27d6a9fbe6ce5b96b67f Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 19:22:19 +0900 Subject: [PATCH 23/26] Add mechanism to test API connections --- src/api/clients/gemini.ts | 3 ++ src/api/clients/ollama.ts | 5 +++ src/api/clients/openai-compatible.ts | 38 +++++++++++++++++-- src/api/clients/openai.ts | 5 +++ src/api/clients/openrouter.ts | 5 +++ src/api/index.ts | 1 + src/api/proxies/memory-cache.ts | 4 ++ src/api/proxies/usage-monitor.ts | 4 ++ src/settings/index.ts | 53 ++++++++++++++++++--------- src/settings/migrators/1.1.0-1.2.0.ts | 2 +- 10 files changed, 98 insertions(+), 22 deletions(-) diff --git a/src/api/clients/gemini.ts b/src/api/clients/gemini.ts index 66ab527..0f3203b 100644 --- a/src/api/clients/gemini.ts +++ b/src/api/clients/gemini.ts @@ -13,4 +13,7 @@ export class GeminiAPIClient implements APIClient { ): Promise { throw new Error('Method not implemented.'); } + testConnection(): Promise { + throw new Error('Method not implemented.'); + } } diff --git a/src/api/clients/ollama.ts b/src/api/clients/ollama.ts index da305e9..8824f21 100644 --- a/src/api/clients/ollama.ts +++ b/src/api/clients/ollama.ts @@ -4,6 +4,7 @@ import Markpilot from 'src/main'; import { validateURL } from 'src/utils'; import { APIClient } from '..'; import { PromptGenerator } from '../prompts/generator'; +import { Provider } from '../providers'; import { CostsTracker } from '../providers/costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; @@ -19,6 +20,10 @@ export class OllamaAPIClient super(generator, tracker, plugin); } + get provider(): Provider { + return 'ollama'; + } + get openai(): OpenAI | undefined { const { settings } = this.plugin; diff --git a/src/api/clients/openai-compatible.ts b/src/api/clients/openai-compatible.ts index edf978f..08643d3 100644 --- a/src/api/clients/openai-compatible.ts +++ b/src/api/clients/openai-compatible.ts @@ -4,7 +4,9 @@ import OpenAI from 'openai'; import Markpilot from 'src/main'; import { APIClient, ChatMessage } from '..'; import { PromptGenerator } from '../prompts/generator'; +import { Provider } from '../providers'; import { CostsTracker } from '../providers/costs'; +import { DEFAULT_MODELS } from '../providers/models'; export abstract class OpenAICompatibleAPIClient implements APIClient { constructor( @@ -13,15 +15,16 @@ export abstract class OpenAICompatibleAPIClient implements APIClient { protected plugin: Markpilot, ) {} + abstract get provider(): Provider; + abstract get openai(): OpenAI | undefined; async *fetchChat(messages: ChatMessage[]) { - const { settings } = this.plugin; - if (this.openai === undefined) { return; } + const { settings } = this.plugin; try { const stream = await this.openai.chat.completions.create({ messages, @@ -63,12 +66,11 @@ export abstract class OpenAICompatibleAPIClient implements APIClient { } async fetchCompletions(prefix: string, suffix: string) { - const { settings } = this.plugin; - if (this.openai === undefined) { return; } + const { settings } = this.plugin; try { const messages = this.generator.generate(prefix, suffix); const completions = await this.openai.chat.completions.create({ @@ -98,9 +100,37 @@ export abstract class OpenAICompatibleAPIClient implements APIClient { return this.generator.parse(content); } catch (error) { console.error(error); + console.log(JSON.stringify(error)); new Notice( 'Failed to fetch completions. Make sure your API key or API URL is correct.', ); } } + + async testConnection() { + if (this.openai === undefined) { + return false; + } + + try { + const response = await this.openai.chat.completions.create({ + messages: [ + { + role: 'user', + content: 'Say this is a test', + }, + ], + model: DEFAULT_MODELS[this.provider], + max_tokens: 1, + temperature: 0, + top_p: 1, + n: 1, + }); + + return response.choices[0].message.content !== ''; + } catch (error) { + console.error(error); + return false; + } + } } diff --git a/src/api/clients/openai.ts b/src/api/clients/openai.ts index 5d3cb65..bda8fea 100644 --- a/src/api/clients/openai.ts +++ b/src/api/clients/openai.ts @@ -3,6 +3,7 @@ import OpenAI from 'openai'; import Markpilot from 'src/main'; import { APIClient } from '..'; import { PromptGenerator } from '../prompts/generator'; +import { Provider } from '../providers'; import { CostsTracker } from '../providers/costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; @@ -18,6 +19,10 @@ export class OpenAIAPIClient super(generator, tracker, plugin); } + get provider(): Provider { + return 'openai'; + } + get openai(): OpenAI | undefined { const { settings } = this.plugin; diff --git a/src/api/clients/openrouter.ts b/src/api/clients/openrouter.ts index e00c97d..fdae621 100644 --- a/src/api/clients/openrouter.ts +++ b/src/api/clients/openrouter.ts @@ -3,6 +3,7 @@ import OpenAI from 'openai'; import Markpilot from 'src/main'; import { APIClient } from '..'; import { PromptGenerator } from '../prompts/generator'; +import { Provider } from '../providers'; import { CostsTracker } from '../providers/costs'; import { OpenAICompatibleAPIClient } from './openai-compatible'; @@ -18,6 +19,10 @@ export class OpenRouterAPIClient super(generator, tracker, plugin); } + get provider(): Provider { + return 'openrouter'; + } + get openai(): OpenAI | undefined { const { settings } = this.plugin; diff --git a/src/api/index.ts b/src/api/index.ts index 0fcf0b8..31ecda2 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -1,6 +1,7 @@ export interface APIClient { fetchChat(messages: ChatMessage[]): AsyncGenerator; fetchCompletions(prefix: string, suffix: string): Promise; + testConnection(): Promise; } export type ChatRole = 'system' | 'assistant' | 'user'; diff --git a/src/api/proxies/memory-cache.ts b/src/api/proxies/memory-cache.ts index b79e869..3d5691e 100644 --- a/src/api/proxies/memory-cache.ts +++ b/src/api/proxies/memory-cache.ts @@ -53,4 +53,8 @@ export class MemoryCacheProxy implements APIClient { await this.store.set(hash, completions); return completions; } + + testConnection() { + return this.client.testConnection(); + } } diff --git a/src/api/proxies/usage-monitor.ts b/src/api/proxies/usage-monitor.ts index dfa603b..5d7fa09 100644 --- a/src/api/proxies/usage-monitor.ts +++ b/src/api/proxies/usage-monitor.ts @@ -39,4 +39,8 @@ export class UsageMonitorProxy implements APIClient { return await this.client.fetchCompletions(prefix, suffix); } + + testConnection() { + return this.client.testConnection(); + } } diff --git a/src/settings/index.ts b/src/settings/index.ts index 91458b6..fd61afc 100644 --- a/src/settings/index.ts +++ b/src/settings/index.ts @@ -10,7 +10,7 @@ import { import { DEFAULT_MODELS, Model, MODELS } from 'src/api/providers/models'; import Markpilot from '../main'; -import { getDaysInCurrentMonth, validateURL } from '../utils'; +import { getDaysInCurrentMonth } from '../utils'; export interface MarkpilotSettings { version: string; @@ -66,7 +66,7 @@ export const DEFAULT_SETTINGS: MarkpilotSettings = { apiKey: undefined, }, ollama: { - apiUrl: 'http://localhost:11434/v1/', + apiUrl: 'http://127.0.0.1:11434/v1/', }, }, completions: { @@ -159,7 +159,9 @@ export class MarkpilotSettingTab extends PluginSettingTab { new Setting(containerEl) .setName('Ollama API URL') - .setDesc('Enter your Ollama API URL.') + .setDesc( + 'Enter your Ollama API URL. Prefer using 127.0.0.1 instead of localhost to avoid issues related to IPv4/IPv6', + ) .addText((text) => text .setValue(settings.providers.ollama.apiUrl ?? '') @@ -171,25 +173,42 @@ export class MarkpilotSettingTab extends PluginSettingTab { ); new Setting(containerEl) - .setName('Test Ollama API connection') - .setDesc('Test the connection to the local Ollama API.') + .setName('Test OpenAI API connection') + .setDesc('Test the connection to the OpenAI API.') .addButton((button) => button.setButtonText('Test Connection').onClick(async () => { - const apiUrl = settings.providers.ollama.apiUrl; - if (apiUrl === undefined) { - new Notice('Ollama API URL is not set.'); - return; + const client = this.plugin.createAPIClient('openai'); + if (await client.testConnection()) { + new Notice('Successfully connected to OpenAI API.'); + } else { + new Notice('Failed to connect to OpenAI API.'); } - if (!validateURL(apiUrl)) { - new Notice('Invalid Ollama API URL.'); - return; + }), + ); + + new Setting(containerEl) + .setName('Test OpenRouter API connection') + .setDesc('Test the connection to the OpenRouter API.') + .addButton((button) => + button.setButtonText('Test Connection').onClick(async () => { + const client = this.plugin.createAPIClient('openrouter'); + if (await client.testConnection()) { + new Notice('Successfully connected to OpenRouter API.'); + } else { + new Notice('Failed to connect to OpenRouter API.'); } - // TODO: - // Properly implement logic for checking Ollama API status. - try { - await fetch(apiUrl); + }), + ); + + new Setting(containerEl) + .setName('Test Ollama API connection') + .setDesc('Test the connection to the local Ollama API.') + .addButton((button) => + button.setButtonText('Test Connection').onClick(async () => { + const client = this.plugin.createAPIClient('ollama'); + if (await client.testConnection()) { new Notice('Successfully connected to Ollama API.'); - } catch { + } else { new Notice('Failed to connect to Ollama API.'); } }), diff --git a/src/settings/migrators/1.1.0-1.2.0.ts b/src/settings/migrators/1.1.0-1.2.0.ts index cc79ca2..8899f4b 100644 --- a/src/settings/migrators/1.1.0-1.2.0.ts +++ b/src/settings/migrators/1.1.0-1.2.0.ts @@ -17,7 +17,7 @@ export const migrateVersion1_1_0_toVersion1_2_0: SettingsMigrator< apiKey: undefined, }, ollama: { - apiUrl: 'http://localhost:11434/v1/', + apiUrl: 'http://127.0.0.1:11434/v1/', }, }, completions: { From 07d48b286f9baac16e03e8f9184dba02c9d976a5 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 20:34:17 +0900 Subject: [PATCH 24/26] Disable completion when cursor is at the start of document --- src/editor/listener.ts | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/editor/listener.ts b/src/editor/listener.ts index 5ab7443..50e3967 100644 --- a/src/editor/listener.ts +++ b/src/editor/listener.ts @@ -28,25 +28,28 @@ function showCompletions(fetcher: CompletionsFetcher) { if (state.selection.ranges.length > 1 || !state.selection.main.empty) { return; } - // If the suffix does not end with a punctuation or space, ignore. const head = state.selection.main.head; const char = state.sliceDoc(head, head + 1); if (char.length == 1 && !char.match(/^[\p{P}\s]/u)) { return; } + // If the prefix is empty, ignore. + // This helps prevent showing completions when opening a new document. + const prefix = state.sliceDoc(0, head); + const suffix = state.sliceDoc(head, length); + if (prefix === '') { + return; + } const currentCompletionsId = ++latestCompletionsId; - // Get the completions context with code blocks taken into account. - const prefix = state.sliceDoc(0, head); - const suffix = state.sliceDoc(head, length); // Fetch completions from the server. const completions = await fetcher(prefix, suffix).catch((error) => { new Notice('Failed to fetch completions: ', error); return undefined; }); - // if fetch has failed, ignore and return. + // If fetch has failed, ignore and return. if (completions === undefined) { return; } From fe494dc968f95bafc0bd8cacbe96d806d05d9c5e Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 20:41:52 +0900 Subject: [PATCH 25/26] Update Node.js version in CI --- .github/workflows/ci.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6d365f3..ebf2af9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -22,7 +22,7 @@ jobs: id: setup-node uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Install dependencies id: install @@ -45,7 +45,7 @@ jobs: id: setup-node uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Install dependencies id: install @@ -68,7 +68,7 @@ jobs: id: setup-node uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Install dependencies id: install @@ -91,7 +91,7 @@ jobs: id: setup-node uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Install dependencies id: install From b553a89dd4d475d0909310037d050777251b6d88 Mon Sep 17 00:00:00 2001 From: Taichi Maeda Date: Sat, 20 Apr 2024 20:43:11 +0900 Subject: [PATCH 26/26] Format code --- src/api/providers/ollama.json | 2 +- src/api/providers/openrouter.json | 2 +- src/settings/versions/1.1.0/index.ts | 6 +++++- src/settings/versions/1.2.0/api/providers/ollama.json | 2 +- src/settings/versions/1.2.0/api/providers/openrouter.json | 2 +- 5 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/api/providers/ollama.json b/src/api/providers/ollama.json index f56f4c8..eeef26c 100644 --- a/src/api/providers/ollama.json +++ b/src/api/providers/ollama.json @@ -315,4 +315,4 @@ "inputCost": 0, "outputCost": 0 } -} \ No newline at end of file +} diff --git a/src/api/providers/openrouter.json b/src/api/providers/openrouter.json index b1c2af9..2557e08 100644 --- a/src/api/providers/openrouter.json +++ b/src/api/providers/openrouter.json @@ -499,4 +499,4 @@ "inputCost": 0.003, "outputCost": 0.015 } -} \ No newline at end of file +} diff --git a/src/settings/versions/1.1.0/index.ts b/src/settings/versions/1.1.0/index.ts index 00216d0..d019759 100644 --- a/src/settings/versions/1.1.0/index.ts +++ b/src/settings/versions/1.1.0/index.ts @@ -1,4 +1,8 @@ -import { ChatCompletionsModel, ChatHistory, CompletionsModel } from './api/openai'; +import { + ChatCompletionsModel, + ChatHistory, + CompletionsModel, +} from './api/openai'; export interface MarkpilotSettings1_1_0 { apiKey: string | undefined; diff --git a/src/settings/versions/1.2.0/api/providers/ollama.json b/src/settings/versions/1.2.0/api/providers/ollama.json index f56f4c8..eeef26c 100644 --- a/src/settings/versions/1.2.0/api/providers/ollama.json +++ b/src/settings/versions/1.2.0/api/providers/ollama.json @@ -315,4 +315,4 @@ "inputCost": 0, "outputCost": 0 } -} \ No newline at end of file +} diff --git a/src/settings/versions/1.2.0/api/providers/openrouter.json b/src/settings/versions/1.2.0/api/providers/openrouter.json index b1c2af9..2557e08 100644 --- a/src/settings/versions/1.2.0/api/providers/openrouter.json +++ b/src/settings/versions/1.2.0/api/providers/openrouter.json @@ -499,4 +499,4 @@ "inputCost": 0.003, "outputCost": 0.015 } -} \ No newline at end of file +}