diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 69e7572b..67394089 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -8,7 +8,7 @@ }, "package": { "productName": "RisuAI", - "version": "1.72.0" + "version": "1.73.0" }, "tauri": { "allowlist": { diff --git a/src/etc/updateLog.ts b/src/etc/updateLog.ts index cb5d022a..33b45ca1 100644 --- a/src/etc/updateLog.ts +++ b/src/etc/updateLog.ts @@ -1,12 +1,14 @@ export const patchNote = { - version: "1.72", + version: "1.73", content: ` -# Update 1.72 -- Added custom chain of thoughts -- Added thought tag depth -- Added Openrouter fallback option -- Added Openrouter middle-out option +# Update 1.73 +- Added WebLLM Local + - WebLLM Local is a option for users who want to use LLM directly on their computer, without sending any data to the server. + - WebLLM Local is experimental, and may not work on all devices. + - Currently WebLLM Local only supports three models, but more will be added in the future. + - Also, in future updates, You may be able to use WebLLM Local with any transformer model. + - Currently WebLLM Local only supports CPU, but GPU support with WebGPU will be added in the future. ` } diff --git a/src/ts/storage/database.ts b/src/ts/storage/database.ts index 12f81fdd..9fdb7f75 100644 --- a/src/ts/storage/database.ts +++ b/src/ts/storage/database.ts @@ -15,7 +15,7 @@ import type { OobaChatCompletionRequestParams } from '../model/ooba'; export const DataBase = writable({} as any as Database) export const loadedStore = writable(false) -export let appVer = "1.72.0" +export let appVer = "1.73.0" export let webAppSubVer = '' export function setDatabase(data:Database){ diff --git a/version.json b/version.json index e67bbcd3..f57b9779 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"1.72.0"} \ No newline at end of file +{"version":"1.73.0"} \ No newline at end of file