diff --git a/.github/workflows/release-app.yml b/.github/workflows/release-app.yml index bf5c19fec..fdcdde7c9 100644 --- a/.github/workflows/release-app.yml +++ b/.github/workflows/release-app.yml @@ -124,12 +124,6 @@ jobs: ls . bun install - # Run pre build - - name: Run pre_build.js on ${{ matrix.platform }} - shell: bash - run: bun ./scripts/pre_build.js ${{ matrix.pre-build-args }} - working-directory: ./examples/apps/screenpipe-app-tauri - - name: Build CLI shell: bash run: | @@ -139,6 +133,12 @@ jobs: fi cargo build --release ${{ matrix.args }} + # Run pre build + - name: Run pre_build.js on ${{ matrix.platform }} + shell: bash + run: bun ./scripts/pre_build.js ${{ matrix.pre-build-args }} + working-directory: ./examples/apps/screenpipe-app-tauri + - name: Build uses: tauri-apps/tauri-action@v0 env: diff --git a/examples/apps/screenpipe-app-tauri/scripts/pre_build.js b/examples/apps/screenpipe-app-tauri/scripts/pre_build.js index fb41838ca..1da0ab510 100644 --- a/examples/apps/screenpipe-app-tauri/scripts/pre_build.js +++ b/examples/apps/screenpipe-app-tauri/scripts/pre_build.js @@ -107,6 +107,17 @@ if (platform == 'linux') { if (platform == 'windows') { const wgetPath = await findWget(); + console.log('Copying screenpipe binary...'); + const screenpipeSrc = path.join(__dirname, '..', '..', '..', '..', 'target', 'release', 'screenpipe.exe'); + const screenpipeDest = path.join(cwd, 'screenpipe.exe'); + try { + await fs.copyFile(screenpipeSrc, screenpipeDest); + console.log('Screenpipe binary copied successfully.'); + } catch (error) { + console.error('Failed to copy screenpipe binary:', error); + process.exit(1); + } + // Setup FFMPEG if (!(await fs.exists(config.ffmpegRealname))) { await $`${wgetPath} -nc --show-progress ${config.windows.ffmpegUrl} -O ${config.windows.ffmpegName}.7z` diff --git a/examples/apps/screenpipe-app-tauri/src-tauri/Cargo.toml b/examples/apps/screenpipe-app-tauri/src-tauri/Cargo.toml index ff33825d2..1db3fbd39 100644 --- a/examples/apps/screenpipe-app-tauri/src-tauri/Cargo.toml +++ b/examples/apps/screenpipe-app-tauri/src-tauri/Cargo.toml @@ -58,6 +58,8 @@ tauri-plugin-store = "2.0.0-beta" anyhow = "1.0.71" which = "6.0.1" +# System information +sysinfo = "0.29.0" # hakc fix-path-env = { git = "https://github.com/tauri-apps/fix-path-env-rs" } diff --git a/examples/apps/screenpipe-app-tauri/src-tauri/src/analytics.rs b/examples/apps/screenpipe-app-tauri/src-tauri/src/analytics.rs index 3094f2bef..82b42f10a 100755 --- a/examples/apps/screenpipe-app-tauri/src-tauri/src/analytics.rs +++ b/examples/apps/screenpipe-app-tauri/src-tauri/src/analytics.rs @@ -4,9 +4,11 @@ use serde_json::json; use std::fs; use std::sync::Arc; use std::time::Duration; +use sysinfo::{System, SystemExt}; use tokio::sync::Mutex; use tokio::time::interval; use uuid::Uuid; + pub struct AnalyticsManager { client: Client, posthog_api_key: String, @@ -38,6 +40,7 @@ impl AnalyticsManager { } let posthog_url = format!("{}/capture/", self.api_host); + let system = System::new_all(); let mut payload = json!({ "api_key": self.posthog_api_key, @@ -45,7 +48,12 @@ impl AnalyticsManager { "properties": { "distinct_id": self.distinct_id, "$lib": "rust-reqwest", - "timestamp": chrono::Utc::now().to_rfc3339(), + "os_name": system.name().unwrap_or_default(), + "os_version": system.os_version().unwrap_or_default(), + "kernel_version": system.kernel_version().unwrap_or_default(), + "host_name": system.host_name().unwrap_or_default(), + "cpu_count": system.cpus().len(), + "total_memory": system.total_memory(), }, }); diff --git a/examples/typescript/daily-log/main.ts b/examples/typescript/daily-log/main.ts index e3515046c..a322870fc 100644 --- a/examples/typescript/daily-log/main.ts +++ b/examples/typescript/daily-log/main.ts @@ -4,11 +4,11 @@ import fetch from "node-fetch"; import fs from "fs/promises"; import path from "path"; -const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions"; -const OPENAI_MODEL = "gpt-4o"; +// const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions"; +// const OPENAI_MODEL = "gpt-4o"; // Uncomment the following lines to use Ollama instead -// const OPENAI_API_URL = "http://localhost:11434/api/chat"; -// const OPENAI_MODEL = "llama3.1"; +const OPENAI_API_URL = "http://localhost:11434/api/chat"; +const OPENAI_MODEL = "llama3.1"; const SCREENPIPE_API_URL = "http://localhost:3030/search"; const LOG_INTERVAL = 1 * 60 * 1000; // 5 minutes in milliseconds