Skip to content

Commit

Permalink
small fixes + cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
louis030195 committed Jul 11, 2024
1 parent 30e3813 commit 787cae1
Show file tree
Hide file tree
Showing 7 changed files with 125 additions and 23 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ Keep in mind that it's still experimental.
screenpipe
# by default it uses your default input and output audio devices
# (e.g. speakers/headphones + laptop mic) & your whole screen
# we recommend that you customise screenpipe to your laptop
# computing capacity to avoid issues
```

<details>
Expand Down
10 changes: 8 additions & 2 deletions screenpipe-audio/src/bin/screenpipe-audio.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ fn print_devices(devices: &[AudioDevice]) {
fn main() -> Result<()> {
use env_logger::Builder;
use log::LevelFilter;
use std::sync::mpsc;

Builder::new()
.filter(None, LevelFilter::Info)
Expand Down Expand Up @@ -81,15 +80,22 @@ fn main() -> Result<()> {
})
})
.collect();
let mut consecutive_timeouts = 0;
let max_consecutive_timeouts = 3; // Adjust this value as needed

// Main loop to receive and print transcriptions
loop {
match whisper_receiver.recv_timeout(Duration::from_secs(5)) {
Ok(result) => {
info!("Transcription: {:?}", result);
consecutive_timeouts = 0; // Reset the counter on successful receive
}
Err(crossbeam::channel::RecvTimeoutError::Timeout) => {
// No transcription received in 5 seconds, continue waiting
consecutive_timeouts += 1;
if consecutive_timeouts >= max_consecutive_timeouts {
info!("No transcriptions received for a while, stopping...");
break;
}
continue;
}
Err(crossbeam::channel::RecvTimeoutError::Disconnected) => {
Expand Down
2 changes: 1 addition & 1 deletion screenpipe-audio/src/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ pub fn list_audio_devices() -> Result<Vec<AudioDevice>> {
if let Ok(name) = device.name() {
devices.push(AudioDevice {
name,
device_type: "input".to_string(),
device_type: "output".to_string(),
});
}
}
Expand Down
2 changes: 1 addition & 1 deletion screenpipe-audio/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ pub use core::{
default_input_device, default_output_device, list_audio_devices, parse_device_spec,
record_and_transcribe, AudioCaptureResult, AudioDevice, DeviceSpec,
};
pub use stt::{create_whisper_channel, AudioInput, TranscriptionResult};
pub use stt::{create_whisper_channel, stt, AudioInput, TranscriptionResult, WhisperModel};
30 changes: 28 additions & 2 deletions screenpipe-audio/tests/core_tests.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#[cfg(test)]
mod tests {
use screenpipe_audio::{list_audio_devices, parse_device_spec};
use screenpipe_audio::{list_audio_devices, parse_device_spec, stt, AudioInput, WhisperModel};

use super::*;

// ! what happen in github action?
// ! what happen in github action?
#[test]
#[ignore]
fn test_list_audio_devices() {
Expand All @@ -18,5 +18,31 @@ mod tests {
assert_eq!(spec.to_string(), "Test Device (input)");
}

// TODO move to tests folder
#[test]
#[ignore]
fn test_speech_to_text() {
println!("Starting speech to text test");

println!("Loading audio file");
let start = std::time::Instant::now();
let whisper_model = WhisperModel::new().unwrap();

let text = stt(
&[AudioInput {
path: "./test_data/poetic_kapil_gupta.wav".to_string(),
device: "the sun".to_string(),
}],
&whisper_model,
)
.unwrap();
let duration = start.elapsed();

println!("Speech to text completed in {:?}", duration);
println!("Transcribed text: {:?}", text);

assert!(text[0].contains("The fire"));
}

// Add more tests for other functions
}
3 changes: 1 addition & 2 deletions screenpipe-server/src/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@ use chrono::Utc;
use crossbeam::channel::{Receiver, Sender};
use log::{debug, error, info};
use screenpipe_audio::{
create_whisper_channel, record_and_transcribe, AudioCaptureResult, AudioInput, DeviceSpec,
TranscriptionResult,
create_whisper_channel, record_and_transcribe, AudioInput, DeviceSpec, TranscriptionResult,
};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
Expand Down
99 changes: 84 additions & 15 deletions screenpipe-server/src/resource_monitor.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
use log::{error, info, warn};
use std::process::Command;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::{Duration, Instant};
use sysinfo::{PidExt, ProcessExt, System, SystemExt};

pub struct ResourceMonitor {
open_files: AtomicUsize,
active_threads: AtomicUsize,
start_time: Instant,
memory_threshold: f64,
runtime_threshold: Duration,
Expand All @@ -17,8 +14,6 @@ pub struct ResourceMonitor {
impl ResourceMonitor {
pub fn new(memory_threshold: f64, runtime_threshold_minutes: u64) -> Arc<Self> {
Arc::new(Self {
open_files: AtomicUsize::new(0),
active_threads: AtomicUsize::new(0),
start_time: Instant::now(),
memory_threshold,
runtime_threshold: Duration::from_secs(runtime_threshold_minutes * 60),
Expand All @@ -34,16 +29,26 @@ impl ResourceMonitor {
let cpu_usage = process.cpu_usage();
let runtime = self.start_time.elapsed();

info!(
"Runtime: {:?}, Memory: {:.2}% ({:.2} KB / {:.2} KB), CPU: {:.2}%",
runtime,
memory_usage_percent,
memory_usage,
total_memory,
cpu_usage,
// self.open_files.load(Ordering::SeqCst),
// self.active_threads.load(Ordering::SeqCst)
);
let log_message = if cfg!(target_os = "macos") {
if let Some(npu_usage) = self.get_npu_usage() {
format!(
"Runtime: {:?}, Memory: {:.2}% ({:.2} KB / {:.2} KB), CPU: {:.2}%, NPU: {:.2}%",
runtime, memory_usage_percent, memory_usage, total_memory, cpu_usage, npu_usage
)
} else {
format!(
"Runtime: {:?}, Memory: {:.2}% ({:.2} KB / {:.2} KB), CPU: {:.2}%, NPU: N/A",
runtime, memory_usage_percent, memory_usage, total_memory, cpu_usage
)
}
} else {
format!(
"Runtime: {:?}, Memory: {:.2}% ({:.2} KB / {:.2} KB), CPU: {:.2}%",
runtime, memory_usage_percent, memory_usage, total_memory, cpu_usage
)
};

info!("{}", log_message);

// Check for restart conditions
if memory_usage_percent > self.memory_threshold || runtime > self.runtime_threshold {
Expand Down Expand Up @@ -97,4 +102,68 @@ impl ResourceMonitor {
}
});
}

// TODO- only way would be to use metal crate (overkill for now :))
#[cfg(target_os = "macos")]
fn get_npu_usage(&self) -> Option<f32> {
let output = Command::new("ioreg")
.args(&["-r", "-c", "AppleARMIODevice", "-n", "ane0"])
.output()
.ok()?;

let output_str = String::from_utf8_lossy(&output.stdout);

// Parse the output to find the "ane_power" value
for line in output_str.lines() {
if line.contains("\"ane_power\"") {
if let Some(value) = line.split('=').nth(1) {
if let Ok(power) = value.trim().parse::<f32>() {
// Assuming max ANE power is 8.0W (adjust if needed)
let max_ane_power = 8.0;
let npu_usage_percent = (power / max_ane_power) * 100.0;
return Some(npu_usage_percent);
}
}
}
}

None
}

// #[cfg(target_os = "macos")]
// fn get_npu_usage(&self) -> Option<f32> {
// // ! HACK unfortunately requrie sudo so not usable ...
// let output = Command::new("powermetrics")
// .args(&[
// "-s",
// "cpu_power",
// "-i",
// "100",
// "-n",
// "1",
// "--format",
// "json",
// ])
// .output()
// .ok()?;

// println!("Output: {:?}", output);

// let json: Value = serde_json::from_slice(&output.stdout).ok()?;
// let ane_power = json["processor"]["ane_energy"].as_f64()?;

// // Convert energy to power (W) based on the interval (100ms)
// let ane_power_watts = ane_power / 0.1 / 1000.0;

// // Assuming max ANE power is 8.0W (adjust if needed)
// let max_ane_power = 8.0;
// let npu_usage_percent = (ane_power_watts / max_ane_power) * 100.0;

// Some(npu_usage_percent as f32)
// }

#[cfg(not(target_os = "macos"))]
fn get_npu_usage(&self) -> Option<f32> {
None
}
}

0 comments on commit 787cae1

Please sign in to comment.