Skip to content

Commit

Permalink
Merge pull request #58 from SilasMarvin/silas-custom-log-file
Browse files Browse the repository at this point in the history
Add custom log file option and improve logging
  • Loading branch information
SilasMarvin authored Aug 11, 2024
2 parents 8335d5c + 600fb6f commit 3a7c4ba
Show file tree
Hide file tree
Showing 12 changed files with 182 additions and 98 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@ out
dist
lsp-ai.log
.vsix
lsp-ai-chat.md
21 changes: 11 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion crates/lsp-ai/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "lsp-ai"
version = "0.4.1"
version = "0.5.0"

description.workspace = true
repository.workspace = true
Expand Down Expand Up @@ -42,6 +42,7 @@ md5 = "0.7.0"
fxhash = "0.2.1"
ordered-float = "4.2.1"
futures = "0.3"
clap = { version = "4.5.14", features = ["derive"] }

[build-dependencies]
cc="1"
Expand Down
65 changes: 54 additions & 11 deletions crates/lsp-ai/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
use anyhow::Result;

use clap::Parser;
use directories::BaseDirs;
use lsp_server::{Connection, ExtractError, Message, Notification, Request, RequestId};
use lsp_types::{
request::{CodeActionRequest, CodeActionResolveRequest, Completion},
CodeActionOptions, CompletionOptions, DidChangeTextDocumentParams, DidOpenTextDocumentParams,
RenameFilesParams, ServerCapabilities, TextDocumentSyncKind,
};
use std::sync::Mutex;
use std::{
collections::HashMap,
fs,
path::Path,
sync::{mpsc, Arc},
thread,
};
use tracing::error;
use tracing::{error, info};
use tracing_subscriber::{EnvFilter, FmtSubscriber};

mod config;
Expand Down Expand Up @@ -54,19 +58,58 @@ where
req.extract(R::METHOD)
}

// LSP-AI parameters
#[derive(Parser)]
#[command(version)]
struct Args {
// Whether to use a custom log file
#[arg(long, default_value_t = false)]
use_seperate_log_file: bool,
}

fn create_log_file(base_path: &Path) -> anyhow::Result<fs::File> {
let dir_path = base_path.join("lsp-ai");
fs::create_dir_all(&dir_path)?;
let file_path = dir_path.join("lsp-ai.log");
Ok(fs::File::create(file_path)?)
}

// Builds a tracing subscriber from the `LSP_AI_LOG` environment variable
// If the variables value is malformed or missing, sets the default log level to ERROR
fn init_logger() {
FmtSubscriber::builder()
.with_writer(std::io::stderr)
.with_ansi(false)
.without_time()
.with_env_filter(EnvFilter::from_env("LSP_AI_LOG"))
.init();
fn init_logger(args: &Args) {
let builder = FmtSubscriber::builder().with_env_filter(EnvFilter::from_env("LSP_AI_LOG"));
let base_dirs = BaseDirs::new();

if args.use_seperate_log_file && base_dirs.is_some() {
let base_dirs = base_dirs.unwrap();
let cache_dir = base_dirs.cache_dir();
// Linux: /home/alice/.cache
// Windows: C:\Users\Alice\AppData\Local
// macOS: /Users/Alice/Library/Caches
match create_log_file(&cache_dir) {
Ok(log_file) => builder.with_writer(Mutex::new(log_file)).init(),
Err(e) => {
eprintln!("creating log file: {e:?} - falling back to stderr");
builder
.with_writer(std::io::stderr)
.without_time()
.with_ansi(false)
.init()
}
}
} else {
builder
.with_writer(std::io::stderr)
.without_time()
.with_ansi(false)
.init()
}
}

fn main() -> Result<()> {
init_logger();
let args = Args::parse();
init_logger(&args);
info!("lsp-ai logger initialized starting server");

let (connection, io_threads) = Connection::stdio();
let server_capabilities = serde_json::to_value(ServerCapabilities {
Expand Down Expand Up @@ -181,7 +224,7 @@ fn main_loop(connection: Connection, args: serde_json::Value) -> Result<()> {
Err(err) => error!("{err:?}"),
}
} else {
error!("lsp-ai currently only supports textDocument/completion, textDocument/generation and textDocument/generationStream")
error!("Unsupported command - see the wiki for a list of supported commands")
}
}
Message::Notification(not) => {
Expand Down
4 changes: 1 addition & 3 deletions crates/lsp-ai/src/memory_backends/file_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,7 @@ impl FileStore {
match parse_tree(uri, &contents, None) {
Ok(tree) => Some(tree),
Err(e) => {
error!(
"Failed to parse tree for {uri} with error {e}, falling back to no tree"
);
warn!("Failed to parse tree for {uri} with error {e}, falling back to no tree");
None
}
}
Expand Down
8 changes: 4 additions & 4 deletions crates/lsp-ai/src/splitters/tree_sitter.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use splitter_tree_sitter::TreeSitterCodeSplitter;
use tracing::error;
use tracing::warn;
use tree_sitter::Tree;

use crate::{config, memory_backends::file_store::File, utils::parse_tree};
Expand Down Expand Up @@ -43,7 +43,7 @@ impl Splitter for TreeSitter {
match self.split_tree(tree, file.rope().to_string().as_bytes()) {
Ok(chunks) => chunks,
Err(e) => {
error!(
warn!(
"Failed to parse tree for file with error: {e:?}. Falling back to default splitter.",
);
self.text_splitter.split(file)
Expand All @@ -59,14 +59,14 @@ impl Splitter for TreeSitter {
Ok(tree) => match self.split_tree(&tree, contents.as_bytes()) {
Ok(chunks) => chunks,
Err(e) => {
error!(
warn!(
"Failed to parse tree for file: {uri} with error: {e:?}. Falling back to default splitter.",
);
self.text_splitter.split_file_contents(uri, contents)
}
},
Err(e) => {
error!(
warn!(
"Failed to parse tree for file {uri} with error: {e:?}. Falling back to default splitter.",
);
self.text_splitter.split_file_contents(uri, contents)
Expand Down
23 changes: 14 additions & 9 deletions crates/lsp-ai/src/transformer_backends/anthropic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::collections::HashMap;
use anyhow::Context;
use serde::Deserialize;
use serde_json::{json, Value};
use tracing::instrument;
use tracing::{info, instrument};

use crate::{
config::{self, ChatMessage},
Expand Down Expand Up @@ -80,6 +80,18 @@ impl Anthropic {
"Please set `auth_token_env_var_name` or `auth_token` to use an Anthropic"
);
};
let params = json!({
"model": self.config.model,
"system": system_prompt,
"max_tokens": params.max_tokens,
"top_p": params.top_p,
"temperature": params.temperature,
"messages": messages
});
info!(
"Calling Anthropic compatible API with parameters:\n{}",
serde_json::to_string_pretty(&params).unwrap()
);
let res: AnthropicChatResponse = client
.post(
self.config
Expand All @@ -91,14 +103,7 @@ impl Anthropic {
.header("anthropic-version", "2023-06-01")
.header("Content-Type", "application/json")
.header("Accept", "application/json")
.json(&json!({
"model": self.config.model,
"system": system_prompt,
"max_tokens": params.max_tokens,
"top_p": params.top_p,
"temperature": params.temperature,
"messages": messages
}))
.json(&params)
.send()
.await?
.json()
Expand Down
17 changes: 11 additions & 6 deletions crates/lsp-ai/src/transformer_backends/gemini.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use anyhow::Context;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use tracing::instrument;
use tracing::{info, instrument};

use super::TransformerBackend;
use crate::{
Expand Down Expand Up @@ -110,6 +110,15 @@ impl Gemini {
) -> anyhow::Result<String> {
let client = reqwest::Client::new();
let token = self.get_token()?;
let params = json!({
"contents": messages,
"systemInstruction": params.system_instruction,
"generationConfig": params.generation_config,
});
info!(
"Calling Gemini compatible chat API with parameters:\n{}",
serde_json::to_string_pretty(&params).unwrap()
);
let res: serde_json::Value = client
.post(
self.configuration
Expand All @@ -122,11 +131,7 @@ impl Gemini {
+ token.as_ref(),
)
.header("Content-Type", "application/json")
.json(&json!({
"contents": messages,
"systemInstruction": params.system_instruction,
"generationConfig": params.generation_config,
}))
.json(&params)
.send()
.await?
.json()
Expand Down
9 changes: 7 additions & 2 deletions crates/lsp-ai/src/transformer_backends/llama_cpp/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use llama_cpp_2::{
};
use once_cell::sync::Lazy;
use std::{num::NonZeroU32, path::PathBuf, time::Duration};
use tracing::{debug, info, instrument};
use tracing::{info, instrument};

use crate::config::{self, ChatMessage};

Expand All @@ -29,7 +29,10 @@ impl Model {
let model_params = LlamaModelParams::default().with_n_gpu_layers(config.n_gpu_layers);

// Load the model
debug!("Loading model at path: {:?}", model_path);
info!(
"Loading llama.cpp compatible model at path: {:?}",
model_path
);
let model = LlamaModel::load_from_file(&BACKEND, model_path, &model_params)?;

Ok(Model {
Expand All @@ -40,6 +43,8 @@ impl Model {

#[instrument(skip(self))]
pub fn complete(&self, prompt: &str, params: LLaMACPPRunParams) -> anyhow::Result<String> {
info!("Completing with llama.cpp with prompt:\n{prompt}");

// initialize the context
let ctx_params = LlamaContextParams::default().with_n_ctx(Some(self.n_ctx));

Expand Down
29 changes: 17 additions & 12 deletions crates/lsp-ai/src/transformer_backends/mistral_fim.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use anyhow::Context;
use serde::Deserialize;
use serde_json::{json, Value};
use tracing::instrument;
use tracing::{info, instrument};

use super::{open_ai::OpenAIChatResponse, TransformerBackend};
use crate::{
Expand Down Expand Up @@ -67,6 +67,21 @@ impl MistralFIM {
) -> anyhow::Result<String> {
let client = reqwest::Client::new();
let token = self.get_token()?;
let params = json!({
"prompt": prompt.prompt,
"suffix": prompt.suffix,
"model": self.config.model,
"max_tokens": params.max_tokens,
"top_p": params.top_p,
"temperature": params.temperature,
"min_tokens": params.min_tokens,
"random_seed": params.random_seed,
"stop": params.stop
});
info!(
"Calling Mistral compatible FIM API with parameters:\n{}",
serde_json::to_string_pretty(&params).unwrap()
);
let res: OpenAIChatResponse = client
.post(
self.config
Expand All @@ -77,17 +92,7 @@ impl MistralFIM {
.bearer_auth(token)
.header("Content-Type", "application/json")
.header("Accept", "application/json")
.json(&json!({
"prompt": prompt.prompt,
"suffix": prompt.suffix,
"model": self.config.model,
"max_tokens": params.max_tokens,
"top_p": params.top_p,
"temperature": params.temperature,
"min_tokens": params.min_tokens,
"random_seed": params.random_seed,
"stop": params.stop
}))
.json(&params)
.send()
.await?
.json()
Expand Down
Loading

0 comments on commit 3a7c4ba

Please sign in to comment.