diff --git a/kclvm/tools/src/LSP/src/capabilities.rs b/kclvm/tools/src/LSP/src/capabilities.rs index 801bcd91a..ed5c707b9 100644 --- a/kclvm/tools/src/LSP/src/capabilities.rs +++ b/kclvm/tools/src/LSP/src/capabilities.rs @@ -38,6 +38,7 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti ), document_formatting_provider: Some(OneOf::Left(true)), document_range_formatting_provider: Some(OneOf::Left(true)), + references_provider: Some(OneOf::Left(true)), ..Default::default() } } diff --git a/kclvm/tools/src/LSP/src/completion.rs b/kclvm/tools/src/LSP/src/completion.rs index 681b04c1c..7f6067316 100644 --- a/kclvm/tools/src/LSP/src/completion.rs +++ b/kclvm/tools/src/LSP/src/completion.rs @@ -83,7 +83,7 @@ fn completion_attr( if let Some((node, schema_expr)) = is_in_schema(program, pos) { let schema_def = find_def(node, &schema_expr.name.get_end_pos(), prog_scope); if let Some(schema) = schema_def { - if let Definition::Object(obj) = schema { + if let Definition::Object(obj, _) = schema { let schema_type = obj.ty.into_schema_type(); completions.extend(schema_type.attrs.keys().map(|attr| KCLCompletionItem { label: attr.clone(), @@ -178,7 +178,7 @@ pub(crate) fn get_completion( let def = find_def(stmt, pos, prog_scope); if let Some(def) = def { match def { - crate::goto_def::Definition::Object(obj) => { + crate::goto_def::Definition::Object(obj, _) => { match &obj.ty.kind { // builtin (str) functions kclvm_sema::ty::TypeKind::Str => { @@ -226,7 +226,7 @@ pub(crate) fn get_completion( _ => {} } } - crate::goto_def::Definition::Scope(s) => { + crate::goto_def::Definition::Scope(s, _) => { for (name, obj) in &s.elems { if let ScopeObjectKind::Module(_) = obj.borrow().kind { continue; @@ -259,7 +259,7 @@ pub(crate) fn get_completion( find_def(stmt, &schema_expr.name.get_end_pos(), prog_scope); if let Some(schema) = schema_def { match schema { - Definition::Object(obj) => { + Definition::Object(obj, _) => { let schema_type = obj.ty.into_schema_type(); items.extend( schema_type @@ -271,7 +271,7 @@ pub(crate) fn get_completion( .collect::>(), ); } - Definition::Scope(_) => {} + Definition::Scope(_, _) => {} } } } diff --git a/kclvm/tools/src/LSP/src/find_ref/find_refs.rs b/kclvm/tools/src/LSP/src/find_ref/find_refs.rs deleted file mode 100644 index f3926093c..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/find_refs.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::find_ref; -use crate::find_ref::go_to_def::go_to_def; -use kclvm_error::Position; - -/// Find all references of the item at the cursor location. -pub fn find_refs(path: String, pos: Position) -> Vec { - let declaration = go_to_def(pos.clone()); - let search = { - move |decl: Position| { - let name = find_ref::word_at_pos(pos); - if name.is_none() { - return vec![]; - } - // Get identifiers with same name - let candidates = find_ref::match_word(path, name.unwrap()); - // Check if the definition of candidate and declartion are the same - let refs: Vec = candidates - .into_iter() - .filter(|x| go_to_def(x.clone()).as_ref() == Some(&decl)) - .collect(); - refs - } - }; - match declaration { - Some(decl) => search(decl), - None => Vec::new(), - } -} diff --git a/kclvm/tools/src/LSP/src/find_ref/go_to_def.rs b/kclvm/tools/src/LSP/src/find_ref/go_to_def.rs deleted file mode 100644 index 95b64b7eb..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/go_to_def.rs +++ /dev/null @@ -1,6 +0,0 @@ -use kclvm_error::Position; - -/// Get the definition of an identifier. -pub fn go_to_def(pos: Position) -> Option { - Some(pos) -} diff --git a/kclvm/tools/src/LSP/src/find_ref/mod.rs b/kclvm/tools/src/LSP/src/find_ref/mod.rs deleted file mode 100644 index 86fe83f9a..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/mod.rs +++ /dev/null @@ -1,134 +0,0 @@ -#![allow(dead_code)] - -use anyhow::Result; -use kclvm_driver::get_kcl_files; -use kclvm_error::Position; - -mod find_refs; -mod go_to_def; -mod word_map; - -#[cfg(test)] -mod tests; - -// LineWord describes an arbitrary word in a certain line including -// start position, end position and the word itself. -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct LineWord { - startpos: u64, - endpos: u64, - word: String, -} - -// Get the word of the position. -pub fn word_at_pos(pos: Position) -> Option { - let text = read_file(&pos.filename); - if text.is_err() { - return None; - } - let text = text.unwrap(); - let lines: Vec<&str> = text.lines().collect(); - if pos.line >= lines.len() as u64 { - return None; - } - pos.column?; - let col = pos.column.unwrap(); - let line_words = line_to_words(lines[pos.line as usize].to_string()); - if line_words.is_empty() - || col < line_words.first().unwrap().startpos - || col >= line_words.last().unwrap().endpos - { - return None; - } - for line_word in line_words.into_iter() { - if col >= line_word.startpos && col < line_word.endpos { - return Some(line_word.word); - } - } - None -} - -pub fn read_file(path: &String) -> Result { - let text = std::fs::read_to_string(path)?; - Ok(text) -} - -// Split one line into identifier words. -pub fn line_to_words(text: String) -> Vec { - let mut chars: Vec = text.chars().collect(); - chars.push('\n'); - let mut start_pos = usize::MAX; - let mut continue_pos = usize::MAX - 1; // avoid overflow - let mut prev_word = false; - let mut words: Vec = vec![]; - for (i, ch) in chars.iter().enumerate() { - let is_id_start = rustc_lexer::is_id_start(*ch); - let is_id_continue = rustc_lexer::is_id_continue(*ch); - // If the character is valid identfier start and the previous character is not valid identifier continue, mark the start position. - if is_id_start && !prev_word { - start_pos = i; - } - match is_id_continue { - true => { - // Continue searching for the end position. - if start_pos != usize::MAX { - continue_pos = i; - } - } - false => { - // Find out the end position. - if continue_pos + 1 == i { - words.push(LineWord { - startpos: start_pos as u64, - endpos: i as u64, - word: chars[start_pos..i].iter().collect::().clone(), - }); - } - // Reset the start position. - start_pos = usize::MAX; - } - } - prev_word = is_id_continue; - } - words -} - -// Get all occurrences of the word in the entire path. -pub fn match_word(path: String, name: String) -> Vec { - let mut res = vec![]; - if let Ok(files) = get_kcl_files(path, true) { - // Searching in all files. - for file in files.into_iter() { - let text = read_file(&file); - if text.is_err() { - continue; - } - let text = text.unwrap(); - let lines: Vec<&str> = text.lines().collect(); - for (li, line) in lines.into_iter().enumerate() { - // Get the matching results for each line. - let matched: Vec = line_to_words(line.to_string()) - .into_iter() - .filter(|x| x.word == name) - .map(|x| Position { - filename: file.clone(), - line: li as u64, - column: Some(x.startpos), - }) - .collect(); - res.extend(matched); - } - } - } - res -} - -// Convert pos format -// The position in lsp protocol is different with position in ast node whose line number is 1 based. -pub fn kcl_pos_to_lsp_pos(pos: Position) -> Position { - Position { - filename: pos.filename, - line: pos.line - 1, - column: pos.column, - } -} diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/inherit.k b/kclvm/tools/src/LSP/src/find_ref/test_data/inherit.k deleted file mode 100644 index a1bef7126..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/inherit.k +++ /dev/null @@ -1,10 +0,0 @@ -schema Parent: - name: str - -schema Son(Parent): - age: int - son_name: str = name - -schema GrandSon(Son): - grand_son_name: str = name - a: str = name_not_exist diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/inherit_pkg.k b/kclvm/tools/src/LSP/src/find_ref/test_data/inherit_pkg.k deleted file mode 100644 index 39cdf9b1d..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/inherit_pkg.k +++ /dev/null @@ -1,4 +0,0 @@ -import pkg - -schema Son(pkg.Parent): - son_name: str = name diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit.k b/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit.k deleted file mode 100644 index a1bef7126..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit.k +++ /dev/null @@ -1,10 +0,0 @@ -schema Parent: - name: str - -schema Son(Parent): - age: int - son_name: str = name - -schema GrandSon(Son): - grand_son_name: str = name - a: str = name_not_exist diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit_pkg.k b/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit_pkg.k deleted file mode 100644 index 39cdf9b1d..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit_pkg.k +++ /dev/null @@ -1,4 +0,0 @@ -import pkg - -schema Son(pkg.Parent): - son_name: str = name diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit.k b/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit.k deleted file mode 100644 index a1bef7126..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit.k +++ /dev/null @@ -1,10 +0,0 @@ -schema Parent: - name: str - -schema Son(Parent): - age: int - son_name: str = name - -schema GrandSon(Son): - grand_son_name: str = name - a: str = name_not_exist diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit_pkg.k b/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit_pkg.k deleted file mode 100644 index 39cdf9b1d..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit_pkg.k +++ /dev/null @@ -1,4 +0,0 @@ -import pkg - -schema Son(pkg.Parent): - son_name: str = name diff --git a/kclvm/tools/src/LSP/src/find_ref/tests.rs b/kclvm/tools/src/LSP/src/find_ref/tests.rs deleted file mode 100644 index d27969350..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/tests.rs +++ /dev/null @@ -1,252 +0,0 @@ -use crate::find_ref; -use crate::find_ref::LineWord; -use kclvm_error::Position; - -#[cfg(test)] -mod tests { - use super::*; - use std::fs; - use std::path::Path; - use std::{collections::HashMap, hash::Hash}; - - fn check_line_to_words(code: &str, expect: Vec) { - assert_eq!(find_ref::line_to_words(code.to_string()), expect); - } - - fn test_eq_list(a: &[T], b: &[T]) -> bool - where - T: Eq + Hash, - { - fn count(items: &[T]) -> HashMap<&T, usize> - where - T: Eq + Hash, - { - let mut cnt = HashMap::new(); - for i in items { - *cnt.entry(i).or_insert(0) += 1 - } - cnt - } - count(a) == count(b) - } - - #[test] - fn test_line_to_words() { - let datas = [ - "alice_first_name = \"alice\"", - "0lice_first_name = \"alic0\"", - "alice = p.Parent { name: \"alice\" }", - ]; - let expect = vec![ - vec![ - LineWord { - startpos: 0, - endpos: 16, - word: "alice_first_name".to_string(), - }, - LineWord { - startpos: 20, - endpos: 25, - word: "alice".to_string(), - }, - ], - vec![LineWord { - startpos: 20, - endpos: 25, - word: "alic0".to_string(), - }], - vec![ - LineWord { - startpos: 0, - endpos: 5, - word: "alice".to_string(), - }, - LineWord { - startpos: 8, - endpos: 9, - word: "p".to_string(), - }, - LineWord { - startpos: 10, - endpos: 16, - word: "Parent".to_string(), - }, - LineWord { - startpos: 19, - endpos: 23, - word: "name".to_string(), - }, - LineWord { - startpos: 26, - endpos: 31, - word: "alice".to_string(), - }, - ], - ]; - for i in 0..datas.len() { - check_line_to_words(datas[i], expect[i].clone()); - } - } - - #[test] - fn test_word_at_pos() { - // use std::env; - // let parent_path = env::current_dir().unwrap(); - // println!("The current directory is {}", parent_path.display()); - let path_prefix = "./src/find_ref/".to_string(); - let datas = vec![ - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 0, - column: Some(0), - }, - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 1, - column: Some(5), - }, - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 3, - column: Some(7), - }, - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 3, - column: Some(10), - }, - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 4, - column: Some(8), - }, - Position { - filename: (path_prefix + "test_data/inherit.k"), - line: 4, - column: Some(100), - }, - ]; - let expect = vec![ - Some("schema".to_string()), - Some("name".to_string()), - Some("Son".to_string()), - None, - None, - None, - ]; - for i in 0..datas.len() { - assert_eq!(find_ref::word_at_pos(datas[i].clone()), expect[i]); - } - } - - fn test_word_workspace() -> String { - Path::new(".") - .join("src") - .join("find_ref") - .join("test_data") - .join("test_word_workspace") - .display() - .to_string() - } - - #[test] - fn test_match_word() { - let path = test_word_workspace(); - let datas = vec![String::from("Son")]; - let except = vec![vec![ - Position { - filename: Path::new(&test_word_workspace()) - .join("inherit_pkg.k") - .display() - .to_string(), - line: 2, - column: Some(7), - }, - Position { - filename: Path::new(&test_word_workspace()) - .join("inherit.k") - .display() - .to_string(), - line: 3, - column: Some(7), - }, - Position { - filename: Path::new(&test_word_workspace()) - .join("inherit.k") - .display() - .to_string(), - line: 7, - column: Some(16), - }, - ]]; - for i in 0..datas.len() { - assert!(test_eq_list( - &find_ref::match_word(path.clone(), datas[i].clone()), - &except[i] - )); - } - } - - fn test_word_workspace_map() -> String { - Path::new(".") - .join("src") - .join("find_ref") - .join("test_data") - .join("test_word_workspace_map") - .display() - .to_string() - } - - #[test] - fn test_word_map() { - let path = test_word_workspace_map(); - let mut mp = find_ref::word_map::WorkSpaceWordMap::new(path); - mp.build(); - let _res = fs::rename( - Path::new(&test_word_workspace_map()) - .join("inherit_pkg.k") - .display() - .to_string(), - Path::new(&test_word_workspace_map()) - .join("inherit_bak.k") - .display() - .to_string(), - ); - mp.rename_file( - Path::new(&test_word_workspace_map()) - .join("inherit_pkg.k") - .display() - .to_string(), - Path::new(&test_word_workspace_map()) - .join("inherit_bak.k") - .display() - .to_string(), - ); - mp.delete_file( - Path::new(&test_word_workspace_map()) - .join("inherit.k") - .display() - .to_string(), - ); - let _res = fs::rename( - Path::new(&test_word_workspace_map()) - .join("inherit_bak.k") - .display() - .to_string(), - Path::new(&test_word_workspace_map()) - .join("inherit_pkg.k") - .display() - .to_string(), - ); - - let except = vec![Position { - filename: Path::new(&test_word_workspace_map()) - .join("inherit_bak.k") - .display() - .to_string(), - line: 2, - column: Some(7), - }]; - assert_eq!(mp.get(&String::from("Son")), Some(except)); - } -} diff --git a/kclvm/tools/src/LSP/src/find_ref/word_map.rs b/kclvm/tools/src/LSP/src/find_ref/word_map.rs deleted file mode 100644 index 143bf1708..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/word_map.rs +++ /dev/null @@ -1,123 +0,0 @@ -use crate::find_ref; - -use kclvm_driver::get_kcl_files; -use kclvm_error::Position; -use std::collections::HashMap; - -// Record all occurrences of the name in a file -#[derive(Default)] -pub struct FileWordMap { - file_name: String, - word_map: HashMap>, -} - -impl FileWordMap { - pub fn new(file_name: String) -> Self { - Self { - file_name, - word_map: HashMap::new(), - } - } - - // Clear records - pub fn clear(&mut self) { - self.word_map.clear(); - } - - // insert an occurrence of a name - pub fn insert(&mut self, name: String, pos: Position) { - self.word_map.entry(name).or_default().push(pos); - } - - // build the record map - // if text is missing, it will be read from the file system based on the filename - pub fn build(&mut self, text: Option) { - self.clear(); - let text = text.unwrap_or(find_ref::read_file(&self.file_name).unwrap()); - let lines: Vec<&str> = text.lines().collect(); - for (li, line) in lines.into_iter().enumerate() { - let words = find_ref::line_to_words(line.to_string()); - words.iter().for_each(|x| { - self.word_map - .entry(x.word.clone()) - .or_default() - .push(Position { - filename: self.file_name.clone(), - line: li as u64, - column: Some(x.startpos), - }) - }); - } - } - - // return all occurrence of a name - pub fn get(&self, name: &String) -> Option<&Vec> { - self.word_map.get(name) - } -} - -// Record all occurrences of the name in workspace -pub struct WorkSpaceWordMap { - path: String, - file_map: HashMap, -} - -impl WorkSpaceWordMap { - pub fn new(path: String) -> Self { - Self { - path, - file_map: HashMap::new(), - } - } - - // when user edit a file, the filemap of this file need to rebuild - pub fn change_file(&mut self, file_name: String, text: String) { - self.file_map - .entry(file_name.clone()) - .or_insert(FileWordMap::new(file_name)) - .build(Some(text)); - } - - // when user add a file, the workspacemap will add a new filemap for it - pub fn create_file(&mut self, file_name: String) { - self.file_map - .entry(file_name.clone()) - .or_insert(FileWordMap::new(file_name)) - .clear(); - } - - // when user delete a file, the workspacemap will remove the old filemap of it - pub fn delete_file(&mut self, file_name: String) { - self.file_map.remove(&file_name); - } - - // when user rename a file, the workspacemap will remove the old filemap of it and build a new filemap for it - pub fn rename_file(&mut self, old_name: String, new_name: String) { - self.delete_file(old_name); - self.create_file(new_name.clone()); - self.file_map.get_mut(&new_name).unwrap().build(None); - } - - // build & maintain the record map for each file under the path - pub fn build(&mut self) { - //TODO may use some cache from other component? - if let Ok(files) = get_kcl_files(&self.path, true) { - for file in files.into_iter() { - self.file_map - .insert(file.clone(), FileWordMap::new(file.clone())); - self.file_map.get_mut(&file).unwrap().build(None); - } - } - } - - // return all occurrence of a name in the workspace - pub fn get(self, name: &String) -> Option> { - let mut words = Vec::new(); - for (_, mp) in self.file_map.iter() { - if let Some(file_words) = mp.get(name) { - words.extend_from_slice(file_words); - } - } - Some(words) - } -} diff --git a/kclvm/tools/src/LSP/src/find_refs.rs b/kclvm/tools/src/LSP/src/find_refs.rs new file mode 100644 index 000000000..38e2db1ad --- /dev/null +++ b/kclvm/tools/src/LSP/src/find_refs.rs @@ -0,0 +1,212 @@ +use crate::from_lsp::kcl_pos; +use crate::goto_def::goto_definition; +use crate::util::{parse_param_and_compile, Param}; +use anyhow; +use lsp_types::{Location, Url}; +use parking_lot::RwLock; +use ra_ap_vfs::Vfs; +use std::collections::HashMap; +use std::sync::Arc; + +pub(crate) fn find_refs Result<(), anyhow::Error>>( + vfs: Option>>, + word_index_map: HashMap>>, + def_loc: Location, + name: String, + cursor_path: String, + logger: F, +) -> anyhow::Result>> { + // todo: decide the scope by the workspace root and the kcl.mod both, use the narrower scope + // todo: should use the current file path + + let mut ref_locations = vec![]; + + for (_, word_index) in word_index_map { + if let Some(locs) = word_index.get(name.as_str()).cloned() { + let matched_locs: Vec = locs + .into_iter() + .filter(|ref_loc| { + // from location to real def + // return if the real def location matches the def_loc + let file_path = ref_loc.uri.path().to_string(); + match parse_param_and_compile( + Param { + file: file_path.clone(), + }, + vfs.clone(), + ) { + Ok((prog, scope, _)) => { + let ref_pos = kcl_pos(&file_path, ref_loc.range.start); + // find def from the ref_pos + if let Some(real_def) = goto_definition(&prog, &ref_pos, &scope) { + match real_def { + lsp_types::GotoDefinitionResponse::Scalar(real_def_loc) => { + real_def_loc == def_loc + } + _ => false, + } + } else { + false + } + } + Err(_) => { + let _ = logger(format!("{cursor_path} compilation failed")); + return false; + } + } + }) + .collect(); + ref_locations.extend(matched_locs); + } + } + anyhow::Ok(Some(ref_locations)) +} + +#[cfg(test)] +mod tests { + use super::find_refs; + use crate::util::build_word_index; + use lsp_types::{Location, Position, Range, Url}; + use std::collections::HashMap; + use std::path::PathBuf; + + fn logger(msg: String) -> Result<(), anyhow::Error> { + println!("{}", msg); + anyhow::Ok(()) + } + + fn check_locations_match(expect: Vec, actual: anyhow::Result>>) { + match actual { + Ok(act) => { + if let Some(locations) = act { + assert_eq!(expect, locations) + } else { + assert!(false, "got empty result. expect: {:?}", expect) + } + } + Err(_) => assert!(false), + } + } + + fn setup_word_index_map(root: &str) -> HashMap>> { + HashMap::from([( + Url::from_file_path(root).unwrap(), + build_word_index(root.to_string()).unwrap(), + )]) + } + + #[test] + fn find_refs_from_variable_test() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let mut path = root.clone(); + path.push("src/test_data/find_refs_test/main.k"); + let path = path.to_str().unwrap(); + match lsp_types::Url::from_file_path(path) { + Ok(url) => { + let def_loc = Location { + uri: url.clone(), + range: Range { + start: Position::new(0, 0), + end: Position::new(0, 1), + }, + }; + let expect = vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(0, 0), + end: Position::new(0, 1), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(1, 4), + end: Position::new(1, 5), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(2, 4), + end: Position::new(2, 5), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(12, 14), + end: Position::new(12, 15), + }, + }, + ]; + check_locations_match( + expect, + find_refs( + None, + setup_word_index_map(path), + def_loc, + "a".to_string(), + path.to_string(), + logger, + ), + ); + } + Err(_) => assert!(false, "file not found"), + } + } + + #[test] + fn find_refs_from_schema_name_test() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let mut path = root.clone(); + path.push("src/test_data/find_refs_test/main.k"); + let path = path.to_str().unwrap(); + match lsp_types::Url::from_file_path(path) { + Ok(url) => { + let def_loc = Location { + uri: url.clone(), + range: Range { + start: Position::new(4, 0), + end: Position::new(7, 0), + }, + }; + let expect = vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(4, 7), + end: Position::new(4, 11), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(8, 7), + end: Position::new(8, 11), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(11, 7), + end: Position::new(11, 11), + }, + }, + ]; + check_locations_match( + expect, + find_refs( + None, + setup_word_index_map(path), + def_loc, + "Name".to_string(), + path.to_string(), + logger, + ), + ); + } + Err(_) => assert!(false, "file not found"), + } + } +} diff --git a/kclvm/tools/src/LSP/src/from_lsp.rs b/kclvm/tools/src/LSP/src/from_lsp.rs index 09f103810..eab37b973 100644 --- a/kclvm/tools/src/LSP/src/from_lsp.rs +++ b/kclvm/tools/src/LSP/src/from_lsp.rs @@ -19,11 +19,7 @@ pub(crate) fn kcl_pos(file: &str, pos: Position) -> KCLPos { KCLPos { filename: file.to_string(), line: (pos.line + 1) as u64, - column: if pos.character == 0 { - None - } else { - Some(pos.character as u64) - }, + column: Some(pos.character as u64), } } diff --git a/kclvm/tools/src/LSP/src/goto_def.rs b/kclvm/tools/src/LSP/src/goto_def.rs index ebbdb9f0a..c8dd2693e 100644 --- a/kclvm/tools/src/LSP/src/goto_def.rs +++ b/kclvm/tools/src/LSP/src/goto_def.rs @@ -70,18 +70,18 @@ pub(crate) fn goto_definition( #[derive(Debug)] pub enum Definition { - Object(ScopeObject), - Scope(Scope), + Object(ScopeObject, String), + Scope(Scope, String), } impl Definition { pub(crate) fn get_positions(&self) -> IndexSet<(KCLPos, KCLPos)> { let mut positions = IndexSet::new(); match self { - Definition::Object(obj) => { + Definition::Object(obj, _) => { positions.insert((obj.start.clone(), obj.end.clone())); } - Definition::Scope(scope) => match &scope.kind { + Definition::Scope(scope, _) => match &scope.kind { kclvm_sema::resolver::scope::ScopeKind::Package(filenames) => { for file in filenames { let dummy_pos = KCLPos { @@ -99,6 +99,12 @@ impl Definition { } positions } + pub(crate) fn get_name(&self) -> String { + match self { + Definition::Object(_, name) => name.clone(), + Definition::Scope(_, name) => name.clone(), + } + } } pub(crate) fn find_def( @@ -148,7 +154,7 @@ pub(crate) fn find_def( find_def(node, &schema_expr.name.get_end_pos(), prog_scope); if let Some(schema) = schema_def { match schema { - Definition::Object(obj) => match &obj.ty.kind { + Definition::Object(obj, _) => match &obj.ty.kind { kclvm_sema::ty::TypeKind::Schema(schema_type) => { return find_attr_in_schema( &schema_type, @@ -158,7 +164,7 @@ pub(crate) fn find_def( } _ => {} }, - Definition::Scope(_) => {} + Definition::Scope(_, _) => {} } } } @@ -180,7 +186,7 @@ pub(crate) fn find_def( let id = select_expr.attr; match value_def { Some(def) => match def { - Definition::Object(obj) => match &obj.ty.kind { + Definition::Object(obj, _) => match &obj.ty.kind { kclvm_sema::ty::TypeKind::Schema(schema_type) => { return find_attr_in_schema( &schema_type, @@ -190,7 +196,7 @@ pub(crate) fn find_def( } _ => {} }, - Definition::Scope(_) => {} + Definition::Scope(_, _) => {} }, None => { if let Some(inner_most_scope) = prog_scope.inner_most_scope(kcl_pos) { @@ -239,16 +245,16 @@ pub(crate) fn resolve_var( kclvm_sema::ty::TypeKind::Module(module_ty) => match module_ty.kind { kclvm_sema::ty::ModuleKind::User => scope_map .get(&pkgpath_without_prefix!(module_ty.pkgpath)) - .map(|scope| Definition::Scope(scope.borrow().clone())), + .map(|scope| Definition::Scope(scope.borrow().clone(), name)), kclvm_sema::ty::ModuleKind::System => { - Some(Definition::Object(obj.borrow().clone())) + Some(Definition::Object(obj.borrow().clone(), name)) } kclvm_sema::ty::ModuleKind::Plugin => None, }, _ => None, } } - _ => Some(Definition::Object(obj.borrow().clone())), + _ => Some(Definition::Object(obj.borrow().clone(), name)), }, None => match builtin_scope().lookup(&name) { Some(obj) => { @@ -263,7 +269,7 @@ pub(crate) fn resolve_var( obj.doc = doc; obj.start = node_names[0].get_pos(); obj.end = node_names[0].get_end_pos(); - Some(Definition::Object(obj)) + Some(Definition::Object(obj, name)) } None => None, }, @@ -296,14 +302,17 @@ pub(crate) fn resolve_var( let ty = get_system_member_function_ty(&name, &func_name); match &ty.kind { kclvm_sema::ty::TypeKind::Function(func_ty) => { - return Some(Definition::Object(ScopeObject { - name: func_name, - start: func_name_node.get_pos(), - end: func_name_node.get_end_pos(), - ty: ty.clone(), - kind: ScopeObjectKind::FunctionCall, - doc: Some(func_ty.doc.clone()), - })) + return Some(Definition::Object( + ScopeObject { + name: func_name.clone(), + start: func_name_node.get_pos(), + end: func_name_node.get_end_pos(), + ty: ty.clone(), + kind: ScopeObjectKind::FunctionCall, + doc: Some(func_ty.doc.clone()), + }, + func_name, + )) } _ => return None, } @@ -341,14 +350,17 @@ pub(crate) fn resolve_var( if let Some(ty) = STRING_MEMBER_FUNCTIONS.get(&func_name) { match &ty.kind { kclvm_sema::ty::TypeKind::Function(func_ty) => { - return Some(Definition::Object(ScopeObject { - name: func_name, - start: func_name_node.get_pos(), - end: func_name_node.get_end_pos(), - ty: Rc::new(ty.clone()), - kind: ScopeObjectKind::FunctionCall, - doc: Some(func_ty.doc.clone()), - })) + return Some(Definition::Object( + ScopeObject { + name: func_name.clone(), + start: func_name_node.get_pos(), + end: func_name_node.get_end_pos(), + ty: Rc::new(ty.clone()), + kind: ScopeObjectKind::FunctionCall, + doc: Some(func_ty.doc.clone()), + }, + func_name, + )) } // unreachable _ => {} diff --git a/kclvm/tools/src/LSP/src/hover.rs b/kclvm/tools/src/LSP/src/hover.rs index 5bd8cea5a..e200b3dc1 100644 --- a/kclvm/tools/src/LSP/src/hover.rs +++ b/kclvm/tools/src/LSP/src/hover.rs @@ -19,7 +19,7 @@ pub(crate) fn hover( Some(node) => { let mut docs: Vec = vec![]; if let Some(def) = find_def(node, kcl_pos, prog_scope) { - if let crate::goto_def::Definition::Object(obj) = def { + if let crate::goto_def::Definition::Object(obj, _) = def { match obj.kind { ScopeObjectKind::Definition => { docs.extend(build_schema_hover_content(&obj.ty.into_schema_type())) diff --git a/kclvm/tools/src/LSP/src/lib.rs b/kclvm/tools/src/LSP/src/lib.rs index ce4779dff..f773be282 100644 --- a/kclvm/tools/src/LSP/src/lib.rs +++ b/kclvm/tools/src/LSP/src/lib.rs @@ -5,7 +5,7 @@ mod config; mod db; mod dispatcher; mod document_symbol; -mod find_ref; +mod find_refs; mod formatting; mod from_lsp; mod goto_def; diff --git a/kclvm/tools/src/LSP/src/main.rs b/kclvm/tools/src/LSP/src/main.rs index 168a2307e..62e576694 100644 --- a/kclvm/tools/src/LSP/src/main.rs +++ b/kclvm/tools/src/LSP/src/main.rs @@ -1,6 +1,8 @@ use crate::main_loop::main_loop; use config::Config; use main_loop::app; +use std::collections::HashMap; + mod analysis; mod capabilities; mod completion; @@ -8,6 +10,7 @@ mod config; mod db; mod dispatcher; mod document_symbol; +mod find_refs; mod from_lsp; mod goto_def; mod hover; @@ -77,9 +80,8 @@ fn run_server() -> anyhow::Result<()> { .map_err(|_| anyhow::anyhow!("Initialize result error"))?; connection.initialize_finish(initialize_id, initialize_result)?; - let config = Config::default(); - main_loop(connection, config)?; + main_loop(connection, config, initialize_params)?; io_threads.join()?; Ok(()) } diff --git a/kclvm/tools/src/LSP/src/main_loop.rs b/kclvm/tools/src/LSP/src/main_loop.rs index 4e2efddcb..b0f8b16c1 100644 --- a/kclvm/tools/src/LSP/src/main_loop.rs +++ b/kclvm/tools/src/LSP/src/main_loop.rs @@ -2,11 +2,16 @@ use crate::config::Config; use crate::state::LanguageServerState; use clap::Command; use lsp_server::Connection; +use lsp_types::InitializeParams; #[allow(dead_code)] /// Runs the main loop of the language server. This will receive requests and handle them. -pub(crate) fn main_loop(connection: Connection, config: Config) -> anyhow::Result<()> { - LanguageServerState::new(connection.sender, config).run(connection.receiver) +pub(crate) fn main_loop( + connection: Connection, + config: Config, + initialize_params: InitializeParams, +) -> anyhow::Result<()> { + LanguageServerState::new(connection.sender, config, initialize_params).run(connection.receiver) } #[allow(dead_code)] diff --git a/kclvm/tools/src/LSP/src/notification.rs b/kclvm/tools/src/LSP/src/notification.rs index 1098b1b09..a4a279ec3 100644 --- a/kclvm/tools/src/LSP/src/notification.rs +++ b/kclvm/tools/src/LSP/src/notification.rs @@ -1,11 +1,18 @@ use lsp_types::notification::{ - DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, DidOpenTextDocument, - DidSaveTextDocument, + DidChangeTextDocument, + DidChangeWatchedFiles, + DidCloseTextDocument, + DidOpenTextDocument, + DidSaveTextDocument, //DidDeleteFiles, DidRenameFiles, DidCreateFiles, //todo more }; +use std::path::Path; use crate::{ - dispatcher::NotificationDispatcher, from_lsp, state::LanguageServerState, + dispatcher::NotificationDispatcher, + from_lsp, + state::LanguageServerState, util::apply_document_changes, + util::{build_word_index_for_file_content, word_index_add, word_index_subtract}, }; impl LanguageServerState { @@ -71,14 +78,46 @@ impl LanguageServerState { let path = from_lsp::abs_path(&text_document.uri)?; self.log_message(format!("on did_change file: {:?}", path)); + // update vfs let vfs = &mut *self.vfs.write(); let file_id = vfs .file_id(&path.clone().into()) .ok_or(anyhow::anyhow!("Already checked that the file_id exists!"))?; let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec())?; + let old_text = text.clone(); apply_document_changes(&mut text, content_changes); - vfs.set_file_contents(path.into(), Some(text.into_bytes())); + vfs.set_file_contents(path.into(), Some(text.clone().into_bytes())); + + // update word index + let old_word_index = build_word_index_for_file_content(old_text, &text_document.uri); + let new_word_index = build_word_index_for_file_content(text.clone(), &text_document.uri); + let binding = text_document.uri.path(); + let file_path = Path::new(binding); //todo rename + for (key, value) in &mut self.word_index_map { + let workspace_folder_path = Path::new(key.path()); + if file_path.starts_with(workspace_folder_path) { + word_index_subtract(value, old_word_index.clone()); + word_index_add(value, new_word_index.clone()); + } + } + // let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + // let url = lsp_types::Url::from_file_path(root.clone()).unwrap(); + // let mm = self.word_index_map.get(&url).unwrap(); + // println!("word_index_map: {:?}", mm); + + // let file = from_lsp::file_path_from_url(&text_document.uri)?; + // let old_word_index = build_word_index_for_file_content(old_text, &text_document.uri); + // let new_word_index = build_word_index_for_file_content(text.clone(), &text_document.uri); + + // let file_path = Path::new(&text_document.uri.path()); + // for (key, mut value) in &self.word_index_map { + // let workspace_folder_path = Path::new(key.path()); + // if file_path.starts_with(workspace_folder_path) { + // value = &word_index_subtract(value, old_word_index.clone()); + // value = &word_index_add(value, new_word_index.clone()); + // } + // } Ok(()) } diff --git a/kclvm/tools/src/LSP/src/request.rs b/kclvm/tools/src/LSP/src/request.rs index f1a12d03a..4c24e3dfa 100644 --- a/kclvm/tools/src/LSP/src/request.rs +++ b/kclvm/tools/src/LSP/src/request.rs @@ -1,6 +1,7 @@ use anyhow::Ok; use crossbeam_channel::Sender; -use lsp_types::TextEdit; +use kclvm_ast::ast::Stmt; +use lsp_types::{Location, TextEdit}; use ra_ap_vfs::VfsPath; use std::time::Instant; @@ -9,9 +10,10 @@ use crate::{ db::AnalysisDatabase, dispatcher::RequestDispatcher, document_symbol::document_symbol, + find_refs::find_refs, formatting::format, from_lsp::{self, file_path_from_url, kcl_pos}, - goto_def::goto_definition, + goto_def::{find_def, goto_definition}, hover, quick_fix, state::{log_message, LanguageServerSnapshot, LanguageServerState, Task}, }; @@ -43,6 +45,7 @@ impl LanguageServerState { Ok(()) })? .on::(handle_goto_definition)? + .on::(handle_reference)? .on::(handle_completion)? .on::(handle_hover)? .on::(handle_document_symbol)? @@ -132,6 +135,57 @@ pub(crate) fn handle_goto_definition( Ok(res) } +/// Called when a `FindReferences` request was received +pub(crate) fn handle_reference( + snapshot: LanguageServerSnapshot, + params: lsp_types::ReferenceParams, + sender: Sender, +) -> anyhow::Result>> { + // 1. find definition of current token + let file = file_path_from_url(¶ms.text_document_position.text_document.uri)?; + let path = from_lsp::abs_path(¶ms.text_document_position.text_document.uri)?; + let db = snapshot.get_db(&path.clone().into())?; + let pos = kcl_pos(&file, params.text_document_position.position); + let word_index_map = snapshot.word_index_map.clone(); + + let log = |msg: String| log_message(msg, &sender); + + if let Some(def_resp) = goto_definition(&db.prog, &pos, &db.scope) { + match def_resp { + lsp_types::GotoDefinitionResponse::Scalar(def_loc) => { + // get the def location + if let Some(def_name) = match db.prog.pos_to_stmt(&pos) { + Some(node) => match node.node { + Stmt::Import(_) => None, + _ => match find_def(node.clone(), &pos, &db.scope) { + Some(def) => Some(def.get_name()), + None => None, + }, + }, + None => None, + } { + return find_refs( + Some(snapshot.vfs), + word_index_map, + def_loc, + def_name, + file, + log, + ); + } + } + _ => return Ok(None), + } + } else { + log_message( + "Definition item not found, result in no reference".to_string(), + &sender, + )?; + } + + return Ok(None); +} + /// Called when a `Completion` request was received. pub(crate) fn handle_completion( snapshot: LanguageServerSnapshot, diff --git a/kclvm/tools/src/LSP/src/state.rs b/kclvm/tools/src/LSP/src/state.rs index a898a7d63..5dd8e0b62 100644 --- a/kclvm/tools/src/LSP/src/state.rs +++ b/kclvm/tools/src/LSP/src/state.rs @@ -2,13 +2,14 @@ use crate::analysis::Analysis; use crate::config::Config; use crate::db::AnalysisDatabase; use crate::to_lsp::{kcl_diag_to_lsp_diags, url}; -use crate::util::{get_file_name, parse_param_and_compile, to_json, Param}; +use crate::util::{build_word_index, get_file_name, parse_param_and_compile, to_json, Param}; use crossbeam_channel::{select, unbounded, Receiver, Sender}; use indexmap::IndexSet; use lsp_server::{ReqQueue, Response}; +use lsp_types::Url; use lsp_types::{ notification::{Notification, PublishDiagnostics}, - Diagnostic, PublishDiagnosticsParams, + Diagnostic, InitializeParams, Location, PublishDiagnosticsParams, }; use parking_lot::RwLock; use ra_ap_vfs::{FileId, Vfs}; @@ -67,6 +68,9 @@ pub(crate) struct LanguageServerState { /// The VFS loader pub vfs_handle: Box, + + /// The word index map + pub word_index_map: HashMap>>, } /// A snapshot of the state of the language server @@ -78,11 +82,17 @@ pub(crate) struct LanguageServerSnapshot { pub db: HashMap, /// Documents that are currently kept in memory from the client pub opened_files: IndexSet, + /// The word index map + pub word_index_map: HashMap>>, } #[allow(unused)] impl LanguageServerState { - pub fn new(sender: Sender, config: Config) -> Self { + pub fn new( + sender: Sender, + config: Config, + initialize_params: InitializeParams, + ) -> Self { let (task_sender, task_receiver) = unbounded::(); let (vfs_sender, receiver) = unbounded::(); @@ -90,6 +100,22 @@ impl LanguageServerState { ra_ap_vfs::loader::Handle::spawn(Box::new(move |msg| vfs_sender.send(msg).unwrap())); let handle = Box::new(handle) as Box; + // build word index for all the workspace folders + // todo: async + let mut word_index_map = HashMap::new(); + if let Some(workspace_folders) = initialize_params.workspace_folders { + for folder in workspace_folders { + let path = folder.uri.path(); + if let Ok(word_index) = build_word_index(path.to_string()) { + word_index_map.insert(folder.uri, word_index); + } + } + } else if let Some(root_uri) = initialize_params.root_uri { + let path = root_uri.path(); + if let Ok(word_index) = build_word_index(path.to_string()) { + word_index_map.insert(root_uri, word_index); + } + } LanguageServerState { sender, request_queue: ReqQueue::default(), @@ -102,6 +128,7 @@ impl LanguageServerState { analysis: Analysis::default(), opened_files: IndexSet::new(), vfs_handle: handle, + word_index_map, } } @@ -245,6 +272,7 @@ impl LanguageServerState { vfs: self.vfs.clone(), db: self.analysis.db.clone(), opened_files: self.opened_files.clone(), + word_index_map: self.word_index_map.clone(), } } diff --git a/kclvm/tools/src/LSP/src/test_data/find_refs_test/kcl.mod b/kclvm/tools/src/LSP/src/test_data/find_refs_test/kcl.mod new file mode 100644 index 000000000..e69de29bb diff --git a/kclvm/tools/src/LSP/src/test_data/find_refs_test/main.k b/kclvm/tools/src/LSP/src/test_data/find_refs_test/main.k new file mode 100644 index 000000000..496868958 --- /dev/null +++ b/kclvm/tools/src/LSP/src/test_data/find_refs_test/main.k @@ -0,0 +1,15 @@ +a = "demo" +b = a +c = a + +schema Name: + name: str + +schema Person: + n: Name + +p2 = Person { + n: Name{ + name: a + } +} \ No newline at end of file diff --git a/kclvm/tools/src/LSP/src/tests.rs b/kclvm/tools/src/LSP/src/tests.rs index dca11158d..c2979ac11 100644 --- a/kclvm/tools/src/LSP/src/tests.rs +++ b/kclvm/tools/src/LSP/src/tests.rs @@ -15,12 +15,18 @@ use lsp_types::GotoDefinitionResponse; use lsp_types::Hover; use lsp_types::HoverContents; use lsp_types::HoverParams; +use lsp_types::InitializeParams; use lsp_types::MarkedString; use lsp_types::PublishDiagnosticsParams; +use lsp_types::ReferenceContext; +use lsp_types::ReferenceParams; use lsp_types::TextDocumentIdentifier; use lsp_types::TextDocumentItem; use lsp_types::TextDocumentPositionParams; use lsp_types::TextEdit; +use lsp_types::Url; +use lsp_types::WorkspaceFolder; + use serde::Serialize; use std::cell::Cell; use std::cell::RefCell; @@ -41,7 +47,6 @@ use lsp_types::DiagnosticRelatedInformation; use lsp_types::DiagnosticSeverity; use lsp_types::Location; use lsp_types::NumberOrString; -use lsp_types::Url; use lsp_types::{Position, Range, TextDocumentContentChangeEvent}; use parking_lot::RwLock; use proc_macro_crate::bench_test; @@ -413,9 +418,9 @@ pub struct Project {} impl Project { /// Instantiates a language server for this project. - pub fn server(self) -> Server { + pub fn server(self, initialize_params: InitializeParams) -> Server { let config = Config::default(); - Server::new(config) + Server::new(config, initialize_params) } } @@ -430,11 +435,11 @@ pub struct Server { impl Server { /// Constructs and initializes a new `Server` - pub fn new(config: Config) -> Self { + pub fn new(config: Config, initialize_params: InitializeParams) -> Self { let (connection, client) = Connection::memory(); let worker = std::thread::spawn(move || { - main_loop(connection, config).unwrap(); + main_loop(connection, config, initialize_params).unwrap(); }); Self { @@ -537,7 +542,7 @@ fn notification_test() { let path = path.to_str().unwrap(); let src = std::fs::read_to_string(path.clone()).unwrap(); - let server = Project {}.server(); + let server = Project {}.server(InitializeParams::default()); // Mock open file server.notification::( @@ -586,7 +591,7 @@ fn goto_def_test() { let path = path.to_str().unwrap(); let src = std::fs::read_to_string(path.clone()).unwrap(); - let server = Project {}.server(); + let server = Project {}.server(InitializeParams::default()); // Mock open file server.notification::( @@ -643,7 +648,7 @@ fn complete_test() { let path = path.to_str().unwrap(); let src = std::fs::read_to_string(path.clone()).unwrap(); - let server = Project {}.server(); + let server = Project {}.server(InitializeParams::default()); // Mock open file server.notification::( @@ -707,7 +712,7 @@ fn hover_test() { let path = path.to_str().unwrap(); let src = std::fs::read_to_string(path.clone()).unwrap(); - let server = Project {}.server(); + let server = Project {}.server(InitializeParams::default()); // Mock open file server.notification::( @@ -766,7 +771,7 @@ fn hover_assign_in_lambda_test() { let path = path.to_str().unwrap(); let src = std::fs::read_to_string(path.clone()).unwrap(); - let server = Project {}.server(); + let server = Project {}.server(InitializeParams::default()); // Mock open file server.notification::( @@ -819,7 +824,7 @@ fn formatting_test() { let path = path.to_str().unwrap(); let src = std::fs::read_to_string(path.clone()).unwrap(); - let server = Project {}.server(); + let server = Project {}.server(InitializeParams::default()); // Mock open file server.notification::( @@ -1303,3 +1308,186 @@ fn lsp_invalid_subcommand_test() { }, } } + +#[test] +fn find_refs_test() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let mut path = root.clone(); + path.push("src/test_data/find_refs_test/main.k"); + + let path = path.to_str().unwrap(); + let src = std::fs::read_to_string(path.clone()).unwrap(); + let mut initialize_params = InitializeParams::default(); + initialize_params.workspace_folders = Some(vec![WorkspaceFolder { + uri: Url::from_file_path(root.clone()).unwrap(), + name: "test".to_string(), + }]); + let server = Project {}.server(initialize_params); + let url = Url::from_file_path(path).unwrap(); + + // Mock open file + server.notification::( + lsp_types::DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: url.clone(), + language_id: "KCL".to_string(), + version: 0, + text: src, + }, + }, + ); + + let id = server.next_request_id.get(); + server.next_request_id.set(id.wrapping_add(1)); + + let r: Request = Request::new( + id.into(), + "textDocument/references".to_string(), + ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: url.clone() }, + position: Position::new(0, 1), + }, + work_done_progress_params: Default::default(), + partial_result_params: Default::default(), + context: ReferenceContext { + include_declaration: true, + }, + }, + ); + + // Send request and wait for it's response + let res = server.send_and_receive(r); + + assert_eq!( + res.result.unwrap(), + to_json(vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(0, 0), + end: Position::new(0, 1), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(1, 4), + end: Position::new(1, 5), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(2, 4), + end: Position::new(2, 5), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(12, 14), + end: Position::new(12, 15), + }, + }, + ]) + .unwrap() + ); +} + +#[test] +fn find_refs_with_file_change_test() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let mut path = root.clone(); + path.push("src/test_data/find_refs_test/main.k"); + + let path = path.to_str().unwrap(); + let src = std::fs::read_to_string(path.clone()).unwrap(); + let mut initialize_params = InitializeParams::default(); + initialize_params.workspace_folders = Some(vec![WorkspaceFolder { + uri: Url::from_file_path(root.clone()).unwrap(), + name: "test".to_string(), + }]); + let server = Project {}.server(initialize_params); + let url = Url::from_file_path(path).unwrap(); + + // Mock open file + server.notification::( + lsp_types::DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: url.clone(), + language_id: "KCL".to_string(), + version: 0, + text: src, + }, + }, + ); + // Mock change file content + server.notification::( + lsp_types::DidChangeTextDocumentParams { + text_document: lsp_types::VersionedTextDocumentIdentifier { + uri: url.clone(), + version: 1, + }, + content_changes: vec![lsp_types::TextDocumentContentChangeEvent { + range: None, + range_length: None, + text: r#"a = "demo" + +schema Name: + name: str + +schema Person: + n: Name + +p2 = Person { + n: Name{ + name: a + } +}"# + .to_string(), + }], + }, + ); + let id = server.next_request_id.get(); + server.next_request_id.set(id.wrapping_add(1)); + // Mock trigger find references + let r: Request = Request::new( + id.into(), + "textDocument/references".to_string(), + ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: url.clone() }, + position: Position::new(0, 1), + }, + work_done_progress_params: Default::default(), + partial_result_params: Default::default(), + context: ReferenceContext { + include_declaration: true, + }, + }, + ); + + // Send request and wait for it's response + let res = server.send_and_receive(r); + assert_eq!( + res.result.unwrap(), + to_json(vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(0, 0), + end: Position::new(0, 1), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(10, 14), + end: Position::new(10, 15), + }, + }, + ]) + .unwrap() + ); +} diff --git a/kclvm/tools/src/LSP/src/util.rs b/kclvm/tools/src/LSP/src/util.rs index e179d53a6..3a8535450 100644 --- a/kclvm/tools/src/LSP/src/util.rs +++ b/kclvm/tools/src/LSP/src/util.rs @@ -16,11 +16,12 @@ use kclvm_sema::resolver::resolve_program_with_opts; use kclvm_sema::resolver::scope::ProgramScope; use kclvm_sema::resolver::scope::Scope; use kclvm_utils::pkgpath::rm_external_pkg_name; -use lsp_types::Url; +use lsp_types::{Location, Position, Range, Url}; use parking_lot::{RwLock, RwLockReadGuard}; use ra_ap_vfs::{FileId, Vfs}; use serde::{de::DeserializeOwned, Serialize}; use std::cell::RefCell; +use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::{fs, sync::Arc}; @@ -744,3 +745,491 @@ pub(crate) fn get_pkg_scope( .borrow() .clone() } + +/// scan and build a word -> Locations index map +pub fn build_word_index(path: String) -> anyhow::Result>> { + let mut index: HashMap> = HashMap::new(); + if let Ok(files) = get_kcl_files(path.clone(), true) { + for file_path in &files { + // str path to url + if let Ok(url) = Url::from_file_path(file_path) { + // read file content and save the word to word index + let text = read_file(file_path)?; + for (key, values) in build_word_index_for_file_content(text, &url) { + index.entry(key).or_insert_with(Vec::new).extend(values); + } + } + } + } + return Ok(index); +} + +pub fn build_word_index_for_file_content( + content: String, + url: &Url, +) -> HashMap> { + let mut index: HashMap> = HashMap::new(); + let lines: Vec<&str> = content.lines().collect(); + for (li, line) in lines.into_iter().enumerate() { + let words = line_to_words(line.to_string()); + for (key, values) in words { + index + .entry(key) + .or_insert_with(Vec::new) + .extend(values.iter().map(|w| Location { + uri: url.clone(), + range: Range { + start: Position::new(li as u32, w.start_col), + end: Position::new(li as u32, w.end_col), + }, + })); + } + } + index +} + +pub fn word_index_add( + from: &mut HashMap>, + add: HashMap>, +) { + for (key, value) in add { + from.entry(key).or_insert_with(Vec::new).extend(value); + } +} + +pub fn word_index_subtract( + from: &mut HashMap>, + remove: HashMap>, +) { + for (key, value) in remove { + for v in value { + from.entry(key.clone()).and_modify(|locations| { + locations.retain(|loc| loc != &v); + }); + } + } +} + +// Word describes an arbitrary word in a certain line including +// start position, end position and the word itself. +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Word { + start_col: u32, + end_col: u32, + word: String, +} + +impl Word { + fn new(start_col: u32, end_col: u32, word: String) -> Self { + Self { + start_col, + end_col, + word, + } + } +} + +fn read_file(path: &String) -> anyhow::Result { + let text = std::fs::read_to_string(path)?; + Ok(text) +} + +// Split one line into identifier words. +fn line_to_words(text: String) -> HashMap> { + let mut result = HashMap::new(); + let mut chars: Vec = text.chars().collect(); + chars.push('\n'); + let mut start_pos = usize::MAX; + let mut continue_pos = usize::MAX - 1; // avoid overflow + let mut prev_word = false; + let mut words: Vec = vec![]; + for (i, ch) in chars.iter().enumerate() { + let is_id_start = rustc_lexer::is_id_start(*ch); + let is_id_continue = rustc_lexer::is_id_continue(*ch); + // If the character is valid identfier start and the previous character is not valid identifier continue, mark the start position. + if is_id_start && !prev_word { + start_pos = i; + } + if is_id_continue { + // Continue searching for the end position. + if start_pos != usize::MAX { + continue_pos = i; + } + } else { + // Find out the end position. + if continue_pos + 1 == i { + words.push(Word::new( + start_pos as u32, + i as u32, + chars[start_pos..i].iter().collect::().clone(), + )); + } + // Reset the start position. + start_pos = usize::MAX; + } + prev_word = is_id_continue; + } + + for w in words { + result.entry(w.word.clone()).or_insert(Vec::new()).push(w); + } + result +} + +#[cfg(test)] +mod tests { + use super::{build_word_index, line_to_words, word_index_add, word_index_subtract, Word}; + use lsp_types::{Location, Position, Range, Url}; + use std::collections::HashMap; + use std::path::PathBuf; + #[test] + fn test_build_word_index() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let mut path = root.clone(); + path.push("src/test_data/find_refs_test/main.k"); + + let url = lsp_types::Url::from_file_path(path.clone()).unwrap(); + let path = path.to_str().unwrap(); + let expect: HashMap> = vec![ + ( + "a".to_string(), + vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(0, 0), + end: Position::new(0, 1), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(1, 4), + end: Position::new(1, 5), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(2, 4), + end: Position::new(2, 5), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(12, 14), + end: Position::new(12, 15), + }, + }, + ], + ), + ( + "c".to_string(), + vec![Location { + uri: url.clone(), + range: Range { + start: Position::new(2, 0), + end: Position::new(2, 1), + }, + }], + ), + ( + "b".to_string(), + vec![Location { + uri: url.clone(), + range: Range { + start: Position::new(1, 0), + end: Position::new(1, 1), + }, + }], + ), + ( + "n".to_string(), + vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(8, 4), + end: Position::new(8, 5), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(11, 4), + end: Position::new(11, 5), + }, + }, + ], + ), + ( + "schema".to_string(), + vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(4, 0), + end: Position::new(4, 6), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(7, 0), + end: Position::new(7, 6), + }, + }, + ], + ), + ( + "b".to_string(), + vec![Location { + uri: url.clone(), + range: Range { + start: Position::new(1, 0), + end: Position::new(1, 1), + }, + }], + ), + ( + "Name".to_string(), + vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(4, 7), + end: Position::new(4, 11), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(8, 7), + end: Position::new(8, 11), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(11, 7), + end: Position::new(11, 11), + }, + }, + ], + ), + ( + "name".to_string(), + vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(5, 4), + end: Position::new(5, 8), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(12, 8), + end: Position::new(12, 12), + }, + }, + ], + ), + ( + "demo".to_string(), + vec![Location { + uri: url.clone(), + range: Range { + start: Position::new(0, 5), + end: Position::new(0, 9), + }, + }], + ), + ( + "str".to_string(), + vec![Location { + uri: url.clone(), + range: Range { + start: Position::new(5, 10), + end: Position::new(5, 13), + }, + }], + ), + ( + "Person".to_string(), + vec![ + Location { + uri: url.clone(), + range: Range { + start: Position::new(7, 7), + end: Position::new(7, 13), + }, + }, + Location { + uri: url.clone(), + range: Range { + start: Position::new(10, 5), + end: Position::new(10, 11), + }, + }, + ], + ), + ( + "p2".to_string(), + vec![Location { + uri: url.clone(), + range: Range { + start: Position::new(10, 0), + end: Position::new(10, 2), + }, + }], + ), + ] + .into_iter() + .collect(); + match build_word_index(path.to_string()) { + Ok(actual) => { + assert_eq!(expect, actual) + } + Err(_) => assert!(false, "build word index failed. expect: {:?}", expect), + } + } + + #[test] + fn test_word_index_add() { + let loc1 = Location { + uri: Url::parse("file:///path/to/file.k").unwrap(), + range: Range { + start: Position::new(0, 0), + end: Position::new(0, 4), + }, + }; + let loc2 = Location { + uri: Url::parse("file:///path/to/file.k").unwrap(), + range: Range { + start: Position::new(1, 0), + end: Position::new(1, 4), + }, + }; + let mut from = HashMap::from([("name".to_string(), vec![loc1.clone()])]); + let add = HashMap::from([("name".to_string(), vec![loc2.clone()])]); + word_index_add(&mut from, add); + assert_eq!( + from, + HashMap::from([("name".to_string(), vec![loc1.clone(), loc2.clone()],)]) + ); + } + + #[test] + fn test_word_index_subtract() { + let loc1 = Location { + uri: Url::parse("file:///path/to/file.k").unwrap(), + range: Range { + start: Position::new(0, 0), + end: Position::new(0, 4), + }, + }; + let loc2 = Location { + uri: Url::parse("file:///path/to/file.k").unwrap(), + range: Range { + start: Position::new(1, 0), + end: Position::new(1, 4), + }, + }; + let mut from = HashMap::from([("name".to_string(), vec![loc1.clone(), loc2.clone()])]); + let remove = HashMap::from([("name".to_string(), vec![loc2.clone()])]); + word_index_subtract(&mut from, remove); + assert_eq!( + from, + HashMap::from([("name".to_string(), vec![loc1.clone()],)]) + ); + } + + #[test] + fn test_line_to_words() { + let lines = ["schema Person:", "name. name again", "some_word word !word"]; + + let expects: Vec>> = vec![ + vec![ + ( + "schema".to_string(), + vec![Word { + start_col: 0, + end_col: 6, + word: "schema".to_string(), + }], + ), + ( + "Person".to_string(), + vec![Word { + start_col: 7, + end_col: 13, + word: "Person".to_string(), + }], + ), + ] + .into_iter() + .collect(), + vec![ + ( + "name".to_string(), + vec![ + Word { + start_col: 0, + end_col: 4, + word: "name".to_string(), + }, + Word { + start_col: 6, + end_col: 10, + word: "name".to_string(), + }, + ], + ), + ( + "again".to_string(), + vec![Word { + start_col: 11, + end_col: 16, + word: "again".to_string(), + }], + ), + ] + .into_iter() + .collect(), + vec![ + ( + "some_word".to_string(), + vec![Word { + start_col: 0, + end_col: 9, + word: "some_word".to_string(), + }], + ), + ( + "word".to_string(), + vec![ + Word { + start_col: 10, + end_col: 14, + word: "word".to_string(), + }, + Word { + start_col: 16, + end_col: 20, + word: "word".to_string(), + }, + ], + ), + ] + .into_iter() + .collect(), + ]; + for i in 0..lines.len() { + let got = line_to_words(lines[i].to_string()); + assert_eq!(expects[i], got) + } + } +}