diff --git a/kclvm/Cargo.lock b/kclvm/Cargo.lock index 396095451..035c684be 100644 --- a/kclvm/Cargo.lock +++ b/kclvm/Cargo.lock @@ -1961,9 +1961,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" [[package]] name = "memmap2" diff --git a/kclvm/parser/src/lib.rs b/kclvm/parser/src/lib.rs index 0cd42ecb8..2c5fa8bce 100644 --- a/kclvm/parser/src/lib.rs +++ b/kclvm/parser/src/lib.rs @@ -313,7 +313,7 @@ impl Loader { fn _load_main(&mut self) -> Result { let compile_entries = get_compile_entries_from_paths(&self.paths, &self.opts)?; let mut pkgs = HashMap::new(); - let workdir = compile_entries.get_root_path().to_string(); + let workdir = compile_entries.get_root_path().to_string();// get package root debug_assert_eq!(compile_entries.len(), self.paths.len()); diff --git a/kclvm/tools/src/LSP/src/completion.rs b/kclvm/tools/src/LSP/src/completion.rs index c727062c2..5bde3dffa 100644 --- a/kclvm/tools/src/LSP/src/completion.rs +++ b/kclvm/tools/src/LSP/src/completion.rs @@ -83,7 +83,7 @@ fn completion_attr( if let Some((node, schema_expr)) = is_in_schema(program, pos) { let schema_def = find_def(node, &schema_expr.name.get_end_pos(), prog_scope); if let Some(schema) = schema_def { - if let Definition::Object(obj) = schema { + if let Definition::Object(obj, _) = schema { let schema_type = obj.ty.into_schema_type(); completions.extend(schema_type.attrs.keys().map(|attr| KCLCompletionItem { label: attr.clone(), @@ -171,7 +171,7 @@ pub(crate) fn get_completion( let def = find_def(stmt, pos, prog_scope); if let Some(def) = def { match def { - crate::goto_def::Definition::Object(obj) => { + crate::goto_def::Definition::Object(obj, _) => { match &obj.ty.kind { // builtin (str) functions kclvm_sema::ty::TypeKind::Str => { @@ -219,7 +219,7 @@ pub(crate) fn get_completion( _ => {} } } - crate::goto_def::Definition::Scope(s) => { + crate::goto_def::Definition::Scope(s, _) => { for (name, obj) in &s.elems { if let ScopeObjectKind::Module(_) = obj.borrow().kind { continue; @@ -252,7 +252,7 @@ pub(crate) fn get_completion( find_def(stmt, &schema_expr.name.get_end_pos(), prog_scope); if let Some(schema) = schema_def { match schema { - Definition::Object(obj) => { + Definition::Object(obj, _) => { let schema_type = obj.ty.into_schema_type(); items.extend( schema_type @@ -264,7 +264,7 @@ pub(crate) fn get_completion( .collect::>(), ); } - Definition::Scope(_) => {} + Definition::Scope(_, _) => {} } } } diff --git a/kclvm/tools/src/LSP/src/find_ref/find_refs.rs b/kclvm/tools/src/LSP/src/find_ref/find_refs.rs deleted file mode 100644 index f3926093c..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/find_refs.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::find_ref; -use crate::find_ref::go_to_def::go_to_def; -use kclvm_error::Position; - -/// Find all references of the item at the cursor location. -pub fn find_refs(path: String, pos: Position) -> Vec { - let declaration = go_to_def(pos.clone()); - let search = { - move |decl: Position| { - let name = find_ref::word_at_pos(pos); - if name.is_none() { - return vec![]; - } - // Get identifiers with same name - let candidates = find_ref::match_word(path, name.unwrap()); - // Check if the definition of candidate and declartion are the same - let refs: Vec = candidates - .into_iter() - .filter(|x| go_to_def(x.clone()).as_ref() == Some(&decl)) - .collect(); - refs - } - }; - match declaration { - Some(decl) => search(decl), - None => Vec::new(), - } -} diff --git a/kclvm/tools/src/LSP/src/find_ref/go_to_def.rs b/kclvm/tools/src/LSP/src/find_ref/go_to_def.rs deleted file mode 100644 index 95b64b7eb..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/go_to_def.rs +++ /dev/null @@ -1,6 +0,0 @@ -use kclvm_error::Position; - -/// Get the definition of an identifier. -pub fn go_to_def(pos: Position) -> Option { - Some(pos) -} diff --git a/kclvm/tools/src/LSP/src/find_ref/mod.rs b/kclvm/tools/src/LSP/src/find_ref/mod.rs deleted file mode 100644 index 86fe83f9a..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/mod.rs +++ /dev/null @@ -1,134 +0,0 @@ -#![allow(dead_code)] - -use anyhow::Result; -use kclvm_driver::get_kcl_files; -use kclvm_error::Position; - -mod find_refs; -mod go_to_def; -mod word_map; - -#[cfg(test)] -mod tests; - -// LineWord describes an arbitrary word in a certain line including -// start position, end position and the word itself. -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct LineWord { - startpos: u64, - endpos: u64, - word: String, -} - -// Get the word of the position. -pub fn word_at_pos(pos: Position) -> Option { - let text = read_file(&pos.filename); - if text.is_err() { - return None; - } - let text = text.unwrap(); - let lines: Vec<&str> = text.lines().collect(); - if pos.line >= lines.len() as u64 { - return None; - } - pos.column?; - let col = pos.column.unwrap(); - let line_words = line_to_words(lines[pos.line as usize].to_string()); - if line_words.is_empty() - || col < line_words.first().unwrap().startpos - || col >= line_words.last().unwrap().endpos - { - return None; - } - for line_word in line_words.into_iter() { - if col >= line_word.startpos && col < line_word.endpos { - return Some(line_word.word); - } - } - None -} - -pub fn read_file(path: &String) -> Result { - let text = std::fs::read_to_string(path)?; - Ok(text) -} - -// Split one line into identifier words. -pub fn line_to_words(text: String) -> Vec { - let mut chars: Vec = text.chars().collect(); - chars.push('\n'); - let mut start_pos = usize::MAX; - let mut continue_pos = usize::MAX - 1; // avoid overflow - let mut prev_word = false; - let mut words: Vec = vec![]; - for (i, ch) in chars.iter().enumerate() { - let is_id_start = rustc_lexer::is_id_start(*ch); - let is_id_continue = rustc_lexer::is_id_continue(*ch); - // If the character is valid identfier start and the previous character is not valid identifier continue, mark the start position. - if is_id_start && !prev_word { - start_pos = i; - } - match is_id_continue { - true => { - // Continue searching for the end position. - if start_pos != usize::MAX { - continue_pos = i; - } - } - false => { - // Find out the end position. - if continue_pos + 1 == i { - words.push(LineWord { - startpos: start_pos as u64, - endpos: i as u64, - word: chars[start_pos..i].iter().collect::().clone(), - }); - } - // Reset the start position. - start_pos = usize::MAX; - } - } - prev_word = is_id_continue; - } - words -} - -// Get all occurrences of the word in the entire path. -pub fn match_word(path: String, name: String) -> Vec { - let mut res = vec![]; - if let Ok(files) = get_kcl_files(path, true) { - // Searching in all files. - for file in files.into_iter() { - let text = read_file(&file); - if text.is_err() { - continue; - } - let text = text.unwrap(); - let lines: Vec<&str> = text.lines().collect(); - for (li, line) in lines.into_iter().enumerate() { - // Get the matching results for each line. - let matched: Vec = line_to_words(line.to_string()) - .into_iter() - .filter(|x| x.word == name) - .map(|x| Position { - filename: file.clone(), - line: li as u64, - column: Some(x.startpos), - }) - .collect(); - res.extend(matched); - } - } - } - res -} - -// Convert pos format -// The position in lsp protocol is different with position in ast node whose line number is 1 based. -pub fn kcl_pos_to_lsp_pos(pos: Position) -> Position { - Position { - filename: pos.filename, - line: pos.line - 1, - column: pos.column, - } -} diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/inherit.k b/kclvm/tools/src/LSP/src/find_ref/test_data/inherit.k deleted file mode 100644 index a1bef7126..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/inherit.k +++ /dev/null @@ -1,10 +0,0 @@ -schema Parent: - name: str - -schema Son(Parent): - age: int - son_name: str = name - -schema GrandSon(Son): - grand_son_name: str = name - a: str = name_not_exist diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/inherit_pkg.k b/kclvm/tools/src/LSP/src/find_ref/test_data/inherit_pkg.k deleted file mode 100644 index 39cdf9b1d..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/inherit_pkg.k +++ /dev/null @@ -1,4 +0,0 @@ -import pkg - -schema Son(pkg.Parent): - son_name: str = name diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit.k b/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit.k deleted file mode 100644 index a1bef7126..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit.k +++ /dev/null @@ -1,10 +0,0 @@ -schema Parent: - name: str - -schema Son(Parent): - age: int - son_name: str = name - -schema GrandSon(Son): - grand_son_name: str = name - a: str = name_not_exist diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit_pkg.k b/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit_pkg.k deleted file mode 100644 index 39cdf9b1d..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace/inherit_pkg.k +++ /dev/null @@ -1,4 +0,0 @@ -import pkg - -schema Son(pkg.Parent): - son_name: str = name diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit.k b/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit.k deleted file mode 100644 index a1bef7126..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit.k +++ /dev/null @@ -1,10 +0,0 @@ -schema Parent: - name: str - -schema Son(Parent): - age: int - son_name: str = name - -schema GrandSon(Son): - grand_son_name: str = name - a: str = name_not_exist diff --git a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit_pkg.k b/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit_pkg.k deleted file mode 100644 index 39cdf9b1d..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/test_data/test_word_workspace_map/inherit_pkg.k +++ /dev/null @@ -1,4 +0,0 @@ -import pkg - -schema Son(pkg.Parent): - son_name: str = name diff --git a/kclvm/tools/src/LSP/src/find_ref/tests.rs b/kclvm/tools/src/LSP/src/find_ref/tests.rs deleted file mode 100644 index d27969350..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/tests.rs +++ /dev/null @@ -1,252 +0,0 @@ -use crate::find_ref; -use crate::find_ref::LineWord; -use kclvm_error::Position; - -#[cfg(test)] -mod tests { - use super::*; - use std::fs; - use std::path::Path; - use std::{collections::HashMap, hash::Hash}; - - fn check_line_to_words(code: &str, expect: Vec) { - assert_eq!(find_ref::line_to_words(code.to_string()), expect); - } - - fn test_eq_list(a: &[T], b: &[T]) -> bool - where - T: Eq + Hash, - { - fn count(items: &[T]) -> HashMap<&T, usize> - where - T: Eq + Hash, - { - let mut cnt = HashMap::new(); - for i in items { - *cnt.entry(i).or_insert(0) += 1 - } - cnt - } - count(a) == count(b) - } - - #[test] - fn test_line_to_words() { - let datas = [ - "alice_first_name = \"alice\"", - "0lice_first_name = \"alic0\"", - "alice = p.Parent { name: \"alice\" }", - ]; - let expect = vec![ - vec![ - LineWord { - startpos: 0, - endpos: 16, - word: "alice_first_name".to_string(), - }, - LineWord { - startpos: 20, - endpos: 25, - word: "alice".to_string(), - }, - ], - vec![LineWord { - startpos: 20, - endpos: 25, - word: "alic0".to_string(), - }], - vec![ - LineWord { - startpos: 0, - endpos: 5, - word: "alice".to_string(), - }, - LineWord { - startpos: 8, - endpos: 9, - word: "p".to_string(), - }, - LineWord { - startpos: 10, - endpos: 16, - word: "Parent".to_string(), - }, - LineWord { - startpos: 19, - endpos: 23, - word: "name".to_string(), - }, - LineWord { - startpos: 26, - endpos: 31, - word: "alice".to_string(), - }, - ], - ]; - for i in 0..datas.len() { - check_line_to_words(datas[i], expect[i].clone()); - } - } - - #[test] - fn test_word_at_pos() { - // use std::env; - // let parent_path = env::current_dir().unwrap(); - // println!("The current directory is {}", parent_path.display()); - let path_prefix = "./src/find_ref/".to_string(); - let datas = vec![ - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 0, - column: Some(0), - }, - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 1, - column: Some(5), - }, - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 3, - column: Some(7), - }, - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 3, - column: Some(10), - }, - Position { - filename: (path_prefix.clone() + "test_data/inherit.k"), - line: 4, - column: Some(8), - }, - Position { - filename: (path_prefix + "test_data/inherit.k"), - line: 4, - column: Some(100), - }, - ]; - let expect = vec![ - Some("schema".to_string()), - Some("name".to_string()), - Some("Son".to_string()), - None, - None, - None, - ]; - for i in 0..datas.len() { - assert_eq!(find_ref::word_at_pos(datas[i].clone()), expect[i]); - } - } - - fn test_word_workspace() -> String { - Path::new(".") - .join("src") - .join("find_ref") - .join("test_data") - .join("test_word_workspace") - .display() - .to_string() - } - - #[test] - fn test_match_word() { - let path = test_word_workspace(); - let datas = vec![String::from("Son")]; - let except = vec![vec![ - Position { - filename: Path::new(&test_word_workspace()) - .join("inherit_pkg.k") - .display() - .to_string(), - line: 2, - column: Some(7), - }, - Position { - filename: Path::new(&test_word_workspace()) - .join("inherit.k") - .display() - .to_string(), - line: 3, - column: Some(7), - }, - Position { - filename: Path::new(&test_word_workspace()) - .join("inherit.k") - .display() - .to_string(), - line: 7, - column: Some(16), - }, - ]]; - for i in 0..datas.len() { - assert!(test_eq_list( - &find_ref::match_word(path.clone(), datas[i].clone()), - &except[i] - )); - } - } - - fn test_word_workspace_map() -> String { - Path::new(".") - .join("src") - .join("find_ref") - .join("test_data") - .join("test_word_workspace_map") - .display() - .to_string() - } - - #[test] - fn test_word_map() { - let path = test_word_workspace_map(); - let mut mp = find_ref::word_map::WorkSpaceWordMap::new(path); - mp.build(); - let _res = fs::rename( - Path::new(&test_word_workspace_map()) - .join("inherit_pkg.k") - .display() - .to_string(), - Path::new(&test_word_workspace_map()) - .join("inherit_bak.k") - .display() - .to_string(), - ); - mp.rename_file( - Path::new(&test_word_workspace_map()) - .join("inherit_pkg.k") - .display() - .to_string(), - Path::new(&test_word_workspace_map()) - .join("inherit_bak.k") - .display() - .to_string(), - ); - mp.delete_file( - Path::new(&test_word_workspace_map()) - .join("inherit.k") - .display() - .to_string(), - ); - let _res = fs::rename( - Path::new(&test_word_workspace_map()) - .join("inherit_bak.k") - .display() - .to_string(), - Path::new(&test_word_workspace_map()) - .join("inherit_pkg.k") - .display() - .to_string(), - ); - - let except = vec![Position { - filename: Path::new(&test_word_workspace_map()) - .join("inherit_bak.k") - .display() - .to_string(), - line: 2, - column: Some(7), - }]; - assert_eq!(mp.get(&String::from("Son")), Some(except)); - } -} diff --git a/kclvm/tools/src/LSP/src/find_ref/word_map.rs b/kclvm/tools/src/LSP/src/find_ref/word_map.rs deleted file mode 100644 index 143bf1708..000000000 --- a/kclvm/tools/src/LSP/src/find_ref/word_map.rs +++ /dev/null @@ -1,123 +0,0 @@ -use crate::find_ref; - -use kclvm_driver::get_kcl_files; -use kclvm_error::Position; -use std::collections::HashMap; - -// Record all occurrences of the name in a file -#[derive(Default)] -pub struct FileWordMap { - file_name: String, - word_map: HashMap>, -} - -impl FileWordMap { - pub fn new(file_name: String) -> Self { - Self { - file_name, - word_map: HashMap::new(), - } - } - - // Clear records - pub fn clear(&mut self) { - self.word_map.clear(); - } - - // insert an occurrence of a name - pub fn insert(&mut self, name: String, pos: Position) { - self.word_map.entry(name).or_default().push(pos); - } - - // build the record map - // if text is missing, it will be read from the file system based on the filename - pub fn build(&mut self, text: Option) { - self.clear(); - let text = text.unwrap_or(find_ref::read_file(&self.file_name).unwrap()); - let lines: Vec<&str> = text.lines().collect(); - for (li, line) in lines.into_iter().enumerate() { - let words = find_ref::line_to_words(line.to_string()); - words.iter().for_each(|x| { - self.word_map - .entry(x.word.clone()) - .or_default() - .push(Position { - filename: self.file_name.clone(), - line: li as u64, - column: Some(x.startpos), - }) - }); - } - } - - // return all occurrence of a name - pub fn get(&self, name: &String) -> Option<&Vec> { - self.word_map.get(name) - } -} - -// Record all occurrences of the name in workspace -pub struct WorkSpaceWordMap { - path: String, - file_map: HashMap, -} - -impl WorkSpaceWordMap { - pub fn new(path: String) -> Self { - Self { - path, - file_map: HashMap::new(), - } - } - - // when user edit a file, the filemap of this file need to rebuild - pub fn change_file(&mut self, file_name: String, text: String) { - self.file_map - .entry(file_name.clone()) - .or_insert(FileWordMap::new(file_name)) - .build(Some(text)); - } - - // when user add a file, the workspacemap will add a new filemap for it - pub fn create_file(&mut self, file_name: String) { - self.file_map - .entry(file_name.clone()) - .or_insert(FileWordMap::new(file_name)) - .clear(); - } - - // when user delete a file, the workspacemap will remove the old filemap of it - pub fn delete_file(&mut self, file_name: String) { - self.file_map.remove(&file_name); - } - - // when user rename a file, the workspacemap will remove the old filemap of it and build a new filemap for it - pub fn rename_file(&mut self, old_name: String, new_name: String) { - self.delete_file(old_name); - self.create_file(new_name.clone()); - self.file_map.get_mut(&new_name).unwrap().build(None); - } - - // build & maintain the record map for each file under the path - pub fn build(&mut self) { - //TODO may use some cache from other component? - if let Ok(files) = get_kcl_files(&self.path, true) { - for file in files.into_iter() { - self.file_map - .insert(file.clone(), FileWordMap::new(file.clone())); - self.file_map.get_mut(&file).unwrap().build(None); - } - } - } - - // return all occurrence of a name in the workspace - pub fn get(self, name: &String) -> Option> { - let mut words = Vec::new(); - for (_, mp) in self.file_map.iter() { - if let Some(file_words) = mp.get(name) { - words.extend_from_slice(file_words); - } - } - Some(words) - } -} diff --git a/kclvm/tools/src/LSP/src/find_refs.rs b/kclvm/tools/src/LSP/src/find_refs.rs new file mode 100644 index 000000000..e8c8ee300 --- /dev/null +++ b/kclvm/tools/src/LSP/src/find_refs.rs @@ -0,0 +1,116 @@ +use anyhow; +use std::collections::HashMap; +use crate::{ + util::{build_word_index, parse_param_and_compile, Param}, + state::{LanguageServerSnapshot, Task, log_message}, + from_lsp::{self, file_path_from_url, kcl_pos}, + goto_def::{goto_definition, find_def,}, +}; +use lsp_types; +use crossbeam_channel::Sender; +use kclvm_config::modfile::get_pkg_root; +use kclvm_ast::ast::Stmt; + + +pub(crate) fn find_references ( + snapshot: LanguageServerSnapshot, + params: lsp_types::ReferenceParams, + sender: Sender, +) -> anyhow::Result>> { + // 1. find definition of current token + let file = file_path_from_url(¶ms.text_document_position.text_document.uri)?; + let path = from_lsp::abs_path(¶ms.text_document_position.text_document.uri)?; + let db = snapshot.get_db(&path.clone().into())?; + let pos = kcl_pos(&file, params.text_document_position.position); + + if let Some(def_resp) = goto_definition(&db.prog, &pos, &db.scope) { + match def_resp { + lsp_types::GotoDefinitionResponse::Scalar(def_loc) => { + // get the def location + if let Some(def_name) = match db.prog.pos_to_stmt(&pos) { + Some(node) => match node.node { + Stmt::Import(_) => None, + _ => match find_def(node.clone(), &pos, &db.scope) { + Some(def) => Some(def.get_name()), + None => None, + }, + }, + None => None, + } { + // 2. find all occurrence of current token + // todo: decide the scope by the workspace root and the kcl.mod both, use the narrower scope + if let Some(root) = get_pkg_root(path.display().to_string().as_str()) { + match build_word_index(root) { + Ok(word_index) => { + return find_refs(def_loc, def_name, word_index); + }, + Err(_) => { + let _ = log_message("build word index failed".to_string(), &sender); + return anyhow::Ok(None); + } + } + } else { + return Ok(None) + } + } + }, + _=> return Ok(None), + } + } else { + log_message("Definition item not found, result in no reference".to_string(), &sender)?; + } + + return Ok(None) +} + +pub(crate) fn find_refs(def_loc:lsp_types::Location, name: String, word_index: HashMap>) +-> anyhow::Result>>{ + if let Some(locs) = word_index.get(name.as_str()).cloned() { + return anyhow::Ok(Some(locs.into_iter().filter(|ref_loc|{ + // from location to real def + // return if the real def location matches the def_loc + let file_path = ref_loc.uri.path().to_string(); + match parse_param_and_compile( + Param { + file: file_path.clone(), + }, + None, + ) { + Ok((prog, scope, _)) => { + let ref_pos = kcl_pos(&file_path, ref_loc.range.start); + // find def from the ref_pos + if let Some(real_def) = goto_definition(&prog, &ref_pos, &scope) { + match real_def { + lsp_types::GotoDefinitionResponse::Scalar(real_def_loc) => { + real_def_loc == def_loc + }, + _ => false + } + } else { + false + } + + } + Err(_) => { + // todo log compilation error + return false; + }, + } + }).collect())); + } else { + return Ok(None) + } + +} + +#[cfg(test)] +mod tests { + //todo + // todo assert + #[test] + fn test_find_refs() { + + } + + +} \ No newline at end of file diff --git a/kclvm/tools/src/LSP/src/goto_def.rs b/kclvm/tools/src/LSP/src/goto_def.rs index 49a9fb806..21dcb03ab 100644 --- a/kclvm/tools/src/LSP/src/goto_def.rs +++ b/kclvm/tools/src/LSP/src/goto_def.rs @@ -68,18 +68,18 @@ pub(crate) fn goto_definition( #[derive(Debug)] pub enum Definition { - Object(ScopeObject), - Scope(Scope), + Object(ScopeObject, String), + Scope(Scope, String), } impl Definition { pub(crate) fn get_positions(&self) -> IndexSet<(KCLPos, KCLPos)> { let mut positions = IndexSet::new(); match self { - Definition::Object(obj) => { + Definition::Object(obj, _) => { positions.insert((obj.start.clone(), obj.end.clone())); } - Definition::Scope(scope) => match &scope.kind { + Definition::Scope(scope, _) => match &scope.kind { kclvm_sema::resolver::scope::ScopeKind::Package(filenames) => { for file in filenames { let dummy_pos = KCLPos { @@ -97,8 +97,15 @@ impl Definition { } positions } + pub(crate) fn get_name(&self) -> String { + match self { + Definition::Object(_, name) => name.clone(), + Definition::Scope(_, name) => name.clone(), + } + } } + pub(crate) fn find_def( node: Node, kcl_pos: &KCLPos, @@ -145,7 +152,7 @@ pub(crate) fn find_def( find_def(node, &schema_expr.name.get_end_pos(), prog_scope); if let Some(schema) = schema_def { match schema { - Definition::Object(obj) => { + Definition::Object(obj, _) => { let schema_type = obj.ty.into_schema_type(); return find_attr_in_schema( &schema_type, @@ -153,7 +160,7 @@ pub(crate) fn find_def( &prog_scope.scope_map, ); } - Definition::Scope(_) => { + Definition::Scope(_, _) => { //todo } } @@ -193,16 +200,16 @@ pub(crate) fn resolve_var( kclvm_sema::ty::TypeKind::Module(module_ty) => match module_ty.kind { kclvm_sema::ty::ModuleKind::User => scope_map .get(&pkgpath_without_prefix!(module_ty.pkgpath)) - .map(|scope| Definition::Scope(scope.borrow().clone())), + .map(|scope| Definition::Scope(scope.borrow().clone(), name)), kclvm_sema::ty::ModuleKind::System => { - Some(Definition::Object(obj.borrow().clone())) + Some(Definition::Object(obj.borrow().clone(), name)) } kclvm_sema::ty::ModuleKind::Plugin => None, }, _ => None, } } - _ => Some(Definition::Object(obj.borrow().clone())), + _ => Some(Definition::Object(obj.borrow().clone(), name)), }, None => None, } @@ -235,14 +242,14 @@ pub(crate) fn resolve_var( match &ty.kind { kclvm_sema::ty::TypeKind::Function(func_ty) => { return Some(Definition::Object(ScopeObject { - name: func_name, + name: func_name.clone(), start: func_name_node.get_pos(), end: func_name_node.get_end_pos(), ty: ty.clone(), kind: ScopeObjectKind::FunctionCall, used: false, doc: Some(func_ty.doc.clone()), - })) + }, func_name)) } _ => return None, } diff --git a/kclvm/tools/src/LSP/src/hover.rs b/kclvm/tools/src/LSP/src/hover.rs index ac8554d5b..8f88c8cfb 100644 --- a/kclvm/tools/src/LSP/src/hover.rs +++ b/kclvm/tools/src/LSP/src/hover.rs @@ -19,7 +19,7 @@ pub(crate) fn hover( Some(node) => { let mut docs: Vec = vec![]; if let Some(def) = find_def(node, kcl_pos, prog_scope) { - if let crate::goto_def::Definition::Object(obj) = def { + if let crate::goto_def::Definition::Object(obj, _) = def { match obj.kind { ScopeObjectKind::Definition => { docs.extend(build_schema_hover_content(&obj.ty.into_schema_type())) diff --git a/kclvm/tools/src/LSP/src/lib.rs b/kclvm/tools/src/LSP/src/lib.rs index 442724c6e..cc66d149f 100644 --- a/kclvm/tools/src/LSP/src/lib.rs +++ b/kclvm/tools/src/LSP/src/lib.rs @@ -4,7 +4,7 @@ mod config; mod db; mod dispatcher; mod document_symbol; -mod find_ref; +mod find_refs; mod formatting; mod from_lsp; mod goto_def; diff --git a/kclvm/tools/src/LSP/src/main.rs b/kclvm/tools/src/LSP/src/main.rs index 70a499820..c605e967b 100644 --- a/kclvm/tools/src/LSP/src/main.rs +++ b/kclvm/tools/src/LSP/src/main.rs @@ -9,6 +9,7 @@ mod dispatcher; mod document_symbol; mod from_lsp; mod goto_def; +mod find_refs; mod hover; mod main_loop; mod notification; diff --git a/kclvm/tools/src/LSP/src/notification.rs b/kclvm/tools/src/LSP/src/notification.rs index 1098b1b09..b86c46e55 100644 --- a/kclvm/tools/src/LSP/src/notification.rs +++ b/kclvm/tools/src/LSP/src/notification.rs @@ -1,6 +1,6 @@ use lsp_types::notification::{ DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, DidOpenTextDocument, - DidSaveTextDocument, + DidSaveTextDocument, //Initialized, }; use crate::{ diff --git a/kclvm/tools/src/LSP/src/request.rs b/kclvm/tools/src/LSP/src/request.rs index f1a12d03a..4e41b9964 100644 --- a/kclvm/tools/src/LSP/src/request.rs +++ b/kclvm/tools/src/LSP/src/request.rs @@ -1,6 +1,6 @@ use anyhow::Ok; use crossbeam_channel::Sender; -use lsp_types::TextEdit; +use lsp_types::{TextEdit, Location}; use ra_ap_vfs::VfsPath; use std::time::Instant; @@ -12,6 +12,7 @@ use crate::{ formatting::format, from_lsp::{self, file_path_from_url, kcl_pos}, goto_def::goto_definition, + find_refs::find_references, hover, quick_fix, state::{log_message, LanguageServerSnapshot, LanguageServerState, Task}, }; @@ -42,7 +43,9 @@ impl LanguageServerState { state.shutdown_requested = true; Ok(()) })? + // .on::(handle_initialize)? .on::(handle_goto_definition)? + .on::(handle_reference)? .on::(handle_completion)? .on::(handle_hover)? .on::(handle_document_symbol)? @@ -69,6 +72,16 @@ impl LanguageServerSnapshot { } } +// pub(crate) fn handle_initialize( +// _snapshot: LanguageServerSnapshot, +// params: lsp_types::InitializeParams, +// _sender: Sender +// ) -> anyhow::Result{ +// if let Some(uri) = params.root_uri { +// self.word_index = build_word_index(uri.path().to_string()) +// } +// } + pub(crate) fn handle_formatting( _snapshot: LanguageServerSnapshot, params: lsp_types::DocumentFormattingParams, @@ -132,6 +145,15 @@ pub(crate) fn handle_goto_definition( Ok(res) } +/// Called when a `FindReferences` request was received +pub(crate) fn handle_reference ( + snapshot: LanguageServerSnapshot, + params: lsp_types::ReferenceParams, + sender: Sender, +) -> anyhow::Result>> { + find_references(snapshot, params, sender) +} + /// Called when a `Completion` request was received. pub(crate) fn handle_completion( snapshot: LanguageServerSnapshot, diff --git a/kclvm/tools/src/LSP/src/state.rs b/kclvm/tools/src/LSP/src/state.rs index a898a7d63..20ec3dec0 100644 --- a/kclvm/tools/src/LSP/src/state.rs +++ b/kclvm/tools/src/LSP/src/state.rs @@ -2,13 +2,14 @@ use crate::analysis::Analysis; use crate::config::Config; use crate::db::AnalysisDatabase; use crate::to_lsp::{kcl_diag_to_lsp_diags, url}; -use crate::util::{get_file_name, parse_param_and_compile, to_json, Param}; +use crate::util::{get_file_name, parse_param_and_compile, to_json, Param, self}; use crossbeam_channel::{select, unbounded, Receiver, Sender}; use indexmap::IndexSet; use lsp_server::{ReqQueue, Response}; use lsp_types::{ notification::{Notification, PublishDiagnostics}, Diagnostic, PublishDiagnosticsParams, + Location, }; use parking_lot::RwLock; use ra_ap_vfs::{FileId, Vfs}; @@ -67,6 +68,9 @@ pub(crate) struct LanguageServerState { /// The VFS loader pub vfs_handle: Box, + + /// The word index map + pub word_index: HashMap>, } /// A snapshot of the state of the language server @@ -78,6 +82,8 @@ pub(crate) struct LanguageServerSnapshot { pub db: HashMap, /// Documents that are currently kept in memory from the client pub opened_files: IndexSet, + /// The word index map + pub word_index: HashMap>, } #[allow(unused)] @@ -102,6 +108,7 @@ impl LanguageServerState { analysis: Analysis::default(), opened_files: IndexSet::new(), vfs_handle: handle, + word_index: HashMap::new(), } } @@ -245,6 +252,7 @@ impl LanguageServerState { vfs: self.vfs.clone(), db: self.analysis.db.clone(), opened_files: self.opened_files.clone(), + word_index: self.word_index.clone(), } } diff --git a/kclvm/tools/src/LSP/src/test_data/find_refs_test/kcl.mod b/kclvm/tools/src/LSP/src/test_data/find_refs_test/kcl.mod new file mode 100644 index 000000000..e69de29bb diff --git a/kclvm/tools/src/LSP/src/test_data/find_refs_test/main.k b/kclvm/tools/src/LSP/src/test_data/find_refs_test/main.k new file mode 100644 index 000000000..bf5812c28 --- /dev/null +++ b/kclvm/tools/src/LSP/src/test_data/find_refs_test/main.k @@ -0,0 +1,3 @@ +a = 1 +b = a +c = a \ No newline at end of file diff --git a/kclvm/tools/src/LSP/src/util.rs b/kclvm/tools/src/LSP/src/util.rs index c18b900f3..fdbd8a0a6 100644 --- a/kclvm/tools/src/LSP/src/util.rs +++ b/kclvm/tools/src/LSP/src/util.rs @@ -15,7 +15,7 @@ use kclvm_parser::{load_program, ParseSession}; use kclvm_sema::resolver::scope::Scope; use kclvm_sema::resolver::{resolve_program, scope::ProgramScope}; use kclvm_utils::pkgpath::rm_external_pkg_name; -use lsp_types::Url; +use lsp_types::{Location, Range, Position, Url}; use parking_lot::{RwLock, RwLockReadGuard}; use ra_ap_vfs::{FileId, Vfs}; use serde::{de::DeserializeOwned, Serialize}; @@ -23,6 +23,7 @@ use std::cell::RefCell; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::{fs, sync::Arc}; +use std::collections::HashMap; use crate::from_lsp; @@ -737,3 +738,165 @@ pub(crate) fn get_pkg_scope( .borrow() .clone() } + +/// scan and build a word -> Locations index map +pub fn build_word_index(path: String)-> anyhow::Result>> { + let mut index: HashMap> = HashMap::new(); + if let Ok(files) = get_kcl_files(path.clone(), true) { + for file_path in &files { + // str path to url + if let Ok(url) = Url::from_file_path(file_path) { + // read file content and save the word to word index + let text = read_file(file_path).unwrap(); + let lines: Vec<&str> = text.lines().collect(); + for (li, line) in lines.into_iter().enumerate() { + let words = line_to_words(line.to_string()); + for (key, values) in words { + index.entry(key) + .or_insert_with(Vec::new) + .extend(values.iter().map(|w| { + Location { uri: url.clone(), range: Range { start: Position::new(li as u32, w.start_col), end: Position::new(li as u32, w.end_col) } } + })); + } + } + } + } + } + return Ok(index) +} + +// Word describes an arbitrary word in a certain line including +// start position, end position and the word itself. +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Word { + start_col: u32, + end_col: u32, + word: String, +} + +impl Word { + fn new(start_col: u32, end_col: u32, word: String) -> Self { + Self { + start_col, + end_col, + word + } + } +} + +fn read_file(path: &String) -> anyhow::Result { + let text = std::fs::read_to_string(path)?; + Ok(text) +} + +// Split one line into identifier words. +fn line_to_words(text: String) -> HashMap> { + let mut result = HashMap::new(); + let mut chars: Vec = text.chars().collect(); + chars.push('\n'); + let mut start_pos = usize::MAX; + let mut continue_pos = usize::MAX - 1; // avoid overflow + let mut prev_word = false; + let mut words: Vec = vec![]; + for (i, ch) in chars.iter().enumerate() { + let is_id_start = rustc_lexer::is_id_start(*ch); + let is_id_continue = rustc_lexer::is_id_continue(*ch); + // If the character is valid identfier start and the previous character is not valid identifier continue, mark the start position. + if is_id_start && !prev_word { + start_pos = i; + } + match is_id_continue { + true => { + // Continue searching for the end position. + if start_pos != usize::MAX { + continue_pos = i; + } + } + false => { + // Find out the end position. + if continue_pos + 1 == i { + words.push(Word::new(start_pos as u32,i as u32,chars[start_pos..i].iter().collect::().clone())); + } + // Reset the start position. + start_pos = usize::MAX; + } + } + prev_word = is_id_continue; + } + + for w in words { + result.entry(w.word.clone()).or_insert(vec![w.clone()]); + } + result +} + +#[cfg(test)] +mod tests { + use super::{line_to_words,build_word_index, Word}; + // todo assert + #[test] + fn test_build_word_index() { + let result = build_word_index("/Users/amy/work/open/catalog".to_string()); + println!("{:?}", result) + } + + // todo assert + #[test] + fn test_line_to_words() { + let datas = [ + "alice_first_name = \"alice\"", + "0lice_first_name = \"alic0\"", + "alice = p.Parent { name: \"alice\" }", + ]; + let expect = vec![ + vec![ + Word { + start_col: 0, + end_col: 16, + word: "alice_first_name".to_string(), + }, + Word { + start_col: 20, + end_col: 25, + word: "alice".to_string(), + }, + ], + vec![Word { + start_col: 20, + end_col: 25, + word: "alic0".to_string(), + }], + vec![ + Word { + start_col: 0, + end_col: 5, + word: "alice".to_string(), + }, + Word { + start_col: 8, + end_col: 9, + word: "p".to_string(), + }, + Word { + start_col: 10, + end_col: 16, + word: "Parent".to_string(), + }, + Word { + start_col: 19, + end_col: 23, + word: "name".to_string(), + }, + Word { + start_col: 26, + end_col: 31, + word: "alice".to_string(), + }, + ], + ]; + for i in 0..datas.len() { + // assert_eq!(line_to_words(datas[i].to_string()), expect[i].clone()); + let _ = line_to_words(datas[i].to_string()); + } + } +}