Skip to content

Commit

Permalink
Implement whole-workspace reference tracking and update existing feat…
Browse files Browse the repository at this point in the history
…ures to make use of it (chapel-lang#24445)

This PR modifies the 'use tracking' aspect of the language server to
lift uses to the "dyno context" level. Since Dyno contexts are created
such that one context is shared among multiple files compiled together,
this leads to a "workspace-wide rename" functionality.

The PR works by replacing the `Dict[NodeId, List[Reference]]` with a
`Dict[NodeId, ReferenceList]`, where `ReferenceList` is a new class. The
`ReferenceList` class is _also_ inserted into the "dyno context"
container, also associated with a `NodeId`. The context-level container
is `Dict[NodeId, List[ReferenceList]]`, containing reference lists from
multiple files. By making the `ReferenceList` object be shared between
the `FileInfo` and the context container, so when the `FileInfo` is
updated, so is the corresponding entry in the `ContextContainer`. From
there, LSP features that need global symbols (find references, rename)
can simply use the context-level list instead of `uses_here`.

Reviewed by @jabraham17 -- thanks!
  • Loading branch information
DanilaFe authored Feb 21, 2024
2 parents c20ef67 + 0597af2 commit b95e338
Showing 1 changed file with 57 additions and 14 deletions.
71 changes: 57 additions & 14 deletions tools/chpl-language-server/src/chpl-language-server.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def completion_item_for_decl(


def location_to_location(loc) -> Location:
return Location("file://" + loc.path(), location_to_range(loc))
return Location("file://" + os.path.abspath(loc.path()), location_to_range(loc))


def get_symbol_information(
Expand Down Expand Up @@ -339,7 +339,7 @@ def get_location(self):
return Location(self.get_uri(), self.rng)

def get_uri(self):
path = self.node.location().path()
path = os.path.abspath(self.node.location().path())
return f"file://{path}"


Expand All @@ -349,12 +349,29 @@ class ResolvedPair:
resolved_to: NodeAndRange


@dataclass
class References:
in_file: "FileInfo"
uses: List[NodeAndRange]

def append(self, x: NodeAndRange):
self.uses.append(x)

def clear(self):
self.uses.clear()

def __iter__(self):
return iter(self.uses)


class ContextContainer:
def __init__(self, file: str, config: Optional["WorkspaceConfig"]):
self.config: Optional["WorkspaceConfig"] = config
self.file_paths: List[str] = []
self.module_paths: List[str] = [file]
self.context: chapel.Context = chapel.Context()
self.file_infos: List["FileInfo"] = []
self.global_uses: Dict[str, List[References]] = defaultdict(list)

if config:
file_config = config.for_file(file)
Expand Down Expand Up @@ -407,7 +424,7 @@ class FileInfo:
instantiation_segments: PositionList[
Tuple[NodeAndRange, chapel.TypedSignature]
] = field(init=False)
uses_here: Dict[str, List[NodeAndRange]] = field(init=False)
uses_here: Dict[str, References] = field(init=False)
instantiations: Dict[str, Set[chapel.TypedSignature]] = field(init=False)
siblings: chapel.SiblingMap = field(init=False)
used_modules: List[chapel.Module] = field(init=False)
Expand All @@ -417,6 +434,7 @@ def __post_init__(self):
self.use_segments = PositionList(lambda x: x.ident.rng)
self.def_segments = PositionList(lambda x: x.rng)
self.instantiation_segments = PositionList(lambda x: x[0].rng)
self.uses_here = {}
self.rebuild_index()

def parse_file(self) -> List[chapel.AstNode]:
Expand All @@ -436,6 +454,15 @@ def get_asts(self) -> List[chapel.AstNode]:
with self.context.context.track_errors() as _:
return self.parse_file()

def _get_use_container(self, uid: str) -> References:
if uid in self.uses_here:
return self.uses_here[uid]

refs = References(self, [])
self.uses_here[uid] = refs
self.context.global_uses[uid].append(refs)
return refs

def _note_reference(self, node: Union[chapel.Dot, chapel.Identifier]):
"""
Given a node that can refer to another node, note what it refers
Expand All @@ -445,7 +472,7 @@ def _note_reference(self, node: Union[chapel.Dot, chapel.Identifier]):
if not to:
return

self.uses_here[to.unique_id()].append(NodeAndRange(node))
self._get_use_container(to.unique_id()).append(NodeAndRange(node))
self.use_segments.append(
ResolvedPair(NodeAndRange(node), NodeAndRange(to))
)
Expand Down Expand Up @@ -482,7 +509,9 @@ def _collect_possibly_visible_decls(self):
self.possibly_visible_decls.append(child)

def _search_instantiations(
self, root: Union[chapel.AstNode, List[chapel.AstNode]], via: Optional[chapel.TypedSignature] = None
self,
root: Union[chapel.AstNode, List[chapel.AstNode]],
via: Optional[chapel.TypedSignature] = None,
):
for node in chapel.preorder(root):
if not isinstance(node, chapel.FnCall):
Expand Down Expand Up @@ -517,8 +546,9 @@ def rebuild_index(self):

# Use this class as an AST visitor to rebuild the use and definition segment
# table, as well as the list of references.
self.uses_here = defaultdict(list)
self.instantiations = defaultdict(set)
for _, refs in self.uses_here.items():
refs.clear()
self.use_segments.clear()
self.def_segments.clear()
self.visit(asts)
Expand Down Expand Up @@ -609,6 +639,9 @@ def __init__(self, ls: "ChapelLanguageServer", json: Dict[str, Any]):

self.files[key] = compile_commands[0]

def file_paths(self) -> Iterable[str]:
return self.files.keys()

def for_file(self, path: str) -> Optional[Dict[str, Any]]:
if path in self.files:
return self.files[path]
Expand Down Expand Up @@ -704,6 +737,12 @@ def get_context(self, uri: str) -> ContextContainer:

return context

def eagerly_process_all_files(self, context: ContextContainer):
cfg = context.config
if cfg:
for file in cfg.files:
self.get_file_info("file://" + file, do_update=False)

def get_file_info(
self, uri: str, do_update: bool = False
) -> Tuple[FileInfo, List[Any]]:
Expand Down Expand Up @@ -1018,9 +1057,12 @@ async def get_refs(ls: ChapelLanguageServer, params: ReferenceParams):
if not node_and_loc:
return None

ls.eagerly_process_all_files(fi.context)

locations = [node_and_loc.get_location()]
for use in fi.uses_here[node_and_loc.node.unique_id()]:
locations.append(use.get_location())
for uselist in fi.context.global_uses[node_and_loc.node.unique_id()]:
for use in uselist:
locations.append(use.get_location())

return locations

Expand Down Expand Up @@ -1165,9 +1207,12 @@ def add_to_edits(nr: NodeAndRange):
edits[nr.get_uri()] = []
edits[nr.get_uri()].append(TextEdit(nr.rng, params.new_name))

ls.eagerly_process_all_files(fi.context)

add_to_edits(node_and_loc)
for use in fi.uses_here[node_and_loc.node.unique_id()]:
add_to_edits(use)
for uselist in fi.context.global_uses[node_and_loc.node.unique_id()]:
for use in uselist:
add_to_edits(use)

return WorkspaceEdit(changes=edits)

Expand Down Expand Up @@ -1225,7 +1270,7 @@ async def document_highlight(
highlights = [
DocumentHighlight(node_and_loc.rng, DocumentHighlightKind.Text)
]
for use in fi.uses_here[node_and_loc.node.unique_id()]:
for use in fi.uses_here.get(node_and_loc.node.unique_id(), []):
highlights.append(
DocumentHighlight(use.rng, DocumentHighlightKind.Text)
)
Expand Down Expand Up @@ -1305,9 +1350,7 @@ async def semantic_tokens_range(
start_pos = location_to_range(ast.location()).start
instantiation = fi.get_inst_segment_at_position(start_pos)
tokens.extend(
ls.get_dead_code_tokens(
ast, fi.file_lines(), instantiation
)
ls.get_dead_code_tokens(ast, fi.file_lines(), instantiation)
)

return SemanticTokens(data=encode_deltas(tokens, 0, 0))
Expand Down

0 comments on commit b95e338

Please sign in to comment.