Skip to content

Commit

Permalink
Merge pull request #20 from ypaliy-vdoo/feature/support_pickling_nodes
Browse files Browse the repository at this point in the history
Feature/support pickling nodes
  • Loading branch information
boolangery authored May 24, 2022
2 parents 0b3afea + bcffaac commit becba9c
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 24 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@
build
dist
luaparser.egg-info
venv
venv
.venv
47 changes: 36 additions & 11 deletions luaparser/astnodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from enum import Enum
from typing import List, Optional

from antlr4 import Token
from antlr4.Token import CommonToken

Comments = Optional[List["Comment"]]

Expand All @@ -30,8 +30,8 @@ def __init__(
self,
name: str,
comments: Comments = None,
first_token: Optional[Token] = None,
last_token: Optional[Token] = None,
first_token: Optional[CommonToken] = None,
last_token: Optional[CommonToken] = None,
):
"""
Expand All @@ -45,8 +45,19 @@ def __init__(
comments = []
self._name: str = name
self.comments: Comments = comments
self._first_token: Optional[Token] = first_token
self._last_token: Optional[Token] = last_token
self._first_token: Optional[CommonToken] = first_token
self._last_token: Optional[CommonToken] = last_token

# We want to have nodes be serializable with pickle.
# To allow that we must not have mutable fields such as streams.
# Tokens have streams, create a stream-less copy of tokens.
if self._first_token is not None:
self._first_token = self._first_token.clone()
self._first_token.source = CommonToken.EMPTY_SOURCE

if self._last_token is not None:
self._last_token = self._last_token.clone()
self._last_token.source = CommonToken.EMPTY_SOURCE

@property
def display_name(self) -> str:
Expand All @@ -60,20 +71,34 @@ def __eq__(self, other) -> bool:
return False

@property
def first_token(self) -> Optional[Token]:
def first_token(self) -> Optional[CommonToken]:
"""
First token of a node.
Note: Token is disconnected from underline source streams.
"""
return self._first_token

@first_token.setter
def first_token(self, val):
self._first_token = val
def first_token(self, val: Optional[CommonToken]):
if val is not None:
self._first_token = val.clone()
self._first_token.source = CommonToken.EMPTY_SOURCE

@property
def last_token(self) -> Optional[Token]:
def last_token(self) -> Optional[CommonToken]:
"""
Last token of a node.
Note: Token is disconnected from underline source streams.
"""
return self._last_token

@last_token.setter
def last_token(self, val):
self._last_token = val
def last_token(self, val: Optional[CommonToken]):
if val is not None:
self._last_token = val.clone()
self._last_token.source = CommonToken.EMPTY_SOURCE

@property
def start_char(self) -> Optional[int]:
Expand Down
18 changes: 6 additions & 12 deletions luaparser/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ def __init__(self, source):
self._hidden_handled_stack: List[bool] = []

@property
def _LT(self) -> Token:
def _LT(self) -> CommonToken:
"""Last token that was consumed in next_i*_* method."""
return self._stream.LT(-1)

Expand Down Expand Up @@ -665,9 +665,7 @@ def parse_tail(self) -> Node or bool:
last_token=self._LT,
)
if self.next_is_rc(Tokens.STRING, False):
string = self.parse_lua_str(self.text)
string.first_token = self._LT.start
string.last_token = self._LT
string = self.parse_lua_str(self.text, self._LT)
self.success()
# noinspection PyTypeChecker
return Invoke(None, name, [string])
Expand Down Expand Up @@ -705,9 +703,7 @@ def parse_tail(self) -> Node or bool:

self.failure_save()
if self.next_is_rc(Tokens.STRING, False):
string = self.parse_lua_str(self.text)
string.first_token = self._LT
string.last_token = self._LT
string = self.parse_lua_str(self.text, self._LT)
self.success()
return string

Expand Down Expand Up @@ -1410,9 +1406,7 @@ def parse_atom(self) -> Expression or bool:
)

if self.next_is(Tokens.STRING) and self.next_is_rc(Tokens.STRING):
string = self.parse_lua_str(self.text)
string.first_token = self._LT
string.last_token = self._LT
string = self.parse_lua_str(self.text, self._LT)
return string

if self.next_is(Tokens.NIL) and self.next_is_rc(Tokens.NIL):
Expand All @@ -1426,7 +1420,7 @@ def parse_atom(self) -> Expression or bool:
return None

@staticmethod
def parse_lua_str(lua_str) -> String:
def parse_lua_str(lua_str, token: Optional[CommonToken] = None) -> String:
delimiter: StringDelimiter = StringDelimiter.SINGLE_QUOTE
p = re.compile(r"^\[=+\[(.*)]=+]") # nested quote pattern
# try remove double quote:
Expand All @@ -1444,7 +1438,7 @@ def parse_lua_str(lua_str) -> String:
# nested quote
elif p.match(lua_str):
lua_str = p.search(lua_str).group(1)
return String(lua_str, delimiter)
return String(lua_str, delimiter, first_token=token, last_token=token)

def parse_function_literal(self) -> AnonymousFunction or bool:
self.save()
Expand Down

0 comments on commit becba9c

Please sign in to comment.