From 06f1ab4464f2f864532a9033f14ccec94a4e7d01 Mon Sep 17 00:00:00 2001 From: Yuri Paliy Date: Tue, 9 Nov 2021 12:06:20 +0200 Subject: [PATCH 1/4] Updated gitignore with local venv. --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index aee02fc..6a56064 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ build dist luaparser.egg-info -venv \ No newline at end of file +venv +.venv From 00f8009443f6194c1e7c78fa69e15fe5a298e483 Mon Sep 17 00:00:00 2001 From: Yuri Paliy Date: Tue, 9 Nov 2021 12:15:01 +0200 Subject: [PATCH 2/4] Removed streams from tokens in nodes. --- luaparser/astnodes.py | 45 ++++++++++++++++++++++++++++++++----------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/luaparser/astnodes.py b/luaparser/astnodes.py index ea2fa69..e726e79 100644 --- a/luaparser/astnodes.py +++ b/luaparser/astnodes.py @@ -7,7 +7,7 @@ from enum import Enum from typing import List, Optional -from antlr4 import Token +from antlr4.Token import CommonToken Comments = Optional[List["Comment"]] @@ -30,8 +30,8 @@ def __init__( self, name: str, comments: Comments = None, - first_token: Optional[Token] = None, - last_token: Optional[Token] = None, + first_token: Optional[CommonToken] = None, + last_token: Optional[CommonToken] = None, ): """ @@ -45,8 +45,19 @@ def __init__( comments = [] self._name: str = name self.comments: Comments = comments - self._first_token: Optional[Token] = first_token - self._last_token: Optional[Token] = last_token + self._first_token: Optional[CommonToken] = first_token + self._last_token: Optional[CommonToken] = last_token + + # We want to have nodes be serializable with pickle. + # To allow that we must not have mutable fields such as streams. + # Tokens have streams, create a stream-less copy of tokens. + if self._first_token is not None: + self._first_token = self._first_token.clone() + self._first_token.source = CommonToken.EMPTY_SOURCE + + if self._last_token is not None: + self._last_token = self._last_token.clone() + self._last_token.source = CommonToken.EMPTY_SOURCE @property def display_name(self) -> str: @@ -60,20 +71,32 @@ def __eq__(self, other) -> bool: return False @property - def first_token(self) -> Optional[Token]: + def first_token(self) -> Optional[CommonToken]: + """ + First token of a node. + + Note: Token is disconnected from underline source streams. + """ return self._first_token @first_token.setter - def first_token(self, val): - self._first_token = val + def first_token(self, val: CommonToken): + self._first_token = val.clone() + self._first_token.source = CommonToken.EMPTY_SOURCE @property - def last_token(self) -> Optional[Token]: + def last_token(self) -> Optional[CommonToken]: + """ + Last token of a node. + + Note: Token is disconnected from underline source streams. + """ return self._last_token @last_token.setter - def last_token(self, val): - self._last_token = val + def last_token(self, val: CommonToken): + self._last_token = val.clone() + self._last_token.source = CommonToken.EMPTY_SOURCE @property def start_char(self) -> Optional[int]: From 1877fa084c2fc5ea201a992b1e5d14d3d206bfbd Mon Sep 17 00:00:00 2001 From: Yuri Paliy Date: Tue, 9 Nov 2021 12:18:02 +0200 Subject: [PATCH 3/4] fixed string construction to support pickle. --- luaparser/builder.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/luaparser/builder.py b/luaparser/builder.py index ed53505..490b369 100644 --- a/luaparser/builder.py +++ b/luaparser/builder.py @@ -220,7 +220,7 @@ def __init__(self, source): self._hidden_handled_stack: List[bool] = [] @property - def _LT(self) -> Token: + def _LT(self) -> CommonToken: """Last token that was consumed in next_i*_* method.""" return self._stream.LT(-1) @@ -665,9 +665,7 @@ def parse_tail(self) -> Node or bool: last_token=self._LT, ) if self.next_is_rc(Tokens.STRING, False): - string = self.parse_lua_str(self.text) - string.first_token = self._LT.start - string.last_token = self._LT + string = self.parse_lua_str(self.text, self._LT) self.success() # noinspection PyTypeChecker return Invoke(None, name, [string]) @@ -705,9 +703,7 @@ def parse_tail(self) -> Node or bool: self.failure_save() if self.next_is_rc(Tokens.STRING, False): - string = self.parse_lua_str(self.text) - string.first_token = self._LT - string.last_token = self._LT + string = self.parse_lua_str(self.text, self._LT) self.success() return string @@ -1410,9 +1406,7 @@ def parse_atom(self) -> Expression or bool: ) if self.next_is(Tokens.STRING) and self.next_is_rc(Tokens.STRING): - string = self.parse_lua_str(self.text) - string.first_token = self._LT - string.last_token = self._LT + string = self.parse_lua_str(self.text, self._LT) return string if self.next_is(Tokens.NIL) and self.next_is_rc(Tokens.NIL): @@ -1426,7 +1420,7 @@ def parse_atom(self) -> Expression or bool: return None @staticmethod - def parse_lua_str(lua_str) -> String: + def parse_lua_str(lua_str, token: Optional[CommonToken] = None) -> String: delimiter: StringDelimiter = StringDelimiter.SINGLE_QUOTE p = re.compile(r"^\[=+\[(.*)]=+]") # nested quote pattern # try remove double quote: @@ -1444,7 +1438,7 @@ def parse_lua_str(lua_str) -> String: # nested quote elif p.match(lua_str): lua_str = p.search(lua_str).group(1) - return String(lua_str, delimiter) + return String(lua_str, delimiter, first_token=token, last_token=token) def parse_function_literal(self) -> AnonymousFunction or bool: self.save() From bcffaacf826774cdbd4113b8f2c8c4e27cbb813c Mon Sep 17 00:00:00 2001 From: Yuri Paliy Date: Sun, 28 Nov 2021 16:45:21 +0200 Subject: [PATCH 4/4] bugfix: Gaurd for setting None tokens. --- luaparser/astnodes.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/luaparser/astnodes.py b/luaparser/astnodes.py index e726e79..6f1f3f6 100644 --- a/luaparser/astnodes.py +++ b/luaparser/astnodes.py @@ -80,9 +80,10 @@ def first_token(self) -> Optional[CommonToken]: return self._first_token @first_token.setter - def first_token(self, val: CommonToken): - self._first_token = val.clone() - self._first_token.source = CommonToken.EMPTY_SOURCE + def first_token(self, val: Optional[CommonToken]): + if val is not None: + self._first_token = val.clone() + self._first_token.source = CommonToken.EMPTY_SOURCE @property def last_token(self) -> Optional[CommonToken]: @@ -94,9 +95,10 @@ def last_token(self) -> Optional[CommonToken]: return self._last_token @last_token.setter - def last_token(self, val: CommonToken): - self._last_token = val.clone() - self._last_token.source = CommonToken.EMPTY_SOURCE + def last_token(self, val: Optional[CommonToken]): + if val is not None: + self._last_token = val.clone() + self._last_token.source = CommonToken.EMPTY_SOURCE @property def start_char(self) -> Optional[int]: