Skip to content

Commit b20cf8b

Browse files
committed
Move parser.grammar check deeper into TreeMatcher
1 parent 81fbfe4 commit b20cf8b

File tree

2 files changed

+9
-5
lines changed

2 files changed

+9
-5
lines changed

lark/reconstruct.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
from typing import Dict, Callable, Iterable, Optional
55

6-
from .exceptions import ConfigurationError
76
from .lark import Lark
87
from .tree import Tree, ParseTree
98
from .visitors import Transformer_InPlace
@@ -79,9 +78,6 @@ class Reconstructor(TreeMatcher):
7978
write_tokens: WriteTokensTransformer
8079

8180
def __init__(self, parser: Lark, term_subs: Optional[Dict[str, Callable[[Symbol], str]]]=None) -> None:
82-
if not hasattr(parser, 'grammar') and parser.options.cache:
83-
raise ConfigurationError('Unanalyzed grammar not available from cached parser, use cache_grammar=True')
84-
8581
TreeMatcher.__init__(self, parser)
8682

8783
self.write_tokens = WriteTokensTransformer({t.name:t for t in self.tokens}, term_subs or {})

lark/tree_matcher.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
from . import Tree, Token
77
from .common import ParserConf
8+
from .exceptions import ConfigurationError
89
from .parsers import earley
910
from .grammar import Rule, Terminal, NonTerminal
1011

@@ -89,8 +90,15 @@ class TreeMatcher:
8990
def __init__(self, parser):
9091
# XXX TODO calling compile twice returns different results!
9192
assert not parser.options.maybe_placeholders
93+
9294
# XXX TODO: we just ignore the potential existence of a postlexer
93-
self.tokens, rules, _extra = parser.grammar.compile(parser.options.start, set())
95+
if parser.options.postlex is None:
96+
self.tokens = parser.tokens.copy()
97+
rules = parser.rules.copy()
98+
else:
99+
if not hasattr(parser, 'grammar') and parser.options.cache:
100+
raise ConfigurationError('Unanalyzed grammar not available from cached parser, use cache_grammar=True')
101+
self.tokens, rules, _extra = parser.grammar.compile(parser.options.start, set())
94102

95103
self.rules_for_root = defaultdict(list)
96104

0 commit comments

Comments
 (0)