From 3c64c56bcdcc76d81be065e7ef421bf94285fb98 Mon Sep 17 00:00:00 2001 From: Erez Shinan Date: Tue, 9 Apr 2019 11:59:26 +0300 Subject: [PATCH] All tests passing --- lark/parser_frontends.py | 2 +- lark/parsers/lalr_analysis.py | 9 ++++----- lark/parsers/lalr_parser.py | 4 ++-- lark/tools/standalone.py | 13 ++++++------- lark/utils.py | 19 ++++++++++++------- 5 files changed, 25 insertions(+), 22 deletions(-) diff --git a/lark/parser_frontends.py b/lark/parser_frontends.py index 6750480..e7f64a7 100644 --- a/lark/parser_frontends.py +++ b/lark/parser_frontends.py @@ -27,7 +27,7 @@ class WithLexer(Serialize): return inst def _serialize(self, data, memo): - data['parser'] = data['parser'].serialize() + data['parser'] = data['parser'].serialize(memo) def init_traditional_lexer(self, lexer_conf): self.lexer_conf = lexer_conf diff --git a/lark/parsers/lalr_analysis.py b/lark/parsers/lalr_analysis.py index e34b8c3..cceaa45 100644 --- a/lark/parsers/lalr_analysis.py +++ b/lark/parsers/lalr_analysis.py @@ -13,6 +13,7 @@ from ..utils import classify, classify_bool, bfs, fzset, Serialize, Enumerator from ..exceptions import GrammarError from .grammar_analysis import GrammarAnalyzer, Terminal +from ..grammar import Rule ###{standalone @@ -33,19 +34,18 @@ class ParseTable: self.start_state = start_state self.end_state = end_state - def serialize(self): + def serialize(self, memo): tokens = Enumerator() rules = Enumerator() states = { - state: {tokens.get(token): ((1, rules.get(arg)) if action is Reduce else (0, arg)) + state: {tokens.get(token): ((1, arg.serialize(memo)) if action is Reduce else (0, arg)) for token, (action, arg) in actions.items()} for state, actions in self.states.items() } return { 'tokens': tokens.reversed(), - 'rules': {idx: r.serialize() for idx, r in rules.reversed().items()}, 'states': states, 'start_state': self.start_state, 'end_state': self.end_state, @@ -54,9 +54,8 @@ class ParseTable: @classmethod def deserialize(cls, data, memo): tokens = data['tokens'] - rules = data['rules'] states = { - state: {tokens[token]: ((Reduce, rules[arg]) if action==1 else (Shift, arg)) + state: {tokens[token]: ((Reduce, Rule.deserialize(arg, memo)) if action==1 else (Shift, arg)) for token, (action, arg) in actions.items()} for state, actions in data['states'].items() } diff --git a/lark/parsers/lalr_parser.py b/lark/parsers/lalr_parser.py index 241a47e..5510e3d 100644 --- a/lark/parsers/lalr_parser.py +++ b/lark/parsers/lalr_parser.py @@ -28,8 +28,8 @@ class LALR_Parser(object): inst.parser = _Parser(IntParseTable.deserialize(data, memo), callbacks) return inst - def serialize(self): - return self._parse_table.serialize() + def serialize(self, memo): + return self._parse_table.serialize(memo) def parse(self, *args): return self.parser.parse(*args) diff --git a/lark/tools/standalone.py b/lark/tools/standalone.py index 99e1929..3452a83 100644 --- a/lark/tools/standalone.py +++ b/lark/tools/standalone.py @@ -49,7 +49,7 @@ from lark import Lark from lark.parsers.lalr_analysis import Reduce -from lark.grammar import RuleOptions +from lark.grammar import RuleOptions, Rule from lark.lexer import TerminalDef _dir = path.dirname(__file__) @@ -63,13 +63,13 @@ EXTRACT_STANDALONE_FILES = [ 'tree.py', 'visitors.py', 'indenter.py', + 'grammar.py', 'lexer.py', 'parse_tree_builder.py', 'parsers/lalr_parser.py', 'parsers/lalr_analysis.py', 'parser_frontends.py', 'lark.py', - 'grammar.py', ] def extract_sections(lines): @@ -101,7 +101,7 @@ def main(fobj, start): with open(os.path.join(_larkdir, pyfile)) as f: print (extract_sections(f)['standalone']) - data, m = lark_inst.memo_serialize([TerminalDef]) + data, m = lark_inst.memo_serialize([TerminalDef, Rule]) print( 'DATA = (' ) # pprint(data, width=160) print(data) @@ -113,10 +113,9 @@ def main(fobj, start): print('Shift = 0') print('Reduce = 1') - print("def load_parser():") - print(" return Lark.deserialize(DATA)") - - + print("def Lark_StandAlone():") + print(" memo = SerializeMemoizer.deserialize(MEMO, {'Rule': Rule, 'TerminalDef': TerminalDef}, {})") + print(" return Lark.deserialize(DATA, memo)") diff --git a/lark/utils.py b/lark/utils.py index 0849745..374c293 100644 --- a/lark/utils.py +++ b/lark/utils.py @@ -42,14 +42,11 @@ def bfs(initial, expand): -###{standalone -import sys, re - -Py36 = (sys.version_info[:2] >= (3, 6)) - - def _serialize(value, memo): + # if memo and memo.in_types(value): + # return {'__memo__': memo.memoized.get(value)} + if isinstance(value, Serialize): return value.serialize(memo) elif isinstance(value, list): @@ -60,11 +57,14 @@ def _serialize(value, memo): return {key:_serialize(elem, memo) for key, elem in value.items()} return value +###{standalone def _deserialize(data, namespace, memo): if isinstance(data, dict): if '__type__' in data: # Object class_ = namespace[data['__type__']] return class_.deserialize(data, memo) + elif '__memo__' in data: + return memo[data['__memo__']] return {key:_deserialize(value, namespace, memo) for key, value in data.items()} elif isinstance(data, list): return [_deserialize(value, namespace, memo) for value in data] @@ -159,6 +159,11 @@ def smart_decorator(f, create_decorator): else: return create_decorator(f.__func__.__call__, True) +import sys, re +Py36 = (sys.version_info[:2] >= (3, 6)) +###} + + def dedup_list(l): """Given a list (l) will removing duplicates from the list, preserving the original order of the list. Assumes that @@ -166,7 +171,7 @@ def dedup_list(l): dedup = set() return [ x for x in l if not (x in dedup or dedup.add(x))] -###} + try: