| @@ -5,7 +5,7 @@ import time | |||||
| from collections import defaultdict | from collections import defaultdict | ||||
| from io import open | from io import open | ||||
| from .utils import STRING_TYPE, Serialize | |||||
| from .utils import STRING_TYPE, Serialize, SerializeMemoizer | |||||
| from .load_grammar import load_grammar | from .load_grammar import load_grammar | ||||
| from .tree import Tree | from .tree import Tree | ||||
| from .common import LexerConf, ParserConf | from .common import LexerConf, ParserConf | ||||
| @@ -241,7 +241,9 @@ class Lark(Serialize): | |||||
| return self.parser_class(self.lexer_conf, parser_conf, options=self.options) | return self.parser_class(self.lexer_conf, parser_conf, options=self.options) | ||||
| @classmethod | @classmethod | ||||
| def deserialize(cls, data, memo): | |||||
| def deserialize(cls, data, namespace, memo): | |||||
| if memo: | |||||
| memo = SerializeMemoizer.deserialize(memo, namespace, {}) | |||||
| inst = cls.__new__(cls) | inst = cls.__new__(cls) | ||||
| inst.options = LarkOptions.deserialize(data['options'], memo) | inst.options = LarkOptions.deserialize(data['options'], memo) | ||||
| inst.rules = [Rule.deserialize(r, memo) for r in data['rules']] | inst.rules = [Rule.deserialize(r, memo) for r in data['rules']] | ||||
| @@ -51,7 +51,7 @@ class WithLexer(Serialize): | |||||
| lexer_conf = None | lexer_conf = None | ||||
| __serialize_fields__ = 'parser', 'lexer' | __serialize_fields__ = 'parser', 'lexer' | ||||
| __serialize_namespace__ = Rule, ContextualLexer | |||||
| __serialize_namespace__ = Rule, ContextualLexer, TraditionalLexer | |||||
| @classmethod | @classmethod | ||||
| def deserialize(cls, data, memo, callbacks): | def deserialize(cls, data, memo, callbacks): | ||||
| @@ -114,8 +114,8 @@ def main(fobj, start): | |||||
| print('Shift = 0') | print('Shift = 0') | ||||
| print('Reduce = 1') | print('Reduce = 1') | ||||
| print("def Lark_StandAlone():") | print("def Lark_StandAlone():") | ||||
| print(" memo = SerializeMemoizer.deserialize(MEMO, {'Rule': Rule, 'TerminalDef': TerminalDef}, {})") | |||||
| print(" return Lark.deserialize(DATA, memo)") | |||||
| print(" namespace = {'Rule': Rule, 'TerminalDef': TerminalDef}") | |||||
| print(" return Lark.deserialize(DATA, namespace, MEMO)") | |||||
| @@ -21,6 +21,8 @@ from lark.lark import Lark | |||||
| from lark.exceptions import GrammarError, ParseError, UnexpectedToken, UnexpectedInput, UnexpectedCharacters | from lark.exceptions import GrammarError, ParseError, UnexpectedToken, UnexpectedInput, UnexpectedCharacters | ||||
| from lark.tree import Tree | from lark.tree import Tree | ||||
| from lark.visitors import Transformer | from lark.visitors import Transformer | ||||
| from lark.grammar import Rule | |||||
| from lark.lexer import TerminalDef | |||||
| __path__ = os.path.dirname(__file__) | __path__ = os.path.dirname(__file__) | ||||
| def _read(n, *args): | def _read(n, *args): | ||||
| @@ -1429,6 +1431,23 @@ def _make_parser_test(LEXER, PARSER): | |||||
| parser.parse(r'"That" "And a \"b"') | parser.parse(r'"That" "And a \"b"') | ||||
| @unittest.skipIf(PARSER!='lalr', "Serialize currently only works for LALR parsers (though it should be easy to extend)") | |||||
| def test_serialize(self): | |||||
| grammar = """ | |||||
| start: "A" b "C" | |||||
| b: "B" | |||||
| """ | |||||
| parser = _Lark(grammar) | |||||
| d = parser.serialize() | |||||
| parser2 = Lark.deserialize(d, {}, {}) | |||||
| self.assertEqual(parser2.parse('ABC'), Tree('start', [Tree('b', [])]) ) | |||||
| namespace = {'Rule': Rule, 'TerminalDef': TerminalDef} | |||||
| d, m = parser.memo_serialize(namespace.values()) | |||||
| parser3 = Lark.deserialize(d, namespace, m) | |||||
| self.assertEqual(parser3.parse('ABC'), Tree('start', [Tree('b', [])]) ) | |||||
| _NAME = "Test" + PARSER.capitalize() + LEXER.capitalize() | _NAME = "Test" + PARSER.capitalize() + LEXER.capitalize() | ||||
| _TestParser.__name__ = _NAME | _TestParser.__name__ = _NAME | ||||