From 0f9dfdd6237ea093ce038fa18de3e1764b89a6b1 Mon Sep 17 00:00:00 2001 From: Erez Shinan Date: Sat, 11 May 2019 09:42:16 +0300 Subject: [PATCH] Re-implemented CustomLexer after regression (Issue #377) --- lark/parser_frontends.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lark/parser_frontends.py b/lark/parser_frontends.py index f81001c..ab69d01 100644 --- a/lark/parser_frontends.py +++ b/lark/parser_frontends.py @@ -65,7 +65,7 @@ class WithLexer(Serialize): inst.parser = LALR_Parser.deserialize(inst.parser, memo, callbacks) inst.init_lexer() return inst - + def _serialize(self, data, memo): data['parser'] = data['parser'].serialize(memo) @@ -107,11 +107,12 @@ class LALR_ContextualLexer(LALR_WithLexer): ###} class LALR_CustomLexer(LALR_WithLexer): - def __init__(self, lexer_cls, lexer_conf, parser_conf, options=None): - pass # TODO - - def init_lexer(self): + def __init__(self, lexer_cls, lexer_conf, parser_conf, *, options=None): self.lexer = lexer_cls(self.lexer_conf) + debug = options.debug if options else False + self.parser = LALR_Parser(parser_conf, debug=debug) + WithLexer.__init__(self, lexer_conf, parser_conf, options) + def tokenize_text(text): line = 1