diff --git a/lark/common.py b/lark/common.py index efbab01..e217063 100644 --- a/lark/common.py +++ b/lark/common.py @@ -5,7 +5,7 @@ from .lexer import TerminalDef class LexerConf(Serialize): - __serialize_fields__ = 'tokens', 'ignore', 'g_regex_flags', 'use_bytes', 'name' + __serialize_fields__ = 'tokens', 'ignore', 'g_regex_flags', 'use_bytes', 'lexer_type' __serialize_namespace__ = TerminalDef, def __init__(self, tokens, re_module, ignore=(), postlex=None, callbacks=None, g_regex_flags=0, skip_validation=False, use_bytes=False): @@ -18,11 +18,11 @@ class LexerConf(Serialize): self.skip_validation = skip_validation self.use_bytes = use_bytes - self.name = None + self.lexer_type = None class ParserConf(Serialize): - __serialize_fields__ = 'rules', 'start', 'name' + __serialize_fields__ = 'rules', 'start', 'parser_type' def __init__(self, rules, callbacks, start): assert isinstance(start, list) @@ -30,6 +30,6 @@ class ParserConf(Serialize): self.callbacks = callbacks self.start = start - self.name = None + self.parser_type = None ###} diff --git a/lark/load_grammar.py b/lark/load_grammar.py index 36bf849..76834f4 100644 --- a/lark/load_grammar.py +++ b/lark/load_grammar.py @@ -884,8 +884,8 @@ class GrammarLoader: import re lexer_conf = LexerConf(terminals, re, ['WS', 'COMMENT']) parser_conf = ParserConf(rules, callback, ['start']) - lexer_conf.name = 'standard' - parser_conf.name = 'lalr' + lexer_conf.lexer_type = 'standard' + parser_conf.parser_type = 'lalr' self.parser = ParsingFrontend(lexer_conf, parser_conf, {}) self.canonize_tree = CanonizeTree() diff --git a/lark/parser_frontends.py b/lark/parser_frontends.py index e329dfa..0dd21a0 100644 --- a/lark/parser_frontends.py +++ b/lark/parser_frontends.py @@ -28,15 +28,15 @@ def _wrap_lexer(lexer_class): class MakeParsingFrontend: - def __init__(self, parser, lexer): - self.parser = parser - self.lexer = lexer + def __init__(self, parser_type, lexer_type): + self.parser_type = parser_type + self.lexer_type = lexer_type def __call__(self, lexer_conf, parser_conf, options): assert isinstance(lexer_conf, LexerConf) assert isinstance(parser_conf, ParserConf) - parser_conf.name = self.parser - lexer_conf.name = self.lexer + parser_conf.parser_type = self.parser_type + lexer_conf.lexer_type = self.lexer_type return ParsingFrontend(lexer_conf, parser_conf, options) @classmethod @@ -76,12 +76,14 @@ class ParsingFrontend(Serialize): 'lalr': create_lalr_parser, 'earley': create_earley_parser, 'cyk': CYK_FrontEnd, - }[parser_conf.name] + }[parser_conf.parser_type] self.parser = create_parser(lexer_conf, parser_conf, options) # Set-up lexer + lexer_type = lexer_conf.lexer_type + lexer_type = lexer_conf.lexer_type self.skip_lexer = False - if lexer_conf.name in ('dynamic', 'dynamic_complete'): + if lexer_type in ('dynamic', 'dynamic_complete'): self.skip_lexer = True return @@ -89,10 +91,10 @@ class ParsingFrontend(Serialize): create_lexer = { 'standard': create_traditional_lexer, 'contextual': create_contextual_lexer, - }[lexer_conf.name] + }[lexer_type] except KeyError: - assert issubclass(lexer_conf.name, Lexer), lexer_conf.name - self.lexer = _wrap_lexer(lexer_conf.name)(lexer_conf) + assert issubclass(lexer_type, Lexer), lexer_type + self.lexer = _wrap_lexer(lexer_type)(lexer_conf) else: self.lexer = create_lexer(lexer_conf, self.parser, lexer_conf.postlex) @@ -100,20 +102,18 @@ class ParsingFrontend(Serialize): self.lexer = PostLexConnector(self.lexer, lexer_conf.postlex) - def _parse(self, start, input, *args): + def parse(self, text, start=None): if start is None: start = self.parser_conf.start if len(start) > 1: raise ConfigurationError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start) start ,= start - return self.parser.parse(input, start, *args) - def parse(self, text, start=None): if self.skip_lexer: - return self._parse(start, text) + return self.parser.parse(text, start) - lexer = LexerThread(self.lexer, text) - return self._parse(start, lexer) + lexer_thread = LexerThread(self.lexer, text) + return self.parser.parse(lexer_thread, start) def get_frontend(parser, lexer): @@ -207,9 +207,9 @@ def create_earley_parser(lexer_conf, parser_conf, options): tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None extra = {} - if lexer_conf.name == 'dynamic': + if lexer_conf.lexer_type == 'dynamic': f = create_earley_parser__dynamic - elif lexer_conf.name == 'dynamic_complete': + elif lexer_conf.lexer_type == 'dynamic_complete': extra['complete_lex'] =True f = create_earley_parser__dynamic else: @@ -226,8 +226,8 @@ class CYK_FrontEnd: self.callbacks = parser_conf.callbacks - def parse(self, lexer, start): - tokens = list(lexer.lex(None)) + def parse(self, lexer_thread, start): + tokens = list(lexer_thread.lex(None)) tree = self.parser.parse(tokens, start) return self._transform(tree)