This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

230 lines
8.1 KiB

  1. from .exceptions import ConfigurationError, GrammarError, assert_config
  2. from .utils import get_regexp_width, Serialize
  3. from .parsers.grammar_analysis import GrammarAnalyzer
  4. from .lexer import LexerThread, TraditionalLexer, ContextualLexer, Lexer, Token, TerminalDef
  5. from .parsers import earley, xearley, cyk
  6. from .parsers.lalr_parser import LALR_Parser
  7. from .tree import Tree
  8. from .common import LexerConf, ParserConf
  9. try:
  10. import regex
  11. except ImportError:
  12. regex = None
  13. import re
  14. ###{standalone
  15. def _wrap_lexer(lexer_class):
  16. future_interface = getattr(lexer_class, '__future_interface__', False)
  17. if future_interface:
  18. return lexer_class
  19. else:
  20. class CustomLexerWrapper(Lexer):
  21. def __init__(self, lexer_conf):
  22. self.lexer = lexer_class(lexer_conf)
  23. def lex(self, lexer_state, parser_state):
  24. return self.lexer.lex(lexer_state.text)
  25. return CustomLexerWrapper
  26. class MakeParsingFrontend:
  27. def __init__(self, parser_type, lexer_type):
  28. self.parser_type = parser_type
  29. self.lexer_type = lexer_type
  30. def __call__(self, lexer_conf, parser_conf, options):
  31. assert isinstance(lexer_conf, LexerConf)
  32. assert isinstance(parser_conf, ParserConf)
  33. parser_conf.parser_type = self.parser_type
  34. lexer_conf.lexer_type = self.lexer_type
  35. return ParsingFrontend(lexer_conf, parser_conf, options)
  36. @classmethod
  37. def deserialize(cls, data, memo, lexer_conf, callbacks, options):
  38. parser_conf = ParserConf.deserialize(data['parser_conf'], memo)
  39. parser = LALR_Parser.deserialize(data['parser'], memo, callbacks, options.debug)
  40. parser_conf.callbacks = callbacks
  41. return ParsingFrontend(lexer_conf, parser_conf, options, parser=parser)
  42. class ParsingFrontend(Serialize):
  43. __serialize_fields__ = 'lexer_conf', 'parser_conf', 'parser', 'options'
  44. def __init__(self, lexer_conf, parser_conf, options, parser=None):
  45. self.parser_conf = parser_conf
  46. self.lexer_conf = lexer_conf
  47. self.options = options
  48. # Set-up parser
  49. if parser: # From cache
  50. self.parser = parser
  51. else:
  52. create_parser = {
  53. 'lalr': create_lalr_parser,
  54. 'earley': create_earley_parser,
  55. 'cyk': CYK_FrontEnd,
  56. }[parser_conf.parser_type]
  57. self.parser = create_parser(lexer_conf, parser_conf, options)
  58. # Set-up lexer
  59. lexer_type = lexer_conf.lexer_type
  60. self.skip_lexer = False
  61. if lexer_type in ('dynamic', 'dynamic_complete'):
  62. assert lexer_conf.postlex is None
  63. self.skip_lexer = True
  64. return
  65. try:
  66. create_lexer = {
  67. 'standard': create_traditional_lexer,
  68. 'contextual': create_contextual_lexer,
  69. }[lexer_type]
  70. except KeyError:
  71. assert issubclass(lexer_type, Lexer), lexer_type
  72. self.lexer = _wrap_lexer(lexer_type)(lexer_conf)
  73. else:
  74. self.lexer = create_lexer(lexer_conf, self.parser, lexer_conf.postlex)
  75. if lexer_conf.postlex:
  76. self.lexer = PostLexConnector(self.lexer, lexer_conf.postlex)
  77. def parse(self, text, start=None, on_error=None):
  78. if start is None:
  79. start = self.parser_conf.start
  80. if len(start) > 1:
  81. raise ConfigurationError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start)
  82. start ,= start
  83. stream = text if self.skip_lexer else LexerThread(self.lexer, text)
  84. kw = {} if on_error is None else {'on_error': on_error}
  85. return self.parser.parse(stream, start, **kw)
  86. def get_frontend(parser, lexer):
  87. assert_config(parser, ('lalr', 'earley', 'cyk'))
  88. if not isinstance(lexer, type): # not custom lexer?
  89. expected = {
  90. 'lalr': ('standard', 'contextual'),
  91. 'earley': ('standard', 'dynamic', 'dynamic_complete'),
  92. 'cyk': ('standard', ),
  93. }[parser]
  94. assert_config(lexer, expected, 'Parser %r does not support lexer %%r, expected one of %%s' % parser)
  95. return MakeParsingFrontend(parser, lexer)
  96. def _get_lexer_callbacks(transformer, terminals):
  97. result = {}
  98. for terminal in terminals:
  99. callback = getattr(transformer, terminal.name, None)
  100. if callback is not None:
  101. result[terminal.name] = callback
  102. return result
  103. class PostLexConnector:
  104. def __init__(self, lexer, postlexer):
  105. self.lexer = lexer
  106. self.postlexer = postlexer
  107. def make_lexer_state(self, text):
  108. return self.lexer.make_lexer_state(text)
  109. def lex(self, lexer_state, parser_state):
  110. i = self.lexer.lex(lexer_state, parser_state)
  111. return self.postlexer.process(i)
  112. def create_traditional_lexer(lexer_conf, parser, postlex):
  113. return TraditionalLexer(lexer_conf)
  114. def create_contextual_lexer(lexer_conf, parser, postlex):
  115. states = {idx:list(t.keys()) for idx, t in parser._parse_table.states.items()}
  116. always_accept = postlex.always_accept if postlex else ()
  117. return ContextualLexer(lexer_conf, states, always_accept=always_accept)
  118. def create_lalr_parser(lexer_conf, parser_conf, options=None):
  119. debug = options.debug if options else False
  120. return LALR_Parser(parser_conf, debug=debug)
  121. create_earley_parser = NotImplemented
  122. CYK_FrontEnd = NotImplemented
  123. ###}
  124. class EarleyRegexpMatcher:
  125. def __init__(self, lexer_conf):
  126. self.regexps = {}
  127. for t in lexer_conf.terminals:
  128. if t.priority != 1:
  129. raise GrammarError("Dynamic Earley doesn't support weights on terminals", t, t.priority)
  130. regexp = t.pattern.to_regexp()
  131. try:
  132. width = get_regexp_width(regexp)[0]
  133. except ValueError:
  134. raise GrammarError("Bad regexp in token %s: %s" % (t.name, regexp))
  135. else:
  136. if width == 0:
  137. raise GrammarError("Dynamic Earley doesn't allow zero-width regexps", t)
  138. if lexer_conf.use_bytes:
  139. regexp = regexp.encode('utf-8')
  140. self.regexps[t.name] = lexer_conf.re_module.compile(regexp, lexer_conf.g_regex_flags)
  141. def match(self, term, text, index=0):
  142. return self.regexps[term.name].match(text, index)
  143. def create_earley_parser__dynamic(lexer_conf, parser_conf, options=None, **kw):
  144. earley_matcher = EarleyRegexpMatcher(lexer_conf)
  145. return xearley.Parser(parser_conf, earley_matcher.match, ignore=lexer_conf.ignore, **kw)
  146. def _match_earley_basic(term, token):
  147. return term.name == token.type
  148. def create_earley_parser__basic(lexer_conf, parser_conf, options, **kw):
  149. return earley.Parser(parser_conf, _match_earley_basic, **kw)
  150. def create_earley_parser(lexer_conf, parser_conf, options):
  151. resolve_ambiguity = options.ambiguity == 'resolve'
  152. debug = options.debug if options else False
  153. tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None
  154. extra = {}
  155. if lexer_conf.lexer_type == 'dynamic':
  156. f = create_earley_parser__dynamic
  157. elif lexer_conf.lexer_type == 'dynamic_complete':
  158. extra['complete_lex'] =True
  159. f = create_earley_parser__dynamic
  160. else:
  161. f = create_earley_parser__basic
  162. return f(lexer_conf, parser_conf, options, resolve_ambiguity=resolve_ambiguity, debug=debug, tree_class=tree_class, **extra)
  163. class CYK_FrontEnd:
  164. def __init__(self, lexer_conf, parser_conf, options=None):
  165. self._analysis = GrammarAnalyzer(parser_conf)
  166. self.parser = cyk.Parser(parser_conf.rules)
  167. self.callbacks = parser_conf.callbacks
  168. def parse(self, lexer_thread, start):
  169. tokens = list(lexer_thread.lex(None))
  170. tree = self.parser.parse(tokens, start)
  171. return self._transform(tree)
  172. def _transform(self, tree):
  173. subtrees = list(tree.iter_subtrees())
  174. for subtree in subtrees:
  175. subtree.children = [self._apply_callback(c) if isinstance(c, Tree) else c for c in subtree.children]
  176. return self._apply_callback(tree)
  177. def _apply_callback(self, tree):
  178. return self.callbacks[tree.rule](tree.children)