This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

276 lines
9.9 KiB

  1. from .utils import get_regexp_width, Serialize
  2. from .parsers.grammar_analysis import GrammarAnalyzer
  3. from .lexer import LexerThread, TraditionalLexer, ContextualLexer, Lexer, Token, TerminalDef
  4. from .parsers import earley, xearley, cyk
  5. from .parsers.lalr_parser import LALR_Parser
  6. from .grammar import Rule
  7. from .tree import Tree
  8. from .common import LexerConf
  9. from .exceptions import UnexpectedInput
  10. try:
  11. import regex
  12. except ImportError:
  13. regex = None
  14. import re
  15. ###{standalone
  16. def get_frontend(parser, lexer):
  17. if parser=='lalr':
  18. if lexer is None:
  19. raise ValueError('The LALR parser requires use of a lexer')
  20. elif lexer == 'standard':
  21. return LALR_TraditionalLexer
  22. elif lexer == 'contextual':
  23. return LALR_ContextualLexer
  24. elif issubclass(lexer, Lexer):
  25. class CustomLexerWrapper(Lexer):
  26. def __init__(self, lexer_conf):
  27. self.lexer = lexer(lexer_conf)
  28. def lex(self, lexer_state, parser_state):
  29. return self.lexer.lex(lexer_state.text)
  30. class LALR_CustomLexerWrapper(LALR_CustomLexer):
  31. def __init__(self, lexer_conf, parser_conf, options=None):
  32. super(LALR_CustomLexerWrapper, self).__init__(
  33. lexer, lexer_conf, parser_conf, options=options)
  34. def init_lexer(self):
  35. future_interface = getattr(lexer, '__future_interface__', False)
  36. if future_interface:
  37. self.lexer = lexer(self.lexer_conf)
  38. else:
  39. self.lexer = CustomLexerWrapper(self.lexer_conf)
  40. return LALR_CustomLexerWrapper
  41. else:
  42. raise ValueError('Unknown lexer: %s' % lexer)
  43. elif parser=='earley':
  44. if lexer=='standard':
  45. return Earley
  46. elif lexer=='dynamic':
  47. return XEarley
  48. elif lexer=='dynamic_complete':
  49. return XEarley_CompleteLex
  50. elif lexer=='contextual':
  51. raise ValueError('The Earley parser does not support the contextual parser')
  52. else:
  53. raise ValueError('Unknown lexer: %s' % lexer)
  54. elif parser == 'cyk':
  55. if lexer == 'standard':
  56. return CYK
  57. else:
  58. raise ValueError('CYK parser requires using standard parser.')
  59. else:
  60. raise ValueError('Unknown parser: %s' % parser)
  61. class _ParserFrontend(Serialize):
  62. def _parse(self, start, input, *args):
  63. if start is None:
  64. start = self.start
  65. if len(start) > 1:
  66. raise ValueError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start)
  67. start ,= start
  68. return self.parser.parse(input, start, *args)
  69. def _get_lexer_callbacks(transformer, terminals):
  70. result = {}
  71. for terminal in terminals:
  72. callback = getattr(transformer, terminal.name, None)
  73. if callback is not None:
  74. result[terminal.name] = callback
  75. return result
  76. class PostLexConnector:
  77. def __init__(self, lexer, postlexer):
  78. self.lexer = lexer
  79. self.postlexer = postlexer
  80. def make_lexer_state(self, text):
  81. return self.lexer.make_lexer_state(text)
  82. def lex(self, lexer_state, parser_state):
  83. i = self.lexer.lex(lexer_state, parser_state)
  84. return self.postlexer.process(i)
  85. class WithLexer(_ParserFrontend):
  86. lexer = None
  87. parser = None
  88. lexer_conf = None
  89. start = None
  90. __serialize_fields__ = 'parser', 'lexer_conf', 'start'
  91. __serialize_namespace__ = LexerConf,
  92. def __init__(self, lexer_conf, parser_conf, options=None):
  93. self.lexer_conf = lexer_conf
  94. self.start = parser_conf.start
  95. self.postlex = lexer_conf.postlex
  96. @classmethod
  97. def deserialize(cls, data, memo, callbacks, options):
  98. inst = super(WithLexer, cls).deserialize(data, memo)
  99. inst.postlex = options.postlex
  100. inst.parser = LALR_Parser.deserialize(inst.parser, memo, callbacks, options.debug)
  101. terminals = [item for item in memo.values() if isinstance(item, TerminalDef)]
  102. inst.lexer_conf.callbacks = _get_lexer_callbacks(options.transformer, terminals)
  103. inst.lexer_conf.re_module = regex if options.regex else re
  104. inst.lexer_conf.use_bytes = options.use_bytes
  105. inst.lexer_conf.g_regex_flags = options.g_regex_flags
  106. inst.lexer_conf.skip_validation = True
  107. inst.init_lexer()
  108. return inst
  109. def _serialize(self, data, memo):
  110. data['parser'] = data['parser'].serialize(memo)
  111. def make_lexer(self, text):
  112. lexer = self.lexer
  113. if self.postlex:
  114. lexer = PostLexConnector(self.lexer, self.postlex)
  115. return LexerThread(lexer, text)
  116. def parse(self, text, start=None):
  117. try:
  118. return self._parse(start, self.make_lexer(text))
  119. except UnexpectedInput as e:
  120. if e._all_terminals is None:
  121. e._all_terminals = self.lexer_conf.terminals
  122. raise e
  123. def init_traditional_lexer(self):
  124. self.lexer = TraditionalLexer(self.lexer_conf)
  125. class LALR_WithLexer(WithLexer):
  126. def __init__(self, lexer_conf, parser_conf, options=None):
  127. debug = options.debug if options else False
  128. self.parser = LALR_Parser(parser_conf, debug=debug)
  129. WithLexer.__init__(self, lexer_conf, parser_conf, options)
  130. self.init_lexer()
  131. def init_lexer(self, **kw):
  132. raise NotImplementedError()
  133. class LALR_TraditionalLexer(LALR_WithLexer):
  134. def init_lexer(self):
  135. self.init_traditional_lexer()
  136. class LALR_ContextualLexer(LALR_WithLexer):
  137. def init_lexer(self):
  138. states = {idx:list(t.keys()) for idx, t in self.parser._parse_table.states.items()}
  139. always_accept = self.postlex.always_accept if self.postlex else ()
  140. self.lexer = ContextualLexer(self.lexer_conf, states, always_accept=always_accept)
  141. ###}
  142. class LALR_CustomLexer(LALR_WithLexer):
  143. def __init__(self, lexer_cls, lexer_conf, parser_conf, options=None):
  144. self.lexer = lexer_cls(lexer_conf)
  145. debug = options.debug if options else False
  146. self.parser = LALR_Parser(parser_conf, debug=debug)
  147. WithLexer.__init__(self, lexer_conf, parser_conf, options)
  148. class Earley(WithLexer):
  149. def __init__(self, lexer_conf, parser_conf, options=None):
  150. WithLexer.__init__(self, lexer_conf, parser_conf, options)
  151. self.init_traditional_lexer()
  152. resolve_ambiguity = options.ambiguity == 'resolve'
  153. debug = options.debug if options else False
  154. tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None
  155. self.parser = earley.Parser(parser_conf, self.match, resolve_ambiguity=resolve_ambiguity, debug=debug, tree_class=tree_class)
  156. def make_lexer(self, text):
  157. return WithLexer.make_lexer(self, text).lex(None)
  158. def match(self, term, token):
  159. return term.name == token.type
  160. class XEarley(_ParserFrontend):
  161. def __init__(self, lexer_conf, parser_conf, options=None, **kw):
  162. self.terminals_by_name = {t.name:t for t in lexer_conf.terminals}
  163. self.start = parser_conf.start
  164. self._prepare_match(lexer_conf)
  165. resolve_ambiguity = options.ambiguity == 'resolve'
  166. debug = options.debug if options else False
  167. tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None
  168. self.parser = xearley.Parser(parser_conf,
  169. self.match,
  170. ignore=lexer_conf.ignore,
  171. resolve_ambiguity=resolve_ambiguity,
  172. debug=debug,
  173. tree_class=tree_class,
  174. **kw
  175. )
  176. def match(self, term, text, index=0):
  177. return self.regexps[term.name].match(text, index)
  178. def _prepare_match(self, lexer_conf):
  179. self.regexps = {}
  180. for t in lexer_conf.terminals:
  181. if t.priority != 1:
  182. raise ValueError("Dynamic Earley doesn't support weights on terminals", t, t.priority)
  183. regexp = t.pattern.to_regexp()
  184. try:
  185. width = get_regexp_width(regexp)[0]
  186. except ValueError:
  187. raise ValueError("Bad regexp in token %s: %s" % (t.name, regexp))
  188. else:
  189. if width == 0:
  190. raise ValueError("Dynamic Earley doesn't allow zero-width regexps", t)
  191. if lexer_conf.use_bytes:
  192. regexp = regexp.encode('utf-8')
  193. self.regexps[t.name] = lexer_conf.re_module.compile(regexp, lexer_conf.g_regex_flags)
  194. def parse(self, text, start):
  195. try:
  196. return self._parse(start, text)
  197. except UnexpectedInput as e:
  198. if e._all_terminals is None:
  199. e._all_terminals = self.terminals_by_name
  200. raise e
  201. class XEarley_CompleteLex(XEarley):
  202. def __init__(self, *args, **kw):
  203. XEarley.__init__(self, *args, complete_lex=True, **kw)
  204. class CYK(WithLexer):
  205. def __init__(self, lexer_conf, parser_conf, options=None):
  206. WithLexer.__init__(self, lexer_conf, parser_conf, options)
  207. self.init_traditional_lexer()
  208. self._analysis = GrammarAnalyzer(parser_conf)
  209. self.parser = cyk.Parser(parser_conf.rules)
  210. self.callbacks = parser_conf.callbacks
  211. def parse(self, text, start):
  212. tokens = list(self.make_lexer(text).lex(None))
  213. parse = self._parse(start, tokens)
  214. parse = self._transform(parse)
  215. return parse
  216. def _transform(self, tree):
  217. subtrees = list(tree.iter_subtrees())
  218. for subtree in subtrees:
  219. subtree.children = [self._apply_callback(c) if isinstance(c, Tree) else c for c in subtree.children]
  220. return self._apply_callback(tree)
  221. def _apply_callback(self, tree):
  222. return self.callbacks[tree.rule](tree.children)