This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

268 lines
9.5 KiB

  1. from .exceptions import ConfigurationError, GrammarError
  2. from .utils import get_regexp_width, Serialize
  3. from .parsers.grammar_analysis import GrammarAnalyzer
  4. from .lexer import LexerThread, TraditionalLexer, ContextualLexer, Lexer, Token, TerminalDef
  5. from .parsers import earley, xearley, cyk
  6. from .parsers.lalr_parser import LALR_Parser
  7. from .grammar import Rule
  8. from .tree import Tree
  9. from .common import LexerConf
  10. try:
  11. import regex
  12. except ImportError:
  13. regex = None
  14. import re
  15. ###{standalone
  16. def _wrap_lexer(lexer_class):
  17. future_interface = getattr(lexer_class, '__future_interface__', False)
  18. if future_interface:
  19. return lexer_class
  20. else:
  21. class CustomLexerWrapper(Lexer):
  22. def __init__(self, lexer_conf):
  23. self.lexer = lexer_class(lexer_conf)
  24. def lex(self, lexer_state, parser_state):
  25. return self.lexer.lex(lexer_state.text)
  26. return CustomLexerWrapper
  27. def get_frontend(parser, lexer):
  28. if parser=='lalr':
  29. if lexer is None:
  30. raise ConfigurationError('The LALR parser requires use of a lexer')
  31. elif lexer == 'standard':
  32. return LALR_TraditionalLexer
  33. elif lexer == 'contextual':
  34. return LALR_ContextualLexer
  35. elif issubclass(lexer, Lexer):
  36. wrapped = _wrap_lexer(lexer)
  37. class LALR_CustomLexerWrapper(LALR_WithLexer):
  38. def init_lexer(self):
  39. self.lexer = wrapped(self.lexer_conf)
  40. return LALR_CustomLexerWrapper
  41. else:
  42. raise ConfigurationError('Unknown lexer: %s' % lexer)
  43. elif parser=='earley':
  44. if lexer=='standard':
  45. return Earley_Traditional
  46. elif lexer=='dynamic':
  47. return XEarley
  48. elif lexer=='dynamic_complete':
  49. return XEarley_CompleteLex
  50. elif lexer=='contextual':
  51. raise ConfigurationError('The Earley parser does not support the contextual parser')
  52. elif issubclass(lexer, Lexer):
  53. wrapped = _wrap_lexer(lexer)
  54. class Earley_CustomLexerWrapper(Earley_WithLexer):
  55. def init_lexer(self, **kw):
  56. self.lexer = wrapped(self.lexer_conf)
  57. return Earley_CustomLexerWrapper
  58. else:
  59. raise ConfigurationError('Unknown lexer: %s' % lexer)
  60. elif parser == 'cyk':
  61. if lexer == 'standard':
  62. return CYK
  63. else:
  64. raise ConfigurationError('CYK parser requires using standard parser.')
  65. else:
  66. raise ConfigurationError('Unknown parser: %s' % parser)
  67. class _ParserFrontend(Serialize):
  68. def _parse(self, start, input, *args):
  69. if start is None:
  70. start = self.start
  71. if len(start) > 1:
  72. raise ConfigurationError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start)
  73. start ,= start
  74. return self.parser.parse(input, start, *args)
  75. def _get_lexer_callbacks(transformer, terminals):
  76. result = {}
  77. for terminal in terminals:
  78. callback = getattr(transformer, terminal.name, None)
  79. if callback is not None:
  80. result[terminal.name] = callback
  81. return result
  82. class PostLexConnector:
  83. def __init__(self, lexer, postlexer):
  84. self.lexer = lexer
  85. self.postlexer = postlexer
  86. def make_lexer_state(self, text):
  87. return self.lexer.make_lexer_state(text)
  88. def lex(self, lexer_state, parser_state):
  89. i = self.lexer.lex(lexer_state, parser_state)
  90. return self.postlexer.process(i)
  91. class WithLexer(_ParserFrontend):
  92. lexer = None
  93. parser = None
  94. lexer_conf = None
  95. start = None
  96. __serialize_fields__ = 'parser', 'lexer_conf', 'start'
  97. __serialize_namespace__ = LexerConf,
  98. def __init__(self, lexer_conf, parser_conf, options=None):
  99. self.lexer_conf = lexer_conf
  100. self.start = parser_conf.start
  101. self.postlex = lexer_conf.postlex
  102. @classmethod
  103. def deserialize(cls, data, memo, callbacks, options):
  104. inst = super(WithLexer, cls).deserialize(data, memo)
  105. inst.postlex = options.postlex
  106. inst.parser = LALR_Parser.deserialize(inst.parser, memo, callbacks, options.debug)
  107. terminals = [item for item in memo.values() if isinstance(item, TerminalDef)]
  108. inst.lexer_conf.callbacks = _get_lexer_callbacks(options.transformer, terminals)
  109. inst.lexer_conf.re_module = regex if options.regex else re
  110. inst.lexer_conf.use_bytes = options.use_bytes
  111. inst.lexer_conf.g_regex_flags = options.g_regex_flags
  112. inst.lexer_conf.skip_validation = True
  113. inst.init_lexer()
  114. return inst
  115. def _serialize(self, data, memo):
  116. data['parser'] = data['parser'].serialize(memo)
  117. def make_lexer(self, text):
  118. lexer = self.lexer
  119. if self.postlex:
  120. lexer = PostLexConnector(self.lexer, self.postlex)
  121. return LexerThread(lexer, text)
  122. def parse(self, text, start=None):
  123. return self._parse(start, self.make_lexer(text))
  124. def init_traditional_lexer(self):
  125. self.lexer = TraditionalLexer(self.lexer_conf)
  126. class LALR_WithLexer(WithLexer):
  127. def __init__(self, lexer_conf, parser_conf, options=None):
  128. debug = options.debug if options else False
  129. self.parser = LALR_Parser(parser_conf, debug=debug)
  130. WithLexer.__init__(self, lexer_conf, parser_conf, options)
  131. self.init_lexer()
  132. def init_lexer(self, **kw):
  133. raise NotImplementedError()
  134. class LALR_TraditionalLexer(LALR_WithLexer):
  135. def init_lexer(self):
  136. self.init_traditional_lexer()
  137. class LALR_ContextualLexer(LALR_WithLexer):
  138. def init_lexer(self):
  139. states = {idx:list(t.keys()) for idx, t in self.parser._parse_table.states.items()}
  140. always_accept = self.postlex.always_accept if self.postlex else ()
  141. self.lexer = ContextualLexer(self.lexer_conf, states, always_accept=always_accept)
  142. ###}
  143. class Earley_WithLexer(WithLexer):
  144. def __init__(self, lexer_conf, parser_conf, options=None):
  145. WithLexer.__init__(self, lexer_conf, parser_conf, options)
  146. self.init_lexer()
  147. resolve_ambiguity = options.ambiguity == 'resolve'
  148. debug = options.debug if options else False
  149. tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None
  150. self.parser = earley.Parser(parser_conf, self.match, resolve_ambiguity=resolve_ambiguity, debug=debug, tree_class=tree_class)
  151. def match(self, term, token):
  152. return term.name == token.type
  153. def init_lexer(self, **kw):
  154. raise NotImplementedError()
  155. class Earley_Traditional(Earley_WithLexer):
  156. def init_lexer(self, **kw):
  157. self.init_traditional_lexer()
  158. class XEarley(_ParserFrontend):
  159. def __init__(self, lexer_conf, parser_conf, options=None, **kw):
  160. self.token_by_name = {t.name:t for t in lexer_conf.tokens}
  161. self.start = parser_conf.start
  162. self._prepare_match(lexer_conf)
  163. resolve_ambiguity = options.ambiguity == 'resolve'
  164. debug = options.debug if options else False
  165. tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None
  166. self.parser = xearley.Parser(parser_conf,
  167. self.match,
  168. ignore=lexer_conf.ignore,
  169. resolve_ambiguity=resolve_ambiguity,
  170. debug=debug,
  171. tree_class=tree_class,
  172. **kw
  173. )
  174. def match(self, term, text, index=0):
  175. return self.regexps[term.name].match(text, index)
  176. def _prepare_match(self, lexer_conf):
  177. self.regexps = {}
  178. for t in lexer_conf.tokens:
  179. if t.priority != 1:
  180. raise GrammarError("Dynamic Earley doesn't support weights on terminals", t, t.priority)
  181. regexp = t.pattern.to_regexp()
  182. try:
  183. width = get_regexp_width(regexp)[0]
  184. except ValueError:
  185. raise GrammarError("Bad regexp in token %s: %s" % (t.name, regexp))
  186. else:
  187. if width == 0:
  188. raise GrammarError("Dynamic Earley doesn't allow zero-width regexps", t)
  189. if lexer_conf.use_bytes:
  190. regexp = regexp.encode('utf-8')
  191. self.regexps[t.name] = lexer_conf.re_module.compile(regexp, lexer_conf.g_regex_flags)
  192. def parse(self, text, start):
  193. return self._parse(start, text)
  194. class XEarley_CompleteLex(XEarley):
  195. def __init__(self, *args, **kw):
  196. XEarley.__init__(self, *args, complete_lex=True, **kw)
  197. class CYK(WithLexer):
  198. def __init__(self, lexer_conf, parser_conf, options=None):
  199. WithLexer.__init__(self, lexer_conf, parser_conf, options)
  200. self.init_traditional_lexer()
  201. self._analysis = GrammarAnalyzer(parser_conf)
  202. self.parser = cyk.Parser(parser_conf.rules)
  203. self.callbacks = parser_conf.callbacks
  204. def parse(self, text, start):
  205. tokens = list(self.make_lexer(text).lex(None))
  206. parse = self._parse(start, tokens)
  207. parse = self._transform(parse)
  208. return parse
  209. def _transform(self, tree):
  210. subtrees = list(tree.iter_subtrees())
  211. for subtree in subtrees:
  212. subtree.children = [self._apply_callback(c) if isinstance(c, Tree) else c for c in subtree.children]
  213. return self._apply_callback(tree)
  214. def _apply_callback(self, tree):
  215. return self.callbacks[tree.rule](tree.children)