This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

283 рядки
10 KiB

  1. from .utils import logger, NO_VALUE
  2. from typing import Dict, Iterable, Callable, Union, TypeVar, Tuple, Any, List, Set, Optional, Collection, TYPE_CHECKING
  3. if TYPE_CHECKING:
  4. from .lexer import Token
  5. from .parsers.lalr_interactive_parser import InteractiveParser
  6. from .tree import Tree
  7. ###{standalone
  8. class LarkError(Exception):
  9. pass
  10. class ConfigurationError(LarkError, ValueError):
  11. pass
  12. def assert_config(value, options: Collection, msg='Got %r, expected one of %s'):
  13. if value not in options:
  14. raise ConfigurationError(msg % (value, options))
  15. class GrammarError(LarkError):
  16. pass
  17. class ParseError(LarkError):
  18. pass
  19. class LexError(LarkError):
  20. pass
  21. T = TypeVar('T')
  22. class UnexpectedInput(LarkError):
  23. """UnexpectedInput Error.
  24. Used as a base class for the following exceptions:
  25. - ``UnexpectedCharacters``: The lexer encountered an unexpected string
  26. - ``UnexpectedToken``: The parser received an unexpected token
  27. - ``UnexpectedEOF``: The parser expected a token, but the input ended
  28. After catching one of these exceptions, you may call the following helper methods to create a nicer error message.
  29. """
  30. line: int
  31. column: int
  32. pos_in_stream = None
  33. state: Any
  34. _terminals_by_name = None
  35. def get_context(self, text: str, span: int=40) -> str:
  36. """Returns a pretty string pinpointing the error in the text,
  37. with span amount of context characters around it.
  38. Note:
  39. The parser doesn't hold a copy of the text it has to parse,
  40. so you have to provide it again
  41. """
  42. assert self.pos_in_stream is not None, self
  43. pos = self.pos_in_stream
  44. start = max(pos - span, 0)
  45. end = pos + span
  46. if not isinstance(text, bytes):
  47. before = text[start:pos].rsplit('\n', 1)[-1]
  48. after = text[pos:end].split('\n', 1)[0]
  49. return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n'
  50. else:
  51. before = text[start:pos].rsplit(b'\n', 1)[-1]
  52. after = text[pos:end].split(b'\n', 1)[0]
  53. return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace")
  54. def match_examples(self, parse_fn: 'Callable[[str], Tree]',
  55. examples: Union[Dict[T, Iterable[str]], Iterable[Tuple[T, Iterable[str]]]],
  56. token_type_match_fallback: bool=False,
  57. use_accepts: bool=True
  58. ) -> Optional[T]:
  59. """Allows you to detect what's wrong in the input text by matching
  60. against example errors.
  61. Given a parser instance and a dictionary mapping some label with
  62. some malformed syntax examples, it'll return the label for the
  63. example that bests matches the current error. The function will
  64. iterate the dictionary until it finds a matching error, and
  65. return the corresponding value.
  66. For an example usage, see `examples/error_reporting_lalr.py`
  67. Parameters:
  68. parse_fn: parse function (usually ``lark_instance.parse``)
  69. examples: dictionary of ``{'example_string': value}``.
  70. use_accepts: Recommended to keep this as ``use_accepts=True``.
  71. """
  72. assert self.state is not None, "Not supported for this exception"
  73. if isinstance(examples, dict):
  74. examples = examples.items()
  75. candidate = (None, False)
  76. for i, (label, example) in enumerate(examples):
  77. assert not isinstance(example, str), "Expecting a list"
  78. for j, malformed in enumerate(example):
  79. try:
  80. parse_fn(malformed)
  81. except UnexpectedInput as ut:
  82. if ut.state == self.state:
  83. if use_accepts and hasattr(self, 'accepts') and hasattr(ut, 'accepts') and ut.accepts != self.accepts:
  84. logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" %
  85. (self.state, self.accepts, ut.accepts, i, j))
  86. continue
  87. try:
  88. if ut.token == self.token: # Try exact match first
  89. logger.debug("Exact Match at example [%s][%s]" % (i, j))
  90. return label
  91. if token_type_match_fallback:
  92. # Fallback to token types match
  93. if (ut.token.type == self.token.type) and not candidate[-1]:
  94. logger.debug("Token Type Fallback at example [%s][%s]" % (i, j))
  95. candidate = label, True
  96. except AttributeError:
  97. pass
  98. if candidate[0] is None:
  99. logger.debug("Same State match at example [%s][%s]" % (i, j))
  100. candidate = label, False
  101. return candidate[0]
  102. def _format_expected(self, expected):
  103. if self._terminals_by_name:
  104. d = self._terminals_by_name
  105. expected = [d[t_name].user_repr() if t_name in d else t_name for t_name in expected]
  106. return "Expected one of: \n\t* %s\n" % '\n\t* '.join(expected)
  107. class UnexpectedEOF(ParseError, UnexpectedInput):
  108. """An exception that is raised by the parser, when the input ends while it still expects a token.
  109. """
  110. expected: 'List[Token]'
  111. def __init__(self, expected, state=None, terminals_by_name=None):
  112. super(UnexpectedEOF, self).__init__()
  113. self.expected = expected
  114. self.state = state
  115. from .lexer import Token
  116. self.token = Token("<EOF>", "") # , line=-1, column=-1, pos_in_stream=-1)
  117. self.pos_in_stream = -1
  118. self.line = -1
  119. self.column = -1
  120. self._terminals_by_name = terminals_by_name
  121. def __str__(self):
  122. message = "Unexpected end-of-input. "
  123. message += self._format_expected(self.expected)
  124. return message
  125. class UnexpectedCharacters(LexError, UnexpectedInput):
  126. """An exception that is raised by the lexer, when it cannot match the next
  127. string of characters to any of its terminals.
  128. """
  129. allowed: Set[str]
  130. considered_tokens: Set[Any]
  131. def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None,
  132. terminals_by_name=None, considered_rules=None):
  133. super(UnexpectedCharacters, self).__init__()
  134. # TODO considered_tokens and allowed can be figured out using state
  135. self.line = line
  136. self.column = column
  137. self.pos_in_stream = lex_pos
  138. self.state = state
  139. self._terminals_by_name = terminals_by_name
  140. self.allowed = allowed
  141. self.considered_tokens = considered_tokens
  142. self.considered_rules = considered_rules
  143. self.token_history = token_history
  144. if isinstance(seq, bytes):
  145. self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace")
  146. else:
  147. self.char = seq[lex_pos]
  148. self._context = self.get_context(seq)
  149. def __str__(self):
  150. message = "No terminal matches '%s' in the current parser context, at line %d col %d" % (self.char, self.line, self.column)
  151. message += '\n\n' + self._context
  152. if self.allowed:
  153. message += self._format_expected(self.allowed)
  154. if self.token_history:
  155. message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history)
  156. return message
  157. class UnexpectedToken(ParseError, UnexpectedInput):
  158. """An exception that is raised by the parser, when the token it received
  159. doesn't match any valid step forward.
  160. Parameters:
  161. token: The mismatched token
  162. expected: The set of expected tokens
  163. considered_rules: Which rules were considered, to deduce the expected tokens
  164. state: A value representing the parser state. Do not rely on its value or type.
  165. interactive_parser: An instance of ``InteractiveParser``, that is initialized to the point of failture,
  166. and can be used for debugging and error handling.
  167. Note: These parameters are available as attributes of the instance.
  168. """
  169. expected: Set[str]
  170. considered_rules: Set[str]
  171. interactive_parser: 'InteractiveParser'
  172. def __init__(self, token, expected, considered_rules=None, state=None, interactive_parser=None, terminals_by_name=None, token_history=None):
  173. super(UnexpectedToken, self).__init__()
  174. # TODO considered_rules and expected can be figured out using state
  175. self.line = getattr(token, 'line', '?')
  176. self.column = getattr(token, 'column', '?')
  177. self.pos_in_stream = getattr(token, 'start_pos', None)
  178. self.state = state
  179. self.token = token
  180. self.expected = expected # XXX deprecate? `accepts` is better
  181. self._accepts = NO_VALUE
  182. self.considered_rules = considered_rules
  183. self.interactive_parser = interactive_parser
  184. self._terminals_by_name = terminals_by_name
  185. self.token_history = token_history
  186. @property
  187. def accepts(self) -> Set[str]:
  188. if self._accepts is NO_VALUE:
  189. self._accepts = self.interactive_parser and self.interactive_parser.accepts()
  190. return self._accepts
  191. def __str__(self):
  192. message = ("Unexpected token %r at line %s, column %s.\n%s"
  193. % (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected)))
  194. if self.token_history:
  195. message += "Previous tokens: %r\n" % self.token_history
  196. return message
  197. class VisitError(LarkError):
  198. """VisitError is raised when visitors are interrupted by an exception
  199. It provides the following attributes for inspection:
  200. Parameters:
  201. rule: the name of the visit rule that failed
  202. obj: the tree-node or token that was being processed
  203. orig_exc: the exception that cause it to fail
  204. Note: These parameters are available as attributes
  205. """
  206. obj: 'Union[Tree, Token]'
  207. orig_exc: Exception
  208. def __init__(self, rule, obj, orig_exc):
  209. message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc)
  210. super(VisitError, self).__init__(message)
  211. self.rule = rule
  212. self.obj = obj
  213. self.orig_exc = orig_exc
  214. ###}