This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

263 lines
9.5 KiB

  1. from .utils import logger, NO_VALUE
  2. ###{standalone
  3. from typing import Dict, Iterable, Callable, Union, TypeVar, Tuple, Any, List, Set, TYPE_CHECKING
  4. if TYPE_CHECKING:
  5. from .lexer import Token
  6. from .parsers.lalr_interactive_parser import InteractiveParser
  7. from .tree import Tree
  8. class LarkError(Exception):
  9. pass
  10. class ConfigurationError(LarkError, ValueError):
  11. pass
  12. def assert_config(value, options, msg='Got %r, expected one of %s'):
  13. if value not in options:
  14. raise ConfigurationError(msg % (value, options))
  15. class GrammarError(LarkError):
  16. pass
  17. class ParseError(LarkError):
  18. pass
  19. class LexError(LarkError):
  20. pass
  21. T = TypeVar('T')
  22. class UnexpectedInput(LarkError):
  23. """UnexpectedInput Error.
  24. Used as a base class for the following exceptions:
  25. - ``UnexpectedToken``: The parser received an unexpected token
  26. - ``UnexpectedCharacters``: The lexer encountered an unexpected string
  27. After catching one of these exceptions, you may call the following helper methods to create a nicer error message.
  28. """
  29. line: int
  30. column: int
  31. pos_in_stream = None
  32. state: Any
  33. _terminals_by_name = None
  34. def get_context(self, text: str, span: int=40) -> str:
  35. """Returns a pretty string pinpointing the error in the text,
  36. with span amount of context characters around it.
  37. Note:
  38. The parser doesn't hold a copy of the text it has to parse,
  39. so you have to provide it again
  40. """
  41. assert self.pos_in_stream is not None, self
  42. pos = self.pos_in_stream
  43. start = max(pos - span, 0)
  44. end = pos + span
  45. if not isinstance(text, bytes):
  46. before = text[start:pos].rsplit('\n', 1)[-1]
  47. after = text[pos:end].split('\n', 1)[0]
  48. return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n'
  49. else:
  50. before = text[start:pos].rsplit(b'\n', 1)[-1]
  51. after = text[pos:end].split(b'\n', 1)[0]
  52. return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace")
  53. def match_examples(self, parse_fn: 'Callable[[str], Tree]', examples: Union[Dict[T, Iterable[str]], Iterable[Tuple[T, Iterable[str]]]], token_type_match_fallback: bool=False, use_accepts: bool=False) -> T:
  54. """Allows you to detect what's wrong in the input text by matching
  55. against example errors.
  56. Given a parser instance and a dictionary mapping some label with
  57. some malformed syntax examples, it'll return the label for the
  58. example that bests matches the current error. The function will
  59. iterate the dictionary until it finds a matching error, and
  60. return the corresponding value.
  61. For an example usage, see `examples/error_reporting_lalr.py`
  62. Parameters:
  63. parse_fn: parse function (usually ``lark_instance.parse``)
  64. examples: dictionary of ``{'example_string': value}``.
  65. use_accepts: Recommended to call this with ``use_accepts=True``.
  66. The default is ``False`` for backwards compatibility.
  67. """
  68. assert self.state is not None, "Not supported for this exception"
  69. if isinstance(examples, dict):
  70. examples = examples.items()
  71. candidate = (None, False)
  72. for i, (label, example) in enumerate(examples):
  73. assert not isinstance(example, str), "Expecting a list"
  74. for j, malformed in enumerate(example):
  75. try:
  76. parse_fn(malformed)
  77. except UnexpectedInput as ut:
  78. if ut.state == self.state:
  79. if use_accepts and hasattr(self, 'accepts') and ut.accepts != self.accepts:
  80. logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" %
  81. (self.state, self.accepts, ut.accepts, i, j))
  82. continue
  83. try:
  84. if ut.token == self.token: # Try exact match first
  85. logger.debug("Exact Match at example [%s][%s]" % (i, j))
  86. return label
  87. if token_type_match_fallback:
  88. # Fallback to token types match
  89. if (ut.token.type == self.token.type) and not candidate[-1]:
  90. logger.debug("Token Type Fallback at example [%s][%s]" % (i, j))
  91. candidate = label, True
  92. except AttributeError:
  93. pass
  94. if candidate[0] is None:
  95. logger.debug("Same State match at example [%s][%s]" % (i, j))
  96. candidate = label, False
  97. return candidate[0]
  98. def _format_expected(self, expected):
  99. if self._terminals_by_name:
  100. d = self._terminals_by_name
  101. expected = [d[t_name].user_repr() if t_name in d else t_name for t_name in expected]
  102. return "Expected one of: \n\t* %s\n" % '\n\t* '.join(expected)
  103. class UnexpectedEOF(ParseError, UnexpectedInput):
  104. expected: 'List[Token]'
  105. def __init__(self, expected, state=None, terminals_by_name=None):
  106. self.expected = expected
  107. self.state = state
  108. from .lexer import Token
  109. self.token = Token("<EOF>", "") # , line=-1, column=-1, pos_in_stream=-1)
  110. self.pos_in_stream = -1
  111. self.line = -1
  112. self.column = -1
  113. self._terminals_by_name = terminals_by_name
  114. super(UnexpectedEOF, self).__init__()
  115. def __str__(self):
  116. message = "Unexpected end-of-input. "
  117. message += self._format_expected(self.expected)
  118. return message
  119. class UnexpectedCharacters(LexError, UnexpectedInput):
  120. allowed: Set[str]
  121. considered_tokens: Set[Any]
  122. def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None,
  123. terminals_by_name=None, considered_rules=None):
  124. # TODO considered_tokens and allowed can be figured out using state
  125. self.line = line
  126. self.column = column
  127. self.pos_in_stream = lex_pos
  128. self.state = state
  129. self._terminals_by_name = terminals_by_name
  130. self.allowed = allowed
  131. self.considered_tokens = considered_tokens
  132. self.considered_rules = considered_rules
  133. self.token_history = token_history
  134. if isinstance(seq, bytes):
  135. self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace")
  136. else:
  137. self.char = seq[lex_pos]
  138. self._context = self.get_context(seq)
  139. super(UnexpectedCharacters, self).__init__()
  140. def __str__(self):
  141. message = "No terminal matches '%s' in the current parser context, at line %d col %d" % (self.char, self.line, self.column)
  142. message += '\n\n' + self._context
  143. if self.allowed:
  144. message += self._format_expected(self.allowed)
  145. if self.token_history:
  146. message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history)
  147. return message
  148. class UnexpectedToken(ParseError, UnexpectedInput):
  149. """An exception that is raised by the parser, when the token it received
  150. doesn't match any valid step forward.
  151. The parser provides an interactive instance through `interactive_parser`,
  152. which is initialized to the point of failture, and can be used for debugging and error handling.
  153. see: ``InteractiveParser``.
  154. """
  155. expected: Set[str]
  156. considered_rules: Set[str]
  157. interactive_parser: 'InteractiveParser'
  158. def __init__(self, token, expected, considered_rules=None, state=None, interactive_parser=None, terminals_by_name=None, token_history=None):
  159. # TODO considered_rules and expected can be figured out using state
  160. self.line = getattr(token, 'line', '?')
  161. self.column = getattr(token, 'column', '?')
  162. self.pos_in_stream = getattr(token, 'start_pos', None)
  163. self.state = state
  164. self.token = token
  165. self.expected = expected # XXX deprecate? `accepts` is better
  166. self._accepts = NO_VALUE
  167. self.considered_rules = considered_rules
  168. self.interactive_parser = interactive_parser
  169. self._terminals_by_name = terminals_by_name
  170. self.token_history = token_history
  171. super(UnexpectedToken, self).__init__()
  172. @property
  173. def accepts(self) -> Set[str]:
  174. if self._accepts is NO_VALUE:
  175. self._accepts = self.interactive_parser and self.interactive_parser.accepts()
  176. return self._accepts
  177. def __str__(self):
  178. message = ("Unexpected token %r at line %s, column %s.\n%s"
  179. % (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected)))
  180. if self.token_history:
  181. message += "Previous tokens: %r\n" % self.token_history
  182. return message
  183. class VisitError(LarkError):
  184. """VisitError is raised when visitors are interrupted by an exception
  185. It provides the following attributes for inspection:
  186. - obj: the tree node or token it was processing when the exception was raised
  187. - orig_exc: the exception that cause it to fail
  188. """
  189. obj: 'Union[Tree, Token]'
  190. orig_exc: Exception
  191. def __init__(self, rule, obj, orig_exc):
  192. self.obj = obj
  193. self.orig_exc = orig_exc
  194. message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc)
  195. super(VisitError, self).__init__(message)
  196. ###}