This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

239 lines
8.6 KiB

  1. from .utils import STRING_TYPE, logger
  2. ###{standalone
  3. class LarkError(Exception):
  4. pass
  5. class ConfigurationError(LarkError, ValueError):
  6. pass
  7. def assert_config(value, options, msg='Got %r, expected one of %s'):
  8. if value not in options:
  9. raise ConfigurationError(msg % (value, options))
  10. class GrammarError(LarkError):
  11. pass
  12. class ParseError(LarkError):
  13. pass
  14. class LexError(LarkError):
  15. pass
  16. class UnexpectedInput(LarkError):
  17. """UnexpectedInput Error.
  18. Used as a base class for the following exceptions:
  19. - ``UnexpectedToken``: The parser received an unexpected token
  20. - ``UnexpectedCharacters``: The lexer encountered an unexpected string
  21. After catching one of these exceptions, you may call the following helper methods to create a nicer error message.
  22. """
  23. pos_in_stream = None
  24. _terminals_by_name = None
  25. def get_context(self, text, span=40):
  26. """Returns a pretty string pinpointing the error in the text,
  27. with span amount of context characters around it.
  28. Note:
  29. The parser doesn't hold a copy of the text it has to parse,
  30. so you have to provide it again
  31. """
  32. assert self.pos_in_stream is not None, self
  33. pos = self.pos_in_stream
  34. start = max(pos - span, 0)
  35. end = pos + span
  36. if not isinstance(text, bytes):
  37. before = text[start:pos].rsplit('\n', 1)[-1]
  38. after = text[pos:end].split('\n', 1)[0]
  39. return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n'
  40. else:
  41. before = text[start:pos].rsplit(b'\n', 1)[-1]
  42. after = text[pos:end].split(b'\n', 1)[0]
  43. return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace")
  44. def match_examples(self, parse_fn, examples, token_type_match_fallback=False, use_accepts=False):
  45. """Allows you to detect what's wrong in the input text by matching
  46. against example errors.
  47. Given a parser instance and a dictionary mapping some label with
  48. some malformed syntax examples, it'll return the label for the
  49. example that bests matches the current error. The function will
  50. iterate the dictionary until it finds a matching error, and
  51. return the corresponding value.
  52. For an example usage, see `examples/error_reporting_lalr.py`
  53. Parameters:
  54. parse_fn: parse function (usually ``lark_instance.parse``)
  55. examples: dictionary of ``{'example_string': value}``.
  56. use_accepts: Recommended to call this with ``use_accepts=True``.
  57. The default is ``False`` for backwards compatibility.
  58. """
  59. assert self.state is not None, "Not supported for this exception"
  60. if isinstance(examples, dict):
  61. examples = examples.items()
  62. candidate = (None, False)
  63. for i, (label, example) in enumerate(examples):
  64. assert not isinstance(example, STRING_TYPE)
  65. for j, malformed in enumerate(example):
  66. try:
  67. parse_fn(malformed)
  68. except UnexpectedInput as ut:
  69. if ut.state == self.state:
  70. if use_accepts and hasattr(self, 'accepts') and ut.accepts != self.accepts:
  71. logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" %
  72. (self.state, self.accepts, ut.accepts, i, j))
  73. continue
  74. try:
  75. if ut.token == self.token: # Try exact match first
  76. logger.debug("Exact Match at example [%s][%s]" % (i, j))
  77. return label
  78. if token_type_match_fallback:
  79. # Fallback to token types match
  80. if (ut.token.type == self.token.type) and not candidate[-1]:
  81. logger.debug("Token Type Fallback at example [%s][%s]" % (i, j))
  82. candidate = label, True
  83. except AttributeError:
  84. pass
  85. if candidate[0] is None:
  86. logger.debug("Same State match at example [%s][%s]" % (i, j))
  87. candidate = label, False
  88. return candidate[0]
  89. def _format_expected(self, expected):
  90. if self._terminals_by_name:
  91. ts = []
  92. for ter in expected:
  93. ts.append(self._terminals_by_name[ter].user_repr())
  94. else:
  95. ts = expected
  96. return "Expected one of: \n\t* %s\n" % '\n\t* '.join(ts)
  97. class UnexpectedEOF(ParseError, UnexpectedInput):
  98. def __init__(self, expected, state=None, terminals_by_name=None):
  99. self.expected = expected
  100. self.state = state
  101. from .lexer import Token
  102. self.token = Token("<EOF>", "") # , line=-1, column=-1, pos_in_stream=-1)
  103. self.pos_in_stream = -1
  104. self.line = -1
  105. self.column = -1
  106. self._terminals_by_name = terminals_by_name
  107. super(UnexpectedEOF, self).__init__()
  108. def __str__(self):
  109. message = "Unexpected end-of-input. "
  110. message += self._format_expected(self.expected)
  111. return message
  112. class UnexpectedCharacters(LexError, UnexpectedInput):
  113. def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None,
  114. terminals_by_name=None):
  115. # TODO considered_tokens and allowed can be figured out using state
  116. self.line = line
  117. self.column = column
  118. self.pos_in_stream = lex_pos
  119. self.state = state
  120. self._terminals_by_name = terminals_by_name
  121. self.allowed = allowed
  122. self.considered_tokens = considered_tokens
  123. self.token_history = token_history
  124. if isinstance(seq, bytes):
  125. self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace")
  126. else:
  127. self.char = seq[lex_pos]
  128. self._context = self.get_context(seq)
  129. super(UnexpectedCharacters, self).__init__()
  130. def __str__(self):
  131. message = "No terminal defined for '%s' at line %d col %d" % (self.char, self.line, self.column)
  132. message += '\n\n' + self._context
  133. if self.allowed:
  134. message += self._format_expected(self.allowed)
  135. if self.token_history:
  136. message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history)
  137. return message
  138. _not_set_marker = object()
  139. class UnexpectedToken(ParseError, UnexpectedInput):
  140. """When the parser throws UnexpectedToken, it instantiates a puppet
  141. with its internal state. Users can then interactively set the puppet to
  142. the desired puppet state, and resume regular parsing.
  143. see: :ref:`ParserPuppet`.
  144. """
  145. def __init__(self, token, expected, considered_rules=None, state=None, puppet=None, terminals_by_name=None, token_history=None):
  146. # TODO considered_rules and expected can be figured out using state
  147. self.line = getattr(token, 'line', '?')
  148. self.column = getattr(token, 'column', '?')
  149. self.pos_in_stream = getattr(token, 'pos_in_stream', None)
  150. self.state = state
  151. self.token = token
  152. self.expected = expected # XXX deprecate? `accepts` is better
  153. self._accepts = _not_set_marker
  154. self.considered_rules = considered_rules
  155. self.puppet = puppet
  156. self._terminals_by_name = terminals_by_name
  157. self.token_history = token_history
  158. super(UnexpectedToken, self).__init__()
  159. @property
  160. def accepts(self):
  161. if self._accepts is _not_set_marker:
  162. self._accepts = self.puppet and self.puppet.accepts()
  163. return self._accepts
  164. def __str__(self):
  165. message = ("Unexpected token %r at line %s, column %s.\n%s"
  166. % (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected)))
  167. if self.token_history:
  168. message += "Previous tokens: %r\n" % self.token_history
  169. return message
  170. class VisitError(LarkError):
  171. """VisitError is raised when visitors are interrupted by an exception
  172. It provides the following attributes for inspection:
  173. - obj: the tree node or token it was processing when the exception was raised
  174. - orig_exc: the exception that cause it to fail
  175. """
  176. def __init__(self, rule, obj, orig_exc):
  177. self.obj = obj
  178. self.orig_exc = orig_exc
  179. message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc)
  180. super(VisitError, self).__init__(message)
  181. ###}