This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

110 lines
3.4 KiB

  1. import re
  2. from collections import defaultdict
  3. from .tree import Tree
  4. from .common import is_terminal, ParserConf, PatternStr
  5. from .lexer import Token
  6. from .parsers import earley
  7. from .lark import Lark
  8. def is_discarded_terminal(t):
  9. return is_terminal(t) and t.startswith('_')
  10. def is_iter_empty(i):
  11. try:
  12. _ = next(i)
  13. return False
  14. except StopIteration:
  15. return True
  16. class Reconstructor:
  17. def __init__(self, parser):
  18. tokens = {t.name:t for t in parser.lexer_conf.tokens}
  19. token_res = {t.name:re.compile(t.pattern.to_regexp()) for t in parser.lexer_conf.tokens}
  20. class MatchData:
  21. def __init__(self, data):
  22. self.data = data
  23. class MatchTerminal(MatchData):
  24. def __call__(self, other):
  25. return token_res[self.data].match(other) is not None
  26. class MatchTree(MatchData):
  27. def __call__(self, other):
  28. try:
  29. return self.data == other.data
  30. except AttributeError:
  31. return False
  32. class WriteTokens:
  33. def __init__(self, name, expansion):
  34. self.name = name
  35. self.expansion = expansion
  36. def f(self, args):
  37. args2 = iter(args)
  38. to_write = []
  39. for sym in self.expansion:
  40. if is_discarded_terminal(sym):
  41. t = tokens[sym]
  42. assert isinstance(t.pattern, PatternStr)
  43. to_write.append(t.pattern.value)
  44. else:
  45. x = next(args2)
  46. if isinstance(x, list):
  47. to_write += x
  48. else:
  49. if isinstance(x, Token):
  50. assert x.type == sym, x
  51. else:
  52. assert x.data == sym, x
  53. to_write.append(x)
  54. assert is_iter_empty(args2)
  55. return to_write
  56. d = defaultdict(list)
  57. for name, expansions in parser.rules.items():
  58. for expansion, alias in expansions:
  59. if alias:
  60. d[alias].append(expansion)
  61. d[name].append([alias])
  62. else:
  63. d[name].append(expansion)
  64. rules = []
  65. expand1s = {name.lstrip('!').lstrip('?') for name in d
  66. if name.startswith(('?', '!?'))} # XXX Ugly code
  67. for name, expansions in d.items():
  68. for expansion in expansions:
  69. reduced = [sym if sym.startswith('_') or sym in expand1s else
  70. (MatchTerminal(sym) if is_terminal(sym) else MatchTree(sym),)
  71. for sym in expansion if not is_discarded_terminal(sym)]
  72. name = name.lstrip('!').lstrip('?')
  73. rules.append((name, reduced, WriteTokens(name, expansion).f))
  74. self.rules = rules
  75. def _reconstruct(self, tree):
  76. parser = earley.Parser(ParserConf(self.rules, {}, tree.data))
  77. res ,= parser.parse(tree.children) # XXX ambiguity?
  78. for item in res:
  79. if isinstance(item, Tree):
  80. for x in self._reconstruct(item):
  81. yield x
  82. else:
  83. yield item
  84. def reconstruct(self, tree):
  85. return ''.join(self._reconstruct(tree))