This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
選択できるのは25トピックまでです。 トピックは、先頭が英数字で、英数字とダッシュ('-')を使用した35文字以内のものにしてください。

389 行
14 KiB

  1. from .exceptions import GrammarError, ConfigurationError
  2. from .lexer import Token
  3. from .tree import Tree
  4. from .visitors import InlineTransformer # XXX Deprecated
  5. from .visitors import Transformer_InPlace
  6. from .visitors import _vargs_meta, _vargs_meta_inline
  7. ###{standalone
  8. from functools import partial, wraps
  9. from itertools import repeat, product
  10. class ExpandSingleChild:
  11. def __init__(self, node_builder):
  12. self.node_builder = node_builder
  13. def __call__(self, children):
  14. if len(children) == 1:
  15. return children[0]
  16. else:
  17. return self.node_builder(children)
  18. class PropagatePositions:
  19. def __init__(self, node_builder, node_filter=None):
  20. self.node_builder = node_builder
  21. self.node_filter = node_filter
  22. def __call__(self, children):
  23. res = self.node_builder(children)
  24. if isinstance(res, Tree):
  25. # Calculate positions while the tree is streaming, according to the rule:
  26. # - nodes start at the start of their first child's container,
  27. # and end at the end of their last child's container.
  28. # Containers are nodes that take up space in text, but have been inlined in the tree.
  29. res_meta = res.meta
  30. first_meta = self._pp_get_meta(children)
  31. if first_meta is not None:
  32. if not hasattr(res_meta, 'line'):
  33. # meta was already set, probably because the rule has been inlined (e.g. `?rule`)
  34. res_meta.line = getattr(first_meta, 'container_line', first_meta.line)
  35. res_meta.column = getattr(first_meta, 'container_column', first_meta.column)
  36. res_meta.start_pos = getattr(first_meta, 'container_start_pos', first_meta.start_pos)
  37. res_meta.empty = False
  38. res_meta.container_line = getattr(first_meta, 'container_line', first_meta.line)
  39. res_meta.container_column = getattr(first_meta, 'container_column', first_meta.column)
  40. last_meta = self._pp_get_meta(reversed(children))
  41. if last_meta is not None:
  42. if not hasattr(res_meta, 'end_line'):
  43. res_meta.end_line = getattr(last_meta, 'container_end_line', last_meta.end_line)
  44. res_meta.end_column = getattr(last_meta, 'container_end_column', last_meta.end_column)
  45. res_meta.end_pos = getattr(last_meta, 'container_end_pos', last_meta.end_pos)
  46. res_meta.empty = False
  47. res_meta.container_end_line = getattr(last_meta, 'container_end_line', last_meta.end_line)
  48. res_meta.container_end_column = getattr(last_meta, 'container_end_column', last_meta.end_column)
  49. return res
  50. def _pp_get_meta(self, children):
  51. for c in children:
  52. if self.node_filter is not None and not self.node_filter(c):
  53. continue
  54. if isinstance(c, Tree):
  55. if not c.meta.empty:
  56. return c.meta
  57. elif isinstance(c, Token):
  58. return c
  59. def make_propagate_positions(option):
  60. if callable(option):
  61. return partial(PropagatePositions, node_filter=option)
  62. elif option is True:
  63. return PropagatePositions
  64. elif option is False:
  65. return None
  66. raise ConfigurationError('Invalid option for propagate_positions: %r' % option)
  67. class ChildFilter:
  68. def __init__(self, to_include, append_none, node_builder):
  69. self.node_builder = node_builder
  70. self.to_include = to_include
  71. self.append_none = append_none
  72. def __call__(self, children):
  73. filtered = []
  74. for i, to_expand, add_none in self.to_include:
  75. if add_none:
  76. filtered += [None] * add_none
  77. if to_expand:
  78. filtered += children[i].children
  79. else:
  80. filtered.append(children[i])
  81. if self.append_none:
  82. filtered += [None] * self.append_none
  83. return self.node_builder(filtered)
  84. class ChildFilterLALR(ChildFilter):
  85. """Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)"""
  86. def __call__(self, children):
  87. filtered = []
  88. for i, to_expand, add_none in self.to_include:
  89. if add_none:
  90. filtered += [None] * add_none
  91. if to_expand:
  92. if filtered:
  93. filtered += children[i].children
  94. else: # Optimize for left-recursion
  95. filtered = children[i].children
  96. else:
  97. filtered.append(children[i])
  98. if self.append_none:
  99. filtered += [None] * self.append_none
  100. return self.node_builder(filtered)
  101. class ChildFilterLALR_NoPlaceholders(ChildFilter):
  102. "Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)"
  103. def __init__(self, to_include, node_builder):
  104. self.node_builder = node_builder
  105. self.to_include = to_include
  106. def __call__(self, children):
  107. filtered = []
  108. for i, to_expand in self.to_include:
  109. if to_expand:
  110. if filtered:
  111. filtered += children[i].children
  112. else: # Optimize for left-recursion
  113. filtered = children[i].children
  114. else:
  115. filtered.append(children[i])
  116. return self.node_builder(filtered)
  117. def _should_expand(sym):
  118. return not sym.is_term and sym.name.startswith('_')
  119. def maybe_create_child_filter(expansion, keep_all_tokens, ambiguous, _empty_indices):
  120. # Prepare empty_indices as: How many Nones to insert at each index?
  121. if _empty_indices:
  122. assert _empty_indices.count(False) == len(expansion)
  123. s = ''.join(str(int(b)) for b in _empty_indices)
  124. empty_indices = [len(ones) for ones in s.split('0')]
  125. assert len(empty_indices) == len(expansion)+1, (empty_indices, len(expansion))
  126. else:
  127. empty_indices = [0] * (len(expansion)+1)
  128. to_include = []
  129. nones_to_add = 0
  130. for i, sym in enumerate(expansion):
  131. nones_to_add += empty_indices[i]
  132. if keep_all_tokens or not (sym.is_term and sym.filter_out):
  133. to_include.append((i, _should_expand(sym), nones_to_add))
  134. nones_to_add = 0
  135. nones_to_add += empty_indices[len(expansion)]
  136. if _empty_indices or len(to_include) < len(expansion) or any(to_expand for i, to_expand,_ in to_include):
  137. if _empty_indices or ambiguous:
  138. return partial(ChildFilter if ambiguous else ChildFilterLALR, to_include, nones_to_add)
  139. else:
  140. # LALR without placeholders
  141. return partial(ChildFilterLALR_NoPlaceholders, [(i, x) for i,x,_ in to_include])
  142. class AmbiguousExpander:
  143. """Deal with the case where we're expanding children ('_rule') into a parent but the children
  144. are ambiguous. i.e. (parent->_ambig->_expand_this_rule). In this case, make the parent itself
  145. ambiguous with as many copies as their are ambiguous children, and then copy the ambiguous children
  146. into the right parents in the right places, essentially shifting the ambiguity up the tree."""
  147. def __init__(self, to_expand, tree_class, node_builder):
  148. self.node_builder = node_builder
  149. self.tree_class = tree_class
  150. self.to_expand = to_expand
  151. def __call__(self, children):
  152. def _is_ambig_tree(t):
  153. return hasattr(t, 'data') and t.data == '_ambig'
  154. # -- When we're repeatedly expanding ambiguities we can end up with nested ambiguities.
  155. # All children of an _ambig node should be a derivation of that ambig node, hence
  156. # it is safe to assume that if we see an _ambig node nested within an ambig node
  157. # it is safe to simply expand it into the parent _ambig node as an alternative derivation.
  158. ambiguous = []
  159. for i, child in enumerate(children):
  160. if _is_ambig_tree(child):
  161. if i in self.to_expand:
  162. ambiguous.append(i)
  163. to_expand = [j for j, grandchild in enumerate(child.children) if _is_ambig_tree(grandchild)]
  164. child.expand_kids_by_index(*to_expand)
  165. if not ambiguous:
  166. return self.node_builder(children)
  167. expand = [iter(child.children) if i in ambiguous else repeat(child) for i, child in enumerate(children)]
  168. return self.tree_class('_ambig', [self.node_builder(list(f[0])) for f in product(zip(*expand))])
  169. def maybe_create_ambiguous_expander(tree_class, expansion, keep_all_tokens):
  170. to_expand = [i for i, sym in enumerate(expansion)
  171. if keep_all_tokens or ((not (sym.is_term and sym.filter_out)) and _should_expand(sym))]
  172. if to_expand:
  173. return partial(AmbiguousExpander, to_expand, tree_class)
  174. class AmbiguousIntermediateExpander:
  175. """
  176. Propagate ambiguous intermediate nodes and their derivations up to the
  177. current rule.
  178. In general, converts
  179. rule
  180. _iambig
  181. _inter
  182. someChildren1
  183. ...
  184. _inter
  185. someChildren2
  186. ...
  187. someChildren3
  188. ...
  189. to
  190. _ambig
  191. rule
  192. someChildren1
  193. ...
  194. someChildren3
  195. ...
  196. rule
  197. someChildren2
  198. ...
  199. someChildren3
  200. ...
  201. rule
  202. childrenFromNestedIambigs
  203. ...
  204. someChildren3
  205. ...
  206. ...
  207. propagating up any nested '_iambig' nodes along the way.
  208. """
  209. def __init__(self, tree_class, node_builder):
  210. self.node_builder = node_builder
  211. self.tree_class = tree_class
  212. def __call__(self, children):
  213. def _is_iambig_tree(child):
  214. return hasattr(child, 'data') and child.data == '_iambig'
  215. def _collapse_iambig(children):
  216. """
  217. Recursively flatten the derivations of the parent of an '_iambig'
  218. node. Returns a list of '_inter' nodes guaranteed not
  219. to contain any nested '_iambig' nodes, or None if children does
  220. not contain an '_iambig' node.
  221. """
  222. # Due to the structure of the SPPF,
  223. # an '_iambig' node can only appear as the first child
  224. if children and _is_iambig_tree(children[0]):
  225. iambig_node = children[0]
  226. result = []
  227. for grandchild in iambig_node.children:
  228. collapsed = _collapse_iambig(grandchild.children)
  229. if collapsed:
  230. for child in collapsed:
  231. child.children += children[1:]
  232. result += collapsed
  233. else:
  234. new_tree = self.tree_class('_inter', grandchild.children + children[1:])
  235. result.append(new_tree)
  236. return result
  237. collapsed = _collapse_iambig(children)
  238. if collapsed:
  239. processed_nodes = [self.node_builder(c.children) for c in collapsed]
  240. return self.tree_class('_ambig', processed_nodes)
  241. return self.node_builder(children)
  242. def ptb_inline_args(func):
  243. @wraps(func)
  244. def f(children):
  245. return func(*children)
  246. return f
  247. def inplace_transformer(func):
  248. @wraps(func)
  249. def f(children):
  250. # function name in a Transformer is a rule name.
  251. tree = Tree(func.__name__, children)
  252. return func(tree)
  253. return f
  254. def apply_visit_wrapper(func, name, wrapper):
  255. if wrapper is _vargs_meta or wrapper is _vargs_meta_inline:
  256. raise NotImplementedError("Meta args not supported for internal transformer")
  257. @wraps(func)
  258. def f(children):
  259. return wrapper(func, name, children, None)
  260. return f
  261. class ParseTreeBuilder:
  262. def __init__(self, rules, tree_class, propagate_positions=False, ambiguous=False, maybe_placeholders=False):
  263. self.tree_class = tree_class
  264. self.propagate_positions = propagate_positions
  265. self.ambiguous = ambiguous
  266. self.maybe_placeholders = maybe_placeholders
  267. self.rule_builders = list(self._init_builders(rules))
  268. def _init_builders(self, rules):
  269. propagate_positions = make_propagate_positions(self.propagate_positions)
  270. for rule in rules:
  271. options = rule.options
  272. keep_all_tokens = options.keep_all_tokens
  273. expand_single_child = options.expand1
  274. wrapper_chain = list(filter(None, [
  275. (expand_single_child and not rule.alias) and ExpandSingleChild,
  276. maybe_create_child_filter(rule.expansion, keep_all_tokens, self.ambiguous, options.empty_indices if self.maybe_placeholders else None),
  277. propagate_positions,
  278. self.ambiguous and maybe_create_ambiguous_expander(self.tree_class, rule.expansion, keep_all_tokens),
  279. self.ambiguous and partial(AmbiguousIntermediateExpander, self.tree_class)
  280. ]))
  281. yield rule, wrapper_chain
  282. def create_callback(self, transformer=None):
  283. callbacks = {}
  284. for rule, wrapper_chain in self.rule_builders:
  285. user_callback_name = rule.alias or rule.options.template_source or rule.origin.name
  286. try:
  287. f = getattr(transformer, user_callback_name)
  288. # XXX InlineTransformer is deprecated!
  289. wrapper = getattr(f, 'visit_wrapper', None)
  290. if wrapper is not None:
  291. f = apply_visit_wrapper(f, user_callback_name, wrapper)
  292. else:
  293. if isinstance(transformer, InlineTransformer):
  294. f = ptb_inline_args(f)
  295. elif isinstance(transformer, Transformer_InPlace):
  296. f = inplace_transformer(f)
  297. except AttributeError:
  298. f = partial(self.tree_class, user_callback_name)
  299. for w in wrapper_chain:
  300. f = w(f)
  301. if rule in callbacks:
  302. raise GrammarError("Rule '%s' already exists" % (rule,))
  303. callbacks[rule] = f
  304. return callbacks
  305. ###}