This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1091 lines
40 KiB

  1. """Parses and creates Grammar objects"""
  2. import os.path
  3. import sys
  4. from copy import copy, deepcopy
  5. from io import open
  6. import pkgutil
  7. from ast import literal_eval
  8. from .utils import bfs, Py36, logger, classify_bool
  9. from .lexer import Token, TerminalDef, PatternStr, PatternRE
  10. from .parse_tree_builder import ParseTreeBuilder
  11. from .parser_frontends import ParsingFrontend
  12. from .common import LexerConf, ParserConf
  13. from .grammar import RuleOptions, Rule, Terminal, NonTerminal, Symbol
  14. from .utils import classify, suppress, dedup_list, Str
  15. from .exceptions import GrammarError, UnexpectedCharacters, UnexpectedToken
  16. from .tree import Tree, SlottedTree as ST
  17. from .visitors import Transformer, Visitor, v_args, Transformer_InPlace, Transformer_NonRecursive
  18. inline_args = v_args(inline=True)
  19. __path__ = os.path.dirname(__file__)
  20. IMPORT_PATHS = ['grammars']
  21. EXT = '.lark'
  22. _RE_FLAGS = 'imslux'
  23. _EMPTY = Symbol('__empty__')
  24. _TERMINAL_NAMES = {
  25. '.' : 'DOT',
  26. ',' : 'COMMA',
  27. ':' : 'COLON',
  28. ';' : 'SEMICOLON',
  29. '+' : 'PLUS',
  30. '-' : 'MINUS',
  31. '*' : 'STAR',
  32. '/' : 'SLASH',
  33. '\\' : 'BACKSLASH',
  34. '|' : 'VBAR',
  35. '?' : 'QMARK',
  36. '!' : 'BANG',
  37. '@' : 'AT',
  38. '#' : 'HASH',
  39. '$' : 'DOLLAR',
  40. '%' : 'PERCENT',
  41. '^' : 'CIRCUMFLEX',
  42. '&' : 'AMPERSAND',
  43. '_' : 'UNDERSCORE',
  44. '<' : 'LESSTHAN',
  45. '>' : 'MORETHAN',
  46. '=' : 'EQUAL',
  47. '"' : 'DBLQUOTE',
  48. '\'' : 'QUOTE',
  49. '`' : 'BACKQUOTE',
  50. '~' : 'TILDE',
  51. '(' : 'LPAR',
  52. ')' : 'RPAR',
  53. '{' : 'LBRACE',
  54. '}' : 'RBRACE',
  55. '[' : 'LSQB',
  56. ']' : 'RSQB',
  57. '\n' : 'NEWLINE',
  58. '\r\n' : 'CRLF',
  59. '\t' : 'TAB',
  60. ' ' : 'SPACE',
  61. }
  62. # Grammar Parser
  63. TERMINALS = {
  64. '_LPAR': r'\(',
  65. '_RPAR': r'\)',
  66. '_LBRA': r'\[',
  67. '_RBRA': r'\]',
  68. '_LBRACE': r'\{',
  69. '_RBRACE': r'\}',
  70. 'OP': '[+*]|[?](?![a-z])',
  71. '_COLON': ':',
  72. '_COMMA': ',',
  73. '_OR': r'\|',
  74. '_DOT': r'\.(?!\.)',
  75. '_DOTDOT': r'\.\.',
  76. 'TILDE': '~',
  77. 'RULE': '!?[_?]?[a-z][_a-z0-9]*',
  78. 'TERMINAL': '_?[A-Z][_A-Z0-9]*',
  79. 'STRING': r'"(\\"|\\\\|[^"\n])*?"i?',
  80. 'REGEXP': r'/(?!/)(\\/|\\\\|[^/])*?/[%s]*' % _RE_FLAGS,
  81. '_NL': r'(\r?\n)+\s*',
  82. 'WS': r'[ \t]+',
  83. 'COMMENT': r'\s*//[^\n]*',
  84. '_TO': '->',
  85. '_IGNORE': r'%ignore',
  86. '_DECLARE': r'%declare',
  87. '_IMPORT': r'%import',
  88. 'NUMBER': r'[+-]?\d+',
  89. }
  90. RULES = {
  91. 'start': ['_list'],
  92. '_list': ['_item', '_list _item'],
  93. '_item': ['rule', 'term', 'statement', '_NL'],
  94. 'rule': ['RULE template_params _COLON expansions _NL',
  95. 'RULE template_params _DOT NUMBER _COLON expansions _NL'],
  96. 'template_params': ['_LBRACE _template_params _RBRACE',
  97. ''],
  98. '_template_params': ['RULE',
  99. '_template_params _COMMA RULE'],
  100. 'expansions': ['alias',
  101. 'expansions _OR alias',
  102. 'expansions _NL _OR alias'],
  103. '?alias': ['expansion _TO RULE', 'expansion'],
  104. 'expansion': ['_expansion'],
  105. '_expansion': ['', '_expansion expr'],
  106. '?expr': ['atom',
  107. 'atom OP',
  108. 'atom TILDE NUMBER',
  109. 'atom TILDE NUMBER _DOTDOT NUMBER',
  110. ],
  111. '?atom': ['_LPAR expansions _RPAR',
  112. 'maybe',
  113. 'value'],
  114. 'value': ['terminal',
  115. 'nonterminal',
  116. 'literal',
  117. 'range',
  118. 'template_usage'],
  119. 'terminal': ['TERMINAL'],
  120. 'nonterminal': ['RULE'],
  121. '?name': ['RULE', 'TERMINAL'],
  122. 'maybe': ['_LBRA expansions _RBRA'],
  123. 'range': ['STRING _DOTDOT STRING'],
  124. 'template_usage': ['RULE _LBRACE _template_args _RBRACE'],
  125. '_template_args': ['value',
  126. '_template_args _COMMA value'],
  127. 'term': ['TERMINAL _COLON expansions _NL',
  128. 'TERMINAL _DOT NUMBER _COLON expansions _NL'],
  129. 'statement': ['ignore', 'import', 'declare'],
  130. 'ignore': ['_IGNORE expansions _NL'],
  131. 'declare': ['_DECLARE _declare_args _NL'],
  132. 'import': ['_IMPORT _import_path _NL',
  133. '_IMPORT _import_path _LPAR name_list _RPAR _NL',
  134. '_IMPORT _import_path _TO name _NL'],
  135. '_import_path': ['import_lib', 'import_rel'],
  136. 'import_lib': ['_import_args'],
  137. 'import_rel': ['_DOT _import_args'],
  138. '_import_args': ['name', '_import_args _DOT name'],
  139. 'name_list': ['_name_list'],
  140. '_name_list': ['name', '_name_list _COMMA name'],
  141. '_declare_args': ['name', '_declare_args name'],
  142. 'literal': ['REGEXP', 'STRING'],
  143. }
  144. @inline_args
  145. class EBNF_to_BNF(Transformer_InPlace):
  146. def __init__(self):
  147. self.new_rules = []
  148. self.rules_by_expr = {}
  149. self.prefix = 'anon'
  150. self.i = 0
  151. self.rule_options = None
  152. def _add_recurse_rule(self, type_, expr):
  153. if expr in self.rules_by_expr:
  154. return self.rules_by_expr[expr]
  155. new_name = '__%s_%s_%d' % (self.prefix, type_, self.i)
  156. self.i += 1
  157. t = NonTerminal(new_name)
  158. tree = ST('expansions', [ST('expansion', [expr]), ST('expansion', [t, expr])])
  159. self.new_rules.append((new_name, tree, self.rule_options))
  160. self.rules_by_expr[expr] = t
  161. return t
  162. def expr(self, rule, op, *args):
  163. if op.value == '?':
  164. empty = ST('expansion', [])
  165. return ST('expansions', [rule, empty])
  166. elif op.value == '+':
  167. # a : b c+ d
  168. # -->
  169. # a : b _c d
  170. # _c : _c c | c;
  171. return self._add_recurse_rule('plus', rule)
  172. elif op.value == '*':
  173. # a : b c* d
  174. # -->
  175. # a : b _c? d
  176. # _c : _c c | c;
  177. new_name = self._add_recurse_rule('star', rule)
  178. return ST('expansions', [new_name, ST('expansion', [])])
  179. elif op.value == '~':
  180. if len(args) == 1:
  181. mn = mx = int(args[0])
  182. else:
  183. mn, mx = map(int, args)
  184. if mx < mn or mn < 0:
  185. raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (rule, mn, mx))
  186. return ST('expansions', [ST('expansion', [rule] * n) for n in range(mn, mx+1)])
  187. assert False, op
  188. def maybe(self, rule):
  189. keep_all_tokens = self.rule_options and self.rule_options.keep_all_tokens
  190. def will_not_get_removed(sym):
  191. if isinstance(sym, NonTerminal):
  192. return not sym.name.startswith('_')
  193. if isinstance(sym, Terminal):
  194. return keep_all_tokens or not sym.filter_out
  195. assert False
  196. if any(rule.scan_values(will_not_get_removed)):
  197. empty = _EMPTY
  198. else:
  199. empty = ST('expansion', [])
  200. return ST('expansions', [rule, empty])
  201. class SimplifyRule_Visitor(Visitor):
  202. @staticmethod
  203. def _flatten(tree):
  204. while True:
  205. to_expand = [i for i, child in enumerate(tree.children)
  206. if isinstance(child, Tree) and child.data == tree.data]
  207. if not to_expand:
  208. break
  209. tree.expand_kids_by_index(*to_expand)
  210. def expansion(self, tree):
  211. # rules_list unpacking
  212. # a : b (c|d) e
  213. # -->
  214. # a : b c e | b d e
  215. #
  216. # In AST terms:
  217. # expansion(b, expansions(c, d), e)
  218. # -->
  219. # expansions( expansion(b, c, e), expansion(b, d, e) )
  220. self._flatten(tree)
  221. for i, child in enumerate(tree.children):
  222. if isinstance(child, Tree) and child.data == 'expansions':
  223. tree.data = 'expansions'
  224. tree.children = [self.visit(ST('expansion', [option if i == j else other
  225. for j, other in enumerate(tree.children)]))
  226. for option in dedup_list(child.children)]
  227. self._flatten(tree)
  228. break
  229. def alias(self, tree):
  230. rule, alias_name = tree.children
  231. if rule.data == 'expansions':
  232. aliases = []
  233. for child in tree.children[0].children:
  234. aliases.append(ST('alias', [child, alias_name]))
  235. tree.data = 'expansions'
  236. tree.children = aliases
  237. def expansions(self, tree):
  238. self._flatten(tree)
  239. # Ensure all children are unique
  240. if len(set(tree.children)) != len(tree.children):
  241. tree.children = dedup_list(tree.children) # dedup is expensive, so try to minimize its use
  242. class RuleTreeToText(Transformer):
  243. def expansions(self, x):
  244. return x
  245. def expansion(self, symbols):
  246. return symbols, None
  247. def alias(self, x):
  248. (expansion, _alias), alias = x
  249. assert _alias is None, (alias, expansion, '-', _alias) # Double alias not allowed
  250. return expansion, alias.value
  251. @inline_args
  252. class CanonizeTree(Transformer_InPlace):
  253. def tokenmods(self, *args):
  254. if len(args) == 1:
  255. return list(args)
  256. tokenmods, value = args
  257. return tokenmods + [value]
  258. class PrepareAnonTerminals(Transformer_InPlace):
  259. """Create a unique list of anonymous terminals. Attempt to give meaningful names to them when we add them"""
  260. def __init__(self, terminals):
  261. self.terminals = terminals
  262. self.term_set = {td.name for td in self.terminals}
  263. self.term_reverse = {td.pattern: td for td in terminals}
  264. self.i = 0
  265. self.rule_options = None
  266. @inline_args
  267. def pattern(self, p):
  268. value = p.value
  269. if p in self.term_reverse and p.flags != self.term_reverse[p].pattern.flags:
  270. raise GrammarError(u'Conflicting flags for the same terminal: %s' % p)
  271. term_name = None
  272. user_repr = p.raw # This will always be ok, independent of what term_name we end up using
  273. if isinstance(p, PatternStr):
  274. try:
  275. # If already defined, use the user-defined terminal name
  276. term_name = self.term_reverse[p].name
  277. except KeyError:
  278. # Try to assign an indicative anon-terminal name
  279. try:
  280. term_name = _TERMINAL_NAMES[value]
  281. except KeyError:
  282. if value.isalnum() and value[0].isalpha() and value.upper() not in self.term_set:
  283. with suppress(UnicodeEncodeError):
  284. value.upper().encode('ascii') # Make sure we don't have unicode in our terminal names
  285. term_name = value.upper()
  286. if term_name in self.term_set:
  287. term_name = None
  288. elif isinstance(p, PatternRE):
  289. if p in self.term_reverse: # Kind of a weird placement.name
  290. term_name = self.term_reverse[p].name
  291. else:
  292. assert False, p
  293. if term_name is None:
  294. term_name = '__ANON_%d' % self.i
  295. self.i += 1
  296. if term_name not in self.term_set:
  297. assert p not in self.term_reverse
  298. self.term_set.add(term_name)
  299. termdef = TerminalDef(term_name, p, user_repr=user_repr)
  300. self.term_reverse[p] = termdef
  301. self.terminals.append(termdef)
  302. filter_out = False if self.rule_options and self.rule_options.keep_all_tokens else isinstance(p, PatternStr)
  303. return Terminal(term_name, filter_out=filter_out)
  304. class _ReplaceSymbols(Transformer_InPlace):
  305. """Helper for ApplyTemplates"""
  306. def __init__(self):
  307. self.names = {}
  308. def value(self, c):
  309. if len(c) == 1 and isinstance(c[0], Token) and c[0].value in self.names:
  310. return self.names[c[0].value]
  311. return self.__default__('value', c, None)
  312. def template_usage(self, c):
  313. if c[0] in self.names:
  314. return self.__default__('template_usage', [self.names[c[0]].name] + c[1:], None)
  315. return self.__default__('template_usage', c, None)
  316. class ApplyTemplates(Transformer_InPlace):
  317. """Apply the templates, creating new rules that represent the used templates"""
  318. def __init__(self, rule_defs):
  319. self.rule_defs = rule_defs
  320. self.replacer = _ReplaceSymbols()
  321. self.created_templates = set()
  322. def template_usage(self, c):
  323. name = c[0]
  324. args = c[1:]
  325. result_name = "%s{%s}" % (name, ",".join(a.name for a in args))
  326. if result_name not in self.created_templates:
  327. self.created_templates.add(result_name)
  328. (_n, params, tree, options) ,= (t for t in self.rule_defs if t[0] == name)
  329. assert len(params) == len(args), args
  330. result_tree = deepcopy(tree)
  331. self.replacer.names = dict(zip(params, args))
  332. self.replacer.transform(result_tree)
  333. self.rule_defs.append((result_name, [], result_tree, deepcopy(options)))
  334. return NonTerminal(result_name)
  335. def _rfind(s, choices):
  336. return max(s.rfind(c) for c in choices)
  337. def eval_escaping(s):
  338. w = ''
  339. i = iter(s)
  340. for n in i:
  341. w += n
  342. if n == '\\':
  343. try:
  344. n2 = next(i)
  345. except StopIteration:
  346. raise GrammarError("Literal ended unexpectedly (bad escaping): `%r`" % s)
  347. if n2 == '\\':
  348. w += '\\\\'
  349. elif n2 not in 'uxnftr':
  350. w += '\\'
  351. w += n2
  352. w = w.replace('\\"', '"').replace("'", "\\'")
  353. to_eval = "u'''%s'''" % w
  354. try:
  355. s = literal_eval(to_eval)
  356. except SyntaxError as e:
  357. raise GrammarError(s, e)
  358. return s
  359. def _literal_to_pattern(literal):
  360. v = literal.value
  361. flag_start = _rfind(v, '/"')+1
  362. assert flag_start > 0
  363. flags = v[flag_start:]
  364. assert all(f in _RE_FLAGS for f in flags), flags
  365. if literal.type == 'STRING' and '\n' in v:
  366. raise GrammarError('You cannot put newlines in string literals')
  367. if literal.type == 'REGEXP' and '\n' in v and 'x' not in flags:
  368. raise GrammarError('You can only use newlines in regular expressions '
  369. 'with the `x` (verbose) flag')
  370. v = v[:flag_start]
  371. assert v[0] == v[-1] and v[0] in '"/'
  372. x = v[1:-1]
  373. s = eval_escaping(x)
  374. if literal.type == 'STRING':
  375. s = s.replace('\\\\', '\\')
  376. return PatternStr(s, flags, raw=literal.value)
  377. elif literal.type == 'REGEXP':
  378. return PatternRE(s, flags, raw=literal.value)
  379. else:
  380. assert False, 'Invariant failed: literal.type not in ["STRING", "REGEXP"]'
  381. @inline_args
  382. class PrepareLiterals(Transformer_InPlace):
  383. def literal(self, literal):
  384. return ST('pattern', [_literal_to_pattern(literal)])
  385. def range(self, start, end):
  386. assert start.type == end.type == 'STRING'
  387. start = start.value[1:-1]
  388. end = end.value[1:-1]
  389. assert len(eval_escaping(start)) == len(eval_escaping(end)) == 1
  390. regexp = '[%s-%s]' % (start, end)
  391. return ST('pattern', [PatternRE(regexp)])
  392. def _make_joined_pattern(regexp, flags_set):
  393. # In Python 3.6, a new syntax for flags was introduced, that allows us to restrict the scope
  394. # of flags to a specific regexp group. We are already using it in `lexer.Pattern._get_flags`
  395. # However, for prior Python versions, we still need to use global flags, so we have to make sure
  396. # that there are no flag collisions when we merge several terminals.
  397. flags = ()
  398. if not Py36:
  399. if len(flags_set) > 1:
  400. raise GrammarError("Lark doesn't support joining terminals with conflicting flags in python <3.6!")
  401. elif len(flags_set) == 1:
  402. flags ,= flags_set
  403. return PatternRE(regexp, flags)
  404. class TerminalTreeToPattern(Transformer):
  405. def pattern(self, ps):
  406. p ,= ps
  407. return p
  408. def expansion(self, items):
  409. assert items
  410. if len(items) == 1:
  411. return items[0]
  412. pattern = ''.join(i.to_regexp() for i in items)
  413. return _make_joined_pattern(pattern, {i.flags for i in items})
  414. def expansions(self, exps):
  415. if len(exps) == 1:
  416. return exps[0]
  417. pattern = '(?:%s)' % ('|'.join(i.to_regexp() for i in exps))
  418. return _make_joined_pattern(pattern, {i.flags for i in exps})
  419. def expr(self, args):
  420. inner, op = args[:2]
  421. if op == '~':
  422. if len(args) == 3:
  423. op = "{%d}" % int(args[2])
  424. else:
  425. mn, mx = map(int, args[2:])
  426. if mx < mn:
  427. raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (inner, mn, mx))
  428. op = "{%d,%d}" % (mn, mx)
  429. else:
  430. assert len(args) == 2
  431. return PatternRE('(?:%s)%s' % (inner.to_regexp(), op), inner.flags)
  432. def maybe(self, expr):
  433. return self.expr(expr + ['?'])
  434. def alias(self, t):
  435. raise GrammarError("Aliasing not allowed in terminals (You used -> in the wrong place)")
  436. def value(self, v):
  437. return v[0]
  438. class PrepareSymbols(Transformer_InPlace):
  439. def value(self, v):
  440. v ,= v
  441. if isinstance(v, Tree):
  442. return v
  443. elif v.type == 'RULE':
  444. return NonTerminal(Str(v.value))
  445. elif v.type == 'TERMINAL':
  446. return Terminal(Str(v.value), filter_out=v.startswith('_'))
  447. assert False
  448. def _choice_of_rules(rules):
  449. return ST('expansions', [ST('expansion', [Token('RULE', name)]) for name in rules])
  450. def nr_deepcopy_tree(t):
  451. """Deepcopy tree `t` without recursion"""
  452. return Transformer_NonRecursive(False).transform(t)
  453. class Grammar:
  454. def __init__(self, rule_defs, term_defs, ignore):
  455. self.term_defs = term_defs
  456. self.rule_defs = rule_defs
  457. self.ignore = ignore
  458. def compile(self, start, terminals_to_keep):
  459. # We change the trees in-place (to support huge grammars)
  460. # So deepcopy allows calling compile more than once.
  461. term_defs = deepcopy(list(self.term_defs))
  462. rule_defs = [(n,p,nr_deepcopy_tree(t),o) for n,p,t,o in self.rule_defs]
  463. # ===================
  464. # Compile Terminals
  465. # ===================
  466. # Convert terminal-trees to strings/regexps
  467. for name, (term_tree, priority) in term_defs:
  468. if term_tree is None: # Terminal added through %declare
  469. continue
  470. expansions = list(term_tree.find_data('expansion'))
  471. if len(expansions) == 1 and not expansions[0].children:
  472. raise GrammarError("Terminals cannot be empty (%s)" % name)
  473. transformer = PrepareLiterals() * TerminalTreeToPattern()
  474. terminals = [TerminalDef(name, transformer.transform(term_tree), priority)
  475. for name, (term_tree, priority) in term_defs if term_tree]
  476. # =================
  477. # Compile Rules
  478. # =================
  479. # 1. Pre-process terminals
  480. anon_tokens_transf = PrepareAnonTerminals(terminals)
  481. transformer = PrepareLiterals() * PrepareSymbols() * anon_tokens_transf # Adds to terminals
  482. # 2. Inline Templates
  483. transformer *= ApplyTemplates(rule_defs)
  484. # 3. Convert EBNF to BNF (and apply step 1 & 2)
  485. ebnf_to_bnf = EBNF_to_BNF()
  486. rules = []
  487. i = 0
  488. while i < len(rule_defs): # We have to do it like this because rule_defs might grow due to templates
  489. name, params, rule_tree, options = rule_defs[i]
  490. i += 1
  491. if len(params) != 0: # Dont transform templates
  492. continue
  493. rule_options = RuleOptions(keep_all_tokens=True) if options and options.keep_all_tokens else None
  494. ebnf_to_bnf.rule_options = rule_options
  495. ebnf_to_bnf.prefix = name
  496. anon_tokens_transf.rule_options = rule_options
  497. tree = transformer.transform(rule_tree)
  498. res = ebnf_to_bnf.transform(tree)
  499. rules.append((name, res, options))
  500. rules += ebnf_to_bnf.new_rules
  501. assert len(rules) == len({name for name, _t, _o in rules}), "Whoops, name collision"
  502. # 4. Compile tree to Rule objects
  503. rule_tree_to_text = RuleTreeToText()
  504. simplify_rule = SimplifyRule_Visitor()
  505. compiled_rules = []
  506. for rule_content in rules:
  507. name, tree, options = rule_content
  508. simplify_rule.visit(tree)
  509. expansions = rule_tree_to_text.transform(tree)
  510. for i, (expansion, alias) in enumerate(expansions):
  511. if alias and name.startswith('_'):
  512. raise GrammarError("Rule %s is marked for expansion (it starts with an underscore) and isn't allowed to have aliases (alias=%s)"% (name, alias))
  513. empty_indices = [x==_EMPTY for x in expansion]
  514. if any(empty_indices):
  515. exp_options = copy(options) or RuleOptions()
  516. exp_options.empty_indices = empty_indices
  517. expansion = [x for x in expansion if x!=_EMPTY]
  518. else:
  519. exp_options = options
  520. assert all(isinstance(x, Symbol) for x in expansion), expansion
  521. rule = Rule(NonTerminal(name), expansion, i, alias, exp_options)
  522. compiled_rules.append(rule)
  523. # Remove duplicates of empty rules, throw error for non-empty duplicates
  524. if len(set(compiled_rules)) != len(compiled_rules):
  525. duplicates = classify(compiled_rules, lambda x: x)
  526. for dups in duplicates.values():
  527. if len(dups) > 1:
  528. if dups[0].expansion:
  529. raise GrammarError("Rules defined twice: %s\n\n(Might happen due to colliding expansion of optionals: [] or ?)"
  530. % ''.join('\n * %s' % i for i in dups))
  531. # Empty rule; assert all other attributes are equal
  532. assert len({(r.alias, r.order, r.options) for r in dups}) == len(dups)
  533. # Remove duplicates
  534. compiled_rules = list(set(compiled_rules))
  535. # Filter out unused rules
  536. while True:
  537. c = len(compiled_rules)
  538. used_rules = {s for r in compiled_rules
  539. for s in r.expansion
  540. if isinstance(s, NonTerminal)
  541. and s != r.origin}
  542. used_rules |= {NonTerminal(s) for s in start}
  543. compiled_rules, unused = classify_bool(compiled_rules, lambda r: r.origin in used_rules)
  544. for r in unused:
  545. logger.debug("Unused rule: %s", r)
  546. if len(compiled_rules) == c:
  547. break
  548. # Filter out unused terminals
  549. used_terms = {t.name for r in compiled_rules
  550. for t in r.expansion
  551. if isinstance(t, Terminal)}
  552. terminals, unused = classify_bool(terminals, lambda t: t.name in used_terms or t.name in self.ignore or t.name in terminals_to_keep)
  553. if unused:
  554. logger.debug("Unused terminals: %s", [t.name for t in unused])
  555. return terminals, compiled_rules, self.ignore
  556. class PackageResource(object):
  557. """
  558. Represents a path inside a Package. Used by `FromPackageLoader`
  559. """
  560. def __init__(self, pkg_name, path):
  561. self.pkg_name = pkg_name
  562. self.path = path
  563. def __str__(self):
  564. return "<%s: %s>" % (self.pkg_name, self.path)
  565. def __repr__(self):
  566. return "%s(%r, %r)" % (type(self).__name__, self.pkg_name, self.path)
  567. class FromPackageLoader(object):
  568. """
  569. Provides a simple way of creating custom import loaders that load from packages via ``pkgutil.get_data`` instead of using `open`.
  570. This allows them to be compatible even from within zip files.
  571. Relative imports are handled, so you can just freely use them.
  572. pkg_name: The name of the package. You can probably provide `__name__` most of the time
  573. search_paths: All the path that will be search on absolute imports.
  574. """
  575. def __init__(self, pkg_name, search_paths=("", )):
  576. self.pkg_name = pkg_name
  577. self.search_paths = search_paths
  578. def __repr__(self):
  579. return "%s(%r, %r)" % (type(self).__name__, self.pkg_name, self.search_paths)
  580. def __call__(self, base_path, grammar_path):
  581. if base_path is None:
  582. to_try = self.search_paths
  583. else:
  584. # Check whether or not the importing grammar was loaded by this module.
  585. if not isinstance(base_path, PackageResource) or base_path.pkg_name != self.pkg_name:
  586. # Technically false, but FileNotFound doesn't exist in python2.7, and this message should never reach the end user anyway
  587. raise IOError()
  588. to_try = [base_path.path]
  589. for path in to_try:
  590. full_path = os.path.join(path, grammar_path)
  591. try:
  592. text = pkgutil.get_data(self.pkg_name, full_path)
  593. except IOError:
  594. continue
  595. else:
  596. return PackageResource(self.pkg_name, full_path), text.decode()
  597. raise IOError()
  598. stdlib_loader = FromPackageLoader('lark', IMPORT_PATHS)
  599. _imported_grammars = {}
  600. def import_from_grammar_into_namespace(grammar, namespace, aliases):
  601. """Returns all rules and terminals of grammar, prepended
  602. with a 'namespace' prefix, except for those which are aliased.
  603. """
  604. imported_terms = dict(grammar.term_defs)
  605. imported_rules = {n:(n,p,deepcopy(t),o) for n,p,t,o in grammar.rule_defs}
  606. term_defs = []
  607. rule_defs = []
  608. def rule_dependencies(symbol):
  609. if symbol.type != 'RULE':
  610. return []
  611. try:
  612. _, params, tree,_ = imported_rules[symbol]
  613. except KeyError:
  614. raise GrammarError("Missing symbol '%s' in grammar %s" % (symbol, namespace))
  615. return _find_used_symbols(tree) - set(params)
  616. def get_namespace_name(name, params):
  617. if params is not None:
  618. try:
  619. return params[name]
  620. except KeyError:
  621. pass
  622. try:
  623. return aliases[name].value
  624. except KeyError:
  625. if name[0] == '_':
  626. return '_%s__%s' % (namespace, name[1:])
  627. return '%s__%s' % (namespace, name)
  628. to_import = list(bfs(aliases, rule_dependencies))
  629. for symbol in to_import:
  630. if symbol.type == 'TERMINAL':
  631. term_defs.append([get_namespace_name(symbol, None), imported_terms[symbol]])
  632. else:
  633. assert symbol.type == 'RULE'
  634. _, params, tree, options = imported_rules[symbol]
  635. params_map = {p: ('%s__%s' if p[0]!='_' else '_%s__%s') % (namespace, p) for p in params}
  636. for t in tree.iter_subtrees():
  637. for i, c in enumerate(t.children):
  638. if isinstance(c, Token) and c.type in ('RULE', 'TERMINAL'):
  639. t.children[i] = Token(c.type, get_namespace_name(c, params_map))
  640. params = [params_map[p] for p in params] # We can not rely on ordered dictionaries
  641. rule_defs.append((get_namespace_name(symbol, params_map), params, tree, options))
  642. return term_defs, rule_defs
  643. def resolve_term_references(term_defs):
  644. # TODO Solve with transitive closure (maybe)
  645. term_dict = {k:t for k, (t,_p) in term_defs}
  646. assert len(term_dict) == len(term_defs), "Same name defined twice?"
  647. while True:
  648. changed = False
  649. for name, (token_tree, _p) in term_defs:
  650. if token_tree is None: # Terminal added through %declare
  651. continue
  652. for exp in token_tree.find_data('value'):
  653. item ,= exp.children
  654. if isinstance(item, Token):
  655. if item.type == 'RULE':
  656. raise GrammarError("Rules aren't allowed inside terminals (%s in %s)" % (item, name))
  657. if item.type == 'TERMINAL':
  658. term_value = term_dict[item]
  659. assert term_value is not None
  660. exp.children[0] = term_value
  661. changed = True
  662. if not changed:
  663. break
  664. for name, term in term_dict.items():
  665. if term: # Not just declared
  666. for child in term.children:
  667. ids = [id(x) for x in child.iter_subtrees()]
  668. if id(term) in ids:
  669. raise GrammarError("Recursion in terminal '%s' (recursion is only allowed in rules, not terminals)" % name)
  670. def options_from_rule(name, params, *x):
  671. if len(x) > 1:
  672. priority, expansions = x
  673. priority = int(priority)
  674. else:
  675. expansions ,= x
  676. priority = None
  677. params = [t.value for t in params.children] if params is not None else [] # For the grammar parser
  678. keep_all_tokens = name.startswith('!')
  679. name = name.lstrip('!')
  680. expand1 = name.startswith('?')
  681. name = name.lstrip('?')
  682. return name, params, expansions, RuleOptions(keep_all_tokens, expand1, priority=priority,
  683. template_source=(name if params else None))
  684. def symbols_from_strcase(expansion):
  685. return [Terminal(x, filter_out=x.startswith('_')) if x.isupper() else NonTerminal(x) for x in expansion]
  686. @inline_args
  687. class PrepareGrammar(Transformer_InPlace):
  688. def terminal(self, name):
  689. return name
  690. def nonterminal(self, name):
  691. return name
  692. def _find_used_symbols(tree):
  693. assert tree.data == 'expansions'
  694. return {t for x in tree.find_data('expansion')
  695. for t in x.scan_values(lambda t: t.type in ('RULE', 'TERMINAL'))}
  696. class GrammarLoader:
  697. ERRORS = [
  698. ('Unclosed parenthesis', ['a: (\n']),
  699. ('Unmatched closing parenthesis', ['a: )\n', 'a: [)\n', 'a: (]\n']),
  700. ('Expecting rule or terminal definition (missing colon)', ['a\n', 'A\n', 'a->\n', 'A->\n', 'a A\n']),
  701. ('Illegal name for rules or terminals', ['Aa:\n']),
  702. ('Alias expects lowercase name', ['a: -> "a"\n']),
  703. ('Unexpected colon', ['a::\n', 'a: b:\n', 'a: B:\n', 'a: "a":\n']),
  704. ('Misplaced operator', ['a: b??', 'a: b(?)', 'a:+\n', 'a:?\n', 'a:*\n', 'a:|*\n']),
  705. ('Expecting option ("|") or a new rule or terminal definition', ['a:a\n()\n']),
  706. ('Terminal names cannot contain dots', ['A.B\n']),
  707. ('%import expects a name', ['%import "a"\n']),
  708. ('%ignore expects a value', ['%ignore %import\n']),
  709. ]
  710. def __init__(self, global_keep_all_tokens):
  711. terminals = [TerminalDef(name, PatternRE(value)) for name, value in TERMINALS.items()]
  712. rules = [options_from_rule(name, None, x) for name, x in RULES.items()]
  713. rules = [Rule(NonTerminal(r), symbols_from_strcase(x.split()), i, None, o)
  714. for r, _p, xs, o in rules for i, x in enumerate(xs)]
  715. callback = ParseTreeBuilder(rules, ST).create_callback()
  716. import re
  717. lexer_conf = LexerConf(terminals, re, ['WS', 'COMMENT'])
  718. parser_conf = ParserConf(rules, callback, ['start'])
  719. lexer_conf.lexer_type = 'standard'
  720. parser_conf.parser_type = 'lalr'
  721. self.parser = ParsingFrontend(lexer_conf, parser_conf, {})
  722. self.canonize_tree = CanonizeTree()
  723. self.global_keep_all_tokens = global_keep_all_tokens
  724. def import_grammar(self, grammar_path, base_path=None, import_paths=[]):
  725. if grammar_path not in _imported_grammars:
  726. # import_paths take priority over base_path since they should handle relative imports and ignore everything else.
  727. to_try = import_paths + ([base_path] if base_path is not None else []) + [stdlib_loader]
  728. for source in to_try:
  729. try:
  730. if callable(source):
  731. joined_path, text = source(base_path, grammar_path)
  732. else:
  733. joined_path = os.path.join(source, grammar_path)
  734. with open(joined_path, encoding='utf8') as f:
  735. text = f.read()
  736. except IOError:
  737. continue
  738. else:
  739. grammar = self.load_grammar(text, joined_path, import_paths)
  740. _imported_grammars[grammar_path] = grammar
  741. break
  742. else:
  743. # Search failed. Make Python throw a nice error.
  744. open(grammar_path, encoding='utf8')
  745. assert False
  746. return _imported_grammars[grammar_path]
  747. def load_grammar(self, grammar_text, grammar_name='<?>', import_paths=[]):
  748. """Parse grammar_text, verify, and create Grammar object. Display nice messages on error."""
  749. try:
  750. tree = self.canonize_tree.transform(self.parser.parse(grammar_text+'\n'))
  751. except UnexpectedCharacters as e:
  752. context = e.get_context(grammar_text)
  753. raise GrammarError("Unexpected input at line %d column %d in %s: \n\n%s" %
  754. (e.line, e.column, grammar_name, context))
  755. except UnexpectedToken as e:
  756. context = e.get_context(grammar_text)
  757. error = e.match_examples(self.parser.parse, self.ERRORS, use_accepts=True)
  758. if error:
  759. raise GrammarError("%s, at line %s column %s\n\n%s" % (error, e.line, e.column, context))
  760. elif 'STRING' in e.expected:
  761. raise GrammarError("Expecting a value at line %s column %s\n\n%s" % (e.line, e.column, context))
  762. raise
  763. tree = PrepareGrammar().transform(tree)
  764. # Extract grammar items
  765. defs = classify(tree.children, lambda c: c.data, lambda c: c.children)
  766. term_defs = defs.pop('term', [])
  767. rule_defs = defs.pop('rule', [])
  768. statements = defs.pop('statement', [])
  769. assert not defs
  770. term_defs = [td if len(td)==3 else (td[0], 1, td[1]) for td in term_defs]
  771. term_defs = [(name.value, (t, int(p))) for name, p, t in term_defs]
  772. rule_defs = [options_from_rule(*x) for x in rule_defs]
  773. # Execute statements
  774. ignore, imports = [], {}
  775. for (stmt,) in statements:
  776. if stmt.data == 'ignore':
  777. t ,= stmt.children
  778. ignore.append(t)
  779. elif stmt.data == 'import':
  780. if len(stmt.children) > 1:
  781. path_node, arg1 = stmt.children
  782. else:
  783. path_node ,= stmt.children
  784. arg1 = None
  785. if isinstance(arg1, Tree): # Multi import
  786. dotted_path = tuple(path_node.children)
  787. names = arg1.children
  788. aliases = dict(zip(names, names)) # Can't have aliased multi import, so all aliases will be the same as names
  789. else: # Single import
  790. dotted_path = tuple(path_node.children[:-1])
  791. name = path_node.children[-1] # Get name from dotted path
  792. aliases = {name: arg1 or name} # Aliases if exist
  793. if path_node.data == 'import_lib': # Import from library
  794. base_path = None
  795. else: # Relative import
  796. if grammar_name == '<string>': # Import relative to script file path if grammar is coded in script
  797. try:
  798. base_file = os.path.abspath(sys.modules['__main__'].__file__)
  799. except AttributeError:
  800. base_file = None
  801. else:
  802. base_file = grammar_name # Import relative to grammar file path if external grammar file
  803. if base_file:
  804. if isinstance(base_file, PackageResource):
  805. base_path = PackageResource(base_file.pkg_name, os.path.split(base_file.path)[0])
  806. else:
  807. base_path = os.path.split(base_file)[0]
  808. else:
  809. base_path = os.path.abspath(os.path.curdir)
  810. try:
  811. import_base_path, import_aliases = imports[dotted_path]
  812. assert base_path == import_base_path, 'Inconsistent base_path for %s.' % '.'.join(dotted_path)
  813. import_aliases.update(aliases)
  814. except KeyError:
  815. imports[dotted_path] = base_path, aliases
  816. elif stmt.data == 'declare':
  817. for t in stmt.children:
  818. term_defs.append([t.value, (None, None)])
  819. else:
  820. assert False, stmt
  821. # import grammars
  822. for dotted_path, (base_path, aliases) in imports.items():
  823. grammar_path = os.path.join(*dotted_path) + EXT
  824. g = self.import_grammar(grammar_path, base_path=base_path, import_paths=import_paths)
  825. new_td, new_rd = import_from_grammar_into_namespace(g, '__'.join(dotted_path), aliases)
  826. term_defs += new_td
  827. rule_defs += new_rd
  828. # Verify correctness 1
  829. for name, _ in term_defs:
  830. if name.startswith('__'):
  831. raise GrammarError('Names starting with double-underscore are reserved (Error at %s)' % name)
  832. # Handle ignore tokens
  833. # XXX A slightly hacky solution. Recognition of %ignore TERMINAL as separate comes from the lexer's
  834. # inability to handle duplicate terminals (two names, one value)
  835. ignore_names = []
  836. for t in ignore:
  837. if t.data=='expansions' and len(t.children) == 1:
  838. t2 ,= t.children
  839. if t2.data=='expansion' and len(t2.children) == 1:
  840. item ,= t2.children
  841. if item.data == 'value':
  842. item ,= item.children
  843. if isinstance(item, Token) and item.type == 'TERMINAL':
  844. ignore_names.append(item.value)
  845. continue
  846. name = '__IGNORE_%d'% len(ignore_names)
  847. ignore_names.append(name)
  848. term_defs.append((name, (t, 1)))
  849. # Verify correctness 2
  850. terminal_names = set()
  851. for name, _ in term_defs:
  852. if name in terminal_names:
  853. raise GrammarError("Terminal '%s' defined more than once" % name)
  854. terminal_names.add(name)
  855. if set(ignore_names) > terminal_names:
  856. raise GrammarError("Terminals %s were marked to ignore but were not defined!" % (set(ignore_names) - terminal_names))
  857. resolve_term_references(term_defs)
  858. rules = rule_defs
  859. rule_names = {}
  860. for name, params, _x, option in rules:
  861. # We can't just simply not throw away the tokens later, we need option.keep_all_tokens to correctly generate maybe_placeholders
  862. if self.global_keep_all_tokens:
  863. option.keep_all_tokens = True
  864. if name.startswith('__'):
  865. raise GrammarError('Names starting with double-underscore are reserved (Error at %s)' % name)
  866. if name in rule_names:
  867. raise GrammarError("Rule '%s' defined more than once" % name)
  868. rule_names[name] = len(params)
  869. for name, params , expansions, _o in rules:
  870. for i, p in enumerate(params):
  871. if p in rule_names:
  872. raise GrammarError("Template Parameter conflicts with rule %s (in template %s)" % (p, name))
  873. if p in params[:i]:
  874. raise GrammarError("Duplicate Template Parameter %s (in template %s)" % (p, name))
  875. for temp in expansions.find_data('template_usage'):
  876. sym = temp.children[0]
  877. args = temp.children[1:]
  878. if sym not in params:
  879. if sym not in rule_names:
  880. raise GrammarError("Template '%s' used but not defined (in rule %s)" % (sym, name))
  881. if len(args) != rule_names[sym]:
  882. raise GrammarError("Wrong number of template arguments used for %s "
  883. "(expected %s, got %s) (in rule %s)" % (sym, rule_names[sym], len(args), name))
  884. for sym in _find_used_symbols(expansions):
  885. if sym.type == 'TERMINAL':
  886. if sym not in terminal_names:
  887. raise GrammarError("Token '%s' used but not defined (in rule %s)" % (sym, name))
  888. else:
  889. if sym not in rule_names and sym not in params:
  890. raise GrammarError("Rule '%s' used but not defined (in rule %s)" % (sym, name))
  891. return Grammar(rules, term_defs, ignore_names)
  892. def load_grammar(grammar, source, import_paths, global_keep_all_tokens):
  893. return GrammarLoader(global_keep_all_tokens).load_grammar(grammar, source, import_paths)