This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
Non puoi selezionare più di 25 argomenti Gli argomenti devono iniziare con una lettera o un numero, possono includere trattini ('-') e possono essere lunghi fino a 35 caratteri.

839 righe
29 KiB

  1. "Parses and creates Grammar objects"
  2. import os.path
  3. import sys
  4. from ast import literal_eval
  5. from copy import copy, deepcopy
  6. from .utils import bfs
  7. from .lexer import Token, TerminalDef, PatternStr, PatternRE
  8. from .parse_tree_builder import ParseTreeBuilder
  9. from .parser_frontends import LALR_TraditionalLexer
  10. from .common import LexerConf, ParserConf
  11. from .grammar import RuleOptions, Rule, Terminal, NonTerminal, Symbol
  12. from .utils import classify, suppress, dedup_list
  13. from .exceptions import GrammarError, UnexpectedCharacters, UnexpectedToken
  14. from .tree import Tree, SlottedTree as ST
  15. from .visitors import Transformer, Visitor, v_args, Transformer_InPlace
  16. inline_args = v_args(inline=True)
  17. __path__ = os.path.dirname(__file__)
  18. IMPORT_PATHS = [os.path.join(__path__, 'grammars')]
  19. EXT = '.lark'
  20. _RE_FLAGS = 'imslux'
  21. _EMPTY = Symbol('__empty__')
  22. _TERMINAL_NAMES = {
  23. '.' : 'DOT',
  24. ',' : 'COMMA',
  25. ':' : 'COLON',
  26. ';' : 'SEMICOLON',
  27. '+' : 'PLUS',
  28. '-' : 'MINUS',
  29. '*' : 'STAR',
  30. '/' : 'SLASH',
  31. '\\' : 'BACKSLASH',
  32. '|' : 'VBAR',
  33. '?' : 'QMARK',
  34. '!' : 'BANG',
  35. '@' : 'AT',
  36. '#' : 'HASH',
  37. '$' : 'DOLLAR',
  38. '%' : 'PERCENT',
  39. '^' : 'CIRCUMFLEX',
  40. '&' : 'AMPERSAND',
  41. '_' : 'UNDERSCORE',
  42. '<' : 'LESSTHAN',
  43. '>' : 'MORETHAN',
  44. '=' : 'EQUAL',
  45. '"' : 'DBLQUOTE',
  46. '\'' : 'QUOTE',
  47. '`' : 'BACKQUOTE',
  48. '~' : 'TILDE',
  49. '(' : 'LPAR',
  50. ')' : 'RPAR',
  51. '{' : 'LBRACE',
  52. '}' : 'RBRACE',
  53. '[' : 'LSQB',
  54. ']' : 'RSQB',
  55. '\n' : 'NEWLINE',
  56. '\r\n' : 'CRLF',
  57. '\t' : 'TAB',
  58. ' ' : 'SPACE',
  59. }
  60. # Grammar Parser
  61. TERMINALS = {
  62. '_LPAR': r'\(',
  63. '_RPAR': r'\)',
  64. '_LBRA': r'\[',
  65. '_RBRA': r'\]',
  66. 'OP': '[+*][?]?|[?](?![a-z])',
  67. '_COLON': ':',
  68. '_COMMA': ',',
  69. '_OR': r'\|',
  70. '_DOT': r'\.',
  71. 'TILDE': '~',
  72. 'RULE': '!?[_?]?[a-z][_a-z0-9]*',
  73. 'TERMINAL': '_?[A-Z][_A-Z0-9]*',
  74. 'STRING': r'"(\\"|\\\\|[^"\n])*?"i?',
  75. 'REGEXP': r'/(?!/)(\\/|\\\\|[^/\n])*?/[%s]*' % _RE_FLAGS,
  76. '_NL': r'(\r?\n)+\s*',
  77. 'WS': r'[ \t]+',
  78. 'COMMENT': r'//[^\n]*',
  79. '_TO': '->',
  80. '_IGNORE': r'%ignore',
  81. '_DECLARE': r'%declare',
  82. '_IMPORT': r'%import',
  83. 'NUMBER': r'\d+',
  84. }
  85. RULES = {
  86. 'start': ['_list'],
  87. '_list': ['_item', '_list _item'],
  88. '_item': ['rule', 'term', 'statement', '_NL'],
  89. 'rule': ['RULE _COLON expansions _NL',
  90. 'RULE _DOT NUMBER _COLON expansions _NL'],
  91. 'expansions': ['alias',
  92. 'expansions _OR alias',
  93. 'expansions _NL _OR alias'],
  94. '?alias': ['expansion _TO RULE', 'expansion'],
  95. 'expansion': ['_expansion'],
  96. '_expansion': ['', '_expansion expr'],
  97. '?expr': ['atom',
  98. 'atom OP',
  99. 'atom TILDE NUMBER',
  100. 'atom TILDE NUMBER _DOT _DOT NUMBER',
  101. ],
  102. '?atom': ['_LPAR expansions _RPAR',
  103. 'maybe',
  104. 'value'],
  105. 'value': ['terminal',
  106. 'nonterminal',
  107. 'literal',
  108. 'range'],
  109. 'terminal': ['TERMINAL'],
  110. 'nonterminal': ['RULE'],
  111. '?name': ['RULE', 'TERMINAL'],
  112. 'maybe': ['_LBRA expansions _RBRA'],
  113. 'range': ['STRING _DOT _DOT STRING'],
  114. 'term': ['TERMINAL _COLON expansions _NL',
  115. 'TERMINAL _DOT NUMBER _COLON expansions _NL'],
  116. 'statement': ['ignore', 'import', 'declare'],
  117. 'ignore': ['_IGNORE expansions _NL'],
  118. 'declare': ['_DECLARE _declare_args _NL'],
  119. 'import': ['_IMPORT _import_path _NL',
  120. '_IMPORT _import_path _LPAR name_list _RPAR _NL',
  121. '_IMPORT _import_path _TO name _NL'],
  122. '_import_path': ['import_lib', 'import_rel'],
  123. 'import_lib': ['_import_args'],
  124. 'import_rel': ['_DOT _import_args'],
  125. '_import_args': ['name', '_import_args _DOT name'],
  126. 'name_list': ['_name_list'],
  127. '_name_list': ['name', '_name_list _COMMA name'],
  128. '_declare_args': ['name', '_declare_args name'],
  129. 'literal': ['REGEXP', 'STRING'],
  130. }
  131. @inline_args
  132. class EBNF_to_BNF(Transformer_InPlace):
  133. def __init__(self):
  134. self.new_rules = []
  135. self.rules_by_expr = {}
  136. self.prefix = 'anon'
  137. self.i = 0
  138. self.rule_options = None
  139. def _add_recurse_rule(self, type_, expr):
  140. if expr in self.rules_by_expr:
  141. return self.rules_by_expr[expr]
  142. new_name = '__%s_%s_%d' % (self.prefix, type_, self.i)
  143. self.i += 1
  144. t = NonTerminal(new_name)
  145. tree = ST('expansions', [ST('expansion', [expr]), ST('expansion', [t, expr])])
  146. self.new_rules.append((new_name, tree, self.rule_options))
  147. self.rules_by_expr[expr] = t
  148. return t
  149. def expr(self, rule, op, *args):
  150. if op.value == '?':
  151. empty = ST('expansion', [])
  152. return ST('expansions', [rule, empty])
  153. elif op.value == '+':
  154. # a : b c+ d
  155. # -->
  156. # a : b _c d
  157. # _c : _c c | c;
  158. return self._add_recurse_rule('plus', rule)
  159. elif op.value == '*':
  160. # a : b c* d
  161. # -->
  162. # a : b _c? d
  163. # _c : _c c | c;
  164. new_name = self._add_recurse_rule('star', rule)
  165. return ST('expansions', [new_name, ST('expansion', [])])
  166. elif op.value == '~':
  167. if len(args) == 1:
  168. mn = mx = int(args[0])
  169. else:
  170. mn, mx = map(int, args)
  171. if mx < mn:
  172. raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (rule, mn, mx))
  173. return ST('expansions', [ST('expansion', [rule] * n) for n in range(mn, mx+1)])
  174. assert False, op
  175. def maybe(self, rule):
  176. keep_all_tokens = self.rule_options and self.rule_options.keep_all_tokens
  177. def will_not_get_removed(sym):
  178. if isinstance(sym, NonTerminal):
  179. return not sym.name.startswith('_')
  180. if isinstance(sym, Terminal):
  181. return keep_all_tokens or not sym.filter_out
  182. assert False
  183. if any(rule.scan_values(will_not_get_removed)):
  184. empty = _EMPTY
  185. else:
  186. empty = ST('expansion', [])
  187. return ST('expansions', [rule, empty])
  188. class SimplifyRule_Visitor(Visitor):
  189. @staticmethod
  190. def _flatten(tree):
  191. while True:
  192. to_expand = [i for i, child in enumerate(tree.children)
  193. if isinstance(child, Tree) and child.data == tree.data]
  194. if not to_expand:
  195. break
  196. tree.expand_kids_by_index(*to_expand)
  197. def expansion(self, tree):
  198. # rules_list unpacking
  199. # a : b (c|d) e
  200. # -->
  201. # a : b c e | b d e
  202. #
  203. # In AST terms:
  204. # expansion(b, expansions(c, d), e)
  205. # -->
  206. # expansions( expansion(b, c, e), expansion(b, d, e) )
  207. self._flatten(tree)
  208. for i, child in enumerate(tree.children):
  209. if isinstance(child, Tree) and child.data == 'expansions':
  210. tree.data = 'expansions'
  211. tree.children = [self.visit(ST('expansion', [option if i==j else other
  212. for j, other in enumerate(tree.children)]))
  213. for option in dedup_list(child.children)]
  214. self._flatten(tree)
  215. break
  216. def alias(self, tree):
  217. rule, alias_name = tree.children
  218. if rule.data == 'expansions':
  219. aliases = []
  220. for child in tree.children[0].children:
  221. aliases.append(ST('alias', [child, alias_name]))
  222. tree.data = 'expansions'
  223. tree.children = aliases
  224. def expansions(self, tree):
  225. self._flatten(tree)
  226. tree.children = dedup_list(tree.children)
  227. class RuleTreeToText(Transformer):
  228. def expansions(self, x):
  229. return x
  230. def expansion(self, symbols):
  231. return symbols, None
  232. def alias(self, x):
  233. (expansion, _alias), alias = x
  234. assert _alias is None, (alias, expansion, '-', _alias) # Double alias not allowed
  235. return expansion, alias.value
  236. @inline_args
  237. class CanonizeTree(Transformer_InPlace):
  238. def tokenmods(self, *args):
  239. if len(args) == 1:
  240. return list(args)
  241. tokenmods, value = args
  242. return tokenmods + [value]
  243. class PrepareAnonTerminals(Transformer_InPlace):
  244. "Create a unique list of anonymous terminals. Attempt to give meaningful names to them when we add them"
  245. def __init__(self, terminals):
  246. self.terminals = terminals
  247. self.term_set = {td.name for td in self.terminals}
  248. self.term_reverse = {td.pattern: td for td in terminals}
  249. self.i = 0
  250. @inline_args
  251. def pattern(self, p):
  252. value = p.value
  253. if p in self.term_reverse and p.flags != self.term_reverse[p].pattern.flags:
  254. raise GrammarError(u'Conflicting flags for the same terminal: %s' % p)
  255. term_name = None
  256. if isinstance(p, PatternStr):
  257. try:
  258. # If already defined, use the user-defined terminal name
  259. term_name = self.term_reverse[p].name
  260. except KeyError:
  261. # Try to assign an indicative anon-terminal name
  262. try:
  263. term_name = _TERMINAL_NAMES[value]
  264. except KeyError:
  265. if value.isalnum() and value[0].isalpha() and value.upper() not in self.term_set:
  266. with suppress(UnicodeEncodeError):
  267. value.upper().encode('ascii') # Make sure we don't have unicode in our terminal names
  268. term_name = value.upper()
  269. if term_name in self.term_set:
  270. term_name = None
  271. elif isinstance(p, PatternRE):
  272. if p in self.term_reverse: # Kind of a wierd placement.name
  273. term_name = self.term_reverse[p].name
  274. else:
  275. assert False, p
  276. if term_name is None:
  277. term_name = '__ANON_%d' % self.i
  278. self.i += 1
  279. if term_name not in self.term_set:
  280. assert p not in self.term_reverse
  281. self.term_set.add(term_name)
  282. termdef = TerminalDef(term_name, p)
  283. self.term_reverse[p] = termdef
  284. self.terminals.append(termdef)
  285. return Terminal(term_name, filter_out=isinstance(p, PatternStr))
  286. def _rfind(s, choices):
  287. return max(s.rfind(c) for c in choices)
  288. def _fix_escaping(s):
  289. w = ''
  290. i = iter(s)
  291. for n in i:
  292. w += n
  293. if n == '\\':
  294. n2 = next(i)
  295. if n2 == '\\':
  296. w += '\\\\'
  297. elif n2 not in 'uxnftr':
  298. w += '\\'
  299. w += n2
  300. w = w.replace('\\"', '"').replace("'", "\\'")
  301. to_eval = "u'''%s'''" % w
  302. try:
  303. s = literal_eval(to_eval)
  304. except SyntaxError as e:
  305. raise ValueError(s, e)
  306. return s
  307. def _literal_to_pattern(literal):
  308. v = literal.value
  309. flag_start = _rfind(v, '/"')+1
  310. assert flag_start > 0
  311. flags = v[flag_start:]
  312. assert all(f in _RE_FLAGS for f in flags), flags
  313. v = v[:flag_start]
  314. assert v[0] == v[-1] and v[0] in '"/'
  315. x = v[1:-1]
  316. s = _fix_escaping(x)
  317. if literal.type == 'STRING':
  318. s = s.replace('\\\\', '\\')
  319. return { 'STRING': PatternStr,
  320. 'REGEXP': PatternRE }[literal.type](s, flags)
  321. @inline_args
  322. class PrepareLiterals(Transformer_InPlace):
  323. def literal(self, literal):
  324. return ST('pattern', [_literal_to_pattern(literal)])
  325. def range(self, start, end):
  326. assert start.type == end.type == 'STRING'
  327. start = start.value[1:-1]
  328. end = end.value[1:-1]
  329. assert len(_fix_escaping(start)) == len(_fix_escaping(end)) == 1, (start, end, len(_fix_escaping(start)), len(_fix_escaping(end)))
  330. regexp = '[%s-%s]' % (start, end)
  331. return ST('pattern', [PatternRE(regexp)])
  332. class TerminalTreeToPattern(Transformer):
  333. def pattern(self, ps):
  334. p ,= ps
  335. return p
  336. def expansion(self, items):
  337. assert items
  338. if len(items) == 1:
  339. return items[0]
  340. if len({i.flags for i in items}) > 1:
  341. raise GrammarError("Lark doesn't support joining terminals with conflicting flags!")
  342. return PatternRE(''.join(i.to_regexp() for i in items), items[0].flags if items else ())
  343. def expansions(self, exps):
  344. if len(exps) == 1:
  345. return exps[0]
  346. if len({i.flags for i in exps}) > 1:
  347. raise GrammarError("Lark doesn't support joining terminals with conflicting flags!")
  348. return PatternRE('(?:%s)' % ('|'.join(i.to_regexp() for i in exps)), exps[0].flags)
  349. def expr(self, args):
  350. inner, op = args[:2]
  351. if op == '~':
  352. if len(args) == 3:
  353. op = "{%d}" % int(args[2])
  354. else:
  355. mn, mx = map(int, args[2:])
  356. if mx < mn:
  357. raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (inner, mn, mx))
  358. op = "{%d,%d}" % (mn, mx)
  359. else:
  360. assert len(args) == 2
  361. return PatternRE('(?:%s)%s' % (inner.to_regexp(), op), inner.flags)
  362. def maybe(self, expr):
  363. return self.expr(expr + ['?'])
  364. def alias(self, t):
  365. raise GrammarError("Aliasing not allowed in terminals (You used -> in the wrong place)")
  366. def value(self, v):
  367. return v[0]
  368. class PrepareSymbols(Transformer_InPlace):
  369. def value(self, v):
  370. v ,= v
  371. if isinstance(v, Tree):
  372. return v
  373. elif v.type == 'RULE':
  374. return NonTerminal(v.value)
  375. elif v.type == 'TERMINAL':
  376. return Terminal(v.value, filter_out=v.startswith('_'))
  377. assert False
  378. def _choice_of_rules(rules):
  379. return ST('expansions', [ST('expansion', [Token('RULE', name)]) for name in rules])
  380. class Grammar:
  381. def __init__(self, rule_defs, term_defs, ignore):
  382. self.term_defs = term_defs
  383. self.rule_defs = rule_defs
  384. self.ignore = ignore
  385. def compile(self):
  386. # We change the trees in-place (to support huge grammars)
  387. # So deepcopy allows calling compile more than once.
  388. term_defs = deepcopy(list(self.term_defs))
  389. rule_defs = deepcopy(self.rule_defs)
  390. # ===================
  391. # Compile Terminals
  392. # ===================
  393. # Convert terminal-trees to strings/regexps
  394. transformer = PrepareLiterals() * TerminalTreeToPattern()
  395. for name, (term_tree, priority) in term_defs:
  396. if term_tree is None: # Terminal added through %declare
  397. continue
  398. expansions = list(term_tree.find_data('expansion'))
  399. if len(expansions) == 1 and not expansions[0].children:
  400. raise GrammarError("Terminals cannot be empty (%s)" % name)
  401. terminals = [TerminalDef(name, transformer.transform(term_tree), priority)
  402. for name, (term_tree, priority) in term_defs if term_tree]
  403. # =================
  404. # Compile Rules
  405. # =================
  406. # 1. Pre-process terminals
  407. transformer = PrepareLiterals() * PrepareSymbols() * PrepareAnonTerminals(terminals) # Adds to terminals
  408. # 2. Convert EBNF to BNF (and apply step 1)
  409. ebnf_to_bnf = EBNF_to_BNF()
  410. rules = []
  411. for name, rule_tree, options in rule_defs:
  412. ebnf_to_bnf.rule_options = RuleOptions(keep_all_tokens=True) if options and options.keep_all_tokens else None
  413. tree = transformer.transform(rule_tree)
  414. res = ebnf_to_bnf.transform(tree)
  415. rules.append((name, res, options))
  416. rules += ebnf_to_bnf.new_rules
  417. assert len(rules) == len({name for name, _t, _o in rules}), "Whoops, name collision"
  418. # 3. Compile tree to Rule objects
  419. rule_tree_to_text = RuleTreeToText()
  420. simplify_rule = SimplifyRule_Visitor()
  421. compiled_rules = []
  422. for i, rule_content in enumerate(rules):
  423. name, tree, options = rule_content
  424. simplify_rule.visit(tree)
  425. expansions = rule_tree_to_text.transform(tree)
  426. for expansion, alias in expansions:
  427. if alias and name.startswith('_'):
  428. raise GrammarError("Rule %s is marked for expansion (it starts with an underscore) and isn't allowed to have aliases (alias=%s)" % (name, alias))
  429. empty_indices = [x==_EMPTY for i, x in enumerate(expansion)]
  430. if any(empty_indices):
  431. exp_options = copy(options) if options else RuleOptions()
  432. exp_options.empty_indices = empty_indices
  433. expansion = [x for x in expansion if x!=_EMPTY]
  434. else:
  435. exp_options = options
  436. assert all(isinstance(x, Symbol) for x in expansion), expansion
  437. rule = Rule(NonTerminal(name), expansion, i, alias, exp_options)
  438. compiled_rules.append(rule)
  439. # Remove duplicates of empty rules, throw error for non-empty duplicates
  440. if len(set(compiled_rules)) != len(compiled_rules):
  441. duplicates = classify(compiled_rules, lambda x: x)
  442. for dups in duplicates.values():
  443. if len(dups) > 1:
  444. if dups[0].expansion:
  445. raise GrammarError("Rules defined twice: %s" % ', '.join(str(i) for i in duplicates))
  446. # Empty rule; assert all other attributes are equal
  447. assert len({(r.alias, r.order, r.options) for r in dups}) == len(dups)
  448. # Remove duplicates
  449. compiled_rules = list(set(compiled_rules))
  450. # Filter out unused terminals
  451. used_terms = {t.name for r in compiled_rules
  452. for t in r.expansion
  453. if isinstance(t, Terminal)}
  454. terminals = [t for t in terminals if t.name in used_terms or t.name in self.ignore]
  455. return terminals, compiled_rules, self.ignore
  456. _imported_grammars = {}
  457. def import_grammar(grammar_path, base_paths=[]):
  458. if grammar_path not in _imported_grammars:
  459. import_paths = base_paths + IMPORT_PATHS
  460. for import_path in import_paths:
  461. with suppress(IOError):
  462. with open(os.path.join(import_path, grammar_path)) as f:
  463. text = f.read()
  464. grammar = load_grammar(text, grammar_path)
  465. _imported_grammars[grammar_path] = grammar
  466. break
  467. else:
  468. open(grammar_path)
  469. assert False
  470. return _imported_grammars[grammar_path]
  471. def import_from_grammar_into_namespace(grammar, namespace, aliases):
  472. """Returns all rules and terminals of grammar, prepended
  473. with a 'namespace' prefix, except for those which are aliased.
  474. """
  475. imported_terms = dict(grammar.term_defs)
  476. imported_rules = {n:(n,deepcopy(t),o) for n,t,o in grammar.rule_defs}
  477. term_defs = []
  478. rule_defs = []
  479. def rule_dependencies(symbol):
  480. if symbol.type != 'RULE':
  481. return []
  482. try:
  483. _, tree, _ = imported_rules[symbol]
  484. except KeyError:
  485. raise GrammarError("Missing symbol '%s' in grammar %s" % (symbol, namespace))
  486. return tree.scan_values(lambda x: x.type in ('RULE', 'TERMINAL'))
  487. def get_namespace_name(name):
  488. try:
  489. return aliases[name].value
  490. except KeyError:
  491. return '%s__%s' % (namespace, name)
  492. to_import = list(bfs(aliases, rule_dependencies))
  493. for symbol in to_import:
  494. if symbol.type == 'TERMINAL':
  495. term_defs.append([get_namespace_name(symbol), imported_terms[symbol]])
  496. else:
  497. assert symbol.type == 'RULE'
  498. rule = imported_rules[symbol]
  499. for t in rule[1].iter_subtrees():
  500. for i, c in enumerate(t.children):
  501. if isinstance(c, Token) and c.type in ('RULE', 'TERMINAL'):
  502. t.children[i] = Token(c.type, get_namespace_name(c))
  503. rule_defs.append((get_namespace_name(symbol), rule[1], rule[2]))
  504. return term_defs, rule_defs
  505. def resolve_term_references(term_defs):
  506. # TODO Cycles detection
  507. # TODO Solve with transitive closure (maybe)
  508. token_dict = {k:t for k, (t,_p) in term_defs}
  509. assert len(token_dict) == len(term_defs), "Same name defined twice?"
  510. while True:
  511. changed = False
  512. for name, (token_tree, _p) in term_defs:
  513. if token_tree is None: # Terminal added through %declare
  514. continue
  515. for exp in token_tree.find_data('value'):
  516. item ,= exp.children
  517. if isinstance(item, Token):
  518. if item.type == 'RULE':
  519. raise GrammarError("Rules aren't allowed inside terminals (%s in %s)" % (item, name))
  520. if item.type == 'TERMINAL':
  521. exp.children[0] = token_dict[item]
  522. changed = True
  523. if not changed:
  524. break
  525. def options_from_rule(name, *x):
  526. if len(x) > 1:
  527. priority, expansions = x
  528. priority = int(priority)
  529. else:
  530. expansions ,= x
  531. priority = None
  532. keep_all_tokens = name.startswith('!')
  533. name = name.lstrip('!')
  534. expand1 = name.startswith('?')
  535. name = name.lstrip('?')
  536. return name, expansions, RuleOptions(keep_all_tokens, expand1, priority=priority)
  537. def symbols_from_strcase(expansion):
  538. return [Terminal(x, filter_out=x.startswith('_')) if x.isupper() else NonTerminal(x) for x in expansion]
  539. @inline_args
  540. class PrepareGrammar(Transformer_InPlace):
  541. def terminal(self, name):
  542. return name
  543. def nonterminal(self, name):
  544. return name
  545. class GrammarLoader:
  546. def __init__(self):
  547. terminals = [TerminalDef(name, PatternRE(value)) for name, value in TERMINALS.items()]
  548. rules = [options_from_rule(name, x) for name, x in RULES.items()]
  549. rules = [Rule(NonTerminal(r), symbols_from_strcase(x.split()), i, None, o) for r, xs, o in rules for i, x in enumerate(xs)]
  550. callback = ParseTreeBuilder(rules, ST).create_callback()
  551. lexer_conf = LexerConf(terminals, ['WS', 'COMMENT'])
  552. parser_conf = ParserConf(rules, callback, 'start')
  553. self.parser = LALR_TraditionalLexer(lexer_conf, parser_conf)
  554. self.canonize_tree = CanonizeTree()
  555. def load_grammar(self, grammar_text, grammar_name='<?>'):
  556. "Parse grammar_text, verify, and create Grammar object. Display nice messages on error."
  557. try:
  558. tree = self.canonize_tree.transform( self.parser.parse(grammar_text+'\n') )
  559. except UnexpectedCharacters as e:
  560. context = e.get_context(grammar_text)
  561. raise GrammarError("Unexpected input at line %d column %d in %s: \n\n%s" %
  562. (e.line, e.column, grammar_name, context))
  563. except UnexpectedToken as e:
  564. context = e.get_context(grammar_text)
  565. error = e.match_examples(self.parser.parse, {
  566. 'Unclosed parenthesis': ['a: (\n'],
  567. 'Umatched closing parenthesis': ['a: )\n', 'a: [)\n', 'a: (]\n'],
  568. 'Expecting rule or terminal definition (missing colon)': ['a\n', 'a->\n', 'A->\n', 'a A\n'],
  569. 'Alias expects lowercase name': ['a: -> "a"\n'],
  570. 'Unexpected colon': ['a::\n', 'a: b:\n', 'a: B:\n', 'a: "a":\n'],
  571. 'Misplaced operator': ['a: b??', 'a: b(?)', 'a:+\n', 'a:?\n', 'a:*\n', 'a:|*\n'],
  572. 'Expecting option ("|") or a new rule or terminal definition': ['a:a\n()\n'],
  573. '%import expects a name': ['%import "a"\n'],
  574. '%ignore expects a value': ['%ignore %import\n'],
  575. })
  576. if error:
  577. raise GrammarError("%s at line %s column %s\n\n%s" % (error, e.line, e.column, context))
  578. elif 'STRING' in e.expected:
  579. raise GrammarError("Expecting a value at line %s column %s\n\n%s" % (e.line, e.column, context))
  580. raise
  581. tree = PrepareGrammar().transform(tree)
  582. # Extract grammar items
  583. defs = classify(tree.children, lambda c: c.data, lambda c: c.children)
  584. term_defs = defs.pop('term', [])
  585. rule_defs = defs.pop('rule', [])
  586. statements = defs.pop('statement', [])
  587. assert not defs
  588. term_defs = [td if len(td)==3 else (td[0], 1, td[1]) for td in term_defs]
  589. term_defs = [(name.value, (t, int(p))) for name, p, t in term_defs]
  590. rule_defs = [options_from_rule(*x) for x in rule_defs]
  591. # Execute statements
  592. ignore = []
  593. for (stmt,) in statements:
  594. if stmt.data == 'ignore':
  595. t ,= stmt.children
  596. ignore.append(t)
  597. elif stmt.data == 'import':
  598. if len(stmt.children) > 1:
  599. path_node, arg1 = stmt.children
  600. else:
  601. path_node ,= stmt.children
  602. arg1 = None
  603. if isinstance(arg1, Tree): # Multi import
  604. dotted_path = path_node.children
  605. names = arg1.children
  606. aliases = names # Can't have aliased multi import, so all aliases will be the same as names
  607. else: # Single import
  608. dotted_path = path_node.children[:-1]
  609. names = [path_node.children[-1]] # Get name from dotted path
  610. aliases = [arg1] if arg1 else names # Aliases if exist
  611. grammar_path = os.path.join(*dotted_path) + EXT
  612. if path_node.data == 'import_lib': # Import from library
  613. g = import_grammar(grammar_path)
  614. else: # Relative import
  615. if grammar_name == '<string>': # Import relative to script file path if grammar is coded in script
  616. try:
  617. base_file = os.path.abspath(sys.modules['__main__'].__file__)
  618. except AttributeError:
  619. base_file = None
  620. else:
  621. base_file = grammar_name # Import relative to grammar file path if external grammar file
  622. if base_file:
  623. base_path = os.path.split(base_file)[0]
  624. else:
  625. base_path = os.path.abspath(os.path.curdir)
  626. g = import_grammar(grammar_path, base_paths=[base_path])
  627. aliases_dict = dict(zip(names, aliases))
  628. new_td, new_rd = import_from_grammar_into_namespace(g, '__'.join(dotted_path), aliases_dict)
  629. term_defs += new_td
  630. rule_defs += new_rd
  631. elif stmt.data == 'declare':
  632. for t in stmt.children:
  633. term_defs.append([t.value, (None, None)])
  634. else:
  635. assert False, stmt
  636. # Verify correctness 1
  637. for name, _ in term_defs:
  638. if name.startswith('__'):
  639. raise GrammarError('Names starting with double-underscore are reserved (Error at %s)' % name)
  640. # Handle ignore tokens
  641. # XXX A slightly hacky solution. Recognition of %ignore TERMINAL as separate comes from the lexer's
  642. # inability to handle duplicate terminals (two names, one value)
  643. ignore_names = []
  644. for t in ignore:
  645. if t.data=='expansions' and len(t.children) == 1:
  646. t2 ,= t.children
  647. if t2.data=='expansion' and len(t2.children) == 1:
  648. item ,= t2.children
  649. if item.data == 'value':
  650. item ,= item.children
  651. if isinstance(item, Token) and item.type == 'TERMINAL':
  652. ignore_names.append(item.value)
  653. continue
  654. name = '__IGNORE_%d'% len(ignore_names)
  655. ignore_names.append(name)
  656. term_defs.append((name, (t, 1)))
  657. # Verify correctness 2
  658. terminal_names = set()
  659. for name, _ in term_defs:
  660. if name in terminal_names:
  661. raise GrammarError("Terminal '%s' defined more than once" % name)
  662. terminal_names.add(name)
  663. if set(ignore_names) > terminal_names:
  664. raise GrammarError("Terminals %s were marked to ignore but were not defined!" % (set(ignore_names) - terminal_names))
  665. resolve_term_references(term_defs)
  666. rules = rule_defs
  667. rule_names = set()
  668. for name, _x, _o in rules:
  669. if name.startswith('__'):
  670. raise GrammarError('Names starting with double-underscore are reserved (Error at %s)' % name)
  671. if name in rule_names:
  672. raise GrammarError("Rule '%s' defined more than once" % name)
  673. rule_names.add(name)
  674. for name, expansions, _o in rules:
  675. used_symbols = {t for x in expansions.find_data('expansion')
  676. for t in x.scan_values(lambda t: t.type in ('RULE', 'TERMINAL'))}
  677. for sym in used_symbols:
  678. if sym.type == 'TERMINAL':
  679. if sym not in terminal_names:
  680. raise GrammarError("Token '%s' used but not defined (in rule %s)" % (sym, name))
  681. else:
  682. if sym not in rule_names:
  683. raise GrammarError("Rule '%s' used but not defined (in rule %s)" % (sym, name))
  684. return Grammar(rules, term_defs, ignore_names)
  685. load_grammar = GrammarLoader().load_grammar