@@ -889,13 +889,15 @@ class GrammarBuilder: | |||||
def _define(self, name, exp, params=(), options=None, override=False): | def _define(self, name, exp, params=(), options=None, override=False): | ||||
if (name in self._definitions) ^ override: | |||||
if override: | |||||
self._grammar_error("Cannot override a nonexisting {type} {name}", name) | |||||
else: | |||||
if name in self._definitions: | |||||
if not override: | |||||
self._grammar_error("{Type} '{name}' defined more than once", name) | self._grammar_error("{Type} '{name}' defined more than once", name) | ||||
elif override: | |||||
self._grammar_error("Cannot override a nonexisting {type} {name}", name) | |||||
if name.startswith('__'): | if name.startswith('__'): | ||||
self._grammar_error('Names starting with double-underscore are reserved (Error at {name})', name) | self._grammar_error('Names starting with double-underscore are reserved (Error at {name})', name) | ||||
self._definitions[name] = (params, exp, self._check_options(name, options)) | self._definitions[name] = (params, exp, self._check_options(name, options)) | ||||
def _extend(self, name, exp, params=(), options=None): | def _extend(self, name, exp, params=(), options=None): | ||||
@@ -1002,7 +1004,6 @@ class GrammarBuilder: | |||||
tree = _parse_grammar(grammar_text, grammar_name) | tree = _parse_grammar(grammar_text, grammar_name) | ||||
imports = {} # imports are collect over the whole file to prevent duplications | imports = {} # imports are collect over the whole file to prevent duplications | ||||
for stmt in tree.children: | for stmt in tree.children: | ||||
if stmt.data == 'import': | if stmt.data == 'import': | ||||
dotted_path, base_path, aliases = self._unpack_import(stmt, grammar_name) | dotted_path, base_path, aliases = self._unpack_import(stmt, grammar_name) | ||||
@@ -1077,7 +1078,7 @@ class GrammarBuilder: | |||||
return s | return s | ||||
return mangle | return mangle | ||||
def check(self): | |||||
def validate(self): | |||||
for name, (params, exp, options) in self._definitions.items(): | for name, (params, exp, options) in self._definitions.items(): | ||||
for i, p in enumerate(params): | for i, p in enumerate(params): | ||||
if p in self._definitions: | if p in self._definitions: | ||||
@@ -1085,7 +1086,7 @@ class GrammarBuilder: | |||||
if p in params[:i]: | if p in params[:i]: | ||||
raise GrammarError("Duplicate Template Parameter %s (in template %s)" % (p, name)) | raise GrammarError("Duplicate Template Parameter %s (in template %s)" % (p, name)) | ||||
if exp is None: # Remaining checks don't work for abstract rules/terminals | |||||
if exp is None: # Remaining checks don't apply to abstract rules/terminals | |||||
continue | continue | ||||
for temp in exp.find_data('template_usage'): | for temp in exp.find_data('template_usage'): | ||||
@@ -1107,7 +1108,7 @@ class GrammarBuilder: | |||||
raise GrammarError("Terminals %s were marked to ignore but were not defined!" % (set(self._ignore_names) - set(self._definitions))) | raise GrammarError("Terminals %s were marked to ignore but were not defined!" % (set(self._ignore_names) - set(self._definitions))) | ||||
def build(self): | def build(self): | ||||
self.check() | |||||
self.validate() | |||||
rule_defs = [] | rule_defs = [] | ||||
term_defs = [] | term_defs = [] | ||||
for name, (params, exp, options) in self._definitions.items(): | for name, (params, exp, options) in self._definitions.items(): | ||||
@@ -35,11 +35,21 @@ class TestGrammar(TestCase): | |||||
b = p.parse('[1, 2, 3, ]') | b = p.parse('[1, 2, 3, ]') | ||||
assert a == b | assert a == b | ||||
self.assertRaises(GrammarError, Lark, """ | |||||
%import .test_templates_import (start, sep) | |||||
%override sep{item}: item (delim item)* delim? | |||||
""") | |||||
self.assertRaises(GrammarError, Lark, """ | |||||
%override sep{item}: item (delim item)* delim? | |||||
""") | |||||
def test_override_terminal(self): | def test_override_terminal(self): | ||||
p = Lark(""" | p = Lark(""" | ||||
%import .grammars.ab (startab, A, B) | %import .grammars.ab (startab, A, B) | ||||
%override A: "c" | %override A: "c" | ||||
%override B: "d" | %override B: "d" | ||||
""", start='startab', source_path=__file__) | """, start='startab', source_path=__file__) | ||||
@@ -56,15 +66,63 @@ class TestGrammar(TestCase): | |||||
a = p.parse('abab') | a = p.parse('abab') | ||||
self.assertEqual(a.children[0].children, ['a', Tree('expr', ['b', 'a']), 'b']) | self.assertEqual(a.children[0].children, ['a', Tree('expr', ['b', 'a']), 'b']) | ||||
self.assertRaises(GrammarError, Lark, """ | |||||
%extend expr: B A | |||||
""") | |||||
def test_extend_term(self): | def test_extend_term(self): | ||||
p = Lark(""" | p = Lark(""" | ||||
%import .grammars.ab (startab, A, B, expr) | %import .grammars.ab (startab, A, B, expr) | ||||
%extend A: "c" | %extend A: "c" | ||||
""", start='startab', source_path=__file__) | """, start='startab', source_path=__file__) | ||||
a = p.parse('acbb') | a = p.parse('acbb') | ||||
self.assertEqual(a.children[0].children, ['a', Tree('expr', ['c', 'b']), 'b']) | self.assertEqual(a.children[0].children, ['a', Tree('expr', ['c', 'b']), 'b']) | ||||
def test_extend_twice(self): | |||||
p = Lark(""" | |||||
start: x+ | |||||
x: "a" | |||||
%extend x: "b" | |||||
%extend x: "c" | |||||
""") | |||||
assert p.parse("abccbba") == p.parse("cbabbbb") | |||||
def test_undefined_ignore(self): | |||||
g = """!start: "A" | |||||
%ignore B | |||||
""" | |||||
self.assertRaises( GrammarError, Lark, g) | |||||
g = """!start: "A" | |||||
%ignore start | |||||
""" | |||||
self.assertRaises( GrammarError, Lark, g) | |||||
def test_alias_in_terminal(self): | |||||
g = """start: TERM | |||||
TERM: "a" -> alias | |||||
""" | |||||
self.assertRaises( GrammarError, Lark, g) | |||||
def test_undefined_rule(self): | |||||
self.assertRaises(GrammarError, Lark, """start: a""") | |||||
def test_undefined_term(self): | |||||
self.assertRaises(GrammarError, Lark, """start: A""") | |||||
def test_token_multiline_only_works_with_x_flag(self): | |||||
g = r"""start: ABC | |||||
ABC: / a b c | |||||
d | |||||
e f | |||||
/i | |||||
""" | |||||
self.assertRaises( GrammarError, Lark, g) | |||||
if __name__ == '__main__': | if __name__ == '__main__': | ||||
@@ -1380,12 +1380,6 @@ def _make_parser_test(LEXER, PARSER): | |||||
# A: "a" """) | # A: "a" """) | ||||
# self.assertRaises(LexError, g.parse, 'aab') | # self.assertRaises(LexError, g.parse, 'aab') | ||||
def test_undefined_rule(self): | |||||
self.assertRaises(GrammarError, _Lark, """start: a""") | |||||
def test_undefined_token(self): | |||||
self.assertRaises(GrammarError, _Lark, """start: A""") | |||||
def test_rule_collision(self): | def test_rule_collision(self): | ||||
g = _Lark("""start: "a"+ "b" | g = _Lark("""start: "a"+ "b" | ||||
| "a"+ """) | | "a"+ """) | ||||
@@ -1619,15 +1613,6 @@ def _make_parser_test(LEXER, PARSER): | |||||
x = g.parse('abcdef') | x = g.parse('abcdef') | ||||
self.assertEqual(x.children, ['abcdef']) | self.assertEqual(x.children, ['abcdef']) | ||||
def test_token_multiline_only_works_with_x_flag(self): | |||||
g = r"""start: ABC | |||||
ABC: / a b c | |||||
d | |||||
e f | |||||
/i | |||||
""" | |||||
self.assertRaises( GrammarError, _Lark, g) | |||||
@unittest.skipIf(PARSER == 'cyk', "No empty rules") | @unittest.skipIf(PARSER == 'cyk', "No empty rules") | ||||
def test_twice_empty(self): | def test_twice_empty(self): | ||||
g = """!start: ("A"?)? | g = """!start: ("A"?)? | ||||
@@ -1639,18 +1624,6 @@ def _make_parser_test(LEXER, PARSER): | |||||
tree = l.parse('') | tree = l.parse('') | ||||
self.assertEqual(tree.children, []) | self.assertEqual(tree.children, []) | ||||
def test_undefined_ignore(self): | |||||
g = """!start: "A" | |||||
%ignore B | |||||
""" | |||||
self.assertRaises( GrammarError, _Lark, g) | |||||
def test_alias_in_terminal(self): | |||||
g = """start: TERM | |||||
TERM: "a" -> alias | |||||
""" | |||||
self.assertRaises( GrammarError, _Lark, g) | |||||
def test_line_and_column(self): | def test_line_and_column(self): | ||||
g = r"""!start: "A" bc "D" | g = r"""!start: "A" bc "D" | ||||