This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

208 lines
5.6 KiB

  1. from __future__ import absolute_import
  2. import sys
  3. from unittest import TestCase, main
  4. from lark import Lark, Token, Tree
  5. from lark.load_grammar import GrammarError, GRAMMAR_ERRORS, find_grammar_errors
  6. from lark.load_grammar import FromPackageLoader
  7. class TestGrammar(TestCase):
  8. def setUp(self):
  9. pass
  10. def test_errors(self):
  11. for msg, examples in GRAMMAR_ERRORS:
  12. for example in examples:
  13. try:
  14. p = Lark(example)
  15. except GrammarError as e:
  16. assert msg in str(e)
  17. else:
  18. assert False, "example did not raise an error"
  19. def test_empty_literal(self):
  20. # Issues #888
  21. self.assertRaises(GrammarError, Lark, "start: \"\"")
  22. def test_override_rule(self):
  23. # Overrides the 'sep' template in existing grammar to add an optional terminating delimiter
  24. # Thus extending it beyond its original capacity
  25. p = Lark("""
  26. %import .test_templates_import (start, sep)
  27. %override sep{item, delim}: item (delim item)* delim?
  28. %ignore " "
  29. """, source_path=__file__)
  30. a = p.parse('[1, 2, 3]')
  31. b = p.parse('[1, 2, 3, ]')
  32. assert a == b
  33. self.assertRaises(GrammarError, Lark, """
  34. %import .test_templates_import (start, sep)
  35. %override sep{item}: item (delim item)* delim?
  36. """, source_path=__file__)
  37. self.assertRaises(GrammarError, Lark, """
  38. %override sep{item}: item (delim item)* delim?
  39. """, source_path=__file__)
  40. def test_override_terminal(self):
  41. p = Lark("""
  42. %import .grammars.ab (startab, A, B)
  43. %override A: "c"
  44. %override B: "d"
  45. """, start='startab', source_path=__file__)
  46. a = p.parse('cd')
  47. self.assertEqual(a.children[0].children, [Token('A', 'c'), Token('B', 'd')])
  48. def test_extend_rule(self):
  49. p = Lark("""
  50. %import .grammars.ab (startab, A, B, expr)
  51. %extend expr: B A
  52. """, start='startab', source_path=__file__)
  53. a = p.parse('abab')
  54. self.assertEqual(a.children[0].children, ['a', Tree('expr', ['b', 'a']), 'b'])
  55. self.assertRaises(GrammarError, Lark, """
  56. %extend expr: B A
  57. """)
  58. def test_extend_term(self):
  59. p = Lark("""
  60. %import .grammars.ab (startab, A, B, expr)
  61. %extend A: "c"
  62. """, start='startab', source_path=__file__)
  63. a = p.parse('acbb')
  64. self.assertEqual(a.children[0].children, ['a', Tree('expr', ['c', 'b']), 'b'])
  65. def test_extend_twice(self):
  66. p = Lark("""
  67. start: x+
  68. x: "a"
  69. %extend x: "b"
  70. %extend x: "c"
  71. """)
  72. assert p.parse("abccbba") == p.parse("cbabbbb")
  73. def test_undefined_ignore(self):
  74. g = """!start: "A"
  75. %ignore B
  76. """
  77. self.assertRaises( GrammarError, Lark, g)
  78. g = """!start: "A"
  79. %ignore start
  80. """
  81. self.assertRaises( GrammarError, Lark, g)
  82. def test_alias_in_terminal(self):
  83. g = """start: TERM
  84. TERM: "a" -> alias
  85. """
  86. self.assertRaises( GrammarError, Lark, g)
  87. def test_undefined_rule(self):
  88. self.assertRaises(GrammarError, Lark, """start: a""")
  89. def test_undefined_term(self):
  90. self.assertRaises(GrammarError, Lark, """start: A""")
  91. def test_token_multiline_only_works_with_x_flag(self):
  92. g = r"""start: ABC
  93. ABC: / a b c
  94. d
  95. e f
  96. /i
  97. """
  98. self.assertRaises( GrammarError, Lark, g)
  99. def test_import_custom_sources(self):
  100. custom_loader = FromPackageLoader('tests', ('grammars', ))
  101. grammar = """
  102. start: startab
  103. %import ab.startab
  104. """
  105. p = Lark(grammar, import_paths=[custom_loader])
  106. self.assertEqual(p.parse('ab'),
  107. Tree('start', [Tree('startab', [Tree('ab__expr', [Token('ab__A', 'a'), Token('ab__B', 'b')])])]))
  108. def test_import_custom_sources2(self):
  109. custom_loader = FromPackageLoader('tests', ('grammars', ))
  110. grammar = """
  111. start: rule_to_import
  112. %import test_relative_import_of_nested_grammar__grammar_to_import.rule_to_import
  113. """
  114. p = Lark(grammar, import_paths=[custom_loader])
  115. x = p.parse('N')
  116. self.assertEqual(next(x.find_data('rule_to_import')).children, ['N'])
  117. def test_import_custom_sources3(self):
  118. custom_loader2 = FromPackageLoader('tests')
  119. grammar = """
  120. %import .test_relative_import (start, WS)
  121. %ignore WS
  122. """
  123. p = Lark(grammar, import_paths=[custom_loader2], source_path=__file__) # import relative to current file
  124. x = p.parse('12 capybaras')
  125. self.assertEqual(x.children, ['12', 'capybaras'])
  126. def test_find_grammar_errors(self):
  127. text = """
  128. a: rule
  129. b rule
  130. c: rule
  131. B.: "hello" f
  132. D: "okay"
  133. """
  134. assert [e.line for e, _s in find_grammar_errors(text)] == [3, 5]
  135. text = """
  136. a: rule
  137. b rule
  138. | ok
  139. c: rule
  140. B.: "hello" f
  141. D: "okay"
  142. """
  143. assert [e.line for e, _s in find_grammar_errors(text)] == [3, 4, 6]
  144. text = """
  145. a: rule @#$#@$@&&
  146. b: rule
  147. | ok
  148. c: rule
  149. B: "hello" f @
  150. D: "okay"
  151. """
  152. x = find_grammar_errors(text)
  153. assert [e.line for e, _s in find_grammar_errors(text)] == [2, 6]
  154. if __name__ == '__main__':
  155. main()