This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

204 lines
5.5 KiB

  1. from __future__ import absolute_import
  2. import sys
  3. from unittest import TestCase, main
  4. from lark import Lark, Token, Tree
  5. from lark.load_grammar import GrammarError, GRAMMAR_ERRORS, find_grammar_errors
  6. from lark.load_grammar import FromPackageLoader
  7. class TestGrammar(TestCase):
  8. def setUp(self):
  9. pass
  10. def test_errors(self):
  11. for msg, examples in GRAMMAR_ERRORS:
  12. for example in examples:
  13. try:
  14. p = Lark(example)
  15. except GrammarError as e:
  16. assert msg in str(e)
  17. else:
  18. assert False, "example did not raise an error"
  19. def test_override_rule(self):
  20. # Overrides the 'sep' template in existing grammar to add an optional terminating delimiter
  21. # Thus extending it beyond its original capacity
  22. p = Lark("""
  23. %import .test_templates_import (start, sep)
  24. %override sep{item, delim}: item (delim item)* delim?
  25. %ignore " "
  26. """, source_path=__file__)
  27. a = p.parse('[1, 2, 3]')
  28. b = p.parse('[1, 2, 3, ]')
  29. assert a == b
  30. self.assertRaises(GrammarError, Lark, """
  31. %import .test_templates_import (start, sep)
  32. %override sep{item}: item (delim item)* delim?
  33. """)
  34. self.assertRaises(GrammarError, Lark, """
  35. %override sep{item}: item (delim item)* delim?
  36. """)
  37. def test_override_terminal(self):
  38. p = Lark("""
  39. %import .grammars.ab (startab, A, B)
  40. %override A: "c"
  41. %override B: "d"
  42. """, start='startab', source_path=__file__)
  43. a = p.parse('cd')
  44. self.assertEqual(a.children[0].children, [Token('A', 'c'), Token('B', 'd')])
  45. def test_extend_rule(self):
  46. p = Lark("""
  47. %import .grammars.ab (startab, A, B, expr)
  48. %extend expr: B A
  49. """, start='startab', source_path=__file__)
  50. a = p.parse('abab')
  51. self.assertEqual(a.children[0].children, ['a', Tree('expr', ['b', 'a']), 'b'])
  52. self.assertRaises(GrammarError, Lark, """
  53. %extend expr: B A
  54. """)
  55. def test_extend_term(self):
  56. p = Lark("""
  57. %import .grammars.ab (startab, A, B, expr)
  58. %extend A: "c"
  59. """, start='startab', source_path=__file__)
  60. a = p.parse('acbb')
  61. self.assertEqual(a.children[0].children, ['a', Tree('expr', ['c', 'b']), 'b'])
  62. def test_extend_twice(self):
  63. p = Lark("""
  64. start: x+
  65. x: "a"
  66. %extend x: "b"
  67. %extend x: "c"
  68. """)
  69. assert p.parse("abccbba") == p.parse("cbabbbb")
  70. def test_undefined_ignore(self):
  71. g = """!start: "A"
  72. %ignore B
  73. """
  74. self.assertRaises( GrammarError, Lark, g)
  75. g = """!start: "A"
  76. %ignore start
  77. """
  78. self.assertRaises( GrammarError, Lark, g)
  79. def test_alias_in_terminal(self):
  80. g = """start: TERM
  81. TERM: "a" -> alias
  82. """
  83. self.assertRaises( GrammarError, Lark, g)
  84. def test_undefined_rule(self):
  85. self.assertRaises(GrammarError, Lark, """start: a""")
  86. def test_undefined_term(self):
  87. self.assertRaises(GrammarError, Lark, """start: A""")
  88. def test_token_multiline_only_works_with_x_flag(self):
  89. g = r"""start: ABC
  90. ABC: / a b c
  91. d
  92. e f
  93. /i
  94. """
  95. self.assertRaises( GrammarError, Lark, g)
  96. def test_import_custom_sources(self):
  97. custom_loader = FromPackageLoader('tests', ('grammars', ))
  98. grammar = """
  99. start: startab
  100. %import ab.startab
  101. """
  102. p = Lark(grammar, import_paths=[custom_loader])
  103. self.assertEqual(p.parse('ab'),
  104. Tree('start', [Tree('startab', [Tree('ab__expr', [Token('ab__A', 'a'), Token('ab__B', 'b')])])]))
  105. def test_import_custom_sources2(self):
  106. custom_loader = FromPackageLoader('tests', ('grammars', ))
  107. grammar = """
  108. start: rule_to_import
  109. %import test_relative_import_of_nested_grammar__grammar_to_import.rule_to_import
  110. """
  111. p = Lark(grammar, import_paths=[custom_loader])
  112. x = p.parse('N')
  113. self.assertEqual(next(x.find_data('rule_to_import')).children, ['N'])
  114. def test_import_custom_sources3(self):
  115. custom_loader2 = FromPackageLoader('tests')
  116. grammar = """
  117. %import .test_relative_import (start, WS)
  118. %ignore WS
  119. """
  120. p = Lark(grammar, import_paths=[custom_loader2], source_path=__file__) # import relative to current file
  121. x = p.parse('12 capybaras')
  122. self.assertEqual(x.children, ['12', 'capybaras'])
  123. def test_find_grammar_errors(self):
  124. text = """
  125. a: rule
  126. b rule
  127. c: rule
  128. B.: "hello" f
  129. D: "okay"
  130. """
  131. assert [e.line for e, _s in find_grammar_errors(text)] == [3, 5]
  132. text = """
  133. a: rule
  134. b rule
  135. | ok
  136. c: rule
  137. B.: "hello" f
  138. D: "okay"
  139. """
  140. assert [e.line for e, _s in find_grammar_errors(text)] == [3, 4, 6]
  141. text = """
  142. a: rule @#$#@$@&&
  143. b: rule
  144. | ok
  145. c: rule
  146. B: "hello" f @
  147. D: "okay"
  148. """
  149. x = find_grammar_errors(text)
  150. assert [e.line for e, _s in find_grammar_errors(text)] == [2, 6]
  151. if __name__ == '__main__':
  152. main()