diff --git a/examples/reconstruct_json.py b/examples/reconstruct_json.py index de4e086..29c3373 100644 --- a/examples/reconstruct_json.py +++ b/examples/reconstruct_json.py @@ -46,7 +46,7 @@ def test_lalr(): new_json = Reconstructor(json_parser).reconstruct(tree) print (new_json) - print (json.loads(new_json) == json.loads(test_json)) + print (json.loads(new_json) == json.loads(test_json)) test_scanless() test_lalr() diff --git a/lark/load_grammar.py b/lark/load_grammar.py index f91b0b9..8027857 100644 --- a/lark/load_grammar.py +++ b/lark/load_grammar.py @@ -290,7 +290,8 @@ class ExtractAnonTokens(InlineTransformer): def _literal_to_pattern(literal): v = literal.value assert v[0] == v[-1] and v[0] in '"/' - s = literal_eval("u'''%s'''" % v[1:-1]) + x = v[1:-1].replace("'", r"\'") + s = literal_eval("u'''%s'''" % x) return { 'STRING': PatternStr, 'REGEXP': PatternRE }[literal.type](s) diff --git a/tests/test_parser.py b/tests/test_parser.py index 0b1f342..6eca4ab 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -407,6 +407,15 @@ def _make_parser_test(LEXER, PARSER): """) x = g.parse(r'\a') + def test_special_chars(self): + g = _Lark(r"""start: "\n" + """) + x = g.parse('\n') + + g = _Lark(r"""start: /\n/ + """) + x = g.parse('\n') + def test_backslash2(self): g = _Lark(r"""start: "\"" "-"