Browse Source

load_grammar now collects all imports to make before loading them to namespace

tags/gm/2021-09-23T00Z/github.com--lark-parser-lark/0.7.1
PJCampi 5 years ago
parent
commit
ccbaebdc6f
4 changed files with 43 additions and 18 deletions
  1. +24
    -18
      lark/load_grammar.py
  2. +7
    -0
      tests/grammars/three_rules_using_same_token.lark
  3. +7
    -0
      tests/test_parser.py
  4. +5
    -0
      tests/test_relative_import_rules_dependencies_imported_only_once.lark

+ 24
- 18
lark/load_grammar.py View File

@@ -725,7 +725,7 @@ class GrammarLoader:
rule_defs = [options_from_rule(*x) for x in rule_defs] rule_defs = [options_from_rule(*x) for x in rule_defs]


# Execute statements # Execute statements
ignore = []
ignore, imports = [], {}
for (stmt,) in statements: for (stmt,) in statements:
if stmt.data == 'ignore': if stmt.data == 'ignore':
t ,= stmt.children t ,= stmt.children
@@ -734,22 +734,20 @@ class GrammarLoader:
if len(stmt.children) > 1: if len(stmt.children) > 1:
path_node, arg1 = stmt.children path_node, arg1 = stmt.children
else: else:
path_node ,= stmt.children
path_node, = stmt.children
arg1 = None arg1 = None


if isinstance(arg1, Tree): # Multi import if isinstance(arg1, Tree): # Multi import
dotted_path = path_node.children
dotted_path = tuple(path_node.children)
names = arg1.children names = arg1.children
aliases = names # Can't have aliased multi import, so all aliases will be the same as names
aliases = dict(zip(names, names)) # Can't have aliased multi import, so all aliases will be the same as names
else: # Single import else: # Single import
dotted_path = path_node.children[:-1]
names = [path_node.children[-1]] # Get name from dotted path
aliases = [arg1] if arg1 else names # Aliases if exist

grammar_path = os.path.join(*dotted_path) + EXT
dotted_path = tuple(path_node.children[:-1])
name = path_node.children[-1] # Get name from dotted path
aliases = {name: arg1 or name} # Aliases if exist


if path_node.data == 'import_lib': # Import from library if path_node.data == 'import_lib': # Import from library
g = import_grammar(grammar_path)
base_paths = []
else: # Relative import else: # Relative import
if grammar_name == '<string>': # Import relative to script file path if grammar is coded in script if grammar_name == '<string>': # Import relative to script file path if grammar is coded in script
try: try:
@@ -759,16 +757,16 @@ class GrammarLoader:
else: else:
base_file = grammar_name # Import relative to grammar file path if external grammar file base_file = grammar_name # Import relative to grammar file path if external grammar file
if base_file: if base_file:
base_path = os.path.split(base_file)[0]
base_paths = [os.path.split(base_file)[0]]
else: else:
base_path = os.path.abspath(os.path.curdir)
g = import_grammar(grammar_path, base_paths=[base_path])

aliases_dict = dict(zip(names, aliases))
new_td, new_rd = import_from_grammar_into_namespace(g, '__'.join(dotted_path), aliases_dict)
base_paths = [os.path.abspath(os.path.curdir)]


term_defs += new_td
rule_defs += new_rd
try:
import_base_paths, import_aliases = imports[dotted_path]
assert base_paths == import_base_paths, 'Inconsistent base_paths for %s.' % '.'.join(dotted_path)
import_aliases.update(aliases)
except KeyError:
imports[dotted_path] = base_paths, aliases


elif stmt.data == 'declare': elif stmt.data == 'declare':
for t in stmt.children: for t in stmt.children:
@@ -776,6 +774,14 @@ class GrammarLoader:
else: else:
assert False, stmt assert False, stmt


# import grammars
for dotted_path, (base_paths, aliases) in imports.items():
grammar_path = os.path.join(*dotted_path) + EXT
g = import_grammar(grammar_path, base_paths=base_paths)
new_td, new_rd = import_from_grammar_into_namespace(g, '__'.join(dotted_path), aliases)

term_defs += new_td
rule_defs += new_rd


# Verify correctness 1 # Verify correctness 1
for name, _ in term_defs: for name, _ in term_defs:


+ 7
- 0
tests/grammars/three_rules_using_same_token.lark View File

@@ -0,0 +1,7 @@
%import common.INT

a: A
b: A
c: A

A: "A"

+ 7
- 0
tests/test_parser.py View File

@@ -1109,6 +1109,13 @@ def _make_parser_test(LEXER, PARSER):
x = l.parse('N') x = l.parse('N')
self.assertEqual(next(x.find_data('rule_to_import')).children, ['N']) self.assertEqual(next(x.find_data('rule_to_import')).children, ['N'])


def test_relative_import_rules_dependencies_imported_only_once(self):
l = _Lark_open("test_relative_import_rules_dependencies_imported_only_once.lark", rel_to=__file__)
x = l.parse('AAA')
self.assertEqual(next(x.find_data('a')).children, ['A'])
self.assertEqual(next(x.find_data('b')).children, ['A'])
self.assertEqual(next(x.find_data('d')).children, ['A'])

def test_import_errors(self): def test_import_errors(self):
grammar = """ grammar = """
start: NUMBER WORD start: NUMBER WORD


+ 5
- 0
tests/test_relative_import_rules_dependencies_imported_only_once.lark View File

@@ -0,0 +1,5 @@
%import .grammars.three_rules_using_same_token.a
%import .grammars.three_rules_using_same_token.b
%import .grammars.three_rules_using_same_token.c -> d

start: a b d

Loading…
Cancel
Save