###{standalone
#
#
# Lark Stand-alone Generator Tool
# ----------------------------------
# Generates a stand-alone LALR(1) parser with a standard lexer
#
# Git: https://github.com/erezsh/lark
# Author: Erez Shinan (erezshin@gmail.com)
#
#
# >>> LICENSE
#
# This tool and its generated code use a separate license from Lark.
#
# It is licensed under GPLv2 or above.
#
# If you wish to purchase a commercial license for this tool and its
# generated code, contact me via email.
#
# If GPL is incompatible with your free or open-source project,
# contact me and we'll work it out (for free).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See .
#
#
###}
import pprint
import codecs
import sys
import os
from pprint import pprint
from os import path
from collections import defaultdict
import lark
from lark import Lark
from lark.parsers.lalr_analysis import Reduce
from lark.grammar import RuleOptions, Rule
from lark.lexer import TerminalDef
_dir = path.dirname(__file__)
_larkdir = path.join(_dir, path.pardir)
EXTRACT_STANDALONE_FILES = [
'tools/standalone.py',
'exceptions.py',
'utils.py',
'tree.py',
'visitors.py',
'indenter.py',
'grammar.py',
'lexer.py',
'common.py',
'parse_tree_builder.py',
'parsers/lalr_parser.py',
'parsers/lalr_analysis.py',
'parser_frontends.py',
'lark.py',
]
def extract_sections(lines):
section = None
text = []
sections = defaultdict(list)
for l in lines:
if l.startswith('###'):
if l[3] == '{':
section = l[4:].strip()
elif l[3] == '}':
sections[section] += text
section = None
text = []
else:
raise ValueError(l)
elif section:
text.append(l)
return {name:''.join(text) for name, text in sections.items()}
def main(fobj, start):
lark_inst = Lark(fobj, parser="lalr", lexer="contextual", start=start)
print('# The file was automatically generated by Lark v%s' % lark.__version__)
for pyfile in EXTRACT_STANDALONE_FILES:
with open(os.path.join(_larkdir, pyfile)) as f:
print (extract_sections(f)['standalone'])
data, m = lark_inst.memo_serialize([TerminalDef, Rule])
print( 'DATA = (' )
# pprint(data, width=160)
print(data)
print(')')
print( 'MEMO = (')
print(m)
print(')')
print('Shift = 0')
print('Reduce = 1')
print("def Lark_StandAlone(transformer=None, postlex=None):")
print(" namespace = {'Rule': Rule, 'TerminalDef': TerminalDef}")
print(" return Lark.deserialize(DATA, namespace, MEMO, transformer=transformer, postlex=postlex)")
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Lark Stand-alone Generator Tool")
print("Usage: python -m lark.tools.standalone []")
sys.exit(1)
if len(sys.argv) == 3:
fn, start = sys.argv[1:]
elif len(sys.argv) == 2:
fn, start = sys.argv[1], 'start'
else:
assert False, sys.argv
with codecs.open(fn, encoding='utf8') as f:
main(f, start)