diff --git a/examples/calc.py b/examples/calc.py index 83835cf..cb7ef5d 100644 --- a/examples/calc.py +++ b/examples/calc.py @@ -5,7 +5,7 @@ from lark import Lark, InlineTransformer try: - input = raw_input + input = raw_input # For Python2 compatibility except NameError: pass diff --git a/examples/conf_nolex.py b/examples/conf_nolex.py index 6c16baf..8634a46 100644 --- a/examples/conf_nolex.py +++ b/examples/conf_nolex.py @@ -1,14 +1,15 @@ # -# This example demonstrates scanless parsing using the earley_nolex frontend +# This example demonstrates scanless parsing using the dynamic-lexer earley frontend # # Using a lexer for configuration files is tricky, because values don't -# have to be surrounded by delimiters. -# In this example with skip lexing and let the Earley parser resolve the ambiguity. +# have to be surrounded by delimiters. Using a standard lexer for this just won't work. +# +# In this example we use a dynamic lexer and let the Earley parser resolve the ambiguity. # # Future versions of lark will make it easier to write these kinds of grammars. # -# Another approach is to use the contextual lexer. It is less powerful than the scanless approach, -# but it can handle some ambiguity in lexing and it's much faster since it uses LALR(1). +# Another approach is to use the contextual lexer with LALR. It is less powerful than Earley, +# but it can handle some ambiguity when lexing and it's much faster. # See examples/conf.py for an example of that approach. # @@ -25,7 +26,7 @@ parser = Lark(r""" %import common.WS_INLINE %ignore WS_INLINE - """, lexer=None) + """, lexer='dynamic') def test(): sample_conf = """ diff --git a/examples/reconstruct_json.py b/examples/reconstruct_json.py index 29c3373..4695698 100644 --- a/examples/reconstruct_json.py +++ b/examples/reconstruct_json.py @@ -25,7 +25,7 @@ test_json = ''' def test_scanless(): - json_parser = Lark(json_grammar) + json_parser = Lark(json_grammar, lexer=None) tree = json_parser.parse(test_json) # print ('@@', tree.pretty()) diff --git a/examples/turtle_dsl.py b/examples/turtle_dsl.py index 9f3cc9d..775a98e 100644 --- a/examples/turtle_dsl.py +++ b/examples/turtle_dsl.py @@ -1,5 +1,10 @@ # This example implements a LOGO-like toy language for Python's turtle, with interpreter. +try: + input = raw_input # For Python2 compatibility +except NameError: + pass + import turtle from lark import Lark @@ -76,5 +81,5 @@ def test(): run_turtle(text) if __name__ == '__main__': - #test + # test() main() diff --git a/lark/__init__.py b/lark/__init__.py index 5337ecb..59629d3 100644 --- a/lark/__init__.py +++ b/lark/__init__.py @@ -3,4 +3,4 @@ from .common import ParseError, GrammarError from .lark import Lark from .utils import inline_args -__version__ = "0.2.10" +__version__ = "0.3.0" diff --git a/lark/lark.py b/lark/lark.py index 041bdc1..1bf05fe 100644 --- a/lark/lark.py +++ b/lark/lark.py @@ -129,7 +129,7 @@ class Lark: if self.options.parser == 'lalr': self.options.lexer = 'standard' elif self.options.parser == 'earley': - self.options.lexer = None + self.options.lexer = 'dynamic' else: assert False, self.options.parser lexer = self.options.lexer diff --git a/lark/parsers/xearley.py b/lark/parsers/xearley.py index 7bffbb7..729c326 100644 --- a/lark/parsers/xearley.py +++ b/lark/parsers/xearley.py @@ -24,7 +24,7 @@ from ..common import ParseError, UnexpectedToken, Terminal from ..tree import Tree from .grammar_analysis import GrammarAnalyzer -from earley import ResolveAmbig, ApplyCallbacks, Item, NewsList, Derivation, END_TOKEN, Column +from .earley import ResolveAmbig, ApplyCallbacks, Item, NewsList, Derivation, END_TOKEN, Column class Parser: def __init__(self, rules, start_symbol, callback, resolve_ambiguity=True, ignore=()): diff --git a/lark/reconstruct.py b/lark/reconstruct.py index 2c9bcd1..590a8e7 100644 --- a/lark/reconstruct.py +++ b/lark/reconstruct.py @@ -86,7 +86,7 @@ class Reconstructor: MatchTerminal(sym) if is_terminal(sym) else MatchTree(sym) for sym in expansion if not is_discarded_terminal(sym)] - rules.append((name, reduced, WriteTokens(name, expansion).f)) + rules.append((name, reduced, WriteTokens(name, expansion).f, None)) self.rules = rules