@@ -166,7 +166,8 @@ string: STRING | LONG_STRING | |||||
NAME: /[a-zA-Z_]\w*/ | NAME: /[a-zA-Z_]\w*/ | ||||
COMMENT: /\#[^\n]*/ | COMMENT: /\#[^\n]*/ | ||||
_NEWLINE: /(\r?\n[\t ]*|${COMMENT})+/ | |||||
_NEWLINE: ( /\r?\n[\t ]*/ | COMMENT )+ | |||||
%ignore /[\t \f]+/ // WS | %ignore /[\t \f]+/ // WS | ||||
%ignore /\\\\[\t \f]*\r?\n/ // LINE_CONT | %ignore /\\\\[\t \f]*\r?\n/ // LINE_CONT | ||||
@@ -3,4 +3,4 @@ from .common import ParseError, GrammarError | |||||
from .lark import Lark | from .lark import Lark | ||||
from .utils import inline_args | from .utils import inline_args | ||||
__version__ = "0.3.3" | |||||
__version__ = "0.3.4" |
@@ -36,7 +36,7 @@ class LarkOptions(object): | |||||
debug - Affects verbosity (default: False) | debug - Affects verbosity (default: False) | ||||
keep_all_tokens - Don't automagically remove "punctuation" tokens (default: False) | keep_all_tokens - Don't automagically remove "punctuation" tokens (default: False) | ||||
cache_grammar - Cache the Lark grammar (Default: False) | cache_grammar - Cache the Lark grammar (Default: False) | ||||
postlex - Lexer post-processing (Default: None) | |||||
postlex - Lexer post-processing (Requires standard lexer. Default: None) | |||||
start - The start symbol (Default: start) | start - The start symbol (Default: start) | ||||
profile - Measure run-time usage in Lark. Read results from the profiler proprety (Default: False) | profile - Measure run-time usage in Lark. Read results from the profiler proprety (Default: False) | ||||
propagate_positions - Experimental. Don't use yet. | propagate_positions - Experimental. Don't use yet. | ||||
@@ -131,7 +131,7 @@ class Lexer(object): | |||||
self.newline_types = [t.name for t in tokens if _regexp_has_newline(t.pattern.to_regexp())] | self.newline_types = [t.name for t in tokens if _regexp_has_newline(t.pattern.to_regexp())] | ||||
self.ignore_types = [t for t in ignore] | self.ignore_types = [t for t in ignore] | ||||
tokens.sort(key=lambda x:(-x.priority, -x.pattern.max_width, x.name)) | |||||
tokens.sort(key=lambda x:(-x.priority, -x.pattern.max_width, -len(x.pattern.value), x.name)) | |||||
tokens, self.callback = _create_unless(tokens) | tokens, self.callback = _create_unless(tokens) | ||||
assert all(self.callback.values()) | assert all(self.callback.values()) | ||||