@@ -1,6 +1,6 @@ | |||||
from warnings import warn | from warnings import warn | ||||
from .utils import STRING_TYPE, logger, NO_VALUE | |||||
from .utils import logger, NO_VALUE | |||||
###{standalone | ###{standalone | ||||
@@ -90,7 +90,7 @@ class UnexpectedInput(LarkError): | |||||
candidate = (None, False) | candidate = (None, False) | ||||
for i, (label, example) in enumerate(examples): | for i, (label, example) in enumerate(examples): | ||||
assert not isinstance(example, STRING_TYPE) | |||||
assert not isinstance(example, str), "Expecting a list" | |||||
for j, malformed in enumerate(example): | for j, malformed in enumerate(example): | ||||
try: | try: | ||||
@@ -1,14 +1,13 @@ | |||||
from __future__ import absolute_import | from __future__ import absolute_import | ||||
from lark.exceptions import ConfigurationError, assert_config | |||||
from abc import ABC, abstractmethod | |||||
import sys, os, pickle, hashlib | import sys, os, pickle, hashlib | ||||
from io import open | from io import open | ||||
import tempfile | import tempfile | ||||
from warnings import warn | from warnings import warn | ||||
from .utils import STRING_TYPE, Serialize, SerializeMemoizer, FS, isascii, logger, ABC, abstractmethod | |||||
from .exceptions import ConfigurationError, assert_config | |||||
from .utils import Serialize, SerializeMemoizer, FS, isascii, logger | |||||
from .load_grammar import load_grammar, FromPackageLoader, Grammar, verify_used_files | from .load_grammar import load_grammar, FromPackageLoader, Grammar, verify_used_files | ||||
from .tree import Tree | from .tree import Tree | ||||
from .common import LexerConf, ParserConf | from .common import LexerConf, ParserConf | ||||
@@ -153,7 +152,7 @@ class LarkOptions(Serialize): | |||||
options[name] = value | options[name] = value | ||||
if isinstance(options['start'], STRING_TYPE): | |||||
if isinstance(options['start'], str): | |||||
options['start'] = [options['start']] | options['start'] = [options['start']] | ||||
self.__dict__['options'] = options | self.__dict__['options'] = options | ||||
@@ -247,14 +246,11 @@ class Lark(Serialize): | |||||
cache_fn = None | cache_fn = None | ||||
cache_md5 = None | cache_md5 = None | ||||
if isinstance(grammar, STRING_TYPE): | |||||
if isinstance(grammar, str): | |||||
self.source_grammar = grammar | self.source_grammar = grammar | ||||
if self.options.use_bytes: | if self.options.use_bytes: | ||||
if not isascii(grammar): | if not isascii(grammar): | ||||
raise ConfigurationError("Grammar must be ascii only, when use_bytes=True") | raise ConfigurationError("Grammar must be ascii only, when use_bytes=True") | ||||
if sys.version_info[0] == 2 and self.options.use_bytes != 'force': | |||||
raise ConfigurationError("`use_bytes=True` may have issues on python2." | |||||
"Use `use_bytes='force'` to use it at your own risk.") | |||||
if self.options.cache: | if self.options.cache: | ||||
if self.options.parser != 'lalr': | if self.options.parser != 'lalr': | ||||
@@ -266,7 +262,7 @@ class Lark(Serialize): | |||||
s = grammar + options_str + __version__ + str(sys.version_info[:2]) | s = grammar + options_str + __version__ + str(sys.version_info[:2]) | ||||
cache_md5 = hashlib.md5(s.encode('utf8')).hexdigest() | cache_md5 = hashlib.md5(s.encode('utf8')).hexdigest() | ||||
if isinstance(self.options.cache, STRING_TYPE): | |||||
if isinstance(self.options.cache, str): | |||||
cache_fn = self.options.cache | cache_fn = self.options.cache | ||||
else: | else: | ||||
if self.options.cache is not True: | if self.options.cache is not True: | ||||
@@ -1,8 +1,9 @@ | |||||
# Lexer Implementation | # Lexer Implementation | ||||
import re | import re | ||||
from contextlib import suppress | |||||
from .utils import Str, classify, get_regexp_width, Py36, Serialize, suppress | |||||
from .utils import classify, get_regexp_width, Py36, Serialize | |||||
from .exceptions import UnexpectedCharacters, LexError, UnexpectedToken | from .exceptions import UnexpectedCharacters, LexError, UnexpectedToken | ||||
###{standalone | ###{standalone | ||||
@@ -110,7 +111,7 @@ class TerminalDef(Serialize): | |||||
return self.name | return self.name | ||||
class Token(Str): | |||||
class Token(str): | |||||
"""A string with meta-information, that is produced by the lexer. | """A string with meta-information, that is produced by the lexer. | ||||
When parsing text, the resulting chunks of the input that haven't been discarded, | When parsing text, the resulting chunks of the input that haven't been discarded, | ||||
@@ -177,9 +178,9 @@ class Token(Str): | |||||
if isinstance(other, Token) and self.type != other.type: | if isinstance(other, Token) and self.type != other.type: | ||||
return False | return False | ||||
return Str.__eq__(self, other) | |||||
return str.__eq__(self, other) | |||||
__hash__ = Str.__hash__ | |||||
__hash__ = str.__hash__ | |||||
class LineCounter: | class LineCounter: | ||||
@@ -8,6 +8,7 @@ from io import open | |||||
import pkgutil | import pkgutil | ||||
from ast import literal_eval | from ast import literal_eval | ||||
from numbers import Integral | from numbers import Integral | ||||
from contextlib import suppress | |||||
from .utils import bfs, Py36, logger, classify_bool, is_id_continue, is_id_start, bfs_all_unique | from .utils import bfs, Py36, logger, classify_bool, is_id_continue, is_id_start, bfs_all_unique | ||||
from .lexer import Token, TerminalDef, PatternStr, PatternRE | from .lexer import Token, TerminalDef, PatternStr, PatternRE | ||||
@@ -16,7 +17,7 @@ from .parse_tree_builder import ParseTreeBuilder | |||||
from .parser_frontends import ParsingFrontend | from .parser_frontends import ParsingFrontend | ||||
from .common import LexerConf, ParserConf | from .common import LexerConf, ParserConf | ||||
from .grammar import RuleOptions, Rule, Terminal, NonTerminal, Symbol | from .grammar import RuleOptions, Rule, Terminal, NonTerminal, Symbol | ||||
from .utils import classify, suppress, dedup_list, Str | |||||
from .utils import classify, dedup_list | |||||
from .exceptions import GrammarError, UnexpectedCharacters, UnexpectedToken, ParseError | from .exceptions import GrammarError, UnexpectedCharacters, UnexpectedToken, ParseError | ||||
from .tree import Tree, SlottedTree as ST | from .tree import Tree, SlottedTree as ST | ||||
@@ -539,9 +540,9 @@ class PrepareSymbols(Transformer_InPlace): | |||||
if isinstance(v, Tree): | if isinstance(v, Tree): | ||||
return v | return v | ||||
elif v.type == 'RULE': | elif v.type == 'RULE': | ||||
return NonTerminal(Str(v.value)) | |||||
return NonTerminal(str(v.value)) | |||||
elif v.type == 'TERMINAL': | elif v.type == 'TERMINAL': | ||||
return Terminal(Str(v.value), filter_out=v.startswith('_')) | |||||
return Terminal(str(v.value), filter_out=v.startswith('_')) | |||||
assert False | assert False | ||||
@@ -27,6 +27,7 @@ from __future__ import print_function | |||||
# | # | ||||
from io import open | from io import open | ||||
from abc import ABC, abstractmethod | |||||
###} | ###} | ||||
import sys | import sys | ||||
@@ -1,4 +1,3 @@ | |||||
import hashlib | |||||
import unicodedata | import unicodedata | ||||
import os | import os | ||||
from functools import reduce | from functools import reduce | ||||
@@ -14,14 +13,6 @@ logger.addHandler(logging.StreamHandler()) | |||||
# By default, we should not output any log messages | # By default, we should not output any log messages | ||||
logger.setLevel(logging.CRITICAL) | logger.setLevel(logging.CRITICAL) | ||||
if sys.version_info[0]>2: | |||||
from abc import ABC, abstractmethod | |||||
else: | |||||
from abc import ABCMeta, abstractmethod | |||||
class ABC(object): # Provide Python27 compatibility | |||||
__slots__ = () | |||||
__metclass__ = ABCMeta | |||||
Py36 = (sys.version_info[:2] >= (3, 6)) | Py36 = (sys.version_info[:2] >= (3, 6)) | ||||
@@ -120,28 +111,16 @@ class SerializeMemoizer(Serialize): | |||||
return _deserialize(data, namespace, memo) | return _deserialize(data, namespace, memo) | ||||
try: | |||||
STRING_TYPE = basestring | |||||
except NameError: # Python 3 | |||||
STRING_TYPE = str | |||||
import types | import types | ||||
from functools import wraps, partial | from functools import wraps, partial | ||||
from contextlib import contextmanager | |||||
Str = type(u'') | |||||
try: | |||||
classtype = types.ClassType # Python2 | |||||
except AttributeError: | |||||
classtype = type # Python3 | |||||
def smart_decorator(f, create_decorator): | def smart_decorator(f, create_decorator): | ||||
if isinstance(f, types.FunctionType): | if isinstance(f, types.FunctionType): | ||||
return wraps(f)(create_decorator(f, True)) | return wraps(f)(create_decorator(f, True)) | ||||
elif isinstance(f, (classtype, type, types.BuiltinFunctionType)): | |||||
elif isinstance(f, (type, types.BuiltinFunctionType)): | |||||
return wraps(f)(create_decorator(f, False)) | return wraps(f)(create_decorator(f, False)) | ||||
elif isinstance(f, types.MethodType): | elif isinstance(f, types.MethodType): | ||||
@@ -222,34 +201,12 @@ def dedup_list(l): | |||||
return [x for x in l if not (x in dedup or dedup.add(x))] | return [x for x in l if not (x in dedup or dedup.add(x))] | ||||
try: | |||||
from contextlib import suppress # Python 3 | |||||
except ImportError: | |||||
@contextmanager | |||||
def suppress(*excs): | |||||
'''Catch and dismiss the provided exception | |||||
>>> x = 'hello' | |||||
>>> with suppress(IndexError): | |||||
... x = x[10] | |||||
>>> x | |||||
'hello' | |||||
''' | |||||
try: | |||||
yield | |||||
except excs: | |||||
pass | |||||
try: | |||||
compare = cmp | |||||
except NameError: | |||||
def compare(a, b): | |||||
if a == b: | |||||
return 0 | |||||
elif a > b: | |||||
return 1 | |||||
return -1 | |||||
def compare(a, b): | |||||
if a == b: | |||||
return 0 | |||||
elif a > b: | |||||
return 1 | |||||
return -1 | |||||
class Enumerator(Serialize): | class Enumerator(Serialize): | ||||