Преглед на файлове

Declare instance variable types at class level

gm/2021-09-23T00Z/github.com--lark-parser-lark/1.0b
Chanic Panic преди 3 години
родител
ревизия
bca7c79b1f
променени са 5 файла, в които са добавени 56 реда и са изтрити 27 реда
  1. +15
    -8
      lark/grammar.py
  2. +6
    -2
      lark/indenter.py
  3. +20
    -10
      lark/load_grammar.py
  4. +8
    -3
      lark/reconstruct.py
  5. +7
    -4
      lark/visitors.py

+ 15
- 8
lark/grammar.py Целия файл

@@ -8,10 +8,11 @@ from typing import Optional, Tuple
class Symbol(Serialize):
__slots__ = ('name',)

name: str
is_term: bool = NotImplemented

def __init__(self, name):
self.name: str = name
def __init__(self, name: str) -> None:
self.name = name

def __eq__(self, other):
assert isinstance(other, Symbol), other
@@ -52,12 +53,18 @@ class NonTerminal(Symbol):
class RuleOptions(Serialize):
__serialize_fields__ = 'keep_all_tokens', 'expand1', 'priority', 'template_source', 'empty_indices'

def __init__(self, keep_all_tokens=False, expand1=False, priority=None, template_source=None, empty_indices=()):
self.keep_all_tokens: bool = keep_all_tokens
self.expand1: bool = expand1
self.priority: int = priority
self.template_source: Optional[str] = template_source
self.empty_indices: Tuple[bool, ...] = empty_indices
keep_all_tokens: bool
expand1: bool
priority: Optional[int]
template_source: Optional[str]
empty_indices: Tuple[bool, ...]

def __init__(self, keep_all_tokens: bool=False, expand1: bool=False, priority: Optional[int]=None, template_source: Optional[str]=None, empty_indices: Tuple[bool, ...]=()) -> None:
self.keep_all_tokens = keep_all_tokens
self.expand1 = expand1
self.priority = priority
self.template_source = template_source
self.empty_indices = empty_indices

def __repr__(self):
return 'RuleOptions(%r, %r, %r, %r)' % (


+ 6
- 2
lark/indenter.py Целия файл

@@ -13,9 +13,13 @@ class DedentError(LarkError):
pass

class Indenter(PostLex, ABC):

paren_level: Optional[int]
indent_level: Optional[List[int]]

def __init__(self) -> None:
self.paren_level: Optional[int] = None
self.indent_level: Optional[List[int]] = None
self.paren_level = None
self.indent_level = None
assert self.tab_len > 0

def handle_NL(self, token: Token) -> Iterator[Token]:


+ 20
- 10
lark/load_grammar.py Целия файл

@@ -552,10 +552,15 @@ def nr_deepcopy_tree(t):


class Grammar:
def __init__(self, rule_defs, term_defs, ignore):
self.term_defs: List[Tuple[str, Tuple[Tree, int]]] = term_defs
self.rule_defs: List[Tuple[str, Tuple[str, ...], Tree, RuleOptions]] = rule_defs
self.ignore: List[str] = ignore

term_defs: List[Tuple[str, Tuple[Tree, int]]]
rule_defs: List[Tuple[str, Tuple[str, ...], Tree, RuleOptions]]
ignore: List[str]

def __init__(self, rule_defs: List[Tuple[str, Tuple[str, ...], Tree, RuleOptions]], term_defs: List[Tuple[str, Tuple[Tree, int]]], ignore: List[str]) -> None:
self.term_defs = term_defs
self.rule_defs = rule_defs
self.ignore = ignore

def compile(self, start, terminals_to_keep):
# We change the trees in-place (to support huge grammars)
@@ -928,10 +933,15 @@ def _mangle_exp(exp, mangle):


class GrammarBuilder:
def __init__(self, global_keep_all_tokens: bool=False, import_paths: List[Union[str, Callable]]=None, used_files: Dict[str, str]=None) -> None:
self.global_keep_all_tokens: bool = global_keep_all_tokens
self.import_paths: List[Union[str, Callable]] = import_paths or []
self.used_files: Dict[str, str] = used_files or {}

global_keep_all_tokens: bool
import_paths: List[Union[str, Callable]]
used_files: Dict[str, str]

def __init__(self, global_keep_all_tokens: bool=False, import_paths: Optional[List[Union[str, Callable]]]=None, used_files: Optional[Dict[str, str]]=None) -> None:
self.global_keep_all_tokens = global_keep_all_tokens
self.import_paths = import_paths or []
self.used_files = used_files or {}

self._definitions = {}
self._ignore_names = []
@@ -1072,7 +1082,7 @@ class GrammarBuilder:
return name, exp, params, opts


def load_grammar(self, grammar_text: str, grammar_name: str="<?>", mangle: Callable[[str], str]=None) -> None:
def load_grammar(self, grammar_text: str, grammar_name: str="<?>", mangle: Optional[Callable[[str], str]]=None) -> None:
tree = _parse_grammar(grammar_text, grammar_name)

imports = {}
@@ -1135,7 +1145,7 @@ class GrammarBuilder:
self._definitions = {k: v for k, v in self._definitions.items() if k in _used}


def do_import(self, dotted_path: Tuple[str, ...], base_path: Optional[str], aliases: Dict[str, str], base_mangle: Callable[[str], str]=None) -> None:
def do_import(self, dotted_path: Tuple[str, ...], base_path: Optional[str], aliases: Dict[str, str], base_mangle: Optional[Callable[[str], str]]=None) -> None:
assert dotted_path
mangle = _get_mangle('__'.join(dotted_path), aliases, base_mangle)
grammar_path = os.path.join(*dotted_path) + EXT


+ 8
- 3
lark/reconstruct.py Целия файл

@@ -1,6 +1,6 @@
"""Reconstruct text from a tree, based on Lark grammar"""

from typing import List, Dict, Union, Callable, Iterable
from typing import List, Dict, Union, Callable, Iterable, Optional
import unicodedata

from .lark import Lark
@@ -23,6 +23,9 @@ def is_iter_empty(i):
class WriteTokensTransformer(Transformer_InPlace):
"Inserts discarded tokens into their correct place, according to the rules of grammar"

tokens: Dict[str, TerminalDef]
term_subs: Dict[str, Callable[[Symbol], str]]

def __init__(self, tokens: Dict[str, TerminalDef], term_subs: Dict[str, Callable[[Symbol], str]]) -> None:
self.tokens = tokens
self.term_subs = term_subs
@@ -72,7 +75,9 @@ class Reconstructor(TreeMatcher):
term_subs: a dictionary of [Terminal name as str] to [output text as str]
"""

def __init__(self, parser: Lark, term_subs: Dict[str, Callable[[Symbol], str]]=None) -> None:
write_tokens: WriteTokensTransformer

def __init__(self, parser: Lark, term_subs: Optional[Dict[str, Callable[[Symbol], str]]]=None) -> None:
TreeMatcher.__init__(self, parser)

self.write_tokens = WriteTokensTransformer({t.name:t for t in self.tokens}, term_subs or {})
@@ -89,7 +94,7 @@ class Reconstructor(TreeMatcher):
else:
yield item

def reconstruct(self, tree: Tree, postproc: Callable[[Iterable[str]], Iterable[str]]=None, insert_spaces: bool=True) -> str:
def reconstruct(self, tree: Tree, postproc: Optional[Callable[[Iterable[str]], Iterable[str]]]=None, insert_spaces: bool=True) -> str:
x = self._reconstruct(tree)
if postproc:
x = postproc(x)


+ 7
- 4
lark/visitors.py Целия файл

@@ -8,7 +8,7 @@ from .lexer import Token

###{standalone
from inspect import getmembers, getmro
from typing import TypeVar, Tuple, List, Callable, Generic, Type, Union
from typing import TypeVar, Tuple, List, Callable, Generic, Type, Union, Optional

_T = TypeVar('_T')
_R = TypeVar('_R')
@@ -156,8 +156,11 @@ class Transformer(_Decoratable, ABC, Generic[_T]):


class TransformerChain(Generic[_T]):
def __init__(self, *transformers):
self.transformers: Tuple[Transformer[_T], ...] = transformers

transformers: Tuple[Transformer[_T], ...]

def __init__(self, *transformers: Transformer[_T]) -> None:
self.transformers = transformers

def transform(self, tree: Tree) -> _T:
for t in self.transformers:
@@ -387,7 +390,7 @@ def _vargs_tree(f, data, children, meta):
return f(Tree(data, children, meta))


def v_args(inline: bool=False, meta: bool=False, tree: bool=False, wrapper: Callable[[_DECORATED], _DECORATED]=None) -> Callable[[_DECORATED], _DECORATED]:
def v_args(inline: bool=False, meta: bool=False, tree: bool=False, wrapper: Optional[Callable]=None) -> Callable[[_DECORATED], _DECORATED]:
"""A convenience decorator factory for modifying the behavior of user-supplied visitor methods.

By default, callback methods of transformers/visitors accept one argument - a list of the node's children.


Зареждане…
Отказ
Запис