From 1d61de4f94b4af441b2f32c790db4290348d7a93 Mon Sep 17 00:00:00 2001 From: Erez Sh Date: Sun, 24 Jan 2021 23:42:14 -0500 Subject: [PATCH] Fix .pyi files --- lark-stubs/exceptions.pyi | 2 +- lark-stubs/indenter.pyi | 2 +- lark-stubs/lexer.pyi | 18 +++++++----------- lark-stubs/load_grammar.pyi | 10 ++++------ lark-stubs/parsers/lalr_puppet.pyi | 2 +- lark-stubs/reconstruct.pyi | 2 +- lark-stubs/tree.pyi | 6 +++--- lark-stubs/visitors.pyi | 7 ++++--- lark/lexer.py | 8 ++++++++ lark/load_grammar.py | 4 ++-- 10 files changed, 32 insertions(+), 29 deletions(-) diff --git a/lark-stubs/exceptions.pyi b/lark-stubs/exceptions.pyi index 08f7b3e..c2f3a6e 100644 --- a/lark-stubs/exceptions.pyi +++ b/lark-stubs/exceptions.pyi @@ -36,7 +36,7 @@ class UnexpectedInput(LarkError): pos_in_stream: int state: Any - def get_context(self, text: str, span: int = ...): + def get_context(self, text: str, span: int = ...) -> str: ... def match_examples( diff --git a/lark-stubs/indenter.pyi b/lark-stubs/indenter.pyi index 11a9058..f0cf8a2 100644 --- a/lark-stubs/indenter.pyi +++ b/lark-stubs/indenter.pyi @@ -9,7 +9,7 @@ class Indenter(ABC): paren_level: Optional[int] indent_level: Optional[List[int]] - def __init__(self): + def __init__(self) -> None: ... def handle_NL(self, token: Token) -> Iterator[Token]: diff --git a/lark-stubs/lexer.pyi b/lark-stubs/lexer.pyi index b13d771..fa42322 100644 --- a/lark-stubs/lexer.pyi +++ b/lark-stubs/lexer.pyi @@ -13,13 +13,9 @@ class Pattern(ABC): value: str flags: Collection[str] raw: str + type: str - def __init__(self, value: str, flags: Collection[str] = ...): - ... - - @property - @abstractmethod - def type(self) -> str: + def __init__(self, value: str, flags: Collection[str] = (), raw: str = None) -> None: ... @abstractmethod @@ -72,9 +68,9 @@ class TerminalDef: pattern: Pattern priority: int - def __init__(self, name: str, pattern: Pattern, priority: int = ...): + def __init__(self, name: str, pattern: Pattern, priority: int = ...) -> None: ... - + def user_repr(self) -> str: ... @@ -88,7 +84,7 @@ class Token(str): end_column: int end_pos: int - def __init__(self, type_: str, value: Any, pos_in_stream: int = None, line: int = None, column: int = None, end_line: int = None, end_column: int = None, end_pos: int = None): + def __init__(self, type_: str, value: Any, pos_in_stream: int = None, line: int = None, column: int = None, end_line: int = None, end_column: int = None, end_pos: int = None) -> None: ... def update(self, type_: Optional[str] = None, value: Optional[Any] = None) -> Token: @@ -130,7 +126,7 @@ class TraditionalLexer(Lexer): def __init__( self, conf: LexerConf - ): + ) -> None: ... def build(self) -> None: @@ -158,7 +154,7 @@ class ContextualLexer(Lexer): always_accept: Collection[str] = ..., user_callbacks: Dict[str, _Callback] = ..., g_regex_flags: int = ... - ): + ) -> None: ... def lex(self, stream: str, get_parser_state: Callable[[], str]) -> Iterator[Token]: diff --git a/lark-stubs/load_grammar.pyi b/lark-stubs/load_grammar.pyi index cadd657..bbd5751 100644 --- a/lark-stubs/load_grammar.pyi +++ b/lark-stubs/load_grammar.pyi @@ -14,15 +14,13 @@ class GrammarBuilder: global_keep_all_tokens: bool import_paths: List[Union[str, Callable]] - def __init__(self, global_keep_all_tokens=..., import_paths=...): ... + def __init__(self, global_keep_all_tokens: bool = False, import_paths: List[Union[str, Callable]] = None) -> None: ... - def load_grammar(self, grammar_text: str, grammar_name: str = ..., mangle: Callable[[str], str] = None): ... + def load_grammar(self, grammar_text: str, grammar_name: str = ..., mangle: Callable[[str], str] = None) -> None: ... def do_import(self, dotted_path: Tuple[str, ...], base_path: Optional[str], aliases: Dict[str, str], - base_mangle: Callable[[str], str] = None): ... + base_mangle: Callable[[str], str] = None) -> None: ... - def get_mangle(self, prefix: str, aliases: Dict[str, str], base_mangle: Callable[[str], str] = None): ... - - def check(self): ... + def validate(self) -> None: ... def build(self) -> Grammar: ... diff --git a/lark-stubs/parsers/lalr_puppet.pyi b/lark-stubs/parsers/lalr_puppet.pyi index f35112a..7820dbd 100644 --- a/lark-stubs/parsers/lalr_puppet.pyi +++ b/lark-stubs/parsers/lalr_puppet.pyi @@ -9,7 +9,7 @@ class ParserPuppet(object): Accessible via `UnexpectedToken.puppet` (raised by the parser on token error) """ - def feed_token(self, token: Token): ... + def feed_token(self, token: Token) -> Any: ... def copy(self) -> ParserPuppet: ... diff --git a/lark-stubs/reconstruct.pyi b/lark-stubs/reconstruct.pyi index a30ef0d..2824fa7 100644 --- a/lark-stubs/reconstruct.pyi +++ b/lark-stubs/reconstruct.pyi @@ -11,7 +11,7 @@ from .lexer import TerminalDef class WriteTokensTransformer(Transformer_InPlace): - def __init__(self, tokens: Dict[str, TerminalDef], term_subs): + def __init__(self, tokens: Dict[str, TerminalDef], Dict[str, Callable[[Symbol], str]] = ...): ... diff --git a/lark-stubs/tree.pyi b/lark-stubs/tree.pyi index a24ab35..98aadff 100644 --- a/lark-stubs/tree.pyi +++ b/lark-stubs/tree.pyi @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from typing import List, Callable, Iterator, Union, Optional, Literal +from typing import List, Callable, Iterator, Union, Optional, Literal, Any from .lexer import TerminalDef class Meta: @@ -25,7 +25,7 @@ class Tree: data: str, children: List[Union[str, Tree]], meta: Optional[Meta] = None - ): + ) -> None: ... def pretty(self, indent_str: str = ...) -> str: @@ -40,7 +40,7 @@ class Tree: def expand_kids_by_index(self, *indices: int) -> None: ... - def scan_values(self, pred: Callable[[Union[str, Tree]], bool]): + def scan_values(self, pred: Callable[[Union[str, Tree]], bool]) -> List[str]: ... def iter_subtrees(self) -> Iterator[Tree]: diff --git a/lark-stubs/visitors.pyi b/lark-stubs/visitors.pyi index ac414e6..86f15d8 100644 --- a/lark-stubs/visitors.pyi +++ b/lark-stubs/visitors.pyi @@ -11,7 +11,7 @@ _DECORATED = Union[_FUNC, type] class Transformer(ABC, Generic[_T]): - def __init__(self, visit_tokens: bool = True): + def __init__(self, visit_tokens: bool = True) -> None: ... def transform(self, tree: Tree) -> _T: @@ -24,7 +24,7 @@ class Transformer(ABC, Generic[_T]): class TransformerChain(Generic[_T]): transformers: Tuple[Transformer[_T], ...] - def __init__(self, *transformers: Transformer[_T]): + def __init__(self, *transformers: Transformer[_T]) -> None: ... def transform(self, tree: Tree) -> _T: @@ -75,7 +75,8 @@ _InterMethod = Callable[[Type[Interpreter], _T], _R] def v_args( inline: bool = False, meta: bool = False, - tree: bool = False + tree: bool = False, + wrapper: Callable = None ) -> Callable[[_DECORATED], _DECORATED]: ... diff --git a/lark/lexer.py b/lark/lexer.py index 72b299c..5a8d21a 100644 --- a/lark/lexer.py +++ b/lark/lexer.py @@ -11,6 +11,7 @@ from copy import copy class Pattern(Serialize): raw = None + type = None def __init__(self, value, flags=(), raw=None): self.value = value @@ -30,6 +31,12 @@ class Pattern(Serialize): def to_regexp(self): raise NotImplementedError() + def min_width(self): + raise NotImplementedError() + + def max_width(self): + raise NotImplementedError() + if Py36: # Python 3.6 changed syntax for flags in regular expression def _get_flags(self, value): @@ -44,6 +51,7 @@ class Pattern(Serialize): return value + class PatternStr(Pattern): __serialize_fields__ = 'value', 'flags' diff --git a/lark/load_grammar.py b/lark/load_grammar.py index 2b099ef..e14afb6 100644 --- a/lark/load_grammar.py +++ b/lark/load_grammar.py @@ -1016,7 +1016,7 @@ class GrammarBuilder: return name, exp, params, opts - def load_grammar(self, grammar_text, grammar_name="", mangle=None, dotted_path=None): + def load_grammar(self, grammar_text, grammar_name="", mangle=None): tree = _parse_grammar(grammar_text, grammar_name) imports = {} @@ -1093,7 +1093,7 @@ class GrammarBuilder: continue else: gb = GrammarBuilder(self.global_keep_all_tokens, self.import_paths) - gb.load_grammar(text, joined_path, mangle, dotted_path) + gb.load_grammar(text, joined_path, mangle) gb._remove_unused(map(mangle, aliases)) for name in gb._definitions: if name in self._definitions: