Browse Source

Fix .pyi files

tags/gm/2021-09-23T00Z/github.com--lark-parser-lark/0.11.2
Erez Sh 3 years ago
parent
commit
1d61de4f94
10 changed files with 32 additions and 29 deletions
  1. +1
    -1
      lark-stubs/exceptions.pyi
  2. +1
    -1
      lark-stubs/indenter.pyi
  3. +7
    -11
      lark-stubs/lexer.pyi
  4. +4
    -6
      lark-stubs/load_grammar.pyi
  5. +1
    -1
      lark-stubs/parsers/lalr_puppet.pyi
  6. +1
    -1
      lark-stubs/reconstruct.pyi
  7. +3
    -3
      lark-stubs/tree.pyi
  8. +4
    -3
      lark-stubs/visitors.pyi
  9. +8
    -0
      lark/lexer.py
  10. +2
    -2
      lark/load_grammar.py

+ 1
- 1
lark-stubs/exceptions.pyi View File

@@ -36,7 +36,7 @@ class UnexpectedInput(LarkError):
pos_in_stream: int pos_in_stream: int
state: Any state: Any


def get_context(self, text: str, span: int = ...):
def get_context(self, text: str, span: int = ...) -> str:
... ...


def match_examples( def match_examples(


+ 1
- 1
lark-stubs/indenter.pyi View File

@@ -9,7 +9,7 @@ class Indenter(ABC):
paren_level: Optional[int] paren_level: Optional[int]
indent_level: Optional[List[int]] indent_level: Optional[List[int]]


def __init__(self):
def __init__(self) -> None:
... ...


def handle_NL(self, token: Token) -> Iterator[Token]: def handle_NL(self, token: Token) -> Iterator[Token]:


+ 7
- 11
lark-stubs/lexer.pyi View File

@@ -13,13 +13,9 @@ class Pattern(ABC):
value: str value: str
flags: Collection[str] flags: Collection[str]
raw: str raw: str
type: str


def __init__(self, value: str, flags: Collection[str] = ...):
...

@property
@abstractmethod
def type(self) -> str:
def __init__(self, value: str, flags: Collection[str] = (), raw: str = None) -> None:
... ...


@abstractmethod @abstractmethod
@@ -72,9 +68,9 @@ class TerminalDef:
pattern: Pattern pattern: Pattern
priority: int priority: int


def __init__(self, name: str, pattern: Pattern, priority: int = ...):
def __init__(self, name: str, pattern: Pattern, priority: int = ...) -> None:
... ...
def user_repr(self) -> str: ... def user_repr(self) -> str: ...




@@ -88,7 +84,7 @@ class Token(str):
end_column: int end_column: int
end_pos: int end_pos: int


def __init__(self, type_: str, value: Any, pos_in_stream: int = None, line: int = None, column: int = None, end_line: int = None, end_column: int = None, end_pos: int = None):
def __init__(self, type_: str, value: Any, pos_in_stream: int = None, line: int = None, column: int = None, end_line: int = None, end_column: int = None, end_pos: int = None) -> None:
... ...


def update(self, type_: Optional[str] = None, value: Optional[Any] = None) -> Token: def update(self, type_: Optional[str] = None, value: Optional[Any] = None) -> Token:
@@ -130,7 +126,7 @@ class TraditionalLexer(Lexer):
def __init__( def __init__(
self, self,
conf: LexerConf conf: LexerConf
):
) -> None:
... ...


def build(self) -> None: def build(self) -> None:
@@ -158,7 +154,7 @@ class ContextualLexer(Lexer):
always_accept: Collection[str] = ..., always_accept: Collection[str] = ...,
user_callbacks: Dict[str, _Callback] = ..., user_callbacks: Dict[str, _Callback] = ...,
g_regex_flags: int = ... g_regex_flags: int = ...
):
) -> None:
... ...


def lex(self, stream: str, get_parser_state: Callable[[], str]) -> Iterator[Token]: def lex(self, stream: str, get_parser_state: Callable[[], str]) -> Iterator[Token]:


+ 4
- 6
lark-stubs/load_grammar.pyi View File

@@ -14,15 +14,13 @@ class GrammarBuilder:
global_keep_all_tokens: bool global_keep_all_tokens: bool
import_paths: List[Union[str, Callable]] import_paths: List[Union[str, Callable]]


def __init__(self, global_keep_all_tokens=..., import_paths=...): ...
def __init__(self, global_keep_all_tokens: bool = False, import_paths: List[Union[str, Callable]] = None) -> None: ...


def load_grammar(self, grammar_text: str, grammar_name: str = ..., mangle: Callable[[str], str] = None): ...
def load_grammar(self, grammar_text: str, grammar_name: str = ..., mangle: Callable[[str], str] = None) -> None: ...


def do_import(self, dotted_path: Tuple[str, ...], base_path: Optional[str], aliases: Dict[str, str], def do_import(self, dotted_path: Tuple[str, ...], base_path: Optional[str], aliases: Dict[str, str],
base_mangle: Callable[[str], str] = None): ...
base_mangle: Callable[[str], str] = None) -> None: ...


def get_mangle(self, prefix: str, aliases: Dict[str, str], base_mangle: Callable[[str], str] = None): ...

def check(self): ...
def validate(self) -> None: ...


def build(self) -> Grammar: ... def build(self) -> Grammar: ...

+ 1
- 1
lark-stubs/parsers/lalr_puppet.pyi View File

@@ -9,7 +9,7 @@ class ParserPuppet(object):


Accessible via `UnexpectedToken.puppet` (raised by the parser on token error) Accessible via `UnexpectedToken.puppet` (raised by the parser on token error)
""" """
def feed_token(self, token: Token): ...
def feed_token(self, token: Token) -> Any: ...


def copy(self) -> ParserPuppet: ... def copy(self) -> ParserPuppet: ...




+ 1
- 1
lark-stubs/reconstruct.pyi View File

@@ -11,7 +11,7 @@ from .lexer import TerminalDef


class WriteTokensTransformer(Transformer_InPlace): class WriteTokensTransformer(Transformer_InPlace):


def __init__(self, tokens: Dict[str, TerminalDef], term_subs):
def __init__(self, tokens: Dict[str, TerminalDef], Dict[str, Callable[[Symbol], str]] = ...):
... ...






+ 3
- 3
lark-stubs/tree.pyi View File

@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-


from typing import List, Callable, Iterator, Union, Optional, Literal
from typing import List, Callable, Iterator, Union, Optional, Literal, Any
from .lexer import TerminalDef from .lexer import TerminalDef


class Meta: class Meta:
@@ -25,7 +25,7 @@ class Tree:
data: str, data: str,
children: List[Union[str, Tree]], children: List[Union[str, Tree]],
meta: Optional[Meta] = None meta: Optional[Meta] = None
):
) -> None:
... ...


def pretty(self, indent_str: str = ...) -> str: def pretty(self, indent_str: str = ...) -> str:
@@ -40,7 +40,7 @@ class Tree:
def expand_kids_by_index(self, *indices: int) -> None: def expand_kids_by_index(self, *indices: int) -> None:
... ...


def scan_values(self, pred: Callable[[Union[str, Tree]], bool]):
def scan_values(self, pred: Callable[[Union[str, Tree]], bool]) -> List[str]:
... ...


def iter_subtrees(self) -> Iterator[Tree]: def iter_subtrees(self) -> Iterator[Tree]:


+ 4
- 3
lark-stubs/visitors.pyi View File

@@ -11,7 +11,7 @@ _DECORATED = Union[_FUNC, type]


class Transformer(ABC, Generic[_T]): class Transformer(ABC, Generic[_T]):


def __init__(self, visit_tokens: bool = True):
def __init__(self, visit_tokens: bool = True) -> None:
... ...


def transform(self, tree: Tree) -> _T: def transform(self, tree: Tree) -> _T:
@@ -24,7 +24,7 @@ class Transformer(ABC, Generic[_T]):
class TransformerChain(Generic[_T]): class TransformerChain(Generic[_T]):
transformers: Tuple[Transformer[_T], ...] transformers: Tuple[Transformer[_T], ...]


def __init__(self, *transformers: Transformer[_T]):
def __init__(self, *transformers: Transformer[_T]) -> None:
... ...


def transform(self, tree: Tree) -> _T: def transform(self, tree: Tree) -> _T:
@@ -75,7 +75,8 @@ _InterMethod = Callable[[Type[Interpreter], _T], _R]
def v_args( def v_args(
inline: bool = False, inline: bool = False,
meta: bool = False, meta: bool = False,
tree: bool = False
tree: bool = False,
wrapper: Callable = None
) -> Callable[[_DECORATED], _DECORATED]: ) -> Callable[[_DECORATED], _DECORATED]:
... ...




+ 8
- 0
lark/lexer.py View File

@@ -11,6 +11,7 @@ from copy import copy


class Pattern(Serialize): class Pattern(Serialize):
raw = None raw = None
type = None


def __init__(self, value, flags=(), raw=None): def __init__(self, value, flags=(), raw=None):
self.value = value self.value = value
@@ -30,6 +31,12 @@ class Pattern(Serialize):
def to_regexp(self): def to_regexp(self):
raise NotImplementedError() raise NotImplementedError()


def min_width(self):
raise NotImplementedError()

def max_width(self):
raise NotImplementedError()

if Py36: if Py36:
# Python 3.6 changed syntax for flags in regular expression # Python 3.6 changed syntax for flags in regular expression
def _get_flags(self, value): def _get_flags(self, value):
@@ -44,6 +51,7 @@ class Pattern(Serialize):
return value return value





class PatternStr(Pattern): class PatternStr(Pattern):
__serialize_fields__ = 'value', 'flags' __serialize_fields__ = 'value', 'flags'




+ 2
- 2
lark/load_grammar.py View File

@@ -1016,7 +1016,7 @@ class GrammarBuilder:
return name, exp, params, opts return name, exp, params, opts




def load_grammar(self, grammar_text, grammar_name="<?>", mangle=None, dotted_path=None):
def load_grammar(self, grammar_text, grammar_name="<?>", mangle=None):
tree = _parse_grammar(grammar_text, grammar_name) tree = _parse_grammar(grammar_text, grammar_name)


imports = {} imports = {}
@@ -1093,7 +1093,7 @@ class GrammarBuilder:
continue continue
else: else:
gb = GrammarBuilder(self.global_keep_all_tokens, self.import_paths) gb = GrammarBuilder(self.global_keep_all_tokens, self.import_paths)
gb.load_grammar(text, joined_path, mangle, dotted_path)
gb.load_grammar(text, joined_path, mangle)
gb._remove_unused(map(mangle, aliases)) gb._remove_unused(map(mangle, aliases))
for name in gb._definitions: for name in gb._definitions:
if name in self._definitions: if name in self._definitions:


Loading…
Cancel
Save