Browse Source

Apply changes and extend more names.

tags/gm/2021-09-23T00Z/github.com--lark-parser-lark/0.8.2
KmolYuan 5 years ago
parent
commit
803b1fe79e
9 changed files with 335 additions and 23 deletions
  1. +1
    -1
      lark-stubs/__init__.pyi
  2. +6
    -1
      lark-stubs/exceptions.pyi
  3. +53
    -0
      lark-stubs/indenter.pyi
  4. +49
    -7
      lark-stubs/lark.pyi
  5. +145
    -7
      lark-stubs/lexer.pyi
  6. +24
    -0
      lark-stubs/reconstruct.pyi
  7. +39
    -6
      lark-stubs/tree.pyi
  8. +17
    -1
      lark-stubs/visitors.pyi
  9. +1
    -0
      lark/visitors.py

+ 1
- 1
lark-stubs/__init__.pyi View File

@@ -6,4 +6,4 @@ from .exceptions import *
from .lexer import * from .lexer import *
from .lark import * from .lark import *


__version__: str
__version__: str = ...

+ 6
- 1
lark-stubs/exceptions.pyi View File

@@ -23,7 +23,7 @@ class LexError(LarkError):
class UnexpectedInput(LarkError): class UnexpectedInput(LarkError):
pos_in_stream: int pos_in_stream: int


def get_context(self, text: str, span: int = 40):
def get_context(self, text: str, span: int = ...):
... ...


def match_examples( def match_examples(
@@ -39,4 +39,9 @@ class UnexpectedToken(ParseError, UnexpectedInput):




class UnexpectedCharacters(LexError, UnexpectedInput): class UnexpectedCharacters(LexError, UnexpectedInput):
line: int
column: int


class VisitError(LarkError):
pass pass

+ 53
- 0
lark-stubs/indenter.pyi View File

@@ -0,0 +1,53 @@
# -*- coding: utf-8 -*-

from typing import Tuple, List, Iterator, Optional
from abc import ABC, abstractmethod
from .lexer import Token


class Indenter(ABC):
paren_level: Optional[int]
indent_level: Optional[List[int]]

def __init__(self):
...

def handle_NL(self, token: Token) -> Iterator[Token]:
...

def process(self, stream: Iterator[Token]) -> Iterator[Token]:
...

@property
def always_accept(self) -> Tuple[str]:
...

@property
@abstractmethod
def NL_type(self) -> str:
...

@property
@abstractmethod
def OPEN_PAREN_types(self) -> List[str]:
...

@property
@abstractmethod
def CLOSE_PAREN_types(self) -> List[str]:
...

@property
@abstractmethod
def INDENT_type(self) -> str:
...

@property
@abstractmethod
def DEDENT_type(self) -> str:
...

@property
@abstractmethod
def tab_len(self) -> int:
...

+ 49
- 7
lark-stubs/lark.pyi View File

@@ -1,25 +1,57 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-


from typing import List, Dict, IO, Callable, Union, Optional, Literal
from typing import (
TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
Literal, Protocol,
)
from .visitors import Transformer from .visitors import Transformer
from .lexer import Lexer, Token
from .lexer import Token, Lexer, TerminalDef
from .tree import Tree from .tree import Tree


_T = TypeVar('_T')
_Start = Union[None, str, List[str]] _Start = Union[None, str, List[str]]
_Parser = Literal["earley", "lalr", "cyk"]
_Lexer = Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Lexer]
_Ambiguity = Literal["explicit", "resolve"]


class PostLex(Protocol):

def process(self, stream: Iterator[Token]) -> Iterator[Token]:
...


class LarkOptions:
start: _Start
parser: _Parser
lexer: _Lexer
transformer: Optional[Transformer]
postlex: Optional[PostLex]
ambiguity: _Ambiguity
debug: bool
keep_all_tokens: bool
propagate_positions: bool
maybe_placeholders: bool
lexer_callbacks: Dict[str, Callable[[Token], Token]]
cache_grammar: bool




class Lark: class Lark:
source: str
options: LarkOptions
lexer: Lexer
terminals: List[TerminalDef]


def __init__( def __init__(
self, self,
grammar: Union[str, IO[str]], grammar: Union[str, IO[str]],
*, *,
start: _Start = ..., start: _Start = ...,
parser: Literal["earley", "lalr", "cyk"] = ...,
lexer: Optional[Lexer] = ...,
transformer: Optional[Transformer] = ...,
postlex: Optional[Literal["standard", "contextual"]] = ...,
ambiguity: Literal["explicit", "resolve"] = ...,
parser: _Parser = ...,
lexer: _Lexer = ...,
transformer: Optional[Transformer] = None,
postlex: Optional[PostLex] = None,
ambiguity: _Ambiguity = ...,
debug: bool = False, debug: bool = False,
keep_all_tokens: bool = False, keep_all_tokens: bool = False,
propagate_positions: bool = False, propagate_positions: bool = False,
@@ -30,3 +62,13 @@ class Lark:


def parse(self, text: str, start: _Start = None) -> Tree: def parse(self, text: str, start: _Start = None) -> Tree:
... ...

@classmethod
def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
...

def lex(self, text: str) -> Iterator[Token]:
...

def get_terminal(self, name: str) -> TerminalDef:
...

+ 145
- 7
lark-stubs/lexer.pyi View File

@@ -1,64 +1,202 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-


from typing import Tuple, Iterator, Sized
from typing import (
TypeVar, Type, Tuple, List, Dict, Iterator, Collection, Callable, Optional,
Pattern as REPattern,
)
from abc import abstractmethod, ABC from abc import abstractmethod, ABC


_T = TypeVar('_T')
_MRes = List[Tuple[REPattern, Dict[int, str]]]



class Pattern(ABC): class Pattern(ABC):
value: str
flags: Collection[str]

def __init__(self, value: str, flags: Collection[str] = ...):
...

@property
@abstractmethod
def type(self) -> str:
...


@abstractmethod @abstractmethod
def to_regexp(self) -> str: def to_regexp(self) -> str:
... ...


@property
@abstractmethod
def min_width(self) -> int:
...

@property
@abstractmethod
def max_width(self) -> int:
...



class PatternStr(Pattern): class PatternStr(Pattern):
type: str = ...


def to_regexp(self) -> str: def to_regexp(self) -> str:
... ...


@property
def min_width(self) -> int:
...

@property
def max_width(self) -> int:
...



class PatternRE(Pattern): class PatternRE(Pattern):
type: str = ...


def to_regexp(self) -> str: def to_regexp(self) -> str:
... ...


@property
def min_width(self) -> int:
...

@property
def max_width(self) -> int:
...



class TerminalDef: class TerminalDef:
name: str name: str
pattern: Pattern pattern: Pattern
priority: int priority: int


def __init__(self, name: str, pattern: Pattern, priority: int = ...):
...



class Token(str): class Token(str):
type: str type: str
pos_in_stream: int pos_in_stream: int
value: str
line: int line: int
column: int column: int
end_line: int end_line: int
end_column: int end_column: int
end_pos: int end_pos: int


def update(self, type_: Optional[str] = None, value: Optional[str] = None) -> Token:
...


class Lexer(ABC):
@classmethod
def new_borrow_pos(cls: Type[_T], type_: str, value: str, borrow_t: Token) -> _T:
...


@abstractmethod
def lex(self, stream: Sized) -> Iterator[Token]:

_Callback = Callable[[Token], Token]


def build_mres(terminals: Collection[TerminalDef], match_whole: bool = False) -> _MRes:
...


class UnlessCallback:
mres: _MRes

def __init__(self, mres: _MRes):
...

def __call__(self, t: Token) -> Token:
...


class CallChain:
callback1: _Callback
callback2: _Callback
cond: Callable[[Token], bool]

def __init__(
self,
callback1: _Callback,
callback2: _Callback,
cond: Callable[[Token], bool]
):
...


class LineCounter:
newline_char: str
char_pos: int
line: int
column: int
line_start_pos: int

def __init__(self):
...

def feed(self, token: str, test_newline: bool = True):
...


class _Lex:
lexer: TraditionalLexer

def __init__(self, lexer: TraditionalLexer, state: Optional[str] = None):
... ...


def lex(
self,
stream: str,
newline_types: Collection[str],
ignore_types: Collection[str]
) -> Iterator[Token]:
...


class Lexer(ABC):
lex: Callable[..., Iterator[Token]]



class TraditionalLexer(Lexer): class TraditionalLexer(Lexer):
terminals: Collection[TerminalDef]
ignore_types: List[str]
newline_types: List[str]
user_callbacks: Dict[str, _Callback]
callback: Dict[str, _Callback]
mres: _MRes

def __init__(
self,
terminals: Collection[TerminalDef],
ignore: Collection[str] = ...,
user_callbacks: Dict[str, _Callback] = ...
):
...


def build(self) -> None: def build(self) -> None:
... ...


def match(self, stream: str, pos: int) -> Tuple[str, str]:
def match(self, stream: str, pos: int) -> Optional[Tuple[str, str]]:
... ...


def lex(self, stream: Sized) -> Iterator[Token]:
def lex(self, stream: str) -> Iterator[Token]:
... ...




class ContextualLexer(Lexer): class ContextualLexer(Lexer):
lexers: Dict[str, TraditionalLexer]
root_lexer: TraditionalLexer

def __init__(
self,
terminals: Collection[TerminalDef],
states: Dict[str, Collection[str]],
ignore: Collection[str] = ...,
always_accept: Collection[str] = ...,
user_callbacks: Dict[str, _Callback] = ...
):
...


def lex(self, stream: Sized) -> Iterator[Token]:
def lex(self, stream: str, get_parser_state: Callable[[], str]) -> Iterator[Token]:
... ...

+ 24
- 0
lark-stubs/reconstruct.pyi View File

@@ -1,7 +1,31 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-


from typing import List, Dict, Union
from .lark import Lark from .lark import Lark
from .tree import Tree from .tree import Tree
from .visitors import Transformer_InPlace
from .lexer import TerminalDef


class WriteTokensTransformer(Transformer_InPlace):

def __init__(self, tokens: Dict[str, TerminalDef], term_subs):
...


class MatchTree(Tree):
pass


class MakeMatchTree:
name: str
expansion: List[TerminalDef]

def __init__(self, name: str, expansion: List[TerminalDef]):
...

def __call__(self, args: List[Union[str, Tree]]):
...




class Reconstructor: class Reconstructor:


+ 39
- 6
lark-stubs/tree.pyi View File

@@ -1,16 +1,31 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-


from typing import List, Callable, Iterator, Union, Optional
from .lexer import Token
from typing import List, Callable, Iterator, Union, Optional, Literal
from .lexer import TerminalDef


class Meta:
empty: bool
line: int
column: int
start_pos: int
end_line: int
end_column: int
end_pos: int
orig_expansion: List[TerminalDef]
match_tree: bool


class Tree:


class Tree:
data: str data: str
children: List[Union[str, Tree]] children: List[Union[str, Tree]]
meta: Token
meta: Meta


def __init__(self, data: str, children: List[Tree], meta: Optional[Token] = None):
def __init__(
self,
data: str,
children: List[Union[str, Tree]],
meta: Optional[Meta] = None
):
... ...


def pretty(self, indent_str: str = ...) -> str: def pretty(self, indent_str: str = ...) -> str:
@@ -22,13 +37,22 @@ class Tree:
def find_data(self, data: str) -> Iterator[Tree]: def find_data(self, data: str) -> Iterator[Tree]:
... ...


def expand_kids_by_index(self, *indices: int) -> None:
...

def scan_values(self, pred: Callable[[Union[str, Tree]], bool]):
...

def iter_subtrees(self) -> Iterator[Tree]: def iter_subtrees(self) -> Iterator[Tree]:
... ...


def iter_subtrees_topdown(self) -> Iterator[Tree]: def iter_subtrees_topdown(self) -> Iterator[Tree]:
... ...


def __eq__(self, other: object) -> bool:
def copy(self) -> Tree:
...

def set(self, data: str, children: List[Union[str, Tree]]) -> None:
... ...


def __hash__(self) -> int: def __hash__(self) -> int:
@@ -37,3 +61,12 @@ class Tree:


class SlottedTree(Tree): class SlottedTree(Tree):
pass pass


def pydot__tree_to_png(
tree: Tree,
filename: str,
rankdir: Literal["TB", "LR", "BT", "RL"] = ...,
**kwargs
) -> None:
...

+ 17
- 1
lark-stubs/visitors.pyi View File

@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-


from typing import TypeVar, List, Callable, Generic, Type
from typing import TypeVar, Tuple, List, Callable, Generic, Type
from abc import ABC from abc import ABC
from .tree import Tree from .tree import Tree


@@ -17,6 +17,22 @@ class Transformer(ABC, Generic[_T]):
def transform(self, tree: Tree) -> _T: def transform(self, tree: Tree) -> _T:
... ...


def __mul__(self, other: Transformer[_T]) -> TransformerChain[_T]:
...


class TransformerChain(Generic[_T]):
transformers: Tuple[Transformer[_T], ...]

def __init__(self, *transformers: Transformer[_T]):
...

def transform(self, tree: Tree) -> _T:
...

def __mul__(self, other: Transformer[_T]) -> TransformerChain[_T]:
...



class Transformer_InPlace(Transformer): class Transformer_InPlace(Transformer):
pass pass


+ 1
- 0
lark/visitors.py View File

@@ -48,6 +48,7 @@ class Transformer(_Decoratable):
Can be used to implement map or reduce. Can be used to implement map or reduce.
""" """
__visit_tokens__ = True # For backwards compatibility __visit_tokens__ = True # For backwards compatibility

def __init__(self, visit_tokens=True): def __init__(self, visit_tokens=True):
self.__visit_tokens__ = visit_tokens self.__visit_tokens__ = visit_tokens




Loading…
Cancel
Save