Annotations for public APItags/gm/2021-09-23T00Z/github.com--lark-parser-lark/0.8.2
@@ -0,0 +1,19 @@ | |||||
name: Python type check | |||||
on: [push, pull_request] | |||||
jobs: | |||||
build: | |||||
runs-on: ubuntu-latest | |||||
steps: | |||||
- uses: actions/checkout@v1 | |||||
- name: Download submodules | |||||
run: git submodule update --init --recursive | |||||
- name: Set up Python | |||||
uses: actions/setup-python@v1 | |||||
with: | |||||
python-version: 3.8 | |||||
- name: Install dependencies | |||||
run: | | |||||
python -m pip install --upgrade pip | |||||
pip install mypy | |||||
- name: Lint with mypy | |||||
run: mypy -m lark-stubs || true |
@@ -7,5 +7,6 @@ tags | |||||
.idea | .idea | ||||
.ropeproject | .ropeproject | ||||
.cache | .cache | ||||
.mypy_cache | |||||
/dist | /dist | ||||
/build | /build |
@@ -0,0 +1,9 @@ | |||||
# -*- coding: utf-8 -*- | |||||
from .tree import * | |||||
from .visitors import * | |||||
from .exceptions import * | |||||
from .lexer import * | |||||
from .lark import * | |||||
__version__: str = ... |
@@ -0,0 +1,47 @@ | |||||
# -*- coding: utf-8 -*- | |||||
from typing import Dict, Iterable, Callable | |||||
from .tree import Tree | |||||
class LarkError(Exception): | |||||
pass | |||||
class GrammarError(LarkError): | |||||
pass | |||||
class ParseError(LarkError): | |||||
pass | |||||
class LexError(LarkError): | |||||
pass | |||||
class UnexpectedInput(LarkError): | |||||
pos_in_stream: int | |||||
def get_context(self, text: str, span: int = ...): | |||||
... | |||||
def match_examples( | |||||
self, | |||||
parse_fn: Callable[[str], Tree], | |||||
examples: Dict[str, Iterable[str]] | |||||
): | |||||
... | |||||
class UnexpectedToken(ParseError, UnexpectedInput): | |||||
pass | |||||
class UnexpectedCharacters(LexError, UnexpectedInput): | |||||
line: int | |||||
column: int | |||||
class VisitError(LarkError): | |||||
pass |
@@ -0,0 +1,53 @@ | |||||
# -*- coding: utf-8 -*- | |||||
from typing import Tuple, List, Iterator, Optional | |||||
from abc import ABC, abstractmethod | |||||
from .lexer import Token | |||||
class Indenter(ABC): | |||||
paren_level: Optional[int] | |||||
indent_level: Optional[List[int]] | |||||
def __init__(self): | |||||
... | |||||
def handle_NL(self, token: Token) -> Iterator[Token]: | |||||
... | |||||
def process(self, stream: Iterator[Token]) -> Iterator[Token]: | |||||
... | |||||
@property | |||||
def always_accept(self) -> Tuple[str]: | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def NL_type(self) -> str: | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def OPEN_PAREN_types(self) -> List[str]: | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def CLOSE_PAREN_types(self) -> List[str]: | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def INDENT_type(self) -> str: | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def DEDENT_type(self) -> str: | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def tab_len(self) -> int: | |||||
... |
@@ -0,0 +1,74 @@ | |||||
# -*- coding: utf-8 -*- | |||||
from typing import ( | |||||
TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional, | |||||
Literal, Protocol, | |||||
) | |||||
from .visitors import Transformer | |||||
from .lexer import Token, Lexer, TerminalDef | |||||
from .tree import Tree | |||||
_T = TypeVar('_T') | |||||
_Start = Union[None, str, List[str]] | |||||
_Parser = Literal["earley", "lalr", "cyk"] | |||||
_Lexer = Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Lexer] | |||||
_Ambiguity = Literal["explicit", "resolve"] | |||||
class PostLex(Protocol): | |||||
def process(self, stream: Iterator[Token]) -> Iterator[Token]: | |||||
... | |||||
class LarkOptions: | |||||
start: _Start | |||||
parser: _Parser | |||||
lexer: _Lexer | |||||
transformer: Optional[Transformer] | |||||
postlex: Optional[PostLex] | |||||
ambiguity: _Ambiguity | |||||
debug: bool | |||||
keep_all_tokens: bool | |||||
propagate_positions: bool | |||||
maybe_placeholders: bool | |||||
lexer_callbacks: Dict[str, Callable[[Token], Token]] | |||||
cache_grammar: bool | |||||
class Lark: | |||||
source: str | |||||
options: LarkOptions | |||||
lexer: Lexer | |||||
terminals: List[TerminalDef] | |||||
def __init__( | |||||
self, | |||||
grammar: Union[str, IO[str]], | |||||
*, | |||||
start: _Start = ..., | |||||
parser: _Parser = ..., | |||||
lexer: _Lexer = ..., | |||||
transformer: Optional[Transformer] = None, | |||||
postlex: Optional[PostLex] = None, | |||||
ambiguity: _Ambiguity = ..., | |||||
debug: bool = False, | |||||
keep_all_tokens: bool = False, | |||||
propagate_positions: bool = False, | |||||
maybe_placeholders: bool = False, | |||||
lexer_callbacks: Dict[str, Callable[[Token], Token]] | |||||
): | |||||
... | |||||
def parse(self, text: str, start: _Start = None) -> Tree: | |||||
... | |||||
@classmethod | |||||
def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T: | |||||
... | |||||
def lex(self, text: str) -> Iterator[Token]: | |||||
... | |||||
def get_terminal(self, name: str) -> TerminalDef: | |||||
... |
@@ -0,0 +1,144 @@ | |||||
# -*- coding: utf-8 -*- | |||||
from typing import ( | |||||
TypeVar, Type, Tuple, List, Dict, Iterator, Collection, Callable, Optional, | |||||
Pattern as REPattern, | |||||
) | |||||
from abc import abstractmethod, ABC | |||||
_T = TypeVar('_T') | |||||
class Pattern(ABC): | |||||
value: str | |||||
flags: Collection[str] | |||||
def __init__(self, value: str, flags: Collection[str] = ...): | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def type(self) -> str: | |||||
... | |||||
@abstractmethod | |||||
def to_regexp(self) -> str: | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def min_width(self) -> int: | |||||
... | |||||
@property | |||||
@abstractmethod | |||||
def max_width(self) -> int: | |||||
... | |||||
class PatternStr(Pattern): | |||||
type: str = ... | |||||
def to_regexp(self) -> str: | |||||
... | |||||
@property | |||||
def min_width(self) -> int: | |||||
... | |||||
@property | |||||
def max_width(self) -> int: | |||||
... | |||||
class PatternRE(Pattern): | |||||
type: str = ... | |||||
def to_regexp(self) -> str: | |||||
... | |||||
@property | |||||
def min_width(self) -> int: | |||||
... | |||||
@property | |||||
def max_width(self) -> int: | |||||
... | |||||
class TerminalDef: | |||||
name: str | |||||
pattern: Pattern | |||||
priority: int | |||||
def __init__(self, name: str, pattern: Pattern, priority: int = ...): | |||||
... | |||||
class Token(str): | |||||
type: str | |||||
pos_in_stream: int | |||||
value: str | |||||
line: int | |||||
column: int | |||||
end_line: int | |||||
end_column: int | |||||
end_pos: int | |||||
def update(self, type_: Optional[str] = None, value: Optional[str] = None) -> Token: | |||||
... | |||||
@classmethod | |||||
def new_borrow_pos(cls: Type[_T], type_: str, value: str, borrow_t: Token) -> _T: | |||||
... | |||||
_Callback = Callable[[Token], Token] | |||||
class Lexer(ABC): | |||||
lex: Callable[..., Iterator[Token]] | |||||
class TraditionalLexer(Lexer): | |||||
terminals: Collection[TerminalDef] | |||||
ignore_types: List[str] | |||||
newline_types: List[str] | |||||
user_callbacks: Dict[str, _Callback] | |||||
callback: Dict[str, _Callback] | |||||
mres: List[Tuple[REPattern, Dict[int, str]]] | |||||
def __init__( | |||||
self, | |||||
terminals: Collection[TerminalDef], | |||||
ignore: Collection[str] = ..., | |||||
user_callbacks: Dict[str, _Callback] = ... | |||||
): | |||||
... | |||||
def build(self) -> None: | |||||
... | |||||
def match(self, stream: str, pos: int) -> Optional[Tuple[str, str]]: | |||||
... | |||||
def lex(self, stream: str) -> Iterator[Token]: | |||||
... | |||||
class ContextualLexer(Lexer): | |||||
lexers: Dict[str, TraditionalLexer] | |||||
root_lexer: TraditionalLexer | |||||
def __init__( | |||||
self, | |||||
terminals: Collection[TerminalDef], | |||||
states: Dict[str, Collection[str]], | |||||
ignore: Collection[str] = ..., | |||||
always_accept: Collection[str] = ..., | |||||
user_callbacks: Dict[str, _Callback] = ... | |||||
): | |||||
... | |||||
def lex(self, stream: str, get_parser_state: Callable[[], str]) -> Iterator[Token]: | |||||
... |
@@ -0,0 +1,37 @@ | |||||
# -*- coding: utf-8 -*- | |||||
from typing import List, Dict, Union | |||||
from .lark import Lark | |||||
from .tree import Tree | |||||
from .visitors import Transformer_InPlace | |||||
from .lexer import TerminalDef | |||||
class WriteTokensTransformer(Transformer_InPlace): | |||||
def __init__(self, tokens: Dict[str, TerminalDef], term_subs): | |||||
... | |||||
class MatchTree(Tree): | |||||
pass | |||||
class MakeMatchTree: | |||||
name: str | |||||
expansion: List[TerminalDef] | |||||
def __init__(self, name: str, expansion: List[TerminalDef]): | |||||
... | |||||
def __call__(self, args: List[Union[str, Tree]]): | |||||
... | |||||
class Reconstructor: | |||||
def __init__(self, parser: Lark): | |||||
... | |||||
def reconstruct(self, tree: Tree) -> str: | |||||
... |
@@ -0,0 +1,72 @@ | |||||
# -*- coding: utf-8 -*- | |||||
from typing import List, Callable, Iterator, Union, Optional, Literal | |||||
from .lexer import TerminalDef | |||||
class Meta: | |||||
empty: bool | |||||
line: int | |||||
column: int | |||||
start_pos: int | |||||
end_line: int | |||||
end_column: int | |||||
end_pos: int | |||||
orig_expansion: List[TerminalDef] | |||||
match_tree: bool | |||||
class Tree: | |||||
data: str | |||||
children: List[Union[str, Tree]] | |||||
meta: Meta | |||||
def __init__( | |||||
self, | |||||
data: str, | |||||
children: List[Union[str, Tree]], | |||||
meta: Optional[Meta] = None | |||||
): | |||||
... | |||||
def pretty(self, indent_str: str = ...) -> str: | |||||
... | |||||
def find_pred(self, pred: Callable[[Tree], bool]) -> Iterator[Tree]: | |||||
... | |||||
def find_data(self, data: str) -> Iterator[Tree]: | |||||
... | |||||
def expand_kids_by_index(self, *indices: int) -> None: | |||||
... | |||||
def scan_values(self, pred: Callable[[Union[str, Tree]], bool]): | |||||
... | |||||
def iter_subtrees(self) -> Iterator[Tree]: | |||||
... | |||||
def iter_subtrees_topdown(self) -> Iterator[Tree]: | |||||
... | |||||
def copy(self) -> Tree: | |||||
... | |||||
def set(self, data: str, children: List[Union[str, Tree]]) -> None: | |||||
... | |||||
def __hash__(self) -> int: | |||||
... | |||||
class SlottedTree(Tree): | |||||
pass | |||||
def pydot__tree_to_png( | |||||
tree: Tree, | |||||
filename: str, | |||||
rankdir: Literal["TB", "LR", "BT", "RL"] = ..., | |||||
**kwargs | |||||
) -> None: | |||||
... |
@@ -0,0 +1,98 @@ | |||||
# -*- coding: utf-8 -*- | |||||
from typing import TypeVar, Tuple, List, Callable, Generic, Type | |||||
from abc import ABC | |||||
from .tree import Tree | |||||
_T = TypeVar('_T') | |||||
_R = TypeVar('_R') | |||||
_FUNC = Callable[..., _T] | |||||
class Transformer(ABC, Generic[_T]): | |||||
def __init__(self, visit_tokens: bool = True): | |||||
... | |||||
def transform(self, tree: Tree) -> _T: | |||||
... | |||||
def __mul__(self, other: Transformer[_T]) -> TransformerChain[_T]: | |||||
... | |||||
class TransformerChain(Generic[_T]): | |||||
transformers: Tuple[Transformer[_T], ...] | |||||
def __init__(self, *transformers: Transformer[_T]): | |||||
... | |||||
def transform(self, tree: Tree) -> _T: | |||||
... | |||||
def __mul__(self, other: Transformer[_T]) -> TransformerChain[_T]: | |||||
... | |||||
class Transformer_InPlace(Transformer): | |||||
pass | |||||
class VisitorBase: | |||||
pass | |||||
class Visitor(VisitorBase, ABC, Generic[_T]): | |||||
def visit(self, tree: Tree) -> Tree: | |||||
... | |||||
def visit_topdown(self, tree: Tree) -> Tree: | |||||
... | |||||
class Visitor_Recursive(VisitorBase): | |||||
def visit(self, tree: Tree) -> Tree: | |||||
... | |||||
def visit_topdown(self, tree: Tree) -> Tree: | |||||
... | |||||
class Interpreter(ABC, Generic[_T]): | |||||
def visit(self, tree: Tree) -> _T: | |||||
... | |||||
def visit_children(self, tree: Tree) -> List[_T]: | |||||
... | |||||
_InterMethod = Callable[[Type[Interpreter], _T], _R] | |||||
def v_args( | |||||
inline: bool = False, | |||||
meta: bool = False, | |||||
tree: bool = False | |||||
) -> Callable[[_FUNC], _FUNC]: | |||||
... | |||||
def visit_children_decor(func: _InterMethod) -> _InterMethod: | |||||
... | |||||
class Discard(Exception): | |||||
pass | |||||
# Deprecated | |||||
class InlineTransformer: | |||||
pass | |||||
# Deprecated | |||||
def inline_args(obj: _FUNC) -> _FUNC: | |||||
... |
@@ -1,7 +1,8 @@ | |||||
from .tree import Tree | from .tree import Tree | ||||
from .visitors import Transformer, Visitor, v_args, Discard | from .visitors import Transformer, Visitor, v_args, Discard | ||||
from .visitors import InlineTransformer, inline_args # XXX Deprecated | from .visitors import InlineTransformer, inline_args # XXX Deprecated | ||||
from .exceptions import ParseError, LexError, GrammarError, UnexpectedToken, UnexpectedInput, UnexpectedCharacters | |||||
from .exceptions import (ParseError, LexError, GrammarError, UnexpectedToken, | |||||
UnexpectedInput, UnexpectedCharacters, LarkError) | |||||
from .lexer import Token | from .lexer import Token | ||||
from .lark import Lark | from .lark import Lark | ||||
@@ -35,6 +35,8 @@ class _Decoratable: | |||||
setattr(cls, name, decorator(value, static=static, **kwargs)) | setattr(cls, name, decorator(value, static=static, **kwargs)) | ||||
return cls | return cls | ||||
def __class_getitem__(cls, _): | |||||
return cls | |||||
class Transformer(_Decoratable): | class Transformer(_Decoratable): | ||||
@@ -45,8 +47,8 @@ class Transformer(_Decoratable): | |||||
Can be used to implement map or reduce. | Can be used to implement map or reduce. | ||||
""" | """ | ||||
__visit_tokens__ = True # For backwards compatibility | __visit_tokens__ = True # For backwards compatibility | ||||
def __init__(self, visit_tokens=True): | def __init__(self, visit_tokens=True): | ||||
self.__visit_tokens__ = visit_tokens | self.__visit_tokens__ = visit_tokens | ||||
@@ -170,6 +172,9 @@ class VisitorBase: | |||||
"Default operation on tree (for override)" | "Default operation on tree (for override)" | ||||
return tree | return tree | ||||
def __class_getitem__(cls, _): | |||||
return cls | |||||
class Visitor(VisitorBase): | class Visitor(VisitorBase): | ||||
"""Bottom-up visitor, non-recursive | """Bottom-up visitor, non-recursive | ||||
@@ -178,7 +183,6 @@ class Visitor(VisitorBase): | |||||
Calls its methods (provided by user via inheritance) according to tree.data | Calls its methods (provided by user via inheritance) according to tree.data | ||||
""" | """ | ||||
def visit(self, tree): | def visit(self, tree): | ||||
for subtree in tree.iter_subtrees(): | for subtree in tree.iter_subtrees(): | ||||
self._call_userfunc(subtree) | self._call_userfunc(subtree) | ||||
@@ -1,17 +1,17 @@ | |||||
import re | import re | ||||
from setuptools import setup | |||||
from setuptools import find_packages, setup | |||||
__version__ ,= re.findall('__version__ = "(.*)"', open('lark/__init__.py').read()) | __version__ ,= re.findall('__version__ = "(.*)"', open('lark/__init__.py').read()) | ||||
setup( | setup( | ||||
name = "lark-parser", | name = "lark-parser", | ||||
version = __version__, | version = __version__, | ||||
packages = ['lark', 'lark.parsers', 'lark.tools', 'lark.grammars'], | |||||
packages = ['lark', 'lark.parsers', 'lark.tools', 'lark.grammars', 'lark-stubs'], | |||||
requires = [], | requires = [], | ||||
install_requires = [], | install_requires = [], | ||||
package_data = { '': ['*.md', '*.lark'] }, | |||||
package_data = {'': ['*.md', '*.lark'], 'lark-stubs': ['*.pyi']}, | |||||
test_suite = 'tests.__main__', | test_suite = 'tests.__main__', | ||||