This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
Nie możesz wybrać więcej, niż 25 tematów Tematy muszą się zaczynać od litery lub cyfry, mogą zawierać myślniki ('-') i mogą mieć do 35 znaków.

103 wiersze
3.1 KiB

  1. # -*- coding: utf-8 -*-
  2. from typing import (
  3. TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
  4. Literal, Protocol, Tuple, Iterable,
  5. )
  6. from .visitors import Transformer
  7. from .lexer import Token, Lexer, TerminalDef
  8. from .tree import Tree
  9. from .exceptions import UnexpectedInput
  10. from .load_grammar import Grammar
  11. _T = TypeVar('_T')
  12. class PostLex(Protocol):
  13. def process(self, stream: Iterator[Token]) -> Iterator[Token]:
  14. ...
  15. always_accept: Iterable[str]
  16. class LarkOptions:
  17. start: List[str]
  18. parser: str
  19. lexer: str
  20. transformer: Optional[Transformer]
  21. postlex: Optional[PostLex]
  22. ambiguity: str
  23. regex: bool
  24. debug: bool
  25. keep_all_tokens: bool
  26. propagate_positions: bool
  27. maybe_placeholders: bool
  28. lexer_callbacks: Dict[str, Callable[[Token], Token]]
  29. cache: Union[bool, str]
  30. g_regex_flags: int
  31. use_bytes: bool
  32. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]
  33. source_path: Optional[str]
  34. class PackageResource(object):
  35. pkg_name: str
  36. path: str
  37. def __init__(self, pkg_name: str, path: str): ...
  38. class FromPackageLoader:
  39. def __init__(self, pkg_name: str, search_paths: Tuple[str, ...] = ...): ...
  40. def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: ...
  41. class Lark:
  42. source_path: str
  43. source_grammar: str
  44. grammar: Grammar
  45. options: LarkOptions
  46. lexer: Lexer
  47. terminals: List[TerminalDef]
  48. def __init__(
  49. self,
  50. grammar: Union[Grammar, str, IO[str]],
  51. *,
  52. start: Union[None, str, List[str]] = "start",
  53. parser: Literal["earley", "lalr", "cyk", "auto"] = "auto",
  54. lexer: Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Type[Lexer]] = "auto",
  55. transformer: Optional[Transformer] = None,
  56. postlex: Optional[PostLex] = None,
  57. ambiguity: Literal["explicit", "resolve"] = "resolve",
  58. regex: bool = False,
  59. debug: bool = False,
  60. keep_all_tokens: bool = False,
  61. propagate_positions: bool = False,
  62. maybe_placeholders: bool = False,
  63. lexer_callbacks: Optional[Dict[str, Callable[[Token], Token]]] = None,
  64. cache: Union[bool, str] = False,
  65. g_regex_flags: int = ...,
  66. use_bytes: bool = False,
  67. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]] = ...,
  68. source_path: Optional[str]=None,
  69. ):
  70. ...
  71. def parse(self, text: str, start: Optional[str] = None, on_error: Callable[[UnexpectedInput], bool] = None) -> Tree:
  72. ...
  73. @classmethod
  74. def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
  75. ...
  76. @classmethod
  77. def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: Tuple[str, ...] = ..., **options) -> _T:
  78. ...
  79. def lex(self, text: str, dont_ignore: bool = False) -> Iterator[Token]:
  80. ...
  81. def get_terminal(self, name: str) -> TerminalDef:
  82. ...