This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
Nie możesz wybrać więcej, niż 25 tematów Tematy muszą się zaczynać od litery lub cyfry, mogą zawierać myślniki ('-') i mogą mieć do 35 znaków.

101 wiersze
3.0 KiB

  1. # -*- coding: utf-8 -*-
  2. from typing import (
  3. TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
  4. Literal, Protocol, Tuple, Iterable,
  5. )
  6. from .visitors import Transformer
  7. from .lexer import Token, Lexer, TerminalDef
  8. from .tree import Tree
  9. from .exceptions import UnexpectedInput
  10. _T = TypeVar('_T')
  11. class PostLex(Protocol):
  12. def process(self, stream: Iterator[Token]) -> Iterator[Token]:
  13. ...
  14. always_accept: Iterable[str]
  15. class LarkOptions:
  16. start: List[str]
  17. parser: str
  18. lexer: str
  19. transformer: Optional[Transformer]
  20. postlex: Optional[PostLex]
  21. ambiguity: str
  22. regex: bool
  23. debug: bool
  24. keep_all_tokens: bool
  25. propagate_positions: bool
  26. maybe_placeholders: bool
  27. lexer_callbacks: Dict[str, Callable[[Token], Token]]
  28. cache: Union[bool, str]
  29. g_regex_flags: int
  30. use_bytes: bool
  31. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]
  32. source_path: Optional[str]
  33. class PackageResource(object):
  34. pkg_name: str
  35. path: str
  36. def __init__(self, pkg_name: str, path: str): ...
  37. class FromPackageLoader:
  38. def __init__(self, pkg_name: str, search_paths: Tuple[str, ...] = ...): ...
  39. def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: ...
  40. class Lark:
  41. source_path: str
  42. source_grammar: str
  43. options: LarkOptions
  44. lexer: Lexer
  45. terminals: List[TerminalDef]
  46. def __init__(
  47. self,
  48. grammar: Union[str, IO[str]],
  49. *,
  50. start: Union[None, str, List[str]] = "start",
  51. parser: Literal["earley", "lalr", "cyk"] = "auto",
  52. lexer: Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Type[Lexer]] = "auto",
  53. transformer: Optional[Transformer] = None,
  54. postlex: Optional[PostLex] = None,
  55. ambiguity: Literal["explicit", "resolve"] = "resolve",
  56. regex: bool = False,
  57. debug: bool = False,
  58. keep_all_tokens: bool = False,
  59. propagate_positions: bool = False,
  60. maybe_placeholders: bool = False,
  61. lexer_callbacks: Optional[Dict[str, Callable[[Token], Token]]] = None,
  62. cache: Union[bool, str] = False,
  63. g_regex_flags: int = ...,
  64. use_bytes: bool = False,
  65. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]] = ...,
  66. source_path: Optional[str]=None,
  67. ):
  68. ...
  69. def parse(self, text: str, start: Optional[str] = None, on_error: Callable[[UnexpectedInput], bool] = None) -> Tree:
  70. ...
  71. @classmethod
  72. def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
  73. ...
  74. @classmethod
  75. def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: Tuple[str, ...] = ..., **options) -> _T:
  76. ...
  77. def lex(self, text: str) -> Iterator[Token]:
  78. ...
  79. def get_terminal(self, name: str) -> TerminalDef:
  80. ...