This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
Nie możesz wybrać więcej, niż 25 tematów Tematy muszą się zaczynać od litery lub cyfry, mogą zawierać myślniki ('-') i mogą mieć do 35 znaków.

110 wiersze
3.3 KiB

  1. # -*- coding: utf-8 -*-
  2. from typing import (
  3. TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
  4. Literal, Protocol, Tuple, Iterable,
  5. )
  6. from .parsers.lalr_interactive_parser import InteractiveParser
  7. from .visitors import Transformer
  8. from .lexer import Token, Lexer, TerminalDef
  9. from .tree import Tree
  10. from .exceptions import UnexpectedInput
  11. from .load_grammar import Grammar
  12. _T = TypeVar('_T')
  13. class PostLex(Protocol):
  14. def process(self, stream: Iterator[Token]) -> Iterator[Token]:
  15. ...
  16. always_accept: Iterable[str]
  17. class LarkOptions:
  18. start: List[str]
  19. parser: str
  20. lexer: str
  21. transformer: Optional[Transformer]
  22. postlex: Optional[PostLex]
  23. ambiguity: str
  24. regex: bool
  25. debug: bool
  26. keep_all_tokens: bool
  27. propagate_positions: bool
  28. maybe_placeholders: bool
  29. lexer_callbacks: Dict[str, Callable[[Token], Token]]
  30. cache: Union[bool, str]
  31. g_regex_flags: int
  32. use_bytes: bool
  33. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]
  34. source_path: Optional[str]
  35. class PackageResource(object):
  36. pkg_name: str
  37. path: str
  38. def __init__(self, pkg_name: str, path: str): ...
  39. class FromPackageLoader:
  40. def __init__(self, pkg_name: str, search_paths: Tuple[str, ...] = ...): ...
  41. def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: ...
  42. class Lark:
  43. source_path: str
  44. source_grammar: str
  45. grammar: Grammar
  46. options: LarkOptions
  47. lexer: Lexer
  48. terminals: List[TerminalDef]
  49. def __init__(
  50. self,
  51. grammar: Union[Grammar, str, IO[str]],
  52. *,
  53. start: Union[None, str, List[str]] = "start",
  54. parser: Literal["earley", "lalr", "cyk", "auto"] = "auto",
  55. lexer: Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Type[Lexer]] = "auto",
  56. transformer: Optional[Transformer] = None,
  57. postlex: Optional[PostLex] = None,
  58. ambiguity: Literal["explicit", "resolve"] = "resolve",
  59. regex: bool = False,
  60. debug: bool = False,
  61. keep_all_tokens: bool = False,
  62. propagate_positions: bool = False,
  63. maybe_placeholders: bool = False,
  64. lexer_callbacks: Optional[Dict[str, Callable[[Token], Token]]] = None,
  65. cache: Union[bool, str] = False,
  66. g_regex_flags: int = ...,
  67. use_bytes: bool = False,
  68. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]] = ...,
  69. source_path: Optional[str]=None,
  70. ):
  71. ...
  72. def parse(self, text: str, start: Optional[str] = None, on_error: Callable[[UnexpectedInput], bool] = None) -> Tree:
  73. ...
  74. def parse_interactive(self, text: str = None, start: Optional[str] = None) -> InteractiveParser:
  75. ...
  76. @classmethod
  77. def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
  78. ...
  79. @classmethod
  80. def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: Tuple[str, ...] = ..., **options) -> _T:
  81. ...
  82. def lex(self, text: str, dont_ignore: bool = False) -> Iterator[Token]:
  83. ...
  84. def get_terminal(self, name: str) -> TerminalDef:
  85. ...