This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

100 lines
2.9 KiB

  1. # -*- coding: utf-8 -*-
  2. from typing import (
  3. TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
  4. Literal, Protocol, Tuple, Iterable,
  5. )
  6. from .visitors import Transformer
  7. from .lexer import Token, Lexer, TerminalDef
  8. from .tree import Tree
  9. _T = TypeVar('_T')
  10. class PostLex(Protocol):
  11. def process(self, stream: Iterator[Token]) -> Iterator[Token]:
  12. ...
  13. always_accept: Iterable[str]
  14. class LarkOptions:
  15. start: List[str]
  16. parser: str
  17. lexer: str
  18. transformer: Optional[Transformer]
  19. postlex: Optional[PostLex]
  20. ambiguity: str
  21. regex: bool
  22. debug: bool
  23. keep_all_tokens: bool
  24. propagate_positions: bool
  25. maybe_placeholders: bool
  26. lexer_callbacks: Dict[str, Callable[[Token], Token]]
  27. cache: Union[bool, str]
  28. g_regex_flags: int
  29. use_bytes: bool
  30. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]
  31. source_path: Optional[str]
  32. class PackageResource(object):
  33. pkg_name: str
  34. path: str
  35. def __init__(self, pkg_name: str, path: str):
  36. class FromPackageLoader:
  37. def __init__(self, pkg_name: str, search_paths: Tuple[str, ...] = ...): ...
  38. def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: ...
  39. class Lark:
  40. source_path: str
  41. source_grammar: str
  42. options: LarkOptions
  43. lexer: Lexer
  44. terminals: List[TerminalDef]
  45. def __init__(
  46. self,
  47. grammar: Union[str, IO[str]],
  48. *,
  49. start: Union[None, str, List[str]] = "start",
  50. parser: Literal["earley", "lalr", "cyk"] = "auto",
  51. lexer: Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Lexer] = "auto",
  52. transformer: Optional[Transformer] = None,
  53. postlex: Optional[PostLex] = None,
  54. ambiguity: Literal["explicit", "resolve"] = "resolve",
  55. regex: bool = False,
  56. debug: bool = False,
  57. keep_all_tokens: bool = False,
  58. propagate_positions: bool = False,
  59. maybe_placeholders: bool = False,
  60. lexer_callbacks: Optional[Dict[str, Callable[[Token], Token]]] = None,
  61. cache: Union[bool, str] = False,
  62. g_regex_flags: int = ...,
  63. use_bytes: bool = False,
  64. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]] = ...,
  65. source_path: Optional[str]=None,
  66. ):
  67. ...
  68. def parse(self, text: str, start: Optional[str] = None) -> Tree:
  69. ...
  70. @classmethod
  71. def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
  72. ...
  73. @classmethod
  74. def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: Tuple[str, ...] = ..., **options) -> _T:
  75. ...
  76. def lex(self, text: str) -> Iterator[Token]:
  77. ...
  78. def get_terminal(self, name: str) -> TerminalDef:
  79. ...