This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

98 lines
2.8 KiB

  1. # -*- coding: utf-8 -*-
  2. from typing import (
  3. TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
  4. Literal, Protocol, Tuple,
  5. )
  6. from .visitors import Transformer
  7. from .lexer import Token, Lexer, TerminalDef
  8. from .tree import Tree
  9. _T = TypeVar('_T')
  10. class PostLex(Protocol):
  11. def process(self, stream: Iterator[Token]) -> Iterator[Token]:
  12. ...
  13. class LarkOptions:
  14. start: List[str]
  15. parser: str
  16. lexer: str
  17. transformer: Optional[Transformer]
  18. postlex: Optional[PostLex]
  19. ambiguity: str
  20. regex: bool
  21. debug: bool
  22. keep_all_tokens: bool
  23. propagate_positions: bool
  24. maybe_placeholders: bool
  25. lexer_callbacks: Dict[str, Callable[[Token], Token]]
  26. cache: Union[bool, str]
  27. g_regex_flags: int
  28. use_bytes: bool
  29. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]
  30. source_path: Optional[str]
  31. class PackageResource(object):
  32. pkg_name: str
  33. path: str
  34. def __init__(self, pkg_name: str, path: str):
  35. class FromPackageLoader:
  36. def __init__(self, pkg_name: str, search_paths: Tuple[str, ...] = ...): ...
  37. def __call__(self, base_paths: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: ...
  38. class Lark:
  39. source_path: str
  40. source_code: str
  41. options: LarkOptions
  42. lexer: Lexer
  43. terminals: List[TerminalDef]
  44. def __init__(
  45. self,
  46. grammar: Union[str, IO[str]],
  47. *,
  48. start: Union[None, str, List[str]] = "start",
  49. parser: Literal["earley", "lalr", "cyk"] = "auto",
  50. lexer: Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Lexer] = "auto",
  51. transformer: Optional[Transformer] = None,
  52. postlex: Optional[PostLex] = None,
  53. ambiguity: Literal["explicit", "resolve"] = "resolve",
  54. regex: bool = False,
  55. debug: bool = False,
  56. keep_all_tokens: bool = False,
  57. propagate_positions: bool = False,
  58. maybe_placeholders: bool = False,
  59. lexer_callbacks: Optional[Dict[str, Callable[[Token], Token]]] = None,
  60. cache: Union[bool, str] = False,
  61. g_regex_flags: int = ...,
  62. use_bytes: bool = False,
  63. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]] = ...,
  64. source_path: Optional[str],
  65. ):
  66. ...
  67. def parse(self, text: str, start: Optional[str] = None) -> Tree:
  68. ...
  69. @classmethod
  70. def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
  71. ...
  72. @classmethod
  73. def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: Tuple[str, ...] = ..., **options) -> _T:
  74. ...
  75. def lex(self, text: str) -> Iterator[Token]:
  76. ...
  77. def get_terminal(self, name: str) -> TerminalDef:
  78. ...