This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

105 lines
3.2 KiB

  1. # -*- coding: utf-8 -*-
  2. from typing import (
  3. TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
  4. Literal, Protocol, Tuple, Iterable,
  5. )
  6. from .visitors import Transformer
  7. from .lexer import Token, Lexer, TerminalDef
  8. from .tree import Tree
  9. from .exceptions import UnexpectedInput
  10. from .load_grammar import Grammar
  11. _T = TypeVar('_T')
  12. class PostLex(Protocol):
  13. def process(self, stream: Iterator[Token]) -> Iterator[Token]:
  14. ...
  15. always_accept: Iterable[str]
  16. class LarkOptions:
  17. start: List[str]
  18. parser: str
  19. lexer: str
  20. transformer: Optional[Transformer]
  21. postlex: Optional[PostLex]
  22. ambiguity: str
  23. regex: bool
  24. debug: bool
  25. keep_all_tokens: bool
  26. propagate_positions: bool
  27. maybe_placeholders: bool
  28. lexer_callbacks: Dict[str, Callable[[Token], Token]]
  29. cache: Union[bool, str]
  30. g_regex_flags: int
  31. use_bytes: bool
  32. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]
  33. source_path: Optional[str]
  34. safe_cache: Literal[False, True, "atomic"]
  35. class PackageResource(object):
  36. pkg_name: str
  37. path: str
  38. def __init__(self, pkg_name: str, path: str): ...
  39. class FromPackageLoader:
  40. def __init__(self, pkg_name: str, search_paths: Tuple[str, ...] = ...): ...
  41. def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: ...
  42. class Lark:
  43. source_path: str
  44. source_grammar: str
  45. grammar: Grammar
  46. options: LarkOptions
  47. lexer: Lexer
  48. terminals: List[TerminalDef]
  49. def __init__(
  50. self,
  51. grammar: Union[Grammar, str, IO[str]],
  52. *,
  53. start: Union[None, str, List[str]] = "start",
  54. parser: Literal["earley", "lalr", "cyk", "auto"] = "auto",
  55. lexer: Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Type[Lexer]] = "auto",
  56. transformer: Optional[Transformer] = None,
  57. postlex: Optional[PostLex] = None,
  58. ambiguity: Literal["explicit", "resolve"] = "resolve",
  59. regex: bool = False,
  60. debug: bool = False,
  61. keep_all_tokens: bool = False,
  62. propagate_positions: bool = False,
  63. maybe_placeholders: bool = False,
  64. lexer_callbacks: Optional[Dict[str, Callable[[Token], Token]]] = None,
  65. cache: Union[bool, str] = False,
  66. g_regex_flags: int = ...,
  67. use_bytes: bool = False,
  68. import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]] = ...,
  69. source_path: Optional[str]=None,
  70. safe_cache: Literal[False, True, "atomic"]=True,
  71. ):
  72. ...
  73. def parse(self, text: str, start: Optional[str] = None, on_error: Callable[[UnexpectedInput], bool] = None) -> Tree:
  74. ...
  75. @classmethod
  76. def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
  77. ...
  78. @classmethod
  79. def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: Tuple[str, ...] = ..., **options) -> _T:
  80. ...
  81. def lex(self, text: str, dont_ignore: bool = False) -> Iterator[Token]:
  82. ...
  83. def get_terminal(self, name: str) -> TerminalDef:
  84. ...