This repo contains code to mirror other repos. It also contains the code that is getting mirrored.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

245 lines
8.8 KiB

  1. from .exceptions import ConfigurationError, GrammarError, assert_config
  2. from .utils import get_regexp_width, Serialize
  3. from .parsers.grammar_analysis import GrammarAnalyzer
  4. from .lexer import LexerThread, TraditionalLexer, ContextualLexer, Lexer, Token, TerminalDef
  5. from .parsers import earley, xearley, cyk
  6. from .parsers.lalr_parser import LALR_Parser
  7. from .tree import Tree
  8. from .common import LexerConf, ParserConf
  9. try:
  10. import regex
  11. except ImportError:
  12. regex = None
  13. import re
  14. ###{standalone
  15. def _wrap_lexer(lexer_class):
  16. future_interface = getattr(lexer_class, '__future_interface__', False)
  17. if future_interface:
  18. return lexer_class
  19. else:
  20. class CustomLexerWrapper(Lexer):
  21. def __init__(self, lexer_conf):
  22. self.lexer = lexer_class(lexer_conf)
  23. def lex(self, lexer_state, parser_state):
  24. return self.lexer.lex(lexer_state.text)
  25. return CustomLexerWrapper
  26. class MakeParsingFrontend:
  27. def __init__(self, parser_type, lexer_type):
  28. self.parser_type = parser_type
  29. self.lexer_type = lexer_type
  30. def __call__(self, lexer_conf, parser_conf, options):
  31. assert isinstance(lexer_conf, LexerConf)
  32. assert isinstance(parser_conf, ParserConf)
  33. parser_conf.parser_type = self.parser_type
  34. lexer_conf.lexer_type = self.lexer_type
  35. return ParsingFrontend(lexer_conf, parser_conf, options)
  36. @classmethod
  37. def deserialize_lexer_conf(cls, data, memo, options):
  38. # We need lexer_conf earley to have the terminals that we need to produce the callback list for paser_conf
  39. # So we split deserialize into two methods
  40. terminals = [item for item in memo.values() if isinstance(item, TerminalDef)]
  41. lexer_conf = LexerConf.deserialize(data['lexer_conf'], memo)
  42. lexer_conf.callbacks = _get_lexer_callbacks(options.transformer, terminals)
  43. lexer_conf.re_module = regex if options.regex else re
  44. lexer_conf.use_bytes = options.use_bytes
  45. lexer_conf.g_regex_flags = options.g_regex_flags
  46. lexer_conf.skip_validation = True
  47. lexer_conf.postlex = options.postlex
  48. return lexer_conf
  49. @classmethod
  50. def deserialize(cls, data, memo, lexer_conf, callbacks, options):
  51. parser_conf = ParserConf.deserialize(data['parser_conf'], memo)
  52. parser = LALR_Parser.deserialize(data['parser'], memo, callbacks, options.debug)
  53. parser_conf.callbacks = callbacks
  54. return ParsingFrontend(lexer_conf, parser_conf, options, parser=parser)
  55. class ParsingFrontend(Serialize):
  56. __serialize_fields__ = 'lexer_conf', 'parser_conf', 'parser', 'options'
  57. def __init__(self, lexer_conf, parser_conf, options, parser=None):
  58. self.parser_conf = parser_conf
  59. self.lexer_conf = lexer_conf
  60. self.options = options
  61. # Set-up parser
  62. if parser: # From cache
  63. self.parser = parser
  64. else:
  65. create_parser = {
  66. 'lalr': create_lalr_parser,
  67. 'earley': create_earley_parser,
  68. 'cyk': CYK_FrontEnd,
  69. }[parser_conf.parser_type]
  70. self.parser = create_parser(lexer_conf, parser_conf, options)
  71. # Set-up lexer
  72. lexer_type = lexer_conf.lexer_type
  73. self.skip_lexer = False
  74. if lexer_type in ('dynamic', 'dynamic_complete'):
  75. self.skip_lexer = True
  76. return
  77. try:
  78. create_lexer = {
  79. 'standard': create_traditional_lexer,
  80. 'contextual': create_contextual_lexer,
  81. }[lexer_type]
  82. except KeyError:
  83. assert issubclass(lexer_type, Lexer), lexer_type
  84. self.lexer = _wrap_lexer(lexer_type)(lexer_conf)
  85. else:
  86. self.lexer = create_lexer(lexer_conf, self.parser, lexer_conf.postlex)
  87. if lexer_conf.postlex:
  88. self.lexer = PostLexConnector(self.lexer, lexer_conf.postlex)
  89. def parse(self, text, start=None):
  90. if start is None:
  91. start = self.parser_conf.start
  92. if len(start) > 1:
  93. raise ConfigurationError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start)
  94. start ,= start
  95. if self.skip_lexer:
  96. return self.parser.parse(text, start)
  97. lexer_thread = LexerThread(self.lexer, text)
  98. return self.parser.parse(lexer_thread, start)
  99. def get_frontend(parser, lexer):
  100. assert_config(parser, ('lalr', 'earley', 'cyk'))
  101. if not isinstance(lexer, type): # not custom lexer?
  102. expected = {
  103. 'lalr': ('standard', 'contextual'),
  104. 'earley': ('standard', 'dynamic', 'dynamic_complete'),
  105. 'cyk': ('standard', ),
  106. }[parser]
  107. assert_config(lexer, expected, 'Parser %r does not support lexer %%r, expected one of %%s' % parser)
  108. return MakeParsingFrontend(parser, lexer)
  109. def _get_lexer_callbacks(transformer, terminals):
  110. result = {}
  111. for terminal in terminals:
  112. callback = getattr(transformer, terminal.name, None)
  113. if callback is not None:
  114. result[terminal.name] = callback
  115. return result
  116. class PostLexConnector:
  117. def __init__(self, lexer, postlexer):
  118. self.lexer = lexer
  119. self.postlexer = postlexer
  120. def make_lexer_state(self, text):
  121. return self.lexer.make_lexer_state(text)
  122. def lex(self, lexer_state, parser_state):
  123. i = self.lexer.lex(lexer_state, parser_state)
  124. return self.postlexer.process(i)
  125. def create_traditional_lexer(lexer_conf, parser, postlex):
  126. return TraditionalLexer(lexer_conf)
  127. def create_contextual_lexer(lexer_conf, parser, postlex):
  128. states = {idx:list(t.keys()) for idx, t in parser._parse_table.states.items()}
  129. always_accept = postlex.always_accept if postlex else ()
  130. return ContextualLexer(lexer_conf, states, always_accept=always_accept)
  131. def create_lalr_parser(lexer_conf, parser_conf, options=None):
  132. debug = options.debug if options else False
  133. return LALR_Parser(parser_conf, debug=debug)
  134. create_earley_parser = NotImplemented
  135. CYK_FrontEnd = NotImplemented
  136. ###}
  137. class EarleyRegexpMatcher:
  138. def __init__(self, lexer_conf):
  139. self.regexps = {}
  140. for t in lexer_conf.terminals:
  141. if t.priority != 1:
  142. raise GrammarError("Dynamic Earley doesn't support weights on terminals", t, t.priority)
  143. regexp = t.pattern.to_regexp()
  144. try:
  145. width = get_regexp_width(regexp)[0]
  146. except ValueError:
  147. raise GrammarError("Bad regexp in token %s: %s" % (t.name, regexp))
  148. else:
  149. if width == 0:
  150. raise GrammarError("Dynamic Earley doesn't allow zero-width regexps", t)
  151. if lexer_conf.use_bytes:
  152. regexp = regexp.encode('utf-8')
  153. self.regexps[t.name] = lexer_conf.re_module.compile(regexp, lexer_conf.g_regex_flags)
  154. def match(self, term, text, index=0):
  155. return self.regexps[term.name].match(text, index)
  156. def create_earley_parser__dynamic(lexer_conf, parser_conf, options=None, **kw):
  157. earley_matcher = EarleyRegexpMatcher(lexer_conf)
  158. return xearley.Parser(parser_conf, earley_matcher.match, ignore=lexer_conf.ignore, **kw)
  159. def _match_earley_basic(term, token):
  160. return term.name == token.type
  161. def create_earley_parser__basic(lexer_conf, parser_conf, options, **kw):
  162. return earley.Parser(parser_conf, _match_earley_basic, **kw)
  163. def create_earley_parser(lexer_conf, parser_conf, options):
  164. resolve_ambiguity = options.ambiguity == 'resolve'
  165. debug = options.debug if options else False
  166. tree_class = options.tree_class or Tree if options.ambiguity != 'forest' else None
  167. extra = {}
  168. if lexer_conf.lexer_type == 'dynamic':
  169. f = create_earley_parser__dynamic
  170. elif lexer_conf.lexer_type == 'dynamic_complete':
  171. extra['complete_lex'] =True
  172. f = create_earley_parser__dynamic
  173. else:
  174. f = create_earley_parser__basic
  175. return f(lexer_conf, parser_conf, options, resolve_ambiguity=resolve_ambiguity, debug=debug, tree_class=tree_class, **extra)
  176. class CYK_FrontEnd:
  177. def __init__(self, lexer_conf, parser_conf, options=None):
  178. self._analysis = GrammarAnalyzer(parser_conf)
  179. self.parser = cyk.Parser(parser_conf.rules)
  180. self.callbacks = parser_conf.callbacks
  181. def parse(self, lexer_thread, start):
  182. tokens = list(lexer_thread.lex(None))
  183. tree = self.parser.parse(tokens, start)
  184. return self._transform(tree)
  185. def _transform(self, tree):
  186. subtrees = list(tree.iter_subtrees())
  187. for subtree in subtrees:
  188. subtree.children = [self._apply_callback(c) if isinstance(c, Tree) else c for c in subtree.children]
  189. return self._apply_callback(tree)
  190. def _apply_callback(self, tree):
  191. return self.callbacks[tree.rule](tree.children)