| @@ -15,9 +15,9 @@ class WithLexer: | |||||
| def init_contextual_lexer(self, lexer_conf, parser_conf): | def init_contextual_lexer(self, lexer_conf, parser_conf): | ||||
| self.lexer_conf = lexer_conf | self.lexer_conf = lexer_conf | ||||
| d = {idx:t.keys() for idx, t in self.parser.analysis.parse_table.states.items()} | |||||
| states = {idx:list(t.keys()) for idx, t in self.parser._parse_table.states.items()} | |||||
| always_accept = lexer_conf.postlex.always_accept if lexer_conf.postlex else () | always_accept = lexer_conf.postlex.always_accept if lexer_conf.postlex else () | ||||
| self.lexer = ContextualLexer(lexer_conf.tokens, d, ignore=lexer_conf.ignore, always_accept=always_accept, user_callbacks=lexer_conf.callbacks) | |||||
| self.lexer = ContextualLexer(lexer_conf.tokens, states, ignore=lexer_conf.ignore, always_accept=always_accept, user_callbacks=lexer_conf.callbacks) | |||||
| def lex(self, text): | def lex(self, text): | ||||
| stream = self.lexer.lex(text) | stream = self.lexer.lex(text) | ||||
| @@ -145,16 +145,16 @@ class Column: | |||||
| class Parser: | class Parser: | ||||
| def __init__(self, parser_conf, term_matcher, resolve_ambiguity=None): | def __init__(self, parser_conf, term_matcher, resolve_ambiguity=None): | ||||
| self.analysis = GrammarAnalyzer(parser_conf) | |||||
| analysis = GrammarAnalyzer(parser_conf) | |||||
| self.parser_conf = parser_conf | self.parser_conf = parser_conf | ||||
| self.resolve_ambiguity = resolve_ambiguity | self.resolve_ambiguity = resolve_ambiguity | ||||
| self.FIRST = self.analysis.FIRST | |||||
| self.FIRST = analysis.FIRST | |||||
| self.postprocess = {} | self.postprocess = {} | ||||
| self.predictions = {} | self.predictions = {} | ||||
| for rule in parser_conf.rules: | for rule in parser_conf.rules: | ||||
| self.postprocess[rule] = rule.alias if callable(rule.alias) else getattr(parser_conf.callback, rule.alias) | self.postprocess[rule] = rule.alias if callable(rule.alias) else getattr(parser_conf.callback, rule.alias) | ||||
| self.predictions[rule.origin] = [x.rule for x in self.analysis.expand_rule(rule.origin)] | |||||
| self.predictions[rule.origin] = [x.rule for x in analysis.expand_rule(rule.origin)] | |||||
| self.term_matcher = term_matcher | self.term_matcher = term_matcher | ||||
| @@ -11,11 +11,12 @@ class Parser: | |||||
| def __init__(self, parser_conf): | def __init__(self, parser_conf): | ||||
| assert all(r.options is None or r.options.priority is None | assert all(r.options is None or r.options.priority is None | ||||
| for r in parser_conf.rules), "LALR doesn't yet support prioritization" | for r in parser_conf.rules), "LALR doesn't yet support prioritization" | ||||
| self.analysis = analysis = LALR_Analyzer(parser_conf) | |||||
| analysis = LALR_Analyzer(parser_conf) | |||||
| analysis.compute_lookahead() | analysis.compute_lookahead() | ||||
| callbacks = {rule: getattr(parser_conf.callback, rule.alias or rule.origin, None) | callbacks = {rule: getattr(parser_conf.callback, rule.alias or rule.origin, None) | ||||
| for rule in parser_conf.rules} | for rule in parser_conf.rules} | ||||
| self._parse_table = analysis.parse_table | |||||
| self.parser_conf = parser_conf | self.parser_conf = parser_conf | ||||
| self.parser = _Parser(analysis.parse_table, callbacks) | self.parser = _Parser(analysis.parse_table, callbacks) | ||||
| self.parse = self.parser.parse | self.parse = self.parser.parse | ||||
| @@ -126,7 +126,7 @@ def _get_token_type(token_type): | |||||
| class ParserAtoms: | class ParserAtoms: | ||||
| def __init__(self, parser): | def __init__(self, parser): | ||||
| self.parse_table = parser.analysis.parse_table | |||||
| self.parse_table = parser._parse_table | |||||
| def print_python(self): | def print_python(self): | ||||
| print('class ParseTable: pass') | print('class ParseTable: pass') | ||||