diff --git a/lark/parsers/lalr_interactive_parser.py b/lark/parsers/lalr_interactive_parser.py index 2a30d8ee..d5a2152f 100644 --- a/lark/parsers/lalr_interactive_parser.py +++ b/lark/parsers/lalr_interactive_parser.py @@ -7,6 +7,7 @@ from lark.exceptions import UnexpectedToken from lark.lexer import Token, LexerThread +###{standalone class InteractiveParser: """InteractiveParser gives you advanced control over parsing and error handling when parsing with LALR. @@ -152,3 +153,5 @@ def as_mutable(self): """Convert to an ``InteractiveParser``.""" p = copy(self) return InteractiveParser(p.parser, p.parser_state, p.lexer_thread) + +###} diff --git a/lark/tools/standalone.py b/lark/tools/standalone.py index 1901f71b..9940ccbf 100644 --- a/lark/tools/standalone.py +++ b/lark/tools/standalone.py @@ -24,6 +24,7 @@ # # +from copy import deepcopy from abc import ABC, abstractmethod from types import ModuleType from typing import ( @@ -65,6 +66,7 @@ 'parsers/lalr_analysis.py', 'parsers/lalr_parser_state.py', 'parsers/lalr_parser.py', + 'parsers/lalr_interactive_parser.py', 'parser_frontends.py', 'lark.py', 'indenter.py', diff --git a/tests/test_parser.py b/tests/test_parser.py index 839e16ae..74985015 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -2542,7 +2542,7 @@ def test_parser_interactive_parser(self): res = ip.feed_eof(ip.lexer_thread.state.last_token) self.assertEqual(res, Tree('start', ['a', 'b'])) - self.assertRaises(UnexpectedToken ,ip.feed_eof) + self.assertRaises(UnexpectedToken, ip.feed_eof) self.assertRaises(UnexpectedToken, ip_copy.feed_token, Token('A', 'a')) ip_copy.feed_token(Token('B', 'b')) diff --git a/tests/test_tools.py b/tests/test_tools.py index deef9299..47715454 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -51,6 +51,46 @@ def test_simple(self): l = _Lark() x = l.parse('12 elephants') + def test_interactive(self): + grammar = """ + start: A+ B* + A: "a" + B: "b" + """ + context = self._create_standalone(grammar) + parser: Lark = context['Lark_StandAlone']() + + ip = parser.parse_interactive() + + UnexpectedToken = context['UnexpectedToken'] + Token = context['Token'] + + self.assertRaises(UnexpectedToken, ip.feed_eof) + self.assertRaises(TypeError, ip.exhaust_lexer) + ip.feed_token(Token('A', 'a')) + res = ip.feed_eof() + self.assertEqual(res, Tree('start', ['a'])) + + ip = parser.parse_interactive("ab") + + ip.exhaust_lexer() + + ip_copy = ip.copy() + self.assertEqual(ip_copy.parser_state, ip.parser_state) + self.assertEqual(ip_copy.lexer_thread.state, ip.lexer_thread.state) + self.assertIsNot(ip_copy.parser_state, ip.parser_state) + self.assertIsNot(ip_copy.lexer_thread.state, ip.lexer_thread.state) + self.assertIsNot(ip_copy.lexer_thread.state.line_ctr, ip.lexer_thread.state.line_ctr) + + res = ip.feed_eof(ip.lexer_thread.state.last_token) + self.assertEqual(res, Tree('start', ['a', 'b'])) + self.assertRaises(UnexpectedToken, ip.feed_eof) + + self.assertRaises(UnexpectedToken, ip_copy.feed_token, Token('A', 'a')) + ip_copy.feed_token(Token('B', 'b')) + res = ip_copy.feed_eof() + self.assertEqual(res, Tree('start', ['a', 'b', 'b'])) + def test_contextual(self): grammar = """ start: a b