10.12., 9:00 - 11:00: Due to updates GitLab may be unavailable for some minutes between 09:00 and 11:00.

Commit 980fd4a2 authored by Eckhart Arnold's avatar Eckhart Arnold

refactoring

parent 9ef2e918
......@@ -28,8 +28,8 @@ except ImportError:
import re
from .ebnf import EBNFGrammar, EBNFTransform, EBNFCompiler, grammar_changed
from .toolkit import load_if_file, is_python_code, compile_python_object
from .parsers import GrammarBase, CompilerBase, full_compilation, nil_scanner
from .toolkit import logging, load_if_file, is_python_code, compile_python_object
from .parsers import GrammarBase, CompilerBase, compile_source, nil_scanner
from .syntaxtree import Node
......@@ -86,16 +86,17 @@ class CompilationError(Exception):
DHPARSER_IMPORTS = '''
from functools import partial
import os
import sys
try:
import regex as re
except ImportError:
import re
from DHParser.toolkit import load_if_file
from DHParser.toolkit import logging, is_filename, load_if_file
from DHParser.parsers import GrammarBase, CompilerBase, nil_scanner, \\
Lookbehind, Lookahead, Alternative, Pop, Required, Token, \\
Optional, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Sequence, RE, Capture, \\
ZeroOrMore, Forward, NegativeLookahead, mixin_comment, full_compilation
ZeroOrMore, Forward, NegativeLookahead, mixin_comment, compile_source
from DHParser.syntaxtree import Node, traverse, remove_enclosing_delimiters, \\
remove_children_if, reduce_single_child, replace_by_single_child, remove_whitespace, \\
no_operation, remove_expendables, remove_tokens, flatten, is_whitespace, is_expendable, \\
......@@ -107,8 +108,17 @@ DHPARSER_COMPILER = '''
def compile_{NAME}(source):
"""Compiles ``source`` and returns (result, errors, ast).
"""
return full_compilation(source, {NAME}Scanner,
{NAME}Grammar(), {NAME}Transform, {NAME}Compiler())
with logging("LOGS"):
grammar = {NAME}Grammar()
compiler = {NAME}Compiler()
cname = compiler.__class__.__name__
log_file_name = os.path.basename(os.path.splitext(source)[0]) \\
if is_filename(source) < 0 else cname[:cname.find('.')] + '_out'
result = compile_source(source, {NAME}Scanner, grammar.parse,
{NAME}Transform, compiler.compile_ast)
grammar.log_parsing_history(log_file_name)
return result
if __name__ == "__main__":
if len(sys.argv) > 1:
......@@ -136,8 +146,9 @@ def grammar_instance(grammar_representation):
if is_python_code(grammar_representation):
parser_py, errors, AST = grammar_src, '', None
else:
parser_py, errors, AST = full_compilation(grammar_src, None,
EBNFGrammar(), EBNFTransform, EBNFCompiler())
with logging(False):
parser_py, errors, AST = compile_source(grammar_src, None,
EBNFGrammar(), EBNFTransform, EBNFCompiler())
if errors:
raise GrammarError('\n\n'.join(errors), grammar_src)
parser_root = compile_python_object(DHPARSER_IMPORTS + parser_py, '\w*Grammar$')()
......@@ -147,7 +158,8 @@ def grammar_instance(grammar_representation):
if isinstance(grammar_representation, GrammarBase):
parser_root = grammar_representation
else:
# assume `grammar` is a grammar class and get the root object
# assume ``grammar_representation`` is a grammar class and get the root object
# TODO: further case: grammar_representation is a method
parser_root = grammar_representation()
return parser_root, grammar_src
......@@ -162,11 +174,13 @@ def compileDSL(text_or_file, scanner, dsl_grammar, ast_transformation, compiler)
"""
assert isinstance(text_or_file, str)
assert isinstance(compiler, CompilerBase)
parser_root, grammar_src = grammar_instance(dsl_grammar)
src = load_if_file(text_or_file)
result, errors, AST = full_compilation(src, scanner, parser_root,
ast_transformation, compiler)
if errors: raise CompilationError(errors, src, grammar_src, AST)
result, errors, AST = compile_source(text_or_file, scanner, parser_root,
ast_transformation, compiler)
if errors:
src = load_if_file(text_or_file)
raise CompilationError(errors, src, grammar_src, AST)
return result
......@@ -217,8 +231,8 @@ def load_compiler_suite(compiler_suite):
compiler = compile_python_object(imports + compiler_py, '\w*Compiler$')
else:
# assume source is an ebnf grammar
parser_py, errors, AST = full_compilation(
source, None, EBNFGrammar(), EBNFTransform, EBNFCompiler())
parser_py, errors, AST = compile_source(source, None, EBNFGrammar(),
EBNFTransform, EBNFCompiler())
if errors:
raise GrammarError('\n\n'.join(errors), source)
scanner = nil_scanner
......@@ -322,7 +336,7 @@ def compile_on_disk(source_file, compiler_suite="", extension=".xml"):
parser = EBNFGrammar()
trans = EBNFTransform
compiler1 = EBNFCompiler(compiler_name, source_file)
result, errors, ast = full_compilation(source_file, scanner, parser, trans, compiler1)
result, errors, ast = compile_source(source_file, scanner, parser, trans, compiler1)
if errors:
return errors
......
......@@ -56,7 +56,7 @@ try:
except ImportError:
import re
from .toolkit import IS_LOGGING, LOGS_DIR, escape_re, sane_parser_name
from .toolkit import is_logging, log_dir, logfile_basename, escape_re, sane_parser_name
from .syntaxtree import WHITESPACE_KEYWORD, TOKEN_KEYWORD, ZOMBIE_PARSER, Node, \
mock_syntax_tree
from DHParser.toolkit import load_if_file, error_messages
......@@ -92,7 +92,7 @@ __all__ = ['HistoryRecord',
'Pop',
'Forward',
'CompilerBase',
'full_compilation']
'compile_source']
LEFT_RECURSION_DEPTH = 10 # because of pythons recursion depth limit, this
......@@ -288,7 +288,7 @@ class GrammarBase:
def __init__(self):
self.all_parsers = set()
self.dirty_flag = False
self.history_tracking = IS_LOGGING()
self.history_tracking = is_logging()
self._reset()
self._assign_parser_names()
self.root__ = copy.deepcopy(self.__class__.root__)
......@@ -326,7 +326,7 @@ class GrammarBase:
self.all_parsers.add(parser)
parser.grammar = self
def parse(self, document, start_parser="root__"):
def __call__(self, document, start_parser="root__"):
"""Parses a document with with parser-combinators.
Args:
......@@ -384,14 +384,14 @@ class GrammarBase:
return record.stack, record.status, excerpt
def write_log(history, log_name):
path = os.path.join(LOGS_DIR(), log_name + "_parser.log")
path = os.path.join(log_dir(), log_name + "_parser.log")
if history:
with open(path, "w", encoding="utf-8") as f:
f.write("\n".join(history))
elif os.path.exists(path):
os.remove(path)
if IS_LOGGING():
if is_logging():
if not log_file_name:
name = self.__class__.__name__
log_file_name = name[:-7] if name.lower().endswith('grammar') else name
......@@ -965,10 +965,10 @@ class CompilerBase:
def _reset(self):
pass
def compile_all(self, node):
def __call__(self, node):
"""Compiles the abstract syntax tree with the root ``node``.
It's called `compile_all`` to avoid confusion with the
It's called `compile_ast`` to avoid confusion with the
``_compile`` that is called from within the local node
compiler methods.
"""
......@@ -1012,7 +1012,7 @@ class CompilerBase:
return result
def full_compilation(source, scanner, parser, transform, compiler):
def compile_source(source, scan, parse, transform, compile_ast):
"""Compiles a source in four stages:
1. Scanning (if needed)
2. Parsing
......@@ -1024,15 +1024,14 @@ def full_compilation(source, scanner, parser, transform, compiler):
Args:
source (str): The input text for compilation or a the name of a
file containing the input text.
scanner (function): text -> text. A scanner function or None,
scan (function): text -> text. A scanner function or None,
if no scanner is needed.
parser (GrammarBase): The GrammarBase object
parse (function): A parsing function or grammar class
transform (function): A transformation function that takes
the root-node of the concrete syntax tree as an argument and
transforms it (in place) into an abstract syntax tree.
compiler (object): An instance of a class derived from
``CompilerBase`` with a suitable method for every parser
name or class.
compile_ast (function): A compiler function or compiler class
instance
Returns (tuple):
The result of the compilation as a 3-tuple
......@@ -1042,16 +1041,21 @@ def full_compilation(source, scanner, parser, transform, compiler):
2. A list of error messages
3. The root-node of the abstract syntax treelow
"""
assert isinstance(compiler, CompilerBase)
source_text = load_if_file(source)
log_file_name = os.path.basename(os.path.splitext(source)[0]) if source != source_text \
else compiler.__class__.__name__ + '_out'
if scanner is not None:
source_text = scanner(source_text)
syntax_tree = parser.parse(source_text)
syntax_tree.log(log_file_name, ext='.cst')
parser.log_parsing_history(log_file_name)
log_file_name = logfile_basename(source, compile_ast)
if scan is not None:
source_text = scan(source_text)
syntax_tree = parse(source_text)
if is_logging():
syntax_tree.log(log_file_name, ext='.cst')
try:
parse.log_parsing_history(log_file_name)
except AttributeError:
# this is a hack in case a parse function or method was
# passed instead of a grammar class instance
for nd in syntax_tree.find(lambda nd: bool(nd.parser)):
nd.parser.grammar.log_parsing_history(log_file_name)
break
assert syntax_tree.error_flag or str(syntax_tree) == source_text, str(syntax_tree)
# only compile if there were no syntax errors, for otherwise it is
......@@ -1061,10 +1065,10 @@ def full_compilation(source, scanner, parser, transform, compiler):
errors = syntax_tree.collect_errors()
else:
transform(syntax_tree)
syntax_tree.log(log_file_name, ext='.ast')
if is_logging(): syntax_tree.log(log_file_name, ext='.ast')
errors = syntax_tree.collect_errors()
if not errors:
result = compiler.compile_all(syntax_tree)
result = compile_ast(syntax_tree)
errors = syntax_tree.collect_errors()
messages = error_messages(source_text, errors)
return result, messages, syntax_tree
......
......@@ -27,7 +27,7 @@ except ImportError:
import re
from typing import NamedTuple
from .toolkit import IS_LOGGING, LOGS_DIR, expand_table, line_col, smart_list
from .toolkit import is_logging, log_dir, expand_table, line_col, smart_list
__all__ = ['WHITESPACE_KEYWORD',
......@@ -339,9 +339,9 @@ class Node:
return errors
def log(self, log_file_name, ext):
if IS_LOGGING():
if is_logging():
st_file_name = log_file_name + ext
with open(os.path.join(LOGS_DIR(), st_file_name), "w", encoding="utf-8") as f:
with open(os.path.join(log_dir(), st_file_name), "w", encoding="utf-8") as f:
f.write(self.as_sexpr())
def find(self, match_function):
......
......@@ -31,6 +31,7 @@ already exists.
"""
import collections
import contextlib
import hashlib
import os
try:
......@@ -39,13 +40,13 @@ except ImportError:
import re
__all__ = ['logging_on',
'logging_off',
'IS_LOGGING',
'LOGS_DIR',
__all__ = ['logging',
'is_logging',
'log_dir',
'line_col',
'error_messages',
'escape_re',
'is_filename',
'load_if_file',
'is_python_code',
'md5',
......@@ -54,51 +55,35 @@ __all__ = ['logging_on',
'sane_parser_name']
LOGGING: str = "" # "LOGS" # LOGGING = "" turns logging off!
def log_dir() -> str:
"""Creates a directory for log files (if it does not exist) and
returns its path.
WARNING: Any files in the log dir will eventually be overwritten.
Don't use a directory name that could be the name of a directory
for other purposes than logging.
def logging_on(log_subdir="LOGS"):
"Turns logging of syntax trees and parser history on."
global LOGGING
LOGGING = log_subdir
def logging_off():
"Turns logging of syntax trees and parser history off."
global LOGGING
LOGGING = ""
def IS_LOGGING():
"""-> True, if logging is turned on."""
return bool(LOGGING)
def LOGS_DIR() -> str:
"""Returns a path of a directory where log files will be stored.
The default name of the logging directory is taken from the LOGGING
variabe (default value 'LOGS'). The directory will be created if it
does not exist. If the directory name does not contain a leading
slash '/' it will be created as a subdirectory of the current
directory Any files in the logging directory can be overwritten!
Raises:
AssertionError if logging has been turned off
Returns:
name of the logging directory
"""
# the try-except clauses in the following are precautions for multiprocessing
global LOGGING
if not LOGGING:
raise AssertionError("Cannot use LOGS_DIR() if logging is turned off!")
dirname = LOGGING
if os.path.exists(LOGGING):
if not os.path.isdir(LOGGING):
raise IOError('"' + LOGGING + '" cannot be used as log directory, '
'because it is not a directory!')
try:
dirname = LOGGING # raises a name error if LOGGING is not defined
if not dirname:
raise NameError # raise a name error if LOGGING evaluates to False
except NameError:
raise NameError("No access to log directory before logging has been turned "
"on within the same thread/process.")
if os.path.exists(dirname) and not os.path.isdir(dirname):
raise IOError('"' + dirname + '" cannot be used as log directory, '
'because it is not a directory!')
else:
os.mkdir(LOGGING)
info_file_name = os.path.join(LOGGING, 'info.txt')
try:
os.mkdir(dirname)
except FileExistsError:
pass
info_file_name = os.path.join(dirname, 'info.txt')
if not os.path.exists(info_file_name):
with open(info_file_name, 'w') as f:
f.write("This directory has been created by DHParser to store log files from\n"
......@@ -108,6 +93,34 @@ def LOGS_DIR() -> str:
return dirname
@contextlib.contextmanager
def logging(dirname="LOGS"):
"""Context manager. Log files within this context will be stored in
directory ``dirname``. Logging is turned off if name is empty.
Args:
dirname: the name for the log directory or the empty string to
turn logging of
"""
global LOGGING
try:
save = LOGGING
except NameError:
save = ""
LOGGING = dirname
yield
LOGGING = save
def is_logging():
"""-> True, if logging is turned on."""
global LOGGING
try:
return bool(LOGGING)
except NameError:
return False
def line_col(text, pos):
"""Returns the position within a text as (line, column)-tuple.
"""
......@@ -155,12 +168,33 @@ def escape_re(s):
return s
def is_filename(s):
"""Tries to guess whether string ``s`` is a file name."""
return s.find('\n') < 0 and s[:1] != " " and s[-1:] != " " \
and s.find('*') < 0 and s.find('?') < 0
def logfile_basename(filename_or_text, function_or_class_or_instance):
"""Generates a reasonable logfile-name (without extension) based on
the given information.
"""
if is_filename(filename_or_text):
return os.path.basename(os.path.splitext(filename_or_text)[0])
else:
try:
s = function_or_class_or_instance.__qualname.__
except AttributeError:
s = function_or_class_or_instance.__class__.__name__
i = s.find('.')
return s[:i] + '_out' if i >= 0 else s
def load_if_file(text_or_file):
"""Reads and returns content of a file if parameter `text_or_file` is a
file name (i.e. a single line string), otherwise (i.e. if `text_or_file` is
a multiline string) `text_or_file` is returned.
"""
if text_or_file.find('\n') < 0:
if is_filename(text_or_file):
try:
with open(text_or_file, encoding="utf-8") as f:
content = f.read()
......@@ -179,7 +213,7 @@ def is_python_code(text_or_file):
"""Checks whether 'text_or_file' is python code or the name of a file that
contains python code.
"""
if text_or_file.find('\n') < 0:
if is_filename(text_or_file):
return text_or_file[-3:].lower() == '.py'
try:
compile(text_or_file, '<string>', 'exec')
......
......@@ -1404,7 +1404,7 @@ def full_compilation(source, grammar_base, AST_transformations, compiler):
"""
assert isinstance(compiler, CompilerBase)
syntax_tree = grammar_base.parse(source)
syntax_tree = grammar_base(source)
syntax_tree.log(grammar_base.log_file_name, ext='.cst')
grammar_base.log_parsing_history()
......
......@@ -150,7 +150,7 @@ class TestEBNFCompiler:
EBNFTransTable, EBNFCompiler('RegExTest'))
assert messages == "", messages
ebnf_line = r"""regexbad = ~/\/(?:[^\/]|(?<=\\)*\//~""" + '\n' # missing ")" should be detected
result = EBNFGrammar().parse(ebnf_line)
result = EBNFGrammar()(ebnf_line)
result, messages, syntax_tree = full_compilation(ebnf_line, EBNFGrammar(),
EBNFTransTable, EBNFCompiler('RegExTest'))
assert messages != ""
......
......@@ -26,7 +26,7 @@ from functools import partial
from DHParser.dsl import compileDSL, compile_on_disk
from DHParser.ebnf import EBNFGrammar, EBNFTransform, EBNFCompiler
from DHParser.parsers import full_compilation, nil_scanner
from DHParser.parsers import compile_source, nil_scanner
def selftest(file_name):
......@@ -36,8 +36,8 @@ def selftest(file_name):
compiler_name = os.path.basename(os.path.splitext(file_name)[0])
compiler = EBNFCompiler(compiler_name, grammar)
parser = EBNFGrammar()
result, errors, syntax_tree = full_compilation(grammar, None, parser,
EBNFTransform, compiler)
result, errors, syntax_tree = compile_source(grammar, None, parser,
EBNFTransform, compiler)
print(result)
if errors:
print('\n\n'.join(errors))
......
......@@ -203,7 +203,7 @@ MDTransTable = {
markdown_text = markdown_scanner(markdown_text)
print(markdown_text)
syntax_tree = parser.parse(markdown_text)
syntax_tree = parser(markdown_text)
ASTTransform(syntax_tree, MDTransTable)
print(syntax_tree.as_sexpr())
......
......@@ -17,7 +17,7 @@ from DHParser.toolkit import load_if_file
from DHParser.parsers import GrammarBase, CompilerBase, nil_scanner, \
Lookbehind, Lookahead, Alternative, Pop, Required, Token, \
Optional, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Sequence, RE, Capture, \
ZeroOrMore, Forward, NegativeLookahead, mixin_comment, full_compilation
ZeroOrMore, Forward, NegativeLookahead, mixin_comment, compile_source
from DHParser.syntaxtree import Node, traverse, remove_enclosing_delimiters, \
remove_children_if, reduce_single_child, replace_by_single_child, remove_whitespace, \
no_operation, remove_expendables, remove_tokens, flatten, is_whitespace, is_expendable, \
......@@ -444,8 +444,8 @@ class MLWCompiler(CompilerBase):
def compile_MLW(source):
"""Compiles ``source`` and returns (result, errors, ast).
"""
return full_compilation(source, MLWScanner,
MLWGrammar(), MLWTransform, MLWCompiler())
return compile_source(source, MLWScanner,
MLWGrammar(), MLWTransform, MLWCompiler())
if __name__ == "__main__":
if len(sys.argv) > 1:
......
......@@ -41,9 +41,8 @@ if (not os.path.exists(MLW_compiler) or
print('\n'.join(errors))
sys.exit(1)
toolkit.logging_on()
errors = compile_on_disk("fascitergula.mlw", MLW_compiler, ".xml")
with toolkit.logging():
errors = compile_on_disk("fascitergula.mlw", MLW_compiler, ".xml")
if errors:
print('\n'.join(errors))
sys.exit(1)
......@@ -24,7 +24,7 @@ from functools import partial
import os
import sys
sys.path.append(os.path.abspath('../../'))
from DHParser.parsers import full_compilation, Retrieve, WHITESPACE_KEYWORD, nil_scanner
from DHParser.parsers import compile_source, Retrieve, WHITESPACE_KEYWORD, nil_scanner
from DHParser.ebnf import EBNFGrammar, EBNFTransform, EBNFCompiler
from DHParser.dsl import compileEBNF, compileDSL
......@@ -46,42 +46,42 @@ class TestDirectives:
MinilangParser = compileEBNF(lang)
parser = MinilangParser()
assert parser
syntax_tree = parser.parse("3 + 4 * 12")
syntax_tree = parser("3 + 4 * 12")
# parser.log_parsing_history("WSP")
assert not syntax_tree.collect_errors()
syntax_tree = parser.parse("3 + 4 \n * 12")
syntax_tree = parser("3 + 4 \n * 12")
# parser.log_parsing_history("WSPLF")
assert not syntax_tree.collect_errors()
syntax_tree = parser.parse("3 + 4 \n \n * 12")
syntax_tree = parser("3 + 4 \n \n * 12")
assert syntax_tree.collect_errors()
syntax_tree = parser.parse("3 + 4 \n\n * 12")
syntax_tree = parser("3 + 4 \n\n * 12")
assert syntax_tree.collect_errors()
def test_whitespace_vertical(self):
lang = "@ whitespace = vertical\n" + self.mini_language
parser = compileEBNF(lang)()
assert parser
syntax_tree = parser.parse("3 + 4 * 12")
syntax_tree = parser("3 + 4 * 12")
assert not syntax_tree.collect_errors()
syntax_tree = parser.parse("3 + 4 \n * 12")
syntax_tree = parser("3 + 4 \n * 12")
assert not syntax_tree.collect_errors()
syntax_tree = parser.parse("3 + 4 \n \n * 12")
syntax_tree = parser("3 + 4 \n \n * 12")
assert not syntax_tree.collect_errors()
syntax_tree = parser.parse("3 + 4 \n\n * 12")
syntax_tree = parser("3 + 4 \n\n * 12")
assert not syntax_tree.collect_errors()
def test_whitespace_horizontal(self):
lang = "@ whitespace = horizontal\n" + self.mini_language
parser = compileEBNF(lang)()
assert parser
syntax_tree = parser.parse("3 + 4 * 12")
syntax_tree = parser("3 + 4 * 12")
assert not syntax_tree.collect_errors()
syntax_tree = parser.parse("3 + 4 \n * 12")
syntax_tree = parser("3 + 4 \n * 12")
assert syntax_tree.collect_errors()
class TestEBNFParser:
test_json = {
cases = {
"list_": {
"match": {
1: "hund",
......@@ -102,12 +102,12 @@ class TestEBNFParser:
def test_literal(self):
snippet = '"literal" '
result = self.EBNF.parse(snippet, 'literal')
result = self.EBNF(snippet, 'literal')
assert not result.error_flag
assert str(result) == snippet
assert result.find(lambda node: str(node) == WHITESPACE_KEYWORD)
result = self.EBNF.parse(' "literal"', 'literal')
result = self.EBNF(' "literal"', 'literal')
assert result.error_flag # literals catch following, but not leading whitespace
......@@ -149,7 +149,7 @@ class TestPopRetrieve:
def test_single_line(self):
teststr = "Anfang ```code block `` <- keine Ende-Zeichen ! ``` Ende"
syntax_tree = self.minilang_parser.parse(teststr)
syntax_tree = self.minilang_parser(teststr)
assert not syntax_tree.collect_errors()
delim = str(next(syntax_tree.find(partial(self.opening_delimiter, name="delimiter"))))
pop = str(next(syntax_tree.find(self.closing_delimiter)))
......@@ -166,7 +166,7 @@ class TestPopRetrieve:
Mehrzeliger ```code block
"""
syntax_tree = self.minilang_parser.parse(teststr)
syntax_tree = self.minilang_parser(teststr)
assert not syntax_tree.collect_errors()
delim = str(next(syntax_tree.find(partial(self.opening_delimiter, name="delimiter"))))
pop = str(next(syntax_tree.find(self.closing_delimiter)))
......@@ -176,7 +176,7 @@ class TestPopRetrieve:
def test_single_line_complement(self):
teststr = "Anfang {{{code block }} <- keine Ende-Zeichen ! }}} Ende"
syntax_tree = self.minilang_parser2.parse(teststr)
syntax_tree = self.minilang_parser2(teststr)
assert not syntax_tree.collect_errors()
delim = str(next(syntax_tree.find(partial(self.opening_delimiter, name="braces"))))
pop = str(next(syntax_tree.find(self.closing_delimiter)))
......@@ -193,7 +193,7 @@ class TestPopRetrieve:
Mehrzeliger }}}code block
"""
syntax_tree = self.minilang_parser2.parse(teststr)
syntax_tree = self.minilang_parser2(teststr)
assert not syntax_tree.collect_errors()
delim = str(next(syntax_tree.find(partial(self.opening_delimiter, name="braces"))))
pop = str(next(syntax_tree.find(self.closing_delimiter)))
......@@ -205,7 +205,7 @@ class TestPopRetrieve:
class TestSemanticValidation:
def check(self, minilang, bool_filter=lambda x: x):
grammar = EBNFGrammar()
st = grammar.parse(minilang)
st = grammar(minilang)
assert not st.collect_errors()
EBNFTransform(st)
assert bool_filter(st.collect_errors())
......@@ -226,8 +226,8 @@ class TestSemanticValidation:
class TestCompilerErrors:
def test_error_propagation(self):