Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

Commit 23a525de authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

Refactoring of Error system mostly finishes

parent 203495aa
...@@ -35,7 +35,7 @@ from DHParser.ebnf import EBNFCompiler, grammar_changed, \ ...@@ -35,7 +35,7 @@ from DHParser.ebnf import EBNFCompiler, grammar_changed, \
PreprocessorFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc PreprocessorFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc
from DHParser.toolkit import logging, load_if_file, is_python_code, compile_python_object from DHParser.toolkit import logging, load_if_file, is_python_code, compile_python_object
from DHParser.parser import Grammar, Compiler, compile_source, nil_preprocessor, PreprocessorFunc from DHParser.parser import Grammar, Compiler, compile_source, nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node, TransformationFunc from DHParser.syntaxtree import Error, is_error, has_errors, Node, TransformationFunc
__all__ = ('GrammarError', __all__ = ('GrammarError',
'CompilationError', 'CompilationError',
...@@ -147,6 +147,14 @@ class CompilationError(Exception): ...@@ -147,6 +147,14 @@ class CompilationError(Exception):
return '\n'.join(self.error_messages) return '\n'.join(self.error_messages)
def error_str(messages: Iterable[Error]) -> str:
"""
Returns all true errors (i.e. not just warnings) from the
`messages` as a concatenated multiline string.
"""
return '\n\n'.join(str(m) for m in messages if is_error(m.level))
def grammar_instance(grammar_representation) -> Tuple[Grammar, str]: def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
""" """
Returns a grammar object and the source code of the grammar, from Returns a grammar object and the source code of the grammar, from
...@@ -158,13 +166,13 @@ def grammar_instance(grammar_representation) -> Tuple[Grammar, str]: ...@@ -158,13 +166,13 @@ def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
# read grammar # read grammar
grammar_src = load_if_file(grammar_representation) grammar_src = load_if_file(grammar_representation)
if is_python_code(grammar_src): if is_python_code(grammar_src):
parser_py, errors, AST = grammar_src, '', None parser_py, messages, AST = grammar_src, [], None
else: else:
with logging(False): with logging(False):
parser_py, errors, AST = compile_source(grammar_src, None, parser_py, messages, AST = compile_source(grammar_src, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler()) get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
if errors: if has_errors(messages):
raise GrammarError('\n\n'.join(errors), grammar_src) raise GrammarError(error_str(messages), grammar_src)
parser_root = compile_python_object(DHPARSER_IMPORTS + parser_py, '\w+Grammar$')() parser_root = compile_python_object(DHPARSER_IMPORTS + parser_py, '\w+Grammar$')()
else: else:
# assume that dsl_grammar is a ParserHQ-object or Grammar class # assume that dsl_grammar is a ParserHQ-object or Grammar class
...@@ -194,11 +202,11 @@ def compileDSL(text_or_file: str, ...@@ -194,11 +202,11 @@ def compileDSL(text_or_file: str,
assert isinstance(compiler, Compiler) assert isinstance(compiler, Compiler)
parser, grammar_src = grammar_instance(dsl_grammar) parser, grammar_src = grammar_instance(dsl_grammar)
result, errors, AST = compile_source(text_or_file, preprocessor, parser, result, messages, AST = compile_source(text_or_file, preprocessor, parser,
ast_transformation, compiler) ast_transformation, compiler)
if errors: if has_errors(messages):
src = load_if_file(text_or_file) src = load_if_file(text_or_file)
raise CompilationError(errors, src, grammar_src, AST, result) raise CompilationError(error_str(messages), src, grammar_src, AST, result)
return result return result
...@@ -298,10 +306,10 @@ def load_compiler_suite(compiler_suite: str) -> \ ...@@ -298,10 +306,10 @@ def load_compiler_suite(compiler_suite: str) -> \
else: else:
# assume source is an ebnf grammar. Is there really any reasonable application case for this? # assume source is an ebnf grammar. Is there really any reasonable application case for this?
with logging(False): with logging(False):
compile_py, errors, AST = compile_source(source, None, compile_py, messages, AST = compile_source(source, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler()) get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
if errors: if has_errors(messages):
raise GrammarError('\n\n'.join(errors), source) raise GrammarError(error_str(messages), source)
preprocessor = get_ebnf_preprocessor preprocessor = get_ebnf_preprocessor
parser = get_ebnf_grammar parser = get_ebnf_grammar
ast = get_ebnf_transformer ast = get_ebnf_transformer
...@@ -388,8 +396,7 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"): ...@@ -388,8 +396,7 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"):
extension. extension.
Returns: Returns:
A list of error messages or an empty list if there were no A (potentially empty) list of error or warning messages.
errors.
""" """
filepath = os.path.normpath(source_file) filepath = os.path.normpath(source_file)
# with open(source_file, encoding="utf-8") as f: # with open(source_file, encoding="utf-8") as f:
...@@ -405,9 +412,9 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"): ...@@ -405,9 +412,9 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"):
cfactory = get_ebnf_compiler cfactory = get_ebnf_compiler
compiler1 = cfactory() compiler1 = cfactory()
compiler1.set_grammar_name(compiler_name, source_file) compiler1.set_grammar_name(compiler_name, source_file)
result, errors, ast = compile_source(source_file, sfactory(), pfactory(), tfactory(), compiler1) result, messages, ast = compile_source(source_file, sfactory(), pfactory(), tfactory(), compiler1)
if errors: if has_errors(messages):
return errors return messages
elif cfactory == get_ebnf_compiler: # trans == get_ebnf_transformer or trans == EBNFTransformer: # either an EBNF- or no compiler suite given elif cfactory == get_ebnf_compiler: # trans == get_ebnf_transformer or trans == EBNFTransformer: # either an EBNF- or no compiler suite given
ebnf_compiler = cast(EBNFCompiler, compiler1) ebnf_compiler = cast(EBNFCompiler, compiler1)
...@@ -484,7 +491,7 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"): ...@@ -484,7 +491,7 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"):
finally: finally:
if f: f.close() if f: f.close()
return [] return messages
def recompile_grammar(ebnf_filename, force=False) -> bool: def recompile_grammar(ebnf_filename, force=False) -> bool:
...@@ -511,19 +518,20 @@ def recompile_grammar(ebnf_filename, force=False) -> bool: ...@@ -511,19 +518,20 @@ def recompile_grammar(ebnf_filename, force=False) -> bool:
base, ext = os.path.splitext(ebnf_filename) base, ext = os.path.splitext(ebnf_filename)
compiler_name = base + 'Compiler.py' compiler_name = base + 'Compiler.py'
error_file_name = base + '_ebnf_ERRORS.txt' error_file_name = base + '_ebnf_ERRORS.txt'
errors = [] # type: Iterable[str] messages = [] # type: Iterable[str]
if (not os.path.exists(compiler_name) or force or if (not os.path.exists(compiler_name) or force or
grammar_changed(compiler_name, ebnf_filename)): grammar_changed(compiler_name, ebnf_filename)):
# print("recompiling parser for: " + ebnf_filename) # print("recompiling parser for: " + ebnf_filename)
errors = compile_on_disk(ebnf_filename) messages = compile_on_disk(ebnf_filename)
if errors: if messages:
# print("Errors while compiling: " + ebnf_filename + '!') # print("Errors while compiling: " + ebnf_filename + '!')
with open(error_file_name, 'w') as f: with open(error_file_name, 'w') as f:
for e in errors: for e in messages:
f.write(e) f.write(e)
f.write('\n') f.write('\n')
return False if has_errors(messages):
return False
if not errors and os.path.exists(error_file_name): if not messages and os.path.exists(error_file_name):
os.remove(error_file_name) os.remove(error_file_name)
return True return True
...@@ -76,7 +76,7 @@ except ImportError: ...@@ -76,7 +76,7 @@ except ImportError:
from DHParser.toolkit import is_logging, log_dir, logfile_basename, escape_re, sane_parser_name from DHParser.toolkit import is_logging, log_dir, logfile_basename, escape_re, sane_parser_name
from DHParser.syntaxtree import WHITESPACE_PTYPE, TOKEN_PTYPE, ZOMBIE_PARSER, ParserBase, \ from DHParser.syntaxtree import WHITESPACE_PTYPE, TOKEN_PTYPE, ZOMBIE_PARSER, ParserBase, \
Error, is_error, Node, TransformationFunc Error, is_error, has_errors, Node, TransformationFunc
from DHParser.toolkit import StringView, EMPTY_STRING_VIEW, sv_match, sv_index, sv_search, \ from DHParser.toolkit import StringView, EMPTY_STRING_VIEW, sv_match, sv_index, sv_search, \
load_if_file, error_messages, line_col load_if_file, error_messages, line_col
...@@ -161,15 +161,17 @@ class HistoryRecord: ...@@ -161,15 +161,17 @@ class HistoryRecord:
self.call_stack = [p for p in call_stack if p.ptype != ":Forward"] # type: List['Parser'] self.call_stack = [p for p in call_stack if p.ptype != ":Forward"] # type: List['Parser']
self.node = node # type: Node self.node = node # type: Node
self.remaining = remaining # type: int self.remaining = remaining # type: int
document = call_stack[-1].grammar.document__.text if call_stack else '' self.line_col = (1, 1) # type: Tuple[int, int]
self.line_col = line_col(document, len(document) - remaining) # type: Tuple[int, int] if call_stack:
document = call_stack[-1].grammar.document__.text
self.line_col = line_col(document, len(document) - remaining)
def __str__(self): def __str__(self):
return 'line %i, column %i: %s "%s"' % \ return 'line %i, column %i: %s "%s"' % \
(self.line_col[0], self.line_col[1], self.stack, str(self.node)) (self.line_col[0], self.line_col[1], self.stack, str(self.node))
def err_msg(self) -> str: def err_msg(self) -> str:
return self.ERROR + ": " + "; ".join(self.node._errors).replace('\n', '\\') return self.ERROR + ": " + "; ".join(str(e) for e in self.node._errors).replace('\n', '\\')
@property @property
def stack(self) -> str: def stack(self) -> str:
...@@ -179,7 +181,7 @@ class HistoryRecord: ...@@ -179,7 +181,7 @@ class HistoryRecord:
@property @property
def status(self) -> str: def status(self) -> str:
return self.FAIL if self.node is None else \ return self.FAIL if self.node is None else \
self.err_msg() if self.node._errors else self.MATCH self.err_msg() if has_errors(self.node._errors) else self.MATCH
@property @property
def extent(self) -> slice: def extent(self) -> slice:
...@@ -1933,7 +1935,7 @@ def compile_source(source: str, ...@@ -1933,7 +1935,7 @@ def compile_source(source: str,
(result, errors, abstract syntax tree). In detail: (result, errors, abstract syntax tree). In detail:
1. The result as returned by the compiler or ``None`` in case 1. The result as returned by the compiler or ``None`` in case
of failure, of failure,
2. A list of error messages 2. A list of error or warning messages
3. The root-node of the abstract syntax treelow 3. The root-node of the abstract syntax treelow
""" """
source_text = load_if_file(source) source_text = load_if_file(source)
...@@ -1950,12 +1952,11 @@ def compile_source(source: str, ...@@ -1950,12 +1952,11 @@ def compile_source(source: str,
# likely that error list gets littered with compile error messages # likely that error list gets littered with compile error messages
result = None result = None
if is_error(syntax_tree.error_flag): if is_error(syntax_tree.error_flag):
errors = syntax_tree.collect_errors() messages = syntax_tree.collect_errors()
else: else:
transformer(syntax_tree) transformer(syntax_tree)
if is_logging(): syntax_tree.log(log_file_name + '.ast') if is_logging(): syntax_tree.log(log_file_name + '.ast')
if not is_error(syntax_tree.error_flag): if not is_error(syntax_tree.error_flag):
result = compiler(syntax_tree) result = compiler(syntax_tree)
errors = syntax_tree.collect_errors() messages = syntax_tree.collect_errors()
messages = error_messages(source_text, errors)
return result, messages, syntax_tree return result, messages, syntax_tree
...@@ -27,10 +27,10 @@ except ImportError: ...@@ -27,10 +27,10 @@ except ImportError:
import re import re
try: try:
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \ from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
Iterator, List, NamedTuple, Sequence, Union, Text, Tuple Iterator, Iterable, List, NamedTuple, Sequence, Union, Text, Tuple
except ImportError: except ImportError:
from .typing34 import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \ from .typing34 import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
Iterator, List, NamedTuple, Sequence, Union, Text, Tuple Iterator, Iterable, List, NamedTuple, Sequence, Union, Text, Tuple
from DHParser.toolkit import is_logging, log_dir, StringView, linebreaks, line_col, identity from DHParser.toolkit import is_logging, log_dir, StringView, linebreaks, line_col, identity
...@@ -42,6 +42,7 @@ __all__ = ('WHITESPACE_PTYPE', ...@@ -42,6 +42,7 @@ __all__ = ('WHITESPACE_PTYPE',
'Error', 'Error',
'is_warning', 'is_warning',
'is_error', 'is_error',
'has_errors',
'Node', 'Node',
'mock_syntax_tree', 'mock_syntax_tree',
'TransformationFunc') 'TransformationFunc')
...@@ -148,7 +149,7 @@ class Error: ...@@ -148,7 +149,7 @@ class Error:
@staticmethod @staticmethod
def from_template(template: str, level: int=ERROR, content: Union[tuple, dict]=()): def from_template(template: str, level: int=ERROR, content: Union[tuple, dict]=()):
if isinstance(content, tuple): if isinstance(content, tuple):
return Error(template % content, level, template) return Error((template % content) if content else template, level, template)
else: else:
return Error(template.format(**content), level, template) return Error(template.format(**content), level, template)
...@@ -157,14 +158,26 @@ class Error: ...@@ -157,14 +158,26 @@ class Error:
return "warning" if is_warning(self.level) else "error" return "warning" if is_warning(self.level) else "error"
def is_warning(level): def is_warning(level: int) -> bool:
return level < Error.ERROR return level < Error.ERROR
def is_error(level): def is_error(level: int) -> bool:
return level >= Error.ERROR return level >= Error.ERROR
def has_errors(messages: Iterable[Error], level: int=Error.ERROR) -> bool:
"""
Returns True, if at least one entry in `messages` has at
least the given error `level`.
"""
for err_obj in messages:
if err_obj.level >= level:
return True
return False
ChildrenType = Tuple['Node', ...] ChildrenType = Tuple['Node', ...]
StrictResultType = Union[ChildrenType, StringView, str] StrictResultType = Union[ChildrenType, StringView, str]
ResultType = Union[ChildrenType, 'Node', StringView, str, None] ResultType = Union[ChildrenType, 'Node', StringView, str, None]
......
...@@ -28,7 +28,7 @@ except ImportError: ...@@ -28,7 +28,7 @@ except ImportError:
import re import re
from DHParser.toolkit import is_logging, clear_logs, error_messages from DHParser.toolkit import is_logging, clear_logs, error_messages
from DHParser.syntaxtree import mock_syntax_tree, flatten_sxpr from DHParser.syntaxtree import is_error, mock_syntax_tree, flatten_sxpr
__all__ = ('unit_from_configfile', __all__ = ('unit_from_configfile',
'unit_from_json', 'unit_from_json',
...@@ -159,7 +159,7 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, report=True, ve ...@@ -159,7 +159,7 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, report=True, ve
transform(ast) transform(ast)
tests.setdefault('__ast__', {})[test_name] = ast tests.setdefault('__ast__', {})[test_name] = ast
ast.log("match_%s_%s.ast" % (parser_name, test_name)) ast.log("match_%s_%s.ast" % (parser_name, test_name))
if cst.error_flag: if is_error(cst.error_flag):
errata.append('Match test "%s" for parser "%s" failed:\n\tExpr.: %s\n\n\t%s\n\n' % errata.append('Match test "%s" for parser "%s" failed:\n\tExpr.: %s\n\n\t%s\n\n' %
(test_name, parser_name, '\n\t'.join(test_code.split('\n')), (test_name, parser_name, '\n\t'.join(test_code.split('\n')),
'\n\t'.join(m.replace('\n', '\n\t\t') for m in '\n\t'.join(m.replace('\n', '\n\t\t') for m in
...@@ -189,7 +189,7 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, report=True, ve ...@@ -189,7 +189,7 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, report=True, ve
infostr = ' fail-test "' + test_name + '" ... ' infostr = ' fail-test "' + test_name + '" ... '
errflag = len(errata) errflag = len(errata)
cst = parser(test_code, parser_name) cst = parser(test_code, parser_name)
if not cst.error_flag: if not is_error(cst.error_flag):
errata.append('Fail test "%s" for parser "%s" yields match instead of ' errata.append('Fail test "%s" for parser "%s" yields match instead of '
'expected failure!' % (test_name, parser_name)) 'expected failure!' % (test_name, parser_name))
tests.setdefault('__err__', {})[test_name] = errata[-1] tests.setdefault('__err__', {})[test_name] = errata[-1]
......
...@@ -186,7 +186,7 @@ class LaTeXGrammar(Grammar): ...@@ -186,7 +186,7 @@ class LaTeXGrammar(Grammar):
####################################################################### #######################################################################
# #
# Primitives # primitives
# #
####################################################################### #######################################################################
...@@ -223,7 +223,7 @@ class LaTeXGrammar(Grammar): ...@@ -223,7 +223,7 @@ class LaTeXGrammar(Grammar):
paragraph = Forward() paragraph = Forward()
tabular_config = Forward() tabular_config = Forward()
text_element = Forward() text_element = Forward()
source_hash__ = "57dd004091e87ff603b51f0a47857cf4" source_hash__ = "939c094e994677d2ab894169c013cf58"
parser_initialization__ = "upon instantiation" parser_initialization__ = "upon instantiation"
COMMENT__ = r'%.*' COMMENT__ = r'%.*'
WHITESPACE__ = r'[ \t]*(?:\n(?![ \t]*\n)[ \t]*)?' WHITESPACE__ = r'[ \t]*(?:\n(?![ \t]*\n)[ \t]*)?'
......
...@@ -25,6 +25,7 @@ import sys ...@@ -25,6 +25,7 @@ import sys
sys.path.extend(['../', './']) sys.path.extend(['../', './'])
from DHParser.parser import Grammar, Compiler from DHParser.parser import Grammar, Compiler
from DHParser.syntaxtree import is_error
from DHParser.dsl import compile_on_disk, run_compiler, compileEBNF, grammar_provider, \ from DHParser.dsl import compile_on_disk, run_compiler, compileEBNF, grammar_provider, \
load_compiler_suite load_compiler_suite
...@@ -51,7 +52,7 @@ class TestCompileFunctions: ...@@ -51,7 +52,7 @@ class TestCompileFunctions:
result = parser("5 + 3 * 4") result = parser("5 + 3 * 4")
assert not result.error_flag assert not result.error_flag
result = parser("5A + 4B ** 4C") result = parser("5A + 4B ** 4C")
assert result.error_flag assert is_error(result.error_flag)
class TestCompilerGeneration: class TestCompilerGeneration:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment