Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

Commit 23a525de authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

Refactoring of Error system mostly finishes

parent 203495aa
......@@ -35,7 +35,7 @@ from DHParser.ebnf import EBNFCompiler, grammar_changed, \
PreprocessorFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc
from DHParser.toolkit import logging, load_if_file, is_python_code, compile_python_object
from DHParser.parser import Grammar, Compiler, compile_source, nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node, TransformationFunc
from DHParser.syntaxtree import Error, is_error, has_errors, Node, TransformationFunc
__all__ = ('GrammarError',
'CompilationError',
......@@ -147,6 +147,14 @@ class CompilationError(Exception):
return '\n'.join(self.error_messages)
def error_str(messages: Iterable[Error]) -> str:
"""
Returns all true errors (i.e. not just warnings) from the
`messages` as a concatenated multiline string.
"""
return '\n\n'.join(str(m) for m in messages if is_error(m.level))
def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
"""
Returns a grammar object and the source code of the grammar, from
......@@ -158,13 +166,13 @@ def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
# read grammar
grammar_src = load_if_file(grammar_representation)
if is_python_code(grammar_src):
parser_py, errors, AST = grammar_src, '', None
parser_py, messages, AST = grammar_src, [], None
else:
with logging(False):
parser_py, errors, AST = compile_source(grammar_src, None,
parser_py, messages, AST = compile_source(grammar_src, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
if errors:
raise GrammarError('\n\n'.join(errors), grammar_src)
if has_errors(messages):
raise GrammarError(error_str(messages), grammar_src)
parser_root = compile_python_object(DHPARSER_IMPORTS + parser_py, '\w+Grammar$')()
else:
# assume that dsl_grammar is a ParserHQ-object or Grammar class
......@@ -194,11 +202,11 @@ def compileDSL(text_or_file: str,
assert isinstance(compiler, Compiler)
parser, grammar_src = grammar_instance(dsl_grammar)
result, errors, AST = compile_source(text_or_file, preprocessor, parser,
result, messages, AST = compile_source(text_or_file, preprocessor, parser,
ast_transformation, compiler)
if errors:
if has_errors(messages):
src = load_if_file(text_or_file)
raise CompilationError(errors, src, grammar_src, AST, result)
raise CompilationError(error_str(messages), src, grammar_src, AST, result)
return result
......@@ -298,10 +306,10 @@ def load_compiler_suite(compiler_suite: str) -> \
else:
# assume source is an ebnf grammar. Is there really any reasonable application case for this?
with logging(False):
compile_py, errors, AST = compile_source(source, None,
compile_py, messages, AST = compile_source(source, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
if errors:
raise GrammarError('\n\n'.join(errors), source)
if has_errors(messages):
raise GrammarError(error_str(messages), source)
preprocessor = get_ebnf_preprocessor
parser = get_ebnf_grammar
ast = get_ebnf_transformer
......@@ -388,8 +396,7 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"):
extension.
Returns:
A list of error messages or an empty list if there were no
errors.
A (potentially empty) list of error or warning messages.
"""
filepath = os.path.normpath(source_file)
# with open(source_file, encoding="utf-8") as f:
......@@ -405,9 +412,9 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"):
cfactory = get_ebnf_compiler
compiler1 = cfactory()
compiler1.set_grammar_name(compiler_name, source_file)
result, errors, ast = compile_source(source_file, sfactory(), pfactory(), tfactory(), compiler1)
if errors:
return errors
result, messages, ast = compile_source(source_file, sfactory(), pfactory(), tfactory(), compiler1)
if has_errors(messages):
return messages
elif cfactory == get_ebnf_compiler: # trans == get_ebnf_transformer or trans == EBNFTransformer: # either an EBNF- or no compiler suite given
ebnf_compiler = cast(EBNFCompiler, compiler1)
......@@ -484,7 +491,7 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"):
finally:
if f: f.close()
return []
return messages
def recompile_grammar(ebnf_filename, force=False) -> bool:
......@@ -511,19 +518,20 @@ def recompile_grammar(ebnf_filename, force=False) -> bool:
base, ext = os.path.splitext(ebnf_filename)
compiler_name = base + 'Compiler.py'
error_file_name = base + '_ebnf_ERRORS.txt'
errors = [] # type: Iterable[str]
messages = [] # type: Iterable[str]
if (not os.path.exists(compiler_name) or force or
grammar_changed(compiler_name, ebnf_filename)):
# print("recompiling parser for: " + ebnf_filename)
errors = compile_on_disk(ebnf_filename)
if errors:
messages = compile_on_disk(ebnf_filename)
if messages:
# print("Errors while compiling: " + ebnf_filename + '!')
with open(error_file_name, 'w') as f:
for e in errors:
for e in messages:
f.write(e)
f.write('\n')
if has_errors(messages):
return False
if not errors and os.path.exists(error_file_name):
if not messages and os.path.exists(error_file_name):
os.remove(error_file_name)
return True
......@@ -76,7 +76,7 @@ except ImportError:
from DHParser.toolkit import is_logging, log_dir, logfile_basename, escape_re, sane_parser_name
from DHParser.syntaxtree import WHITESPACE_PTYPE, TOKEN_PTYPE, ZOMBIE_PARSER, ParserBase, \
Error, is_error, Node, TransformationFunc
Error, is_error, has_errors, Node, TransformationFunc
from DHParser.toolkit import StringView, EMPTY_STRING_VIEW, sv_match, sv_index, sv_search, \
load_if_file, error_messages, line_col
......@@ -161,15 +161,17 @@ class HistoryRecord:
self.call_stack = [p for p in call_stack if p.ptype != ":Forward"] # type: List['Parser']
self.node = node # type: Node
self.remaining = remaining # type: int
document = call_stack[-1].grammar.document__.text if call_stack else ''
self.line_col = line_col(document, len(document) - remaining) # type: Tuple[int, int]
self.line_col = (1, 1) # type: Tuple[int, int]
if call_stack:
document = call_stack[-1].grammar.document__.text
self.line_col = line_col(document, len(document) - remaining)
def __str__(self):
return 'line %i, column %i: %s "%s"' % \
(self.line_col[0], self.line_col[1], self.stack, str(self.node))
def err_msg(self) -> str:
return self.ERROR + ": " + "; ".join(self.node._errors).replace('\n', '\\')
return self.ERROR + ": " + "; ".join(str(e) for e in self.node._errors).replace('\n', '\\')
@property
def stack(self) -> str:
......@@ -179,7 +181,7 @@ class HistoryRecord:
@property
def status(self) -> str:
return self.FAIL if self.node is None else \
self.err_msg() if self.node._errors else self.MATCH
self.err_msg() if has_errors(self.node._errors) else self.MATCH
@property
def extent(self) -> slice:
......@@ -1933,7 +1935,7 @@ def compile_source(source: str,
(result, errors, abstract syntax tree). In detail:
1. The result as returned by the compiler or ``None`` in case
of failure,
2. A list of error messages
2. A list of error or warning messages
3. The root-node of the abstract syntax treelow
"""
source_text = load_if_file(source)
......@@ -1950,12 +1952,11 @@ def compile_source(source: str,
# likely that error list gets littered with compile error messages
result = None
if is_error(syntax_tree.error_flag):
errors = syntax_tree.collect_errors()
messages = syntax_tree.collect_errors()
else:
transformer(syntax_tree)
if is_logging(): syntax_tree.log(log_file_name + '.ast')
if not is_error(syntax_tree.error_flag):
result = compiler(syntax_tree)
errors = syntax_tree.collect_errors()
messages = error_messages(source_text, errors)
messages = syntax_tree.collect_errors()
return result, messages, syntax_tree
......@@ -27,10 +27,10 @@ except ImportError:
import re
try:
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
Iterator, Iterable, List, NamedTuple, Sequence, Union, Text, Tuple
except ImportError:
from .typing34 import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
Iterator, Iterable, List, NamedTuple, Sequence, Union, Text, Tuple
from DHParser.toolkit import is_logging, log_dir, StringView, linebreaks, line_col, identity
......@@ -42,6 +42,7 @@ __all__ = ('WHITESPACE_PTYPE',
'Error',
'is_warning',
'is_error',
'has_errors',
'Node',
'mock_syntax_tree',
'TransformationFunc')
......@@ -148,7 +149,7 @@ class Error:
@staticmethod
def from_template(template: str, level: int=ERROR, content: Union[tuple, dict]=()):
if isinstance(content, tuple):
return Error(template % content, level, template)
return Error((template % content) if content else template, level, template)
else:
return Error(template.format(**content), level, template)
......@@ -157,14 +158,26 @@ class Error:
return "warning" if is_warning(self.level) else "error"
def is_warning(level):
def is_warning(level: int) -> bool:
return level < Error.ERROR
def is_error(level):
def is_error(level: int) -> bool:
return level >= Error.ERROR
def has_errors(messages: Iterable[Error], level: int=Error.ERROR) -> bool:
"""
Returns True, if at least one entry in `messages` has at
least the given error `level`.
"""
for err_obj in messages:
if err_obj.level >= level:
return True
return False
ChildrenType = Tuple['Node', ...]
StrictResultType = Union[ChildrenType, StringView, str]
ResultType = Union[ChildrenType, 'Node', StringView, str, None]
......
......@@ -28,7 +28,7 @@ except ImportError:
import re
from DHParser.toolkit import is_logging, clear_logs, error_messages
from DHParser.syntaxtree import mock_syntax_tree, flatten_sxpr
from DHParser.syntaxtree import is_error, mock_syntax_tree, flatten_sxpr
__all__ = ('unit_from_configfile',
'unit_from_json',
......@@ -159,7 +159,7 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, report=True, ve
transform(ast)
tests.setdefault('__ast__', {})[test_name] = ast
ast.log("match_%s_%s.ast" % (parser_name, test_name))
if cst.error_flag:
if is_error(cst.error_flag):
errata.append('Match test "%s" for parser "%s" failed:\n\tExpr.: %s\n\n\t%s\n\n' %
(test_name, parser_name, '\n\t'.join(test_code.split('\n')),
'\n\t'.join(m.replace('\n', '\n\t\t') for m in
......@@ -189,7 +189,7 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, report=True, ve
infostr = ' fail-test "' + test_name + '" ... '
errflag = len(errata)
cst = parser(test_code, parser_name)
if not cst.error_flag:
if not is_error(cst.error_flag):
errata.append('Fail test "%s" for parser "%s" yields match instead of '
'expected failure!' % (test_name, parser_name))
tests.setdefault('__err__', {})[test_name] = errata[-1]
......
......@@ -186,7 +186,7 @@ class LaTeXGrammar(Grammar):
#######################################################################
#
# Primitives
# primitives
#
#######################################################################
......@@ -223,7 +223,7 @@ class LaTeXGrammar(Grammar):
paragraph = Forward()
tabular_config = Forward()
text_element = Forward()
source_hash__ = "57dd004091e87ff603b51f0a47857cf4"
source_hash__ = "939c094e994677d2ab894169c013cf58"
parser_initialization__ = "upon instantiation"
COMMENT__ = r'%.*'
WHITESPACE__ = r'[ \t]*(?:\n(?![ \t]*\n)[ \t]*)?'
......
......@@ -25,6 +25,7 @@ import sys
sys.path.extend(['../', './'])
from DHParser.parser import Grammar, Compiler
from DHParser.syntaxtree import is_error
from DHParser.dsl import compile_on_disk, run_compiler, compileEBNF, grammar_provider, \
load_compiler_suite
......@@ -51,7 +52,7 @@ class TestCompileFunctions:
result = parser("5 + 3 * 4")
assert not result.error_flag
result = parser("5A + 4B ** 4C")
assert result.error_flag
assert is_error(result.error_flag)
class TestCompilerGeneration:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment