Commit 89b64a8c authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

- ebnf.py: EBNFCompiler refarctored to use Series' mandatory marker instead of Required operator

parent bae74d44
......@@ -213,7 +213,7 @@ def compileDSL(text_or_file: str,
parser, grammar_src = grammar_instance(dsl_grammar)
result, messages, AST = compile_source(text_or_file, preprocessor, parser,
ast_transformation, compiler)
ast_transformation, compiler)
if has_errors(messages):
src = load_if_file(text_or_file)
raise CompilationError(only_errors(messages), src, grammar_src, AST, result)
......@@ -316,8 +316,8 @@ def load_compiler_suite(compiler_suite: str) -> \
else:
# assume source is an ebnf grammar. Is there really any reasonable application case for this?
with logging(False):
compile_py, messages, AST = compile_source(source, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
compile_py, messages, AST = compile_source(source, None, get_ebnf_grammar(),
get_ebnf_transformer(), get_ebnf_compiler())
if has_errors(messages):
raise GrammarError(only_errors(messages), source)
preprocessor = get_ebnf_preprocessor
......
......@@ -35,7 +35,7 @@ from DHParser.parser import Grammar, mixin_comment, nil_preprocessor, Forward, R
PreprocessorFunc
from DHParser.syntaxtree import Node, TransformationFunc, WHITESPACE_PTYPE, TOKEN_PTYPE
from DHParser.error import Error
from DHParser.transform import TransformationDict, traverse, remove_brackets, \
from DHParser.transform import traverse, remove_brackets, \
reduce_single_child, replace_by_single_child, remove_expendables, \
remove_tokens, flatten, forbid, assert_content, remove_infix_operator
from DHParser.versionnumber import __version__
......@@ -619,7 +619,10 @@ class EBNFCompiler(Compiler):
# assume it's a synonym, like 'page = REGEX_PAGE_NR'
defn = 'Synonym(%s)' % defn
except TypeError as error:
errmsg = EBNFCompiler.AST_ERROR + " (" + str(error) + ")\n" + node.as_sxpr()
from traceback import extract_tb
trace = str(extract_tb(error.__traceback__)[-1])
errmsg = "%s (TypeError: %s; %s)\n%s" \
% (EBNFCompiler.AST_ERROR, str(error), trace, node.as_sxpr())
node.add_error(errmsg)
rule, defn = rule + ':error', '"' + errmsg + '"'
return rule, defn
......@@ -702,8 +705,7 @@ class EBNFCompiler(Compiler):
else:
node.add_error('Unknown directive %s ! (Known ones are %s .)' %
(key,
', '.join(list(self.directives.keys()))))
(key, ', '.join(list(self.directives.keys()))))
return ""
......@@ -721,7 +723,27 @@ class EBNFCompiler(Compiler):
def on_term(self, node) -> str:
return self.non_terminal(node, 'Series')
mandatory_marker = []
filtered_children = []
i = 0
for nd in node.children:
if nd.parser.ptype == TOKEN_PTYPE and str(nd) == "§":
mandatory_marker.append(i)
if i == 0:
nd.add_error('First item of a series should not be mandatory.',
code=Error.WARNING)
elif len(mandatory_marker) > 1:
nd.add_error('One mandatory marker (§) sufficient to declare the '
'rest of the series as mandatory.', code=Error.WARNING)
else:
filtered_children.append(nd)
i += 1
saved_result = node.result
node.result = tuple(filtered_children)
mandatory_marker.append(Series.NOPE)
compiled = self.non_terminal(node, 'Series', ['mandatory=%i' % mandatory_marker[0]])
node.result = saved_result
return compiled
def on_factor(self, node: Node) -> str:
......@@ -841,8 +863,10 @@ class EBNFCompiler(Compiler):
try:
arg = repr(self._check_rx(node, rx[1:-1].replace(r'\/', '/')))
except AttributeError as error:
errmsg = EBNFCompiler.AST_ERROR + " (" + str(error) + ")\n" + \
node.as_sxpr()
from traceback import extract_tb
trace = str(extract_tb(error.__traceback__)[-1])
errmsg = "%s (AttributeError: %s; %s)\n%s" \
% (EBNFCompiler.AST_ERROR, str(error), trace, node.as_sxpr())
node.add_error(errmsg)
return '"' + errmsg + '"'
return parser + ', '.join([arg] + name) + ')'
......
......@@ -1975,12 +1975,15 @@ def compile_source(source: str,
# only compile if there were no syntax errors, for otherwise it is
# likely that error list gets littered with compile error messages
result = None
if is_error(syntax_tree.error_flag):
messages = syntax_tree.collect_errors()
else:
ef = syntax_tree.error_flag
messages = syntax_tree.collect_errors(clear_errors=True)
if not is_error(ef):
transformer(syntax_tree)
ef = max(ef, syntax_tree.error_flag)
messages.extend(syntax_tree.collect_errors(clear_errors=True))
if is_logging(): syntax_tree.log(log_file_name + '.ast')
if not is_error(syntax_tree.error_flag):
result = compiler(syntax_tree)
messages = syntax_tree.collect_errors()
messages.extend(syntax_tree.collect_errors())
syntax_tree.error_flag = max(syntax_tree.error_flag, ef)
return result, messages, syntax_tree
......@@ -234,7 +234,7 @@ def traverse(root_node: Node,
for child in node.result:
context.append(child)
traverse_recursive(context) # depth first
node.error_flag = node.error_flag or child.error_flag # propagate error flag
node.error_flag = max(node.error_flag, child.error_flag) # propagate error flag
context.pop()
key = key_func(node)
......
......@@ -9,7 +9,7 @@ definition = symbol §"=" expression
directive = "@" §symbol "=" ( regexp | literal | list_ )
expression = term { "|" term }
term = { factor }+ ["§" { factor }+ ] # "§" reguired
term = { ["§"] factor }+ # "§" means all following factors mandatory
factor = [flowmarker] [retrieveop] symbol !"=" # negative lookahead to be sure it's not a definition
| [flowmarker] literal
| [flowmarker] regexp
......
......@@ -281,6 +281,7 @@ class TestSeries:
st = parser("AB_D");
assert st.error_flag
assert st.collect_errors()[0].code == Error.MANDATORY_CONTINUATION
# transitivity of mandatory-operator
st = parser("ABC_");
assert st.error_flag
assert st.collect_errors()[0].code == Error.MANDATORY_CONTINUATION
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment