Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information:

Commit 525ad4bc authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

ameliorations for the name mangeling of compiler methods

parent ae99c15b
......@@ -244,11 +244,12 @@ class EBNFCompiler(CompilerBase):
'Compiler, self).__init__()',
" assert re.match('\w+\Z', grammar_name)", '']
for name in self.definition_names:
method_name = CompilerBase.derive_method_name(name)
if name == self.root:
compiler += [' def ' + name + '__(self, node):',
compiler += [' def ' + method_name + '(self, node):',
' return node', '']
compiler += [' def ' + name + '__(self, node):',
compiler += [' def ' + method_name + '(self, node):',
' pass', '']
return '\n'.join(compiler)
......@@ -314,7 +315,7 @@ class EBNFCompiler(CompilerBase):
return '\n '.join(declarations)
def syntax__(self, node):
def on_syntax(self, node):
definitions = []
......@@ -333,7 +334,7 @@ class EBNFCompiler(CompilerBase):
return self.gen_parser(definitions)
def definition__(self, node):
def on_definition(self, node):
rule = node.result[0].result
if rule in self.rules:
node.add_error('A rule with name "%s" has already been defined.' % rule)
......@@ -374,7 +375,7 @@ class EBNFCompiler(CompilerBase):
(repr(rx), str(re_error)))
return rx
def directive__(self, node):
def on_directive(self, node):
key = node.result[0].result.lower()
assert key not in self.directives['tokens']
if key in {'comment', 'whitespace'}:
......@@ -431,13 +432,13 @@ class EBNFCompiler(CompilerBase):
arguments = [self._compile(r) for r in node.result] + custom_args
return parser_class + '(' + ', '.join(arguments) + ')'
def expression__(self, node):
def on_expression(self, node):
return self.non_terminal(node, 'Alternative')
def term__(self, node):
def on_term(self, node):
return self.non_terminal(node, 'Sequence')
def factor__(self, node):
def on_factor(self, node):
assert isinstance(node.parser, Sequence), node.as_sexpr() # these assert statements can be removed
assert node.children
assert len(node.result) >= 2, node.as_sexpr()
......@@ -471,23 +472,23 @@ class EBNFCompiler(CompilerBase):
except KeyError:
node.add_error('Unknown prefix "%s".' % prefix)
def option__(self, node):
def on_option(self, node):
return self.non_terminal(node, 'Optional')
def repetition__(self, node):
def on_repetition(self, node):
return self.non_terminal(node, 'ZeroOrMore')
def oneormore__(self, node):
def on_oneormore(self, node):
return self.non_terminal(node, 'OneOrMore')
def regexchain__(self, node):
def on_regexchain(self, node):
raise EBNFCompilerError("Not yet implemented!")
def group__(self, node):
def on_group(self, node):
raise EBNFCompilerError("Group nodes should have been eliminated by "
"AST transformation!")
def symbol__(self, node):
def on_symbol(self, node):
if node.result in self.directives['tokens']:
return 'ScannerToken("' + node.result + '")'
......@@ -496,10 +497,10 @@ class EBNFCompiler(CompilerBase):
return node.result
def literal__(self, node):
def on_literal(self, node):
return 'Token(' + node.result.replace('\\', r'\\') + ')' # return 'Token(' + ', '.join([node.result]) + ')' ?
def regexp__(self, node):
def on_regexp(self, node):
rx = node.result
name = []
if rx[:2] == '~/':
......@@ -523,7 +524,7 @@ class EBNFCompiler(CompilerBase):
return '"' + errmsg + '"'
return 'RE(' + ', '.join([arg] + name) + ')'
def list___(self, node):
def on_list_(self, node):
assert node.children
return set(item.result.strip() for item in node.result)
......@@ -460,6 +460,16 @@ def nil_scanner(text):
class ScannerToken(Parser):
Parses tokens that have been inserted by a Scanner.
Scanners can generate Tokens with the ``make_token``-function.
These tokens start and end with magic characters that can only be
matched by the ScannerToken Parser. Scanner tokens can be used to
insert BEGIN - END delimiters at the beginning or ending of an
indented block. Otherwise indented block are difficult to handle
with parsing expression grammars.
def __init__(self, scanner_token):
assert isinstance(scanner_token, str) and scanner_token and \
......@@ -493,7 +503,8 @@ class ScannerToken(Parser):
class RegExp(Parser):
"""Regular expression parser.
Regular expression parser.
The RegExp-parser parses text that matches a regular expression.
RegExp can also be considered as the "atomic parser", because all
......@@ -954,8 +965,12 @@ class CompilerBase:
def _reset(self):
def compile_AST(self, node):
def compile_all(self, node):
"""Compiles the abstract syntax tree with the root ``node``.
It's called `compile_all`` to avoid confusion with the
``_compile`` that is called from within the local node
compiler methods.
if self.dirty_flag:
......@@ -963,13 +978,21 @@ class CompilerBase:
self.dirty_flag = True
return self._compile(node)
def derive_method_name(node_name):
"""Returns the method name for ``node_name``, e.g.
>>> CompilerBase.method_name('expression')
return 'on_' + node_name
def _compile(self, node):
"""Calls the compilation method for the given node and returns
the result of the compilation.
The method's name is dreived from either the node's parser
name or, if the parser is anonymous, the node's parser's class
name by appending two underscores '__'.
name by adding the prefix 'on_'.
Note that ``_compile`` does not call any compilation functions
for the parsers of the sub nodes by itself. Rather, this should
......@@ -982,7 +1005,7 @@ class CompilerBase:
"'_' or '__' or ending with '__' is reserved.)")
return None
compiler = self.__getattribute__(elem + '__')
compiler = self.__getattribute__(self.derive_method_name(elem))
result = compiler(node)
for child in node.children:
node.error_flag |= child.error_flag
......@@ -1041,7 +1064,7 @@ def full_compilation(source, scanner, parser, transform, compiler):
syntax_tree.log(log_file_name, ext='.ast')
errors = syntax_tree.collect_errors()
if not errors:
result = compiler.compile_AST(syntax_tree)
result = compiler.compile_all(syntax_tree)
errors = syntax_tree.collect_errors()
messages = error_messages(source_text, errors)
return result, messages, syntax_tree
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment