Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

Commit 8e8792ad authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

- syntaxtree.py: reversed parameter order in transformer-functions: node is...

- syntaxtree.py: reversed parameter order in transformer-functions: node is now always the first paramter again.
parent 4d29105b
......@@ -197,10 +197,10 @@ EBNF_transformation_table = {
"syntax":
remove_expendables,
"directive, definition":
partial(remove_tokens, {'@', '='}),
partial(remove_tokens, tokens={'@', '='}),
"expression":
[replace_by_single_child, flatten,
partial(remove_tokens, {'|'})],
partial(remove_tokens, tokens={'|'})],
"term":
[replace_by_single_child, flatten], # supports both idioms: "{ factor }+" and "factor { factor }"
"factor, flowmarker, retrieveop":
......@@ -214,7 +214,7 @@ EBNF_transformation_table = {
(TOKEN_PTYPE, WHITESPACE_PTYPE):
[remove_expendables, reduce_single_child],
"list_":
[flatten, partial(remove_tokens, {','})],
[flatten, partial(remove_tokens, tokens={','})],
"*":
[remove_expendables, replace_by_single_child]
}
......@@ -223,8 +223,8 @@ EBNF_transformation_table = {
EBNF_validation_table = {
# Semantic validation on the AST
"repetition, option, oneormore":
[partial(forbid, ['repetition', 'option', 'oneormore']),
partial(assert_content, r'(?!§)')],
[partial(forbid, child_tags=['repetition', 'option', 'oneormore']),
partial(assert_content, regex=r'(?!§)')],
}
......
......@@ -460,14 +460,14 @@ def key_tag_name(node) -> str:
return node.tag_name
def traverse(root_node, processing_table, key_func=key_tag_name):
def traverse(root_node, processing_table, key_func=key_tag_name) -> None:
"""Traverses the snytax tree starting with the given ``node`` depth
first and applies the sequences of callback-functions registered
in the ``calltable``-dictionary.
The most important use case is the transformation of a concrete
syntax tree into an abstract tree (AST). But it is also imaginable
to emloy tree-traversal for the semantic analysis of the AST.
to employ tree-traversal for the semantic analysis of the AST.
In order to assign sequences of callback-functions to nodes, a
dictionary ("processing table") is used. The keys usually represent
......@@ -492,6 +492,10 @@ def traverse(root_node, processing_table, key_func=key_tag_name):
"factor, flowmarker, retrieveop": replace_by_single_child }
traverse(node, table)
"""
# commented, because this approach is too error prone!
# def funclist(call):
# return [as_partial(func) for func in smart_list(call)]
# normalize processing_table entries by turning single values into lists
# with a single value
table = {name: smart_list(call) for name, call in list(processing_table.items())}
......@@ -559,7 +563,7 @@ def reduce_single_child(node):
node.result = node.result[0].result
def replace_parser(name, node):
def replace_parser(node, name):
"""Replaces the parser of a Node with a mock parser with the given
name.
......@@ -628,28 +632,28 @@ def is_expendable(node):
return is_empty(node) or is_whitespace(node)
def is_token(token_set, node):
return node.parser.ptype == TOKEN_PTYPE and (not token_set or node.result in token_set)
def is_token(node, tokens):
return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.result in tokens)
def remove_children_if(condition, node):
def remove_children_if(node, condition):
"""Removes all nodes from the result field if the function
``condition(child_node)`` evaluates to ``True``."""
if node.children:
node.result = tuple(c for c in node.children if not condition(c))
remove_whitespace = partial(remove_children_if, is_whitespace)
remove_whitespace = partial(remove_children_if, condition=is_whitespace)
# remove_scanner_tokens = partial(remove_children_if, condition=is_scanner_token)
remove_expendables = partial(remove_children_if, is_expendable)
remove_expendables = partial(remove_children_if, condition=is_expendable)
def remove_tokens(tokens, node):
def remove_tokens(node, tokens):
"""Reomoves any among a particular set of tokens from the immediate
descendants of a node. If ``tokens`` is the empty set, all tokens
are removed.
"""
remove_children_if(partial(is_token, tokens), node)
remove_children_if(node, partial(is_token, tokens=tokens))
def remove_enclosing_delimiters(node):
......@@ -661,7 +665,7 @@ def remove_enclosing_delimiters(node):
node.result = node.result[1:-1]
def map_content(func, node):
def map_content(node, func):
"""Replaces the content of the node. ``func`` takes the node
as an argument an returns the mapped result.
"""
......@@ -676,21 +680,21 @@ def map_content(func, node):
########################################################################
def require(child_tag, node):
def require(node, child_tags):
for child in node.children:
if child.tag_name not in child_tag:
if child.tag_name not in child_tags:
node.add_error('Element "%s" is not allowed inside "%s".' %
(child.parser.name, node.parser.name))
def forbid(child_tags, node):
def forbid(node, child_tags):
for child in node.children:
if child.tag_name in child_tags:
node.add_error('Element "%s" cannot be nested inside "%s".' %
(child.parser.name, node.parser.name))
def assert_content(regex, node):
def assert_content(node, regex):
content = str(node)
if not re.match(regex, content):
node.add_error('Element "%s" violates %s on %s' %
......
......@@ -32,6 +32,7 @@ already exists.
import collections
import contextlib
import functools
import hashlib
import os
try:
......@@ -237,8 +238,8 @@ def md5(*txt):
return md5_hash.hexdigest()
def smart_list(arg):
"""Returns the argument an iterable, depending on its type and content.
def smart_list(arg) -> list:
"""Returns the argument as list, depending on its type and content.
If the argument is a string, it will be interpreted as a list of
comma separated values, trying ';', ',', ' ' as possible delimiters
......@@ -268,12 +269,12 @@ def smart_list(arg):
for delimiter in (';', ','):
lst = arg.split(delimiter)
if len(lst) > 1:
return (s.strip() for s in lst)
return (s.strip() for s in arg.strip().split(' '))
return [s.strip() for s in lst]
return [s.strip() for s in arg.strip().split(' ')]
# elif isinstance(arg, collections.abc.Sequence): # python 3.6: collections.abc.Collection
# return arg
elif isinstance(arg, collections.abc.Iterable):
return arg
return list(arg)
else:
return [arg]
......@@ -296,6 +297,24 @@ def expand_table(compact_table):
expanded_table[k] = value
return expanded_table
# # commented, because this approach is too error-prone in connection with smart_list
# def as_partial(partial_ellipsis) -> functools.partial:
# """Transforms ``partial_ellipsis`` into a partial function
# application, i.e. string "remove_tokens({'(', ')'})" will be
# transformed into the partial "partial(remove_tokens, {'(', ')'})".
# Partial ellipsises can be considered as a short hand notation for
# partials, which look like function, calls but aren't. Plain
# function names are returned as is. Also, if ``partial_ellipsis``
# already is a callable, it will be returned as is.
# """
# if callable(partial_ellipsis):
# return partial_ellipsis
# m = re.match('\s*(\w+)(?:\(([^)]*)\))?\s*$', partial_ellipsis)
# if m:
# fname, fargs = m.groups()
# return eval("functools.partial(%s, %s)" % (fname, fargs)) if fargs else eval(fname)
# raise SyntaxError(partial_ellipsis + " does not resemble a partial function ellipsis!")
def sane_parser_name(name) -> bool:
"""Checks whether given name is an acceptable parser name. Parser names
......
......@@ -42,4 +42,5 @@ WSPC = /[ \t]*\n?(?!\s*\n)[ \t]*/ # whitespace, including at most one line
LF = /[ \t]*\n(?!\s*\n)/ # a linefeed, but not an empty line (i.e. par)
PARSEP = /\s*\n\s*\n/ # at least one empty line, i.e.
# [whitespace] linefeed [whitespace] linefeed
EOF = !/./
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment