Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

transform.py 21.4 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
"""transformation.py - transformation functions for converting the
                       concrete into the abstract syntax tree

Copyright 2016  by Eckhart Arnold (arnold@badw.de)
                Bavarian Academy of Sciences an Humanities (badw.de)

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied.  See the License for the specific language governing
permissions and limitations under the License.
"""

import inspect
from functools import partial, reduce, singledispatch

23
24
from DHParser.syntaxtree import Node
from DHParser.base import WHITESPACE_PTYPE, TOKEN_PTYPE, MockParser
25
26
27
28
29
30
31
32
33
34
35
36
37
38

try:
    import regex as re
except ImportError:
    import re
try:
    from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
        Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
except ImportError:
    from .typing34 import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
        Iterator, List, NamedTuple, Sequence, Union, Text, Tuple

from DHParser.toolkit import expand_table, smart_list

39
40
41
42
43
__all__ = ('TransformationDict',
           'TransformationProc',
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
44
45
46
           'key_parser_name',
           'key_tag_name',
           'traverse',
47
           'is_named',
48
49
           'replace_by_single_child',
           'reduce_single_child',
50
           'replace_or_reduce',
51
52
           'replace_parser',
           'collapse',
53
           'merge_children',
54
55
           'replace_content',
           'apply_if',
56
           'is_anonymous',
57
58
59
60
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
61
           'is_one_of',
62
           'has_content',
63
           'remove_children_if',
64
65
66
67
68
69
70
71
           'remove_parser',
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
           'remove_expendables',
           'remove_brackets',
72
73
           'remove_infix_operator',
           'remove_single_child',
74
           'remove_tokens',
75
           'keep_children',
76
77
78
           'flatten',
           'forbid',
           'require',
79
80
81
82
           'assert_content',
           'assert_condition',
           'assert_has_children',
           'TRUE_CONDITION')
83
84


85
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
86
87
TransformationDict = Dict[str, Sequence[Callable]]
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
88
89
90
91
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]


92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
def transformation_factory(t=None):
    """Creates factory functions from transformation-functions that
    dispatch on the first parameter after the node parameter.

    Decorating a transformation-function that has more than merely the
    ``node``-parameter with ``transformation_factory`` creates a
    function with the same name, which returns a partial-function that
    takes just the node-parameter.

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

    Usage:
        @transformation_factory(AbtractSet[str])
111
        def remove_tokens(context, tokens):
112
113
114
            ...
      or, alternatively:
        @transformation_factory
115
        def remove_tokens(context, tokens: AbstractSet[str]):
116
117
118
119
120
121
            ...

    Example:
        trans_table = { 'expression': remove_tokens('+', '-') }
      instead of:
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
122
123
124
125
126

    Parameters:
        t:  type of the second argument of the transformation function,
            only necessary if the transformation functions' parameter list
            does not have type annotations.
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
    """

    def decorator(f):
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
        assert t or params[0].annotation != params[0].empty, \
            "No type information on second parameter found! Please, use type " \
            "annotation or provide the type information via transfomer-decorator."
        p1type = t or params[0].annotation
        f = singledispatch(f)
        if len(params) == 1 and issubclass(p1type, Container) and not issubclass(p1type, Text) \
                and not issubclass(p1type, ByteString):
            def gen_special(*args):
                c = set(args) if issubclass(p1type, AbstractSet) else \
                    list(args) if issubclass(p1type, Sequence) else args
                d = {params[0].name: c}
                return partial(f, **d)

            f.register(p1type.__args__[0], gen_special)

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

        f.register(p1type, gen_partial)
        return f

    if isinstance(t, type(lambda: 1)):
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
161
        func = t
162
163
164
165
166
167
        t = None
        return decorator(func)
    else:
        return decorator


168
def key_parser_name(node: Node) -> str:
169
170
171
    return node.parser.name


172
def key_tag_name(node: Node) -> str:
173
174
175
    return node.tag_name


176
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
177
             processing_table: ProcessingTableType,
178
179
180
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
    first and applies the sequences of callback-functions registered
    in the ``calltable``-dictionary.

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
        '+': always called (before any other processing function)
        '*': called for those nodes for which no (other) processing
             function appears in the table
        '~': always called (after any other processing function)

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
201
202
            is interpreted as a `compact_table`. See
            `toolkit.expand_table` or ``EBNFCompiler.EBNFTransTable`
203
204
205
206
207
208
209
210
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

    Example:
        table = { "term": [replace_by_single_child, flatten],
            "factor, flowmarker, retrieveop": replace_by_single_child }
        traverse(node, table)
    """
211
212
213
214
215
216
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
        table = processing_table
        cache = processing_table['__cache__']
    else:
217
218
        # normalize processing_table entries by turning single values
        # into lists with a single value
219
220
        table = {name: smart_list(call) for name, call in list(processing_table.items())}
        table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
221
        cache = table.setdefault('__cache__', cast(TransformationDict, dict()))
222
223
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
224
225
226
227
228
229
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
230
    # cache = {}  # type: Dict[str, List[Callable]]
231

232
233
    def traverse_recursive(context):
        node = context[-1]
234
235
        if node.children:
            for child in node.result:
236
237
                context.append(child)
                traverse_recursive(context)  # depth first
Eckhart Arnold's avatar
Eckhart Arnold committed
238
                node.error_flag = node.error_flag or child.error_flag  # propagate error flag
239
                context.pop()
240
241

        key = key_func(node)
242
243
244
        try:
            sequence = cache[key]
        except KeyError:
245
246
247
248
249
250
251
252
253
254
            sequence = table.get('+', []) + \
                       table.get(key, table.get('*', [])) + \
                       table.get('~', [])
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
255
            call(context)
256

257
    traverse_recursive([root_node])
258
259
    # assert processing_table['__cache__']

260
261
262
263
264
265
266
267
268
269
270
271
272


# ------------------------------------------------
#
# rearranging transformations:
#     - tree may be rearranged (e.g.flattened)
#     - nodes that are not leaves may be dropped
#     - order is preserved
#     - leave content is preserved (though not necessarily the leaves themselves)
#
# ------------------------------------------------


273
def TRUE_CONDITION(context: List[Node]) -> bool:
274
275
276
    return True


277
def replace_child(node: Node):
278
279
280
281
282
    assert len(node.children) == 1
    if not node.children[0].parser.name:
        node.children[0].parser.name = node.parser.name
    node.parser = node.children[0].parser
    node._errors.extend(node.children[0]._errors)
Eckhart Arnold's avatar
Eckhart Arnold committed
283
    node.result = node.children[0].result
284
285


286
def reduce_child(node: Node):
287
288
    assert len(node.children) == 1
    node._errors.extend(node.children[0]._errors)
Eckhart Arnold's avatar
Eckhart Arnold committed
289
    node.result = node.children[0].result
290
291


292
@transformation_factory(Callable)
293
294
295
def replace_by_single_child(context: List[Node], condition: Callable=TRUE_CONDITION):
    """
    Remove single branch node, replacing it by its immediate descendant
296
    if and only if the condision on the descendant is true.
297
298
299
    (In case the descendant's name is empty (i.e. anonymous) the
    name of this node's parser is kept.)
    """
300
    node = context[-1]
301
302
303
304
305
    if len(node.children) == 1:
        context.append(node.children[0])
        if  condition(context):
            replace_child(node)
        context.pop()
306
307


308
@transformation_factory(Callable)
309
310
311
def reduce_single_child(context: List[Node], condition: Callable=TRUE_CONDITION):
    """
    Reduce a single branch node, by transferring the result of its
312
    immediate descendant to this node, but keeping this node's parser entry.
313
314
    If the condition evaluates to false on the descendant, it will not
    be reduced.
315
    """
316
    node = context[-1]
317
318
319
320
321
    if len(node.children) == 1:
        context.append(node.children[0])
        if condition(context):
            reduce_child(node)
        context.pop()
322
323


324
def is_named(context: List[Node]) -> bool:
Eckhart Arnold's avatar
Eckhart Arnold committed
325
    return bool(context[-1].parser.name)
326
327
328
329


def is_anonymous(context: List[Node]) -> bool:
    return not context[-1].parser.name
330
331
332


@transformation_factory(Callable)
333
334
335
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
336
337
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
338
    node = context[-1]
339
340
341
342
343
344
345
    if len(node.children) == 1:
        context.append(node.children[0])
        if condition(context):
            replace_child(node)
        else:
            reduce_child(node)
        context.pop()
346
347
348


@transformation_factory
349
350
351
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
352
353
354
355
356
357
    name.

    Parameters:
        name(str): "NAME:PTYPE" of the surogate. The ptype is optional
        node(Node): The node where the parser shall be replaced
    """
358
    node = context[-1]
359
360
361
362
363
    name, ptype = (name.split(':') + [''])[:2]
    node.parser = MockParser(name, ptype)


@transformation_factory(Callable)
364
365
366
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
    Flattens all children, that fulfil the given `condition`
367
368
369
370
371
372
373
374
375
376
377
378
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
     If the parameter `recursive` is `True` the same will recursively be
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
     Applying flatten recursively will result in these kinds of
    structural transformation:
        (1 (+ 2) (+ 3)     ->   (1 + 2 + 3)
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
379
    node = context[-1]
380
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
381
        new_result = []     # type: List[Node]
382
        for child in node.children:
383
384
            context.append(child)
            if child.children and condition(context):
385
                if recursive:
386
                    flatten(context, condition, recursive)
387
388
389
                new_result.extend(child.children)
            else:
                new_result.append(child)
390
            context.pop()
391
392
393
        node.result = tuple(new_result)


394
395
396
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
397
    string representation of the node.
398
    """
399
    node = context[-1]
400
401
402
403
    node.result = str(node)


@transformation_factory
404
405
406
407
408
def merge_children(context: List[Node], tag_names: List[str]):
    """
    Joins all children next to each other and with particular tag-
    names into a single child node with a mock-parser with the name of
    the first tag-name in the list.
409
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
410
    node = context[-1]
411
    result = []
412
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
413
    if node.children:
414
        i = 0
415
416
417
418
419
420
421
422
423
424
425
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
426
427
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
            i = k
        node.result = tuple(result)


# ------------------------------------------------
#
# destructive transformations:
#     - tree may be rearranged (flattened),
#     - order is preserved
#     - but (irrelevant) leaves may be dropped
#     - errors of dropped leaves will be lost
#
# ------------------------------------------------


@transformation_factory
444
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
445
446
447
    """Replaces the content of the node. ``func`` takes the node
    as an argument an returns the mapped result.
    """
448
    node = context[-1]
449
450
451
    node.result = func(node.result)


452
def is_whitespace(context: List[Node]) -> bool:
453
454
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
455
    return context[-1].parser.ptype == WHITESPACE_PTYPE
456
457


458
459
def is_empty(context: List[Node]) -> bool:
    return not context[-1].result
460
461


462
463
def is_expendable(context: List[Node]) -> bool:
    return is_empty(context) or is_whitespace(context)
464
465


466
467
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    node = context[-1]
468
469
470
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.result in tokens)


471
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
472
473
    """Returns true, if the node's tag_name is on of the
    given tag names."""
474
    return context[-1].tag_name in tag_name_set
475
476


477
def has_content(context: List[Node], regexp: str) -> bool:
478
    """Checks a node's content against a regular expression."""
479
    return bool(re.match(regexp, str(context[-1])))
480
481


482
@transformation_factory(Callable)
483
484
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
485
    node = context[-1]
486
    if condition(node):
487
        transformation(context)
488
489


490
@transformation_factory(slice)
491
def keep_children(context: List[Node], section: slice = slice(None)):
492
    """Keeps only child-nodes which fall into a slice of the result field."""
493
    node = context[-1]
494
    if node.children:
495
        node.result = node.children[section]
496
497
498


@transformation_factory(Callable)
499
def remove_children_if(context: List[Node], condition: Callable, section: slice = slice(None)):
500
501
502
503
504
505
506
507
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))


@transformation_factory(Callable)
def remove_children(context: List[Node], condition: Callable, section: slice = slice(None)):
508
    """Removes all nodes from a slice of the result field if the function
509
    `condition(child_node)` evaluates to `True`."""
510
    node = context[-1]
511
    if node.children:
512
513
514
        c = node.children
        N = len(c)
        rng = range(*section.indices(N))
515
516
517
518
519
520
521
522
523
524
        node.result = tuple(c[i] for i in range(N)
                            if not i in rng or not condition(context + [c[i]]))
        # selection = []
        # for i in range(N):
        #     context.append(c[i])
        #     if not i in rng or not condition(context):
        #         selection.append(c[i])
        #     context.pop()
        # if len(selection) != c:
        #     node.result = tuple(selection)
525
526
527
528
529


remove_whitespace = remove_children_if(is_whitespace)  # partial(remove_children_if, condition=is_whitespace)
remove_empty = remove_children_if(is_empty)
remove_expendables = remove_children_if(is_expendable)  # partial(remove_children_if, condition=is_expendable)
530
531
532
533
534
remove_first = apply_if(keep_children(slice(1, None)), lambda nd: len(nd.children) > 1)
remove_last = apply_if(keep_children(slice(None, -1)), lambda nd: len(nd.children) > 1)
remove_brackets = apply_if(keep_children(slice(1, -1)), lambda nd: len(nd.children) >= 2)
remove_infix_operator = keep_children(slice(0, None, 2))
remove_single_child = apply_if(keep_children(slice(0)), lambda nd: len(nd.children) == 1)
535
536
537


@transformation_factory
538
def remove_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
539
540
541
    """Reomoves any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
542
    remove_children_if(context, partial(is_token, tokens=tokens))
543
544
545


@transformation_factory
546
def remove_parser(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
547
    """Removes children by tag name."""
548
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
549
550
551


@transformation_factory
552
def remove_content(context: List[Node], regexp: str):
553
    """Removes children depending on their string value."""
554
    remove_children_if(context, partial(has_content, regexp=regexp))
555
556
557
558


########################################################################
#
559
# AST semantic validation functions (EXPERIMENTAL!!!)
560
561
562
#
########################################################################

563
@transformation_factory(Callable)
Eckhart Arnold's avatar
Eckhart Arnold committed
564
def assert_condition(context: List[Node], condition: Callable, error_msg: str = ''):
565
    """Checks for `condition`; adds an error message if condition is not met."""
566
    node = context[-1]
567
    if not condition(context):
568
569
570
571
572
573
574
575
576
577
578
579
580
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.assert_condition: Failed to meet condition " + cond_name)


assert_has_children = assert_condition(lambda nd: nd.children, 'Element "%s" has no children')


@transformation_factory
581
def assert_content(context: List[Node], regexp: str):
582
    node = context[-1]
583
    if not has_content(context, regexp):
584
585
586
        node.add_error('Element "%s" violates %s on %s' %
                       (node.parser.name, str(regexp), str(node)))

587
588

@transformation_factory
589
def require(context: List[Node], child_tags: AbstractSet[str]):
590
    node = context[-1]
591
592
593
594
595
596
597
    for child in node.children:
        if child.tag_name not in child_tags:
            node.add_error('Element "%s" is not allowed inside "%s".' %
                           (child.parser.name, node.parser.name))


@transformation_factory
598
def forbid(context: List[Node], child_tags: AbstractSet[str]):
599
    node = context[-1]
600
601
602
603
    for child in node.children:
        if child.tag_name in child_tags:
            node.add_error('Element "%s" cannot be nested inside "%s".' %
                           (child.parser.name, node.parser.name))