transform.py 21.4 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
"""transformation.py - transformation functions for converting the
                       concrete into the abstract syntax tree

Copyright 2016  by Eckhart Arnold (arnold@badw.de)
                Bavarian Academy of Sciences an Humanities (badw.de)

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied.  See the License for the specific language governing
permissions and limitations under the License.
"""

import inspect
from functools import partial, reduce, singledispatch

from DHParser.syntaxtree import WHITESPACE_PTYPE, TOKEN_PTYPE, MockParser, Node

try:
    import regex as re
except ImportError:
    import re
try:
    from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
        Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
except ImportError:
    from .typing34 import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
        Iterator, List, NamedTuple, Sequence, Union, Text, Tuple

from DHParser.toolkit import expand_table, smart_list

38
39
40
41
42
__all__ = ('TransformationDict',
           'TransformationProc',
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
43
44
45
           'key_parser_name',
           'key_tag_name',
           'traverse',
46
           'is_named',
47
48
           'replace_by_single_child',
           'reduce_single_child',
49
           'replace_or_reduce',
50
51
           'replace_parser',
           'collapse',
52
           'merge_children',
53
54
           'replace_content',
           'apply_if',
55
           'is_anonymous',
56
57
58
59
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
60
           'is_one_of',
61
           'has_content',
62
           'remove_children_if',
63
64
65
66
67
68
69
70
           'remove_parser',
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
           'remove_expendables',
           'remove_brackets',
71
72
           'remove_infix_operator',
           'remove_single_child',
73
           'remove_tokens',
74
           'keep_children',
75
76
77
           'flatten',
           'forbid',
           'require',
78
79
80
81
           'assert_content',
           'assert_condition',
           'assert_has_children',
           'TRUE_CONDITION')
82
83


84
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
85
86
TransformationDict = Dict[str, Sequence[Callable]]
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
87
88
89
90
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]


91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
def transformation_factory(t=None):
    """Creates factory functions from transformation-functions that
    dispatch on the first parameter after the node parameter.

    Decorating a transformation-function that has more than merely the
    ``node``-parameter with ``transformation_factory`` creates a
    function with the same name, which returns a partial-function that
    takes just the node-parameter.

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

    Usage:
        @transformation_factory(AbtractSet[str])
110
        def remove_tokens(context, tokens):
111
112
113
            ...
      or, alternatively:
        @transformation_factory
114
        def remove_tokens(context, tokens: AbstractSet[str]):
115
116
117
118
119
120
            ...

    Example:
        trans_table = { 'expression': remove_tokens('+', '-') }
      instead of:
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
121
122
123
124
125

    Parameters:
        t:  type of the second argument of the transformation function,
            only necessary if the transformation functions' parameter list
            does not have type annotations.
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
    """

    def decorator(f):
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
        assert t or params[0].annotation != params[0].empty, \
            "No type information on second parameter found! Please, use type " \
            "annotation or provide the type information via transfomer-decorator."
        p1type = t or params[0].annotation
        f = singledispatch(f)
        if len(params) == 1 and issubclass(p1type, Container) and not issubclass(p1type, Text) \
                and not issubclass(p1type, ByteString):
            def gen_special(*args):
                c = set(args) if issubclass(p1type, AbstractSet) else \
                    list(args) if issubclass(p1type, Sequence) else args
                d = {params[0].name: c}
                return partial(f, **d)

            f.register(p1type.__args__[0], gen_special)

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

        f.register(p1type, gen_partial)
        return f

    if isinstance(t, type(lambda: 1)):
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
160
        func = t
161
162
163
164
165
166
        t = None
        return decorator(func)
    else:
        return decorator


167
def key_parser_name(node: Node) -> str:
168
169
170
    return node.parser.name


171
def key_tag_name(node: Node) -> str:
172
173
174
    return node.tag_name


175
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
176
             processing_table: ProcessingTableType,
177
178
179
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
    first and applies the sequences of callback-functions registered
    in the ``calltable``-dictionary.

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
        '+': always called (before any other processing function)
        '*': called for those nodes for which no (other) processing
             function appears in the table
        '~': always called (after any other processing function)

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
200
201
            is interpreted as a `compact_table`. See
            `toolkit.expand_table` or ``EBNFCompiler.EBNFTransTable`
202
203
204
205
206
207
208
209
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

    Example:
        table = { "term": [replace_by_single_child, flatten],
            "factor, flowmarker, retrieveop": replace_by_single_child }
        traverse(node, table)
    """
210
211
212
213
214
215
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
        table = processing_table
        cache = processing_table['__cache__']
    else:
216
217
        # normalize processing_table entries by turning single values
        # into lists with a single value
218
219
        table = {name: smart_list(call) for name, call in list(processing_table.items())}
        table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
220
        cache = table.setdefault('__cache__', cast(TransformationDict, dict()))
221
222
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
223
224
225
226
227
228
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
229
    # cache = {}  # type: Dict[str, List[Callable]]
230

231
232
    def traverse_recursive(context):
        node = context[-1]
233
234
        if node.children:
            for child in node.result:
235
236
                context.append(child)
                traverse_recursive(context)  # depth first
Eckhart Arnold's avatar
Eckhart Arnold committed
237
                node.error_flag = node.error_flag or child.error_flag  # propagate error flag
238
                context.pop()
239
240

        key = key_func(node)
241
242
243
        try:
            sequence = cache[key]
        except KeyError:
244
245
246
247
248
249
250
251
252
253
            sequence = table.get('+', []) + \
                       table.get(key, table.get('*', [])) + \
                       table.get('~', [])
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
254
            call(context)
255

256
    traverse_recursive([root_node])
257
258
    # assert processing_table['__cache__']

259
260
261
262
263
264
265
266
267
268
269
270
271


# ------------------------------------------------
#
# rearranging transformations:
#     - tree may be rearranged (e.g.flattened)
#     - nodes that are not leaves may be dropped
#     - order is preserved
#     - leave content is preserved (though not necessarily the leaves themselves)
#
# ------------------------------------------------


272
def TRUE_CONDITION(context: List[Node]) -> bool:
273
274
275
    return True


276
def replace_child(node: Node):
277
278
279
280
281
    assert len(node.children) == 1
    if not node.children[0].parser.name:
        node.children[0].parser.name = node.parser.name
    node.parser = node.children[0].parser
    node._errors.extend(node.children[0]._errors)
Eckhart Arnold's avatar
Eckhart Arnold committed
282
    node.result = node.children[0].result
283
284


285
def reduce_child(node: Node):
286
287
    assert len(node.children) == 1
    node._errors.extend(node.children[0]._errors)
Eckhart Arnold's avatar
Eckhart Arnold committed
288
    node.result = node.children[0].result
289
290


291
@transformation_factory(Callable)
292
293
294
def replace_by_single_child(context: List[Node], condition: Callable=TRUE_CONDITION):
    """
    Remove single branch node, replacing it by its immediate descendant
295
    if and only if the condision on the descendant is true.
296
297
298
    (In case the descendant's name is empty (i.e. anonymous) the
    name of this node's parser is kept.)
    """
299
    node = context[-1]
300
301
302
303
304
    if len(node.children) == 1:
        context.append(node.children[0])
        if  condition(context):
            replace_child(node)
        context.pop()
305
306


307
@transformation_factory(Callable)
308
309
310
def reduce_single_child(context: List[Node], condition: Callable=TRUE_CONDITION):
    """
    Reduce a single branch node, by transferring the result of its
311
    immediate descendant to this node, but keeping this node's parser entry.
312
313
    If the condition evaluates to false on the descendant, it will not
    be reduced.
314
    """
315
    node = context[-1]
316
317
318
319
320
    if len(node.children) == 1:
        context.append(node.children[0])
        if condition(context):
            reduce_child(node)
        context.pop()
321
322


323
def is_named(context: List[Node]) -> bool:
Eckhart Arnold's avatar
Eckhart Arnold committed
324
    return bool(context[-1].parser.name)
325
326
327
328


def is_anonymous(context: List[Node]) -> bool:
    return not context[-1].parser.name
329
330
331


@transformation_factory(Callable)
332
333
334
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
335
336
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
337
    node = context[-1]
338
339
340
341
342
343
344
    if len(node.children) == 1:
        context.append(node.children[0])
        if condition(context):
            replace_child(node)
        else:
            reduce_child(node)
        context.pop()
345
346
347


@transformation_factory
348
349
350
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
351
352
353
354
355
356
    name.

    Parameters:
        name(str): "NAME:PTYPE" of the surogate. The ptype is optional
        node(Node): The node where the parser shall be replaced
    """
357
    node = context[-1]
358
359
360
361
362
    name, ptype = (name.split(':') + [''])[:2]
    node.parser = MockParser(name, ptype)


@transformation_factory(Callable)
363
364
365
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
    Flattens all children, that fulfil the given `condition`
366
367
368
369
370
371
372
373
374
375
376
377
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
     If the parameter `recursive` is `True` the same will recursively be
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
     Applying flatten recursively will result in these kinds of
    structural transformation:
        (1 (+ 2) (+ 3)     ->   (1 + 2 + 3)
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
378
    node = context[-1]
379
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
380
        new_result = []     # type: List[Node]
381
        for child in node.children:
382
383
            context.append(child)
            if child.children and condition(context):
384
                if recursive:
385
                    flatten(context, condition, recursive)
386
387
388
                new_result.extend(child.children)
            else:
                new_result.append(child)
389
            context.pop()
390
391
392
        node.result = tuple(new_result)


393
394
395
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
396
    string representation of the node.
397
    """
398
    node = context[-1]
399
400
401
402
    node.result = str(node)


@transformation_factory
403
404
405
406
407
def merge_children(context: List[Node], tag_names: List[str]):
    """
    Joins all children next to each other and with particular tag-
    names into a single child node with a mock-parser with the name of
    the first tag-name in the list.
408
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
409
    node = context[-1]
410
    result = []
411
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
412
    if node.children:
413
        i = 0
414
415
416
417
418
419
420
421
422
423
424
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
425
426
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
            i = k
        node.result = tuple(result)


# ------------------------------------------------
#
# destructive transformations:
#     - tree may be rearranged (flattened),
#     - order is preserved
#     - but (irrelevant) leaves may be dropped
#     - errors of dropped leaves will be lost
#
# ------------------------------------------------


@transformation_factory
443
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
444
445
446
    """Replaces the content of the node. ``func`` takes the node
    as an argument an returns the mapped result.
    """
447
    node = context[-1]
448
449
450
    node.result = func(node.result)


451
def is_whitespace(context: List[Node]) -> bool:
452
453
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
454
    return context[-1].parser.ptype == WHITESPACE_PTYPE
455
456


457
458
def is_empty(context: List[Node]) -> bool:
    return not context[-1].result
459
460


461
462
def is_expendable(context: List[Node]) -> bool:
    return is_empty(context) or is_whitespace(context)
463
464


465
466
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    node = context[-1]
467
468
469
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.result in tokens)


470
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
471
472
    """Returns true, if the node's tag_name is on of the
    given tag names."""
473
    return context[-1].tag_name in tag_name_set
474
475


476
def has_content(context: List[Node], regexp: str) -> bool:
477
    """Checks a node's content against a regular expression."""
478
    return bool(re.match(regexp, str(context[-1])))
479
480


481
@transformation_factory(Callable)
482
483
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
484
    node = context[-1]
485
    if condition(node):
486
        transformation(context)
487
488


489
@transformation_factory(slice)
490
def keep_children(context: List[Node], section: slice = slice(None)):
491
    """Keeps only child-nodes which fall into a slice of the result field."""
492
    node = context[-1]
493
    if node.children:
494
        node.result = node.children[section]
495
496
497


@transformation_factory(Callable)
498
def remove_children_if(context: List[Node], condition: Callable, section: slice = slice(None)):
499
500
501
502
503
504
505
506
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))


@transformation_factory(Callable)
def remove_children(context: List[Node], condition: Callable, section: slice = slice(None)):
507
    """Removes all nodes from a slice of the result field if the function
508
    `condition(child_node)` evaluates to `True`."""
509
    node = context[-1]
510
    if node.children:
511
512
513
        c = node.children
        N = len(c)
        rng = range(*section.indices(N))
514
515
516
517
518
519
520
521
522
523
        node.result = tuple(c[i] for i in range(N)
                            if not i in rng or not condition(context + [c[i]]))
        # selection = []
        # for i in range(N):
        #     context.append(c[i])
        #     if not i in rng or not condition(context):
        #         selection.append(c[i])
        #     context.pop()
        # if len(selection) != c:
        #     node.result = tuple(selection)
524
525
526
527
528


remove_whitespace = remove_children_if(is_whitespace)  # partial(remove_children_if, condition=is_whitespace)
remove_empty = remove_children_if(is_empty)
remove_expendables = remove_children_if(is_expendable)  # partial(remove_children_if, condition=is_expendable)
529
530
531
532
533
remove_first = apply_if(keep_children(slice(1, None)), lambda nd: len(nd.children) > 1)
remove_last = apply_if(keep_children(slice(None, -1)), lambda nd: len(nd.children) > 1)
remove_brackets = apply_if(keep_children(slice(1, -1)), lambda nd: len(nd.children) >= 2)
remove_infix_operator = keep_children(slice(0, None, 2))
remove_single_child = apply_if(keep_children(slice(0)), lambda nd: len(nd.children) == 1)
534
535
536


@transformation_factory
537
def remove_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
538
539
540
    """Reomoves any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
541
    remove_children_if(context, partial(is_token, tokens=tokens))
542
543
544


@transformation_factory
545
def remove_parser(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
546
    """Removes children by tag name."""
547
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
548
549
550


@transformation_factory
551
def remove_content(context: List[Node], regexp: str):
552
    """Removes children depending on their string value."""
553
    remove_children_if(context, partial(has_content, regexp=regexp))
554
555
556
557


########################################################################
#
558
# AST semantic validation functions (EXPERIMENTAL!!!)
559
560
561
#
########################################################################

562
@transformation_factory(Callable)
Eckhart Arnold's avatar
Eckhart Arnold committed
563
def assert_condition(context: List[Node], condition: Callable, error_msg: str = ''):
564
    """Checks for `condition`; adds an error message if condition is not met."""
565
    node = context[-1]
566
    if not condition(context):
567
568
569
570
571
572
573
574
575
576
577
578
579
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.assert_condition: Failed to meet condition " + cond_name)


assert_has_children = assert_condition(lambda nd: nd.children, 'Element "%s" has no children')


@transformation_factory
580
def assert_content(context: List[Node], regexp: str):
581
    node = context[-1]
582
    if not has_content(context, regexp):
583
584
585
        node.add_error('Element "%s" violates %s on %s' %
                       (node.parser.name, str(regexp), str(node)))

586
587

@transformation_factory
588
def require(context: List[Node], child_tags: AbstractSet[str]):
589
    node = context[-1]
590
591
592
593
594
595
596
    for child in node.children:
        if child.tag_name not in child_tags:
            node.add_error('Element "%s" is not allowed inside "%s".' %
                           (child.parser.name, node.parser.name))


@transformation_factory
597
def forbid(context: List[Node], child_tags: AbstractSet[str]):
598
    node = context[-1]
599
600
601
602
    for child in node.children:
        if child.tag_name in child_tags:
            node.add_error('Element "%s" cannot be nested inside "%s".' %
                           (child.parser.name, node.parser.name))