transform.py 23 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
"""transformation.py - transformation functions for converting the
                       concrete into the abstract syntax tree

Copyright 2016  by Eckhart Arnold (arnold@badw.de)
                Bavarian Academy of Sciences an Humanities (badw.de)

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied.  See the License for the specific language governing
permissions and limitations under the License.
"""

import inspect
from functools import partial, reduce, singledispatch

23
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, MockParser
24

25
26
27
28
from DHParser.toolkit import expand_table, smart_list, re, typing

from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
    Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
29

30
31
32
33
34
__all__ = ('TransformationDict',
           'TransformationProc',
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
35
36
37
           'key_parser_name',
           'key_tag_name',
           'traverse',
38
           'is_named',
eckhart's avatar
eckhart committed
39
40
           'replace_by_child',
           'content_from_child',
41
           'replace_or_reduce',
42
43
           'replace_parser',
           'collapse',
44
           'merge_children',
45
46
           'replace_content',
           'apply_if',
47
           'is_anonymous',
48
49
50
51
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
52
           'is_one_of',
53
           'has_content',
54
           'remove_children_if',
eckhart's avatar
eckhart committed
55
           'remove_nodes',
56
57
58
59
60
61
62
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
           'remove_expendables',
           'remove_brackets',
63
64
           'remove_infix_operator',
           'remove_single_child',
65
           'remove_tokens',
66
           'keep_children',
67
68
69
           'flatten',
           'forbid',
           'require',
70
71
           'assert_content',
           'assert_condition',
eckhart's avatar
eckhart committed
72
           'assert_has_children')
73
74


75
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
76
77
TransformationDict = Dict[str, Sequence[Callable]]
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
78
79
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
80
CriteriaType = Union[int, str, Callable]
81
82


eckhart's avatar
eckhart committed
83
# TODO: Add more optional type dispatch paramters, e.g. t2=None, t3=None, t4=None
84
85
def transformation_factory(t=None):
    """Creates factory functions from transformation-functions that
86
    dispatch on the first parameter after the context parameter.
87
88
89
90

    Decorating a transformation-function that has more than merely the
    ``node``-parameter with ``transformation_factory`` creates a
    function with the same name, which returns a partial-function that
91
    takes just the context-parameter.
92
93
94
95
96
97
98
99
100
101

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

    Usage:
eckhart's avatar
eckhart committed
102
        @transformation_factory(AbstractSet[str])
103
        def remove_tokens(context, tokens):
104
105
106
            ...
      or, alternatively:
        @transformation_factory
107
        def remove_tokens(context, tokens: AbstractSet[str]):
108
109
110
111
112
113
            ...

    Example:
        trans_table = { 'expression': remove_tokens('+', '-') }
      instead of:
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
114
115
116
117
118

    Parameters:
        t:  type of the second argument of the transformation function,
            only necessary if the transformation functions' parameter list
            does not have type annotations.
119
120
121
122
123
124
125
126
127
128
129
130
    """

    def decorator(f):
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
        assert t or params[0].annotation != params[0].empty, \
            "No type information on second parameter found! Please, use type " \
            "annotation or provide the type information via transfomer-decorator."
        p1type = t or params[0].annotation
        f = singledispatch(f)
eckhart's avatar
eckhart committed
131
132
133
134
135
136
137
138
139
140
141
142
        try:
            if len(params) == 1 and issubclass(p1type, Container) \
                    and not issubclass(p1type, Text) and not issubclass(p1type, ByteString):
                def gen_special(*args):
                    c = set(args) if issubclass(p1type, AbstractSet) else \
                        list(args) if issubclass(p1type, Sequence) else args
                    d = {params[0].name: c}
                    return partial(f, **d)

                f.register(p1type.__args__[0], gen_special)
        except AttributeError:
            pass  # Union Type does not allow subclassing, but is not needed here
143
144
145
146
147
148
149
150
151
152
153
154
155

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

        f.register(p1type, gen_partial)
        return f

    if isinstance(t, type(lambda: 1)):
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
156
        func = t
157
158
159
160
161
162
        t = None
        return decorator(func)
    else:
        return decorator


163
def key_parser_name(node: Node) -> str:
164
165
166
    return node.parser.name


167
def key_tag_name(node: Node) -> str:
168
169
170
    return node.tag_name


171
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
172
             processing_table: ProcessingTableType,
173
174
175
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
    first and applies the sequences of callback-functions registered
    in the ``calltable``-dictionary.

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
        '+': always called (before any other processing function)
        '*': called for those nodes for which no (other) processing
             function appears in the table
        '~': always called (after any other processing function)

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
196
197
            is interpreted as a `compact_table`. See
            `toolkit.expand_table` or ``EBNFCompiler.EBNFTransTable`
198
199
200
201
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

    Example:
eckhart's avatar
eckhart committed
202
203
        table = { "term": [replace_by_child, flatten],
            "factor, flowmarker, retrieveop": replace_by_child }
204
205
        traverse(node, table)
    """
206
207
208
209
210
211
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
        table = processing_table
        cache = processing_table['__cache__']
    else:
212
213
        # normalize processing_table entries by turning single values
        # into lists with a single value
214
215
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
216
        table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
217
        cache = table.setdefault('__cache__', cast(TransformationDict, dict()))
218
219
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
220
221
222
223
224
225
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
226
    # cache = {}  # type: Dict[str, List[Callable]]
227

228
229
    def traverse_recursive(context):
        node = context[-1]
230
231
        if node.children:
            for child in node.result:
232
233
                context.append(child)
                traverse_recursive(context)  # depth first
234
                node.error_flag = max(node.error_flag, child.error_flag)  # propagate error flag
235
                context.pop()
236
237

        key = key_func(node)
238
239
240
        try:
            sequence = cache[key]
        except KeyError:
241
242
243
244
245
246
247
248
249
250
            sequence = table.get('+', []) + \
                       table.get(key, table.get('*', [])) + \
                       table.get('~', [])
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
251
            call(context)
252

253
    traverse_recursive([root_node])
254
255
    # assert processing_table['__cache__']

256
257
258
259
260
261
262
263
264
265
266
267

# ------------------------------------------------
#
# rearranging transformations:
#     - tree may be rearranged (e.g.flattened)
#     - nodes that are not leaves may be dropped
#     - order is preserved
#     - leave content is preserved (though not necessarily the leaves themselves)
#
# ------------------------------------------------


eckhart's avatar
eckhart committed
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
def pick_child(context: List[Node], criteria: CriteriaType):
    """Returns the first child that meets the criteria."""
    if isinstance(criteria, int):
        try:
            return context[-1].children[criteria]
        except IndexError:
            return None
    elif isinstance(criteria, str):
        for child in context[-1].children:
            if child.tag_name == criteria:
                return child
        return None
    else:  # assume criteria has type ConditionFunc
        for child in context[-1].children:
            context.append(child)
            evaluation = criteria(context)
            context.pop()
            if evaluation:
                return child
        return None
288
289


eckhart's avatar
eckhart committed
290
def replace_by(node: Node, child: Node):
291
292
293
294
295
296
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
    node._errors.extend(child._errors)
    node.result = child.result
297
298


eckhart's avatar
eckhart committed
299
300
301
def reduce_child(node: Node, child: Node):
    node._errors.extend(child._errors)
    node.result = child.result
302
303


eckhart's avatar
eckhart committed
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
# TODO: default value = lambda context: len(context[-1].children) == 1
# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=0):
#     """
#     Replace a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     """
#     child = pick_child(context, criteria)
#     if child:
#         print(child)
#         replace_by(context[-1], child)


# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[None], criteria: CriteriaType=0):
#     """
#     Reduce a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     """
#     child = pick_child(context, criteria)
#     if child:
#         reduce_child(context[-1], child)



def replace_by_child(context: List[Node]):
339
340
    """
    Remove single branch node, replacing it by its immediate descendant
eckhart's avatar
eckhart committed
341
    if and only if the condition on the descendant is true.
342
    """
343
    node = context[-1]
344
    if len(node.children) == 1:
eckhart's avatar
eckhart committed
345
        replace_by(node, node.children[0])
346
347


eckhart's avatar
eckhart committed
348
def content_from_child(context: List[Node]):
349
350
    """
    Reduce a single branch node, by transferring the result of its
351
    immediate descendant to this node, but keeping this node's parser entry.
352
353
    If the condition evaluates to false on the descendant, it will not
    be reduced.
354
    """
355
    node = context[-1]
356
    if len(node.children) == 1:
eckhart's avatar
eckhart committed
357
        reduce_child(node, node.children[0])
358
359


360
def is_named(context: List[Node]) -> bool:
Eckhart Arnold's avatar
Eckhart Arnold committed
361
    return bool(context[-1].parser.name)
362
363
364
365


def is_anonymous(context: List[Node]) -> bool:
    return not context[-1].parser.name
366
367
368


@transformation_factory(Callable)
369
370
371
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
372
373
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
374
    node = context[-1]
375
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
376
        child = node.children[0]
377
        if condition(context):
di68kap's avatar
di68kap committed
378
            replace_by(node, child)
379
        else:
di68kap's avatar
di68kap committed
380
            reduce_child(node, child)
381
382
383


@transformation_factory
384
385
386
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
387
388
389
    name.

    Parameters:
390
391
        context: the context where the parser shall be replaced
        name: "NAME:PTYPE" of the surogate. The ptype is optional
392
    """
393
    node = context[-1]
394
395
396
397
398
    name, ptype = (name.split(':') + [''])[:2]
    node.parser = MockParser(name, ptype)


@transformation_factory(Callable)
399
400
401
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
    Flattens all children, that fulfil the given `condition`
402
403
404
405
406
407
408
409
410
411
412
413
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
     If the parameter `recursive` is `True` the same will recursively be
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
     Applying flatten recursively will result in these kinds of
    structural transformation:
        (1 (+ 2) (+ 3)     ->   (1 + 2 + 3)
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
414
    node = context[-1]
415
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
416
        new_result = []     # type: List[Node]
417
        for child in node.children:
418
419
            context.append(child)
            if child.children and condition(context):
420
                if recursive:
421
                    flatten(context, condition, recursive)
422
423
424
                new_result.extend(child.children)
            else:
                new_result.append(child)
425
            context.pop()
426
427
428
        node.result = tuple(new_result)


429
430
431
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
432
    string representation of the node.
433
    """
434
    node = context[-1]
435
436
437
438
    node.result = str(node)


@transformation_factory
439
440
441
442
443
def merge_children(context: List[Node], tag_names: List[str]):
    """
    Joins all children next to each other and with particular tag-
    names into a single child node with a mock-parser with the name of
    the first tag-name in the list.
444
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
445
    node = context[-1]
446
    result = []
447
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
448
    if node.children:
449
        i = 0
450
451
452
453
454
455
456
457
458
459
460
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
461
462
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
            i = k
        node.result = tuple(result)


# ------------------------------------------------
#
# destructive transformations:
#     - tree may be rearranged (flattened),
#     - order is preserved
#     - but (irrelevant) leaves may be dropped
#     - errors of dropped leaves will be lost
#
# ------------------------------------------------


@transformation_factory
479
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
480
481
482
    """Replaces the content of the node. ``func`` takes the node
    as an argument an returns the mapped result.
    """
483
    node = context[-1]
484
485
486
    node.result = func(node.result)


487
def is_whitespace(context: List[Node]) -> bool:
488
489
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
490
    return context[-1].parser.ptype == WHITESPACE_PTYPE
491
492


493
494
def is_empty(context: List[Node]) -> bool:
    return not context[-1].result
495
496


497
498
def is_expendable(context: List[Node]) -> bool:
    return is_empty(context) or is_whitespace(context)
499
500


501
502
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    node = context[-1]
503
504
505
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.result in tokens)


506
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
507
508
    """Returns true, if the node's tag_name is on of the
    given tag names."""
509
    return context[-1].tag_name in tag_name_set
510
511


512
def has_content(context: List[Node], regexp: str) -> bool:
513
    """Checks a node's content against a regular expression."""
514
    return bool(re.match(regexp, str(context[-1])))
515
516


517
@transformation_factory(Callable)
518
519
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
520
    node = context[-1]
521
    if condition(node):
522
        transformation(context)
523
524


525
@transformation_factory(slice)
526
def keep_children(context: List[Node], section: slice = slice(None)):
527
    """Keeps only child-nodes which fall into a slice of the result field."""
528
    node = context[-1]
529
    if node.children:
530
        node.result = node.children[section]
531
532
533


@transformation_factory(Callable)
534
def remove_children_if(context: List[Node], condition: Callable):  # , section: slice = slice(None)):
535
536
537
538
539
540
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))


eckhart's avatar
eckhart committed
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
562
563
564
565
566


remove_whitespace = remove_children_if(is_whitespace)  # partial(remove_children_if, condition=is_whitespace)
remove_empty = remove_children_if(is_empty)
remove_expendables = remove_children_if(is_expendable)  # partial(remove_children_if, condition=is_expendable)
567
568
569
570
571
remove_first = apply_if(keep_children(slice(1, None)), lambda nd: len(nd.children) > 1)
remove_last = apply_if(keep_children(slice(None, -1)), lambda nd: len(nd.children) > 1)
remove_brackets = apply_if(keep_children(slice(1, -1)), lambda nd: len(nd.children) >= 2)
remove_infix_operator = keep_children(slice(0, None, 2))
remove_single_child = apply_if(keep_children(slice(0)), lambda nd: len(nd.children) == 1)
572
573
574


@transformation_factory
575
def remove_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
576
577
578
    """Reomoves any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
579
    remove_children_if(context, partial(is_token, tokens=tokens))
580
581
582


@transformation_factory
eckhart's avatar
eckhart committed
583
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
584
    """Removes children by tag name."""
585
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
586
587
588


@transformation_factory
589
def remove_content(context: List[Node], regexp: str):
590
    """Removes children depending on their string value."""
591
    remove_children_if(context, partial(has_content, regexp=regexp))
592
593
594
595


########################################################################
#
596
# AST semantic validation functions (EXPERIMENTAL!!!)
597
598
599
#
########################################################################

600
@transformation_factory(Callable)
Eckhart Arnold's avatar
Eckhart Arnold committed
601
def assert_condition(context: List[Node], condition: Callable, error_msg: str = ''):
602
    """Checks for `condition`; adds an error message if condition is not met."""
603
    node = context[-1]
604
    if not condition(context):
605
606
607
608
609
610
611
612
613
614
615
616
617
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.assert_condition: Failed to meet condition " + cond_name)


assert_has_children = assert_condition(lambda nd: nd.children, 'Element "%s" has no children')


@transformation_factory
618
def assert_content(context: List[Node], regexp: str):
619
    node = context[-1]
620
    if not has_content(context, regexp):
621
622
623
        node.add_error('Element "%s" violates %s on %s' %
                       (node.parser.name, str(regexp), str(node)))

624
625

@transformation_factory
626
def require(context: List[Node], child_tags: AbstractSet[str]):
627
    node = context[-1]
628
629
630
631
632
633
634
    for child in node.children:
        if child.tag_name not in child_tags:
            node.add_error('Element "%s" is not allowed inside "%s".' %
                           (child.parser.name, node.parser.name))


@transformation_factory
635
def forbid(context: List[Node], child_tags: AbstractSet[str]):
636
    node = context[-1]
637
638
639
640
    for child in node.children:
        if child.tag_name in child_tags:
            node.add_error('Element "%s" cannot be nested inside "%s".' %
                           (child.parser.name, node.parser.name))