transform.py 23.3 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
"""transformation.py - transformation functions for converting the
                       concrete into the abstract syntax tree

Copyright 2016  by Eckhart Arnold (arnold@badw.de)
                Bavarian Academy of Sciences an Humanities (badw.de)

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied.  See the License for the specific language governing
permissions and limitations under the License.
"""

import inspect
from functools import partial, reduce, singledispatch

23
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, MockParser
24

25
26
27
28
from DHParser.toolkit import expand_table, smart_list, re, typing

from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
    Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
29

30
31
32
33
34
__all__ = ('TransformationDict',
           'TransformationProc',
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
35
36
37
           'key_parser_name',
           'key_tag_name',
           'traverse',
38
           'is_named',
39
40
           'replace_by_single_child',
           'content_from_sinlge_child',
41
           'replace_or_reduce',
42
43
           'replace_parser',
           'collapse',
44
           'merge_children',
45
46
           'replace_content',
           'apply_if',
47
           'is_anonymous',
48
49
50
51
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
52
           'is_one_of',
53
           'has_content',
54
           'remove_children_if',
eckhart's avatar
eckhart committed
55
           'remove_nodes',
56
57
58
59
60
61
62
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
           'remove_expendables',
           'remove_brackets',
63
64
           'remove_infix_operator',
           'remove_single_child',
65
           'remove_tokens',
66
           'keep_children',
67
68
69
           'flatten',
           'forbid',
           'require',
70
71
           'assert_content',
           'assert_condition',
eckhart's avatar
eckhart committed
72
           'assert_has_children')
73
74


75
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
76
77
TransformationDict = Dict[str, Sequence[Callable]]
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
78
79
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
80
CriteriaType = Union[int, str, Callable]
81
82


83
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
84
    """Creates factory functions from transformation-functions that
85
    dispatch on the first parameter after the context parameter.
86
87
88
89

    Decorating a transformation-function that has more than merely the
    ``node``-parameter with ``transformation_factory`` creates a
    function with the same name, which returns a partial-function that
90
    takes just the context-parameter.
91
92
93
94
95
96
97
98
99
100

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

    Usage:
eckhart's avatar
eckhart committed
101
        @transformation_factory(AbstractSet[str])
102
        def remove_tokens(context, tokens):
103
104
105
            ...
      or, alternatively:
        @transformation_factory
106
        def remove_tokens(context, tokens: AbstractSet[str]):
107
108
109
110
111
112
            ...

    Example:
        trans_table = { 'expression': remove_tokens('+', '-') }
      instead of:
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
113
114

    Parameters:
115
        t1:  type of the second argument of the transformation function,
116
117
            only necessary if the transformation functions' parameter list
            does not have type annotations.
118
119
120
121
122
123
124
    """

    def decorator(f):
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
125
        assert t1 or params[0].annotation != params[0].empty, \
126
127
            "No type information on second parameter found! Please, use type " \
            "annotation or provide the type information via transfomer-decorator."
128
        p1type = t1 or params[0].annotation
129
        f = singledispatch(f)
eckhart's avatar
eckhart committed
130
131
132
133
134
135
136
137
138
139
140
141
        try:
            if len(params) == 1 and issubclass(p1type, Container) \
                    and not issubclass(p1type, Text) and not issubclass(p1type, ByteString):
                def gen_special(*args):
                    c = set(args) if issubclass(p1type, AbstractSet) else \
                        list(args) if issubclass(p1type, Sequence) else args
                    d = {params[0].name: c}
                    return partial(f, **d)

                f.register(p1type.__args__[0], gen_special)
        except AttributeError:
            pass  # Union Type does not allow subclassing, but is not needed here
142
143
144
145
146
147

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

148
149
150
151
152
        for t in (p1type, t2, t3, t4, t5):
            if t:
                f.register(t, gen_partial)
            else:
                break
153
154
        return f

155
    if isinstance(t1, type(lambda: 1)):
156
157
158
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
159
160
        func = t1
        t1 = None
161
162
163
164
165
        return decorator(func)
    else:
        return decorator


166
def key_parser_name(node: Node) -> str:
167
168
169
    return node.parser.name


170
def key_tag_name(node: Node) -> str:
171
172
173
    return node.tag_name


174
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
175
             processing_table: ProcessingTableType,
176
177
178
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
    first and applies the sequences of callback-functions registered
    in the ``calltable``-dictionary.

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
        '+': always called (before any other processing function)
        '*': called for those nodes for which no (other) processing
             function appears in the table
        '~': always called (after any other processing function)

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
199
200
            is interpreted as a `compact_table`. See
            `toolkit.expand_table` or ``EBNFCompiler.EBNFTransTable`
201
202
203
204
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

    Example:
205
206
        table = { "term": [replace_by_single_child, flatten],
            "factor, flowmarker, retrieveop": replace_by_single_child }
207
208
        traverse(node, table)
    """
209
210
211
212
213
214
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
        table = processing_table
        cache = processing_table['__cache__']
    else:
215
216
        # normalize processing_table entries by turning single values
        # into lists with a single value
217
218
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
219
        table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
220
        cache = table.setdefault('__cache__', cast(TransformationDict, dict()))
221
222
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
223
224
225
226
227
228
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
229
    # cache = {}  # type: Dict[str, List[Callable]]
230

231
232
    def traverse_recursive(context):
        node = context[-1]
233
234
        if node.children:
            for child in node.result:
235
236
                context.append(child)
                traverse_recursive(context)  # depth first
237
                node.error_flag = max(node.error_flag, child.error_flag)  # propagate error flag
238
                context.pop()
239
240

        key = key_func(node)
241
242
243
        try:
            sequence = cache[key]
        except KeyError:
244
245
246
247
248
249
250
251
252
253
            sequence = table.get('+', []) + \
                       table.get(key, table.get('*', [])) + \
                       table.get('~', [])
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
254
            call(context)
255

256
    traverse_recursive([root_node])
257
258
    # assert processing_table['__cache__']

259
260
261
262
263
264
265
266
267
268
269
270

# ------------------------------------------------
#
# rearranging transformations:
#     - tree may be rearranged (e.g.flattened)
#     - nodes that are not leaves may be dropped
#     - order is preserved
#     - leave content is preserved (though not necessarily the leaves themselves)
#
# ------------------------------------------------


271
272
273
274
275
276
277
278
279
280
281
282
283
284
def replace_by(node: Node, child: Node):
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
    node._errors.extend(child._errors)
    node.result = child.result


def reduce_child(node: Node, child: Node):
    node._errors.extend(child._errors)
    node.result = child.result


eckhart's avatar
eckhart committed
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
def pick_child(context: List[Node], criteria: CriteriaType):
    """Returns the first child that meets the criteria."""
    if isinstance(criteria, int):
        try:
            return context[-1].children[criteria]
        except IndexError:
            return None
    elif isinstance(criteria, str):
        for child in context[-1].children:
            if child.tag_name == criteria:
                return child
        return None
    else:  # assume criteria has type ConditionFunc
        for child in context[-1].children:
            context.append(child)
            evaluation = criteria(context)
            context.pop()
            if evaluation:
                return child
        return None
305
306


307
308
def single_child(context: List[Node]) -> bool:
    return len(context[-2].children) == 1
eckhart's avatar
eckhart committed
309
310


311
312
@transformation_factory(int, str, Callable)
def replace_by_child(context: List[Node], criteria: CriteriaType=single_child):
313
    """
314
315
316
317
318
319
    Replace a node by the first of its immediate descendants
    that meets the `criteria`. The criteria can either be the
    index of the child (counting from zero), or the tag name or
    a boolean-valued function on the context of the child.
    If no child matching the criteria is found, the node will
    not be replaced.
320
321
    With the default value for `criteria` the same semantics is
    the same that of `replace_by_single_child`.
322
    """
323
324
325
    child = pick_child(context, criteria)
    if child:
        replace_by(context[-1], child)
326
327


328
329
@transformation_factory(int, str, Callable)
def content_from_child(context: List[None], criteria: CriteriaType=single_child):
330
    """
331
332
333
334
335
336
337
    Reduce a node, by transferring the result of the first of its
    immediate descendants that meets the `criteria` to this node,
    but keeping this node's parser entry. The criteria can either
    be the index of the child (counting from zero), or the tag
    name or a boolean-valued function on the context of the child.
    If no child matching the criteria is found, the node will
    not be replaced.
338
339
    With the default value for `criteria` this has the same semantics
    as `content_from_single_child`.
340
    """
341
342
343
344
345
346
    child = pick_child(context, criteria)
    if child:
        reduce_child(context[-1], child)



347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
def replace_by_single_child(context: List[Node]):
    """
    Remove single branch node, replacing it by its immediate descendant.
    If there are more than one children, no replacement takes place.
    """
    node = context[-1]
    if len(node.children) == 1:
        replace_by(node, node.children[0])


def content_from_sinlge_child(context: List[Node]):
    """
    Reduce a single branch node by transferring the result of its
    immediate descendant to this node, but keeping this node's parser entry.
    This will only be done if the last node in the context has is exactly
    one child.
    """
    node = context[-1]
    if len(node.children) == 1:
        reduce_child(node, node.children[0])
367
368


369
def is_named(context: List[Node]) -> bool:
Eckhart Arnold's avatar
Eckhart Arnold committed
370
    return bool(context[-1].parser.name)
371
372
373
374


def is_anonymous(context: List[Node]) -> bool:
    return not context[-1].parser.name
375
376
377


@transformation_factory(Callable)
378
379
380
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
381
382
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
383
    node = context[-1]
384
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
385
        child = node.children[0]
386
        if condition(context):
di68kap's avatar
di68kap committed
387
            replace_by(node, child)
388
        else:
di68kap's avatar
di68kap committed
389
            reduce_child(node, child)
390
391
392


@transformation_factory
393
394
395
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
396
397
398
    name.

    Parameters:
399
400
        context: the context where the parser shall be replaced
        name: "NAME:PTYPE" of the surogate. The ptype is optional
401
    """
402
    node = context[-1]
403
404
405
406
407
    name, ptype = (name.split(':') + [''])[:2]
    node.parser = MockParser(name, ptype)


@transformation_factory(Callable)
408
409
410
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
    Flattens all children, that fulfil the given `condition`
411
412
413
414
415
416
417
418
419
420
421
422
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
     If the parameter `recursive` is `True` the same will recursively be
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
     Applying flatten recursively will result in these kinds of
    structural transformation:
        (1 (+ 2) (+ 3)     ->   (1 + 2 + 3)
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
423
    node = context[-1]
424
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
425
        new_result = []     # type: List[Node]
426
        for child in node.children:
427
428
            context.append(child)
            if child.children and condition(context):
429
                if recursive:
430
                    flatten(context, condition, recursive)
431
432
433
                new_result.extend(child.children)
            else:
                new_result.append(child)
434
            context.pop()
435
436
437
        node.result = tuple(new_result)


438
439
440
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
441
    string representation of the node.
442
    """
443
    node = context[-1]
444
    node.result = node.content
445
446
447


@transformation_factory
448
449
450
451
452
def merge_children(context: List[Node], tag_names: List[str]):
    """
    Joins all children next to each other and with particular tag-
    names into a single child node with a mock-parser with the name of
    the first tag-name in the list.
453
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
454
    node = context[-1]
455
    result = []
456
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
457
    if node.children:
458
        i = 0
459
460
461
462
463
464
465
466
467
468
469
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
470
471
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
            i = k
        node.result = tuple(result)


# ------------------------------------------------
#
# destructive transformations:
#     - tree may be rearranged (flattened),
#     - order is preserved
#     - but (irrelevant) leaves may be dropped
#     - errors of dropped leaves will be lost
#
# ------------------------------------------------


@transformation_factory
488
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
489
490
491
    """Replaces the content of the node. ``func`` takes the node
    as an argument an returns the mapped result.
    """
492
    node = context[-1]
493
494
495
    node.result = func(node.result)


496
def is_whitespace(context: List[Node]) -> bool:
497
498
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
499
    return context[-1].parser.ptype == WHITESPACE_PTYPE
500
501


502
503
def is_empty(context: List[Node]) -> bool:
    return not context[-1].result
504
505


506
507
def is_expendable(context: List[Node]) -> bool:
    return is_empty(context) or is_whitespace(context)
508
509


510
511
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    node = context[-1]
512
513
514
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.result in tokens)


515
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
516
517
    """Returns true, if the node's tag_name is on of the
    given tag names."""
518
    return context[-1].tag_name in tag_name_set
519
520


521
def has_content(context: List[Node], regexp: str) -> bool:
522
    """Checks a node's content against a regular expression."""
523
    return bool(re.match(regexp, context[-1].content))
524
525


526
@transformation_factory(Callable)
527
528
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
529
    node = context[-1]
530
    if condition(node):
531
        transformation(context)
532
533


534
@transformation_factory(slice)
535
def keep_children(context: List[Node], section: slice = slice(None)):
536
    """Keeps only child-nodes which fall into a slice of the result field."""
537
    node = context[-1]
538
    if node.children:
539
        node.result = node.children[section]
540
541
542


@transformation_factory(Callable)
543
def remove_children_if(context: List[Node], condition: Callable):  # , section: slice = slice(None)):
544
545
546
547
548
549
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))


eckhart's avatar
eckhart committed
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
571
572
573
574
575


remove_whitespace = remove_children_if(is_whitespace)  # partial(remove_children_if, condition=is_whitespace)
remove_empty = remove_children_if(is_empty)
remove_expendables = remove_children_if(is_expendable)  # partial(remove_children_if, condition=is_expendable)
576
577
578
579
580
remove_first = apply_if(keep_children(slice(1, None)), lambda nd: len(nd.children) > 1)
remove_last = apply_if(keep_children(slice(None, -1)), lambda nd: len(nd.children) > 1)
remove_brackets = apply_if(keep_children(slice(1, -1)), lambda nd: len(nd.children) >= 2)
remove_infix_operator = keep_children(slice(0, None, 2))
remove_single_child = apply_if(keep_children(slice(0)), lambda nd: len(nd.children) == 1)
581
582
583


@transformation_factory
584
def remove_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
585
586
587
    """Reomoves any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
588
    remove_children_if(context, partial(is_token, tokens=tokens))
589
590
591


@transformation_factory
eckhart's avatar
eckhart committed
592
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
593
    """Removes children by tag name."""
594
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
595
596
597


@transformation_factory
598
def remove_content(context: List[Node], regexp: str):
599
    """Removes children depending on their string value."""
600
    remove_children_if(context, partial(has_content, regexp=regexp))
601
602
603
604


########################################################################
#
605
# AST semantic validation functions (EXPERIMENTAL!!!)
606
607
608
#
########################################################################

609
@transformation_factory(Callable)
Eckhart Arnold's avatar
Eckhart Arnold committed
610
def assert_condition(context: List[Node], condition: Callable, error_msg: str = ''):
611
    """Checks for `condition`; adds an error message if condition is not met."""
612
    node = context[-1]
613
    if not condition(context):
614
615
616
617
618
619
620
621
622
623
624
625
626
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.assert_condition: Failed to meet condition " + cond_name)


assert_has_children = assert_condition(lambda nd: nd.children, 'Element "%s" has no children')


@transformation_factory
627
def assert_content(context: List[Node], regexp: str):
628
    node = context[-1]
629
    if not has_content(context, regexp):
630
        node.add_error('Element "%s" violates %s on %s' %
631
                       (node.parser.name, str(regexp), node.content))
632

633
634

@transformation_factory
635
def require(context: List[Node], child_tags: AbstractSet[str]):
636
    node = context[-1]
637
638
639
640
641
642
643
    for child in node.children:
        if child.tag_name not in child_tags:
            node.add_error('Element "%s" is not allowed inside "%s".' %
                           (child.parser.name, node.parser.name))


@transformation_factory
644
def forbid(context: List[Node], child_tags: AbstractSet[str]):
645
    node = context[-1]
646
647
648
649
    for child in node.children:
        if child.tag_name in child_tags:
            node.add_error('Element "%s" cannot be nested inside "%s".' %
                           (child.parser.name, node.parser.name))