transform.py 29.8 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
31
32
import inspect
from functools import partial, reduce, singledispatch

33
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, MockParser
34
35
36
37
from DHParser.toolkit import expand_table, smart_list, re, typing

from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
    Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
38

39
40
41
42
43
__all__ = ('TransformationDict',
           'TransformationProc',
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
44
45
46
           'key_parser_name',
           'key_tag_name',
           'traverse',
47
           'is_named',
48
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
49
           'reduce_single_child',
50
           'replace_or_reduce',
51
52
           'replace_parser',
           'collapse',
53
           'merge_children',
54
55
           'replace_content',
           'apply_if',
56
           'traverse_locally',
57
           'is_anonymous',
58
59
60
61
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
62
           'is_one_of',
63
           'has_content',
di68kap's avatar
di68kap committed
64
           'has_parent',
65
66
67
68
69
70
71
72
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
73
           'remove_children_if',
eckhart's avatar
eckhart committed
74
           'remove_nodes',
75
76
77
78
79
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
80
           'remove_anonymous_empty',
81
82
           'remove_expendables',
           'remove_brackets',
83
84
           'remove_infix_operator',
           'remove_single_child',
85
86
87
88
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
89
90
           'assert_content',
           'assert_condition',
eckhart's avatar
eckhart committed
91
           'assert_has_children')
92
93


94
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
95
96
TransformationDict = Dict[str, Sequence[Callable]]
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
97
98
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
99
CriteriaType = Union[int, str, Callable]
100
101


102
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
103
    """Creates factory functions from transformation-functions that
104
    dispatch on the first parameter after the context parameter.
105
106
107
108

    Decorating a transformation-function that has more than merely the
    ``node``-parameter with ``transformation_factory`` creates a
    function with the same name, which returns a partial-function that
109
    takes just the context-parameter.
110
111
112
113
114
115
116
117
118

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

119
120
    Usage::

eckhart's avatar
eckhart committed
121
        @transformation_factory(AbstractSet[str])
122
        def remove_tokens(context, tokens):
123
            ...
124
125
126

    or, alternatively::

127
        @transformation_factory
128
        def remove_tokens(context, tokens: AbstractSet[str]):
129
130
            ...

131
132
    Example::

133
        trans_table = { 'expression': remove_tokens('+', '-') }
134
135
136

    instead of::

137
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
138
139

    Parameters:
140
        t1:  type of the second argument of the transformation function,
141
142
            only necessary if the transformation functions' parameter list
            does not have type annotations.
143
144
145
146
147
148
149
    """

    def decorator(f):
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
150
        assert t1 or params[0].annotation != params[0].empty, \
151
152
            "No type information on second parameter found! Please, use type " \
            "annotation or provide the type information via transfomer-decorator."
153
        p1type = t1 or params[0].annotation
154
        f = singledispatch(f)
eckhart's avatar
eckhart committed
155
156
157
158
159
160
161
162
163
164
165
166
        try:
            if len(params) == 1 and issubclass(p1type, Container) \
                    and not issubclass(p1type, Text) and not issubclass(p1type, ByteString):
                def gen_special(*args):
                    c = set(args) if issubclass(p1type, AbstractSet) else \
                        list(args) if issubclass(p1type, Sequence) else args
                    d = {params[0].name: c}
                    return partial(f, **d)

                f.register(p1type.__args__[0], gen_special)
        except AttributeError:
            pass  # Union Type does not allow subclassing, but is not needed here
167
168
169
170
171
172

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

173
174
175
176
177
        for t in (p1type, t2, t3, t4, t5):
            if t:
                f.register(t, gen_partial)
            else:
                break
178
179
        return f

180
    if isinstance(t1, type(lambda: 1)):
181
182
183
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
184
185
        func = t1
        t1 = None
186
187
188
189
190
        return decorator(func)
    else:
        return decorator


191
def key_parser_name(node: Node) -> str:
192
193
194
    return node.parser.name


195
def key_tag_name(node: Node) -> str:
196
197
198
    return node.tag_name


199
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
200
             processing_table: ProcessingTableType,
201
202
203
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
204
    first and applies the sequences of callback-functions registered
205
    in the ``processing_table``-dictionary.
206
207
208
209
210
211
212
213
214

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
215
216
217
218
219

    - '+': always called (before any other processing function)
    - '*': called for those nodes for which no (other) processing
      function appears in the table
    - '~': always called (after any other processing function)
220
221
222
223
224

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
225
226
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
227
228
229
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

230
231
    Example::

232
        table = { "term": [replace_by_single_child, flatten],
233
                  "factor, flowmarker, retrieveop": replace_by_single_child }
234
        traverse(node, table)
235

236
    """
237
238
239
240
241
242
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
        table = processing_table
        cache = processing_table['__cache__']
    else:
243
244
        # normalize processing_table entries by turning single values
        # into lists with a single value
245
246
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
247
        table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
248
        cache = table.setdefault('__cache__', cast(TransformationDict, dict()))
249
250
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
251
252
253
254
255
256
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
257
    # cache = {}  # type: Dict[str, List[Callable]]
258

259
260
    def traverse_recursive(context):
        node = context[-1]
261
262
        if node.children:
            for child in node.result:
263
264
                context.append(child)
                traverse_recursive(context)  # depth first
265
                node.error_flag = max(node.error_flag, child.error_flag)  # propagate error flag
266
                context.pop()
267
268

        key = key_func(node)
269
270
271
        try:
            sequence = cache[key]
        except KeyError:
272
273
274
            sequence = table.get('+', []) \
                + table.get(key, table.get('*', [])) \
                + table.get('~', [])
275
276
277
278
279
280
281
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
282
            call(context)
283

284
    traverse_recursive([root_node])
285
286
    # assert processing_table['__cache__']

287

288
#######################################################################
289
#
290
291
# meta transformations, i.e. transformations that call other
# transformations
292
#
293
#######################################################################
294
295


296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
@transformation_factory(Dict)
def traverse_locally(context: List[Node],
                     processing_table: Dict,            # actually: ProcessingTableType
                     key_func: Callable=key_tag_name):  # actually: KeyFunc
    """Transforms the syntax tree starting from the last node in the context
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


# @transformation_factory(List[Callable])
# def apply_to_child(context: List[Node], transformations: List[Callable], condition: Callable):
#     """Applies a list of transformations to those children that meet a specifc condition."""
#     node = context[-1]
#     for child in node.children:
#         context.append(child)
#         if condition(context):
#             for transform in transformations:
#                 transform(context)
#         context.pop()


@transformation_factory(Callable)
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
    if condition(context):
        transformation(context)


#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
    return bool(context[-1].parser.name)


def is_anonymous(context: List[Node]) -> bool:
    return not context[-1].parser.name


def is_whitespace(context: List[Node]) -> bool:
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
    return context[-1].parser.ptype == WHITESPACE_PTYPE


def is_empty(context: List[Node]) -> bool:
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
    return is_empty(context) or is_whitespace(context)


@transformation_factory(AbstractSet[str])
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    """Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
    any token is a match. If only ":" is given all anonymous tokens but no other
    tokens are a match.
    """
    def stripped(nd: Node) -> str:
        # assert node.parser.ptype == TOKEN_PTYPE
        if nd.children:
            i, k = 0, len(nd.children)
            while i < len(nd.children) and nd.children[i].parser.ptype == WHITESPACE_PTYPE:
                i += 1
380
            while k > 0 and nd.children[k - 1].parser.ptype == WHITESPACE_PTYPE:
381
382
383
384
                k -= 1
            return "".join(child.content for child in node.children[i:k])
        return nd.content
    node = context[-1]
385
386
    return (node.parser.ptype == TOKEN_PTYPE and (not tokens or stripped(node) in tokens))

387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418


@transformation_factory(AbstractSet[str])
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


@transformation_factory(str)
def has_content(context: List[Node], regexp: str) -> bool:
    """Checks a node's content against a regular expression."""
    return bool(re.match(regexp, context[-1].content))


@transformation_factory(AbstractSet[str])
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context."""
    for i in range(2, len(context)):
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
419
420
421
422
423
424
425
426
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
    node._errors.extend(child._errors)
    node.result = child.result


427
def _reduce_child(node: Node, child: Node):
428
429
430
431
    node._errors.extend(child._errors)
    node.result = child.result


432
def _pick_child(context: List[Node], criteria: CriteriaType):
eckhart's avatar
eckhart committed
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
    """Returns the first child that meets the criteria."""
    if isinstance(criteria, int):
        try:
            return context[-1].children[criteria]
        except IndexError:
            return None
    elif isinstance(criteria, str):
        for child in context[-1].children:
            if child.tag_name == criteria:
                return child
        return None
    else:  # assume criteria has type ConditionFunc
        for child in context[-1].children:
            context.append(child)
            evaluation = criteria(context)
            context.pop()
            if evaluation:
                return child
        return None
452
453


454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
500
501


502
503
def replace_by_single_child(context: List[Node]):
    """
504
505
506
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
507
508
509
    """
    node = context[-1]
    if len(node.children) == 1:
510
        _replace_by(node, node.children[0])
511
512


Eckhart Arnold's avatar
Eckhart Arnold committed
513
def reduce_single_child(context: List[Node]):
514
    """
515
    Reduces a single branch node by transferring the result of its
516
    immediate descendant to this node, but keeping this node's parser entry.
517
518
    Reduction only takes place if the last node in the context has
    exactly one child.
519
520
521
    """
    node = context[-1]
    if len(node.children) == 1:
522
        _reduce_child(node, node.children[0])
523
524
525


@transformation_factory(Callable)
526
527
528
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
529
530
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
531
    node = context[-1]
532
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
533
        child = node.children[0]
534
        if condition(context):
535
            _replace_by(node, child)
536
        else:
537
            _reduce_child(node, child)
538
539
540


@transformation_factory
541
542
543
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
544
545
546
    name.

    Parameters:
547
548
        context: the context where the parser shall be replaced
        name: "NAME:PTYPE" of the surogate. The ptype is optional
549
    """
550
    node = context[-1]
551
552
553
554
555
    name, ptype = (name.split(':') + [''])[:2]
    node.parser = MockParser(name, ptype)


@transformation_factory(Callable)
556
557
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
558
    Flattens all children, that fulfil the given ``condition``
559
560
561
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
562
563

    If the parameter ``recursive`` is ``True`` the same will recursively be
564
565
566
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
567
568
569
570

    Applying flatten recursively will result in these kinds of
    structural transformation::

571
572
573
        (1 (+ 2) (+ 3)     ->   (1 + 2 + 3)
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
574
    node = context[-1]
575
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
576
        new_result = []     # type: List[Node]
577
        for child in node.children:
578
579
            context.append(child)
            if child.children and condition(context):
580
                if recursive:
581
                    flatten(context, condition, recursive)
582
583
584
                new_result.extend(child.children)
            else:
                new_result.append(child)
585
            context.pop()
586
587
588
        node.result = tuple(new_result)


589
590
591
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
592
    string representation of the node.
593
    """
594
    node = context[-1]
595
    node.result = node.content
596
597
598


@transformation_factory
599
600
def merge_children(context: List[Node], tag_names: List[str]):
    """
601
602
603
    Joins all children next to each other and with particular tag-names
    into a single child node with a mock-parser with the name of the
    first tag-name in the list.
604
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
605
    node = context[-1]
606
    result = []
607
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
608
    if node.children:
609
        i = 0
610
611
612
613
614
615
616
617
618
619
620
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
621
622
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
623
624
625
626
627
            i = k
        node.result = tuple(result)


@transformation_factory
628
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
di68kap's avatar
di68kap committed
629
    """Replaces the content of the node. ``func`` takes the node's result
630
631
    as an argument an returns the mapped result.
    """
632
    node = context[-1]
633
634
635
    node.result = func(node.result)


636
637
638
639
640
641
642
643
644
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
# - errors of dropped leaves will be lost
# - no promise that order will be preserved
#
#######################################################################
645
646


647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
@transformation_factory(Callable)
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


@transformation_factory(Callable)
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
        while i > 0 and condition(context + [node.children[i-1]]):
            i -= 1
        if i < L:
            node.result = node.children[:i]


@transformation_factory(Callable)
eckhart's avatar
eckhart committed
677
def strip(context: List[Node], condition: Callable = is_expendable):
678
679
680
681
682
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


683
@transformation_factory(slice)
684
def keep_children(context: List[Node], section: slice = slice(None)):
685
    """Keeps only child-nodes which fall into a slice of the result field."""
686
    node = context[-1]
687
    if node.children:
688
        node.result = node.children[section]
689
690
691


@transformation_factory(Callable)
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


@transformation_factory
def keep_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


@transformation_factory
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


@transformation_factory(Callable)
def remove_children_if(context: List[Node], condition: Callable):
721
722
723
724
725
726
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))


eckhart's avatar
eckhart committed
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
748
749


750
751
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
752
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
753
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
754
755
756
757
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
758
759
760
remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
761
remove_infix_operator = keep_children(slice(0, None, 2))
762
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
763
764
765


@transformation_factory
766
def remove_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
767
    """Removes any among a particular set of tokens from the immediate
768
769
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
770
    remove_children_if(context, partial(is_token, tokens=tokens))
771
772
773


@transformation_factory
eckhart's avatar
eckhart committed
774
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
775
    """Removes children by tag name."""
776
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
777
778
779


@transformation_factory
780
def remove_content(context: List[Node], regexp: str):
781
    """Removes children depending on their string value."""
782
    remove_children_if(context, partial(has_content, regexp=regexp))
783
784
785
786


########################################################################
#
787
# AST semantic validation functions (EXPERIMENTAL!!!)
788
789
790
#
########################################################################

791
@transformation_factory(Callable)
Eckhart Arnold's avatar
Eckhart Arnold committed
792
def assert_condition(context: List[Node], condition: Callable, error_msg: str = ''):
793
    """Checks for `condition`; adds an error message if condition is not met."""
794
    node = context[-1]
795
    if not condition(context):
796
797
798
799
800
801
802
803
804
805
806
807
808
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.assert_condition: Failed to meet condition " + cond_name)


assert_has_children = assert_condition(lambda nd: nd.children, 'Element "%s" has no children')


@transformation_factory
809
def assert_content(context: List[Node], regexp: str):
810
    node = context[-1]
811
    if not has_content(context, regexp):
812
        node.add_error('Element "%s" violates %s on %s' %
813
                       (node.parser.name, str(regexp), node.content))
814

815
816

@transformation_factory
817
def require(context: List[Node], child_tags: AbstractSet[str]):
818
    node = context[-1]
819
820
821
822
823
824
825
    for child in node.children:
        if child.tag_name not in child_tags:
            node.add_error('Element "%s" is not allowed inside "%s".' %
                           (child.parser.name, node.parser.name))


@transformation_factory
826
def forbid(context: List[Node], child_tags: AbstractSet[str]):
827
    node = context[-1]
828
829
830
831
    for child in node.children:
        if child.tag_name in child_tags:
            node.add_error('Element "%s" cannot be nested inside "%s".' %
                           (child.parser.name, node.parser.name))