Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

transform.py 30.8 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
31
32
import inspect
from functools import partial, reduce, singledispatch

33
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, MockParser
34
35
36
37
from DHParser.toolkit import expand_table, smart_list, re, typing

from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
    Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
38

39
40
__all__ = ('TransformationDict',
           'TransformationProc',
41
           'TransformationFunc',
42
43
44
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
45
46
47
           'key_parser_name',
           'key_tag_name',
           'traverse',
48
           'is_named',
49
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
50
           'reduce_single_child',
51
           'replace_or_reduce',
52
53
           'replace_parser',
           'collapse',
54
           'merge_children',
55
56
           'replace_content',
           'apply_if',
eckhart's avatar
eckhart committed
57
           'apply_unless',
58
           'traverse_locally',
59
           'is_anonymous',
60
61
62
63
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
64
           'is_one_of',
65
           'has_content',
di68kap's avatar
di68kap committed
66
           'has_parent',
67
68
69
70
71
72
73
74
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
75
           'remove_children_if',
eckhart's avatar
eckhart committed
76
           'remove_nodes',
77
78
79
80
81
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
82
           'remove_anonymous_empty',
83
84
           'remove_anonymous_expendables',
           'remove_anonymous_tokens',
85
86
           'remove_expendables',
           'remove_brackets',
87
88
           'remove_infix_operator',
           'remove_single_child',
89
90
91
92
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
93
94
           'assert_content',
           'assert_condition',
eckhart's avatar
eckhart committed
95
           'assert_has_children')
96
97


98
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
99
TransformationDict = Dict[str, Sequence[Callable]]
100
TransformationFunc = Union[Callable[[Node], Any], partial]
Eckhart Arnold's avatar
Eckhart Arnold committed
101
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
102
103
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
104
CriteriaType = Union[int, str, Callable]
105
106


107
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
108
    """Creates factory functions from transformation-functions that
109
    dispatch on the first parameter after the context parameter.
110
111
112
113

    Decorating a transformation-function that has more than merely the
    ``node``-parameter with ``transformation_factory`` creates a
    function with the same name, which returns a partial-function that
114
    takes just the context-parameter.
115
116
117
118
119
120
121
122
123

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

124
125
    Usage::

eckhart's avatar
eckhart committed
126
        @transformation_factory(AbstractSet[str])
127
        def remove_tokens(context, tokens):
128
            ...
129
130
131

    or, alternatively::

132
        @transformation_factory
133
        def remove_tokens(context, tokens: AbstractSet[str]):
134
135
            ...

136
137
    Example::

138
        trans_table = { 'expression': remove_tokens('+', '-') }
139
140
141

    instead of::

142
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
143
144

    Parameters:
145
        t1:  type of the second argument of the transformation function,
146
147
            only necessary if the transformation functions' parameter list
            does not have type annotations.
148
149
150
151
152
153
154
    """

    def decorator(f):
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
155
        assert t1 or params[0].annotation != params[0].empty, \
156
            "No type information on second parameter found! Please, use type " \
eckhart's avatar
eckhart committed
157
            "annotation or provide the type information via transformer-decorator."
158
        p1type = t1 or params[0].annotation
159
        f = singledispatch(f)
eckhart's avatar
eckhart committed
160
161
162
163
164
165
166
167
168
169
170
171
        try:
            if len(params) == 1 and issubclass(p1type, Container) \
                    and not issubclass(p1type, Text) and not issubclass(p1type, ByteString):
                def gen_special(*args):
                    c = set(args) if issubclass(p1type, AbstractSet) else \
                        list(args) if issubclass(p1type, Sequence) else args
                    d = {params[0].name: c}
                    return partial(f, **d)

                f.register(p1type.__args__[0], gen_special)
        except AttributeError:
            pass  # Union Type does not allow subclassing, but is not needed here
172
173
174
175
176
177

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

178
179
180
181
182
        for t in (p1type, t2, t3, t4, t5):
            if t:
                f.register(t, gen_partial)
            else:
                break
183
184
        return f

185
    if isinstance(t1, type(lambda: 1)):
186
187
188
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
189
190
        func = t1
        t1 = None
191
192
193
194
195
        return decorator(func)
    else:
        return decorator


196
def key_parser_name(node: Node) -> str:
197
198
199
    return node.parser.name


200
def key_tag_name(node: Node) -> str:
201
202
203
    return node.tag_name


204
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
205
             processing_table: ProcessingTableType,
206
207
208
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
209
    first and applies the sequences of callback-functions registered
210
    in the ``processing_table``-dictionary.
211
212
213
214
215
216
217
218
219

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
220
221
222
223
224

    - '+': always called (before any other processing function)
    - '*': called for those nodes for which no (other) processing
      function appears in the table
    - '~': always called (after any other processing function)
225
226
227
228
229

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
230
231
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
232
233
234
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

235
236
    Example::

237
        table = { "term": [replace_by_single_child, flatten],
238
                  "factor, flowmarker, retrieveop": replace_by_single_child }
239
        traverse(node, table)
240

241
    """
242
243
244
245
246
247
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
        table = processing_table
        cache = processing_table['__cache__']
    else:
248
249
        # normalize processing_table entries by turning single values
        # into lists with a single value
250
251
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
252
        table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
253
        cache = table.setdefault('__cache__', cast(TransformationDict, dict()))
254
255
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
256
257
258
259
260
261
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
262
    # cache = {}  # type: Dict[str, List[Callable]]
263

264
265
    def traverse_recursive(context):
        node = context[-1]
266
267
        if node.children:
            for child in node.result:
268
269
                context.append(child)
                traverse_recursive(context)  # depth first
270
                node.error_flag = max(node.error_flag, child.error_flag)  # propagate error flag
271
                context.pop()
272
273

        key = key_func(node)
274
275
276
        try:
            sequence = cache[key]
        except KeyError:
277
278
279
            sequence = table.get('+', []) \
                + table.get(key, table.get('*', [])) \
                + table.get('~', [])
280
281
282
283
284
285
286
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
287
            call(context)
288

289
    traverse_recursive([root_node])
290
291
    # assert processing_table['__cache__']

292

293
#######################################################################
294
#
295
296
# meta transformations, i.e. transformations that call other
# transformations
297
#
298
#######################################################################
299
300


eckhart's avatar
eckhart committed
301
@transformation_factory(dict)
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
def traverse_locally(context: List[Node],
                     processing_table: Dict,            # actually: ProcessingTableType
                     key_func: Callable=key_tag_name):  # actually: KeyFunc
    """Transforms the syntax tree starting from the last node in the context
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


# @transformation_factory(List[Callable])
# def apply_to_child(context: List[Node], transformations: List[Callable], condition: Callable):
#     """Applies a list of transformations to those children that meet a specifc condition."""
#     node = context[-1]
#     for child in node.children:
#         context.append(child)
#         if condition(context):
#             for transform in transformations:
#                 transform(context)
#         context.pop()


@transformation_factory(Callable)
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
    if condition(context):
        transformation(context)


eckhart's avatar
eckhart committed
332
333
334
335
336
337
338
@transformation_factory(Callable)
def apply_unless(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation if a certain condition is *not* met."""
    if not condition(context):
        transformation(context)


339
340
341
342
343
344
345
346
347
348
349
350
351
352
#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
353
    """Returns ``True`` if the current node does not have any siblings."""
354
355
356
357
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
358
    """Returns ``True`` if the current node's parser is a named parser."""
359
360
361
362
    return bool(context[-1].parser.name)


def is_anonymous(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
363
    """Returns ``True`` if the current node's parser is an anonymous parser."""
364
365
366
367
368
369
370
371
372
373
    return not context[-1].parser.name


def is_whitespace(context: List[Node]) -> bool:
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
    return context[-1].parser.ptype == WHITESPACE_PTYPE


def is_empty(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
374
    """Returns ``True`` if the current node's content is empty."""
375
376
377
378
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
379
380
    """Returns ``True`` if the current node either is a node containing
    whitespace or an empty node."""
381
382
383
    return is_empty(context) or is_whitespace(context)


eckhart's avatar
eckhart committed
384
@transformation_factory  # (AbstractSet[str])
385
386
387
388
389
390
391
392
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    """Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
    any token is a match. If only ":" is given all anonymous tokens but no other
    tokens are a match.
    """
    def stripped(nd: Node) -> str:
393
        """Removes leading and trailing whitespace-nodes from content."""
394
395
396
397
398
        # assert node.parser.ptype == TOKEN_PTYPE
        if nd.children:
            i, k = 0, len(nd.children)
            while i < len(nd.children) and nd.children[i].parser.ptype == WHITESPACE_PTYPE:
                i += 1
399
            while k > 0 and nd.children[k - 1].parser.ptype == WHITESPACE_PTYPE:
400
401
402
403
                k -= 1
            return "".join(child.content for child in node.children[i:k])
        return nd.content
    node = context[-1]
404
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or stripped(node) in tokens)
405
406


eckhart's avatar
eckhart committed
407
@transformation_factory
408
409
410
411
412
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


eckhart's avatar
eckhart committed
413
@transformation_factory
414
415
416
417
418
def has_content(context: List[Node], regexp: str) -> bool:
    """Checks a node's content against a regular expression."""
    return bool(re.match(regexp, context[-1].content))


eckhart's avatar
eckhart committed
419
@transformation_factory
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context."""
    for i in range(2, len(context)):
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
437
438
439
440
441
442
443
444
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
    node._errors.extend(child._errors)
    node.result = child.result


445
def _reduce_child(node: Node, child: Node):
446
447
448
449
    node._errors.extend(child._errors)
    node.result = child.result


450
def _pick_child(context: List[Node], criteria: CriteriaType):
eckhart's avatar
eckhart committed
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
    """Returns the first child that meets the criteria."""
    if isinstance(criteria, int):
        try:
            return context[-1].children[criteria]
        except IndexError:
            return None
    elif isinstance(criteria, str):
        for child in context[-1].children:
            if child.tag_name == criteria:
                return child
        return None
    else:  # assume criteria has type ConditionFunc
        for child in context[-1].children:
            context.append(child)
            evaluation = criteria(context)
            context.pop()
            if evaluation:
                return child
        return None
470
471


472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
518
519


520
521
def replace_by_single_child(context: List[Node]):
    """
522
523
524
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
525
526
527
    """
    node = context[-1]
    if len(node.children) == 1:
528
        _replace_by(node, node.children[0])
529
530


Eckhart Arnold's avatar
Eckhart Arnold committed
531
def reduce_single_child(context: List[Node]):
532
    """
533
    Reduces a single branch node by transferring the result of its
534
    immediate descendant to this node, but keeping this node's parser entry.
535
536
    Reduction only takes place if the last node in the context has
    exactly one child.
537
538
539
    """
    node = context[-1]
    if len(node.children) == 1:
540
        _reduce_child(node, node.children[0])
541
542
543


@transformation_factory(Callable)
544
545
546
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
547
548
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
549
    node = context[-1]
550
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
551
        child = node.children[0]
552
        if condition(context):
553
            _replace_by(node, child)
554
        else:
555
            _reduce_child(node, child)
556
557
558


@transformation_factory
559
560
561
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
562
563
564
    name.

    Parameters:
565
        context: the context where the parser shall be replaced
eckhart's avatar
eckhart committed
566
        name: "NAME:PTYPE" of the surrogate. The ptype is optional
567
    """
568
    node = context[-1]
569
570
571
572
573
    name, ptype = (name.split(':') + [''])[:2]
    node.parser = MockParser(name, ptype)


@transformation_factory(Callable)
574
575
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
576
    Flattens all children, that fulfil the given ``condition``
577
578
579
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
580
581

    If the parameter ``recursive`` is ``True`` the same will recursively be
582
583
584
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
585
586
587
588

    Applying flatten recursively will result in these kinds of
    structural transformation::

589
590
591
        (1 (+ 2) (+ 3)     ->   (1 + 2 + 3)
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
592
    node = context[-1]
593
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
594
        new_result = []     # type: List[Node]
595
        for child in node.children:
596
597
            context.append(child)
            if child.children and condition(context):
598
                if recursive:
599
                    flatten(context, condition, recursive)
600
601
602
                new_result.extend(child.children)
            else:
                new_result.append(child)
603
            context.pop()
604
605
606
        node.result = tuple(new_result)


607
608
609
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
610
    string representation of the node.
611
    """
612
    node = context[-1]
613
    node.result = node.content
614
615
616


@transformation_factory
617
618
def merge_children(context: List[Node], tag_names: List[str]):
    """
619
620
621
    Joins all children next to each other and with particular tag-names
    into a single child node with a mock-parser with the name of the
    first tag-name in the list.
622
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
623
    node = context[-1]
624
    result = []
625
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
626
    if node.children:
627
        i = 0
628
629
630
631
632
633
634
635
636
637
638
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
639
640
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
641
642
643
644
            i = k
        node.result = tuple(result)


eckhart's avatar
eckhart committed
645
@transformation_factory(Callable)
646
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
di68kap's avatar
di68kap committed
647
    """Replaces the content of the node. ``func`` takes the node's result
648
649
    as an argument an returns the mapped result.
    """
650
    node = context[-1]
651
652
653
    node.result = func(node.result)


654
655
656
657
658
659
660
661
662
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
# - errors of dropped leaves will be lost
# - no promise that order will be preserved
#
#######################################################################
663
664


665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
@transformation_factory(Callable)
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


@transformation_factory(Callable)
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
        while i > 0 and condition(context + [node.children[i-1]]):
            i -= 1
        if i < L:
            node.result = node.children[:i]


@transformation_factory(Callable)
eckhart's avatar
eckhart committed
695
def strip(context: List[Node], condition: Callable = is_expendable):
696
697
698
699
700
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


701
@transformation_factory(slice)
702
def keep_children(context: List[Node], section: slice = slice(None)):
703
    """Keeps only child-nodes which fall into a slice of the result field."""
704
    node = context[-1]
705
    if node.children:
706
        node.result = node.children[section]
707
708
709


@transformation_factory(Callable)
710
711
712
713
714
715
716
717
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


@transformation_factory
eckhart's avatar
eckhart committed
718
def keep_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


@transformation_factory
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


@transformation_factory(Callable)
def remove_children_if(context: List[Node], condition: Callable):
739
740
741
742
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))
eckhart's avatar
eckhart committed
743
    pass
744

eckhart's avatar
eckhart committed
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
766
767


768
769
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
770
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
771
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
772
773
774
775
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
776
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
777
778
779
remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
780
remove_infix_operator = keep_children(slice(0, None, 2))
781
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
782
783


eckhart's avatar
eckhart committed
784
785
@transformation_factory(AbstractSet[str])
def remove_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
786
    """Removes any among a particular set of tokens from the immediate
787
788
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
789
    remove_children_if(context, partial(is_token, tokens=tokens))
790
791
792


@transformation_factory
eckhart's avatar
eckhart committed
793
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
794
    """Removes children by tag name."""
795
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
796
797
798


@transformation_factory
799
def remove_content(context: List[Node], regexp: str):
800
    """Removes children depending on their string value."""
801
    remove_children_if(context, partial(has_content, regexp=regexp))
802
803
804
805


########################################################################
#
806
# AST semantic validation functions (EXPERIMENTAL!!!)
807
808
809
#
########################################################################

810
@transformation_factory(Callable)
Eckhart Arnold's avatar
Eckhart Arnold committed
811
def assert_condition(context: List[Node], condition: Callable, error_msg: str = ''):
812
    """Checks for `condition`; adds an error message if condition is not met."""
813
    node = context[-1]
814
    if not condition(context):
815
816
817
818
819
820
821
822
823
824
825
826
827
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.assert_condition: Failed to meet condition " + cond_name)


assert_has_children = assert_condition(lambda nd: nd.children, 'Element "%s" has no children')


@transformation_factory
828
def assert_content(context: List[Node], regexp: str):
829
    node = context[-1]
830
    if not has_content(context, regexp):
831
        node.add_error('Element "%s" violates %s on %s' %
832
                       (node.parser.name, str(regexp), node.content))
833

834
835

@transformation_factory
836
def require(context: List[Node], child_tags: AbstractSet[str]):
837
    node = context[-1]
838
839
840
841
842
843
844
    for child in node.children:
        if child.tag_name not in child_tags:
            node.add_error('Element "%s" is not allowed inside "%s".' %
                           (child.parser.name, node.parser.name))


@transformation_factory
845
def forbid(context: List[Node], child_tags: AbstractSet[str]):
846
    node = context[-1]
847
848
849
850
    for child in node.children:
        if child.tag_name in child_tags:
            node.add_error('Element "%s" cannot be nested inside "%s".' %
                           (child.parser.name, node.parser.name))
851