transform.py 36.3 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
import collections.abc
31
import inspect
Eckhart Arnold's avatar
Eckhart Arnold committed
32
import fnmatch
33
34
from functools import partial, reduce, singledispatch

35
from DHParser.error import Error
eckhart's avatar
eckhart committed
36
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, MockParser, ZOMBIE_NODE
37
from DHParser.toolkit import issubtype, isgenerictype, expand_table, smart_list, re, typing
38
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
39
    Tuple, List, Sequence, Union, Text, Generic
40

41
42
__all__ = ('TransformationDict',
           'TransformationProc',
43
           'TransformationFunc',
44
45
46
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
47
48
49
           'key_parser_name',
           'key_tag_name',
           'traverse',
50
           'is_named',
51
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
52
           'reduce_single_child',
53
           'replace_or_reduce',
54
55
           'replace_parser',
           'collapse',
56
           'merge_children',
57
           'replace_content',
58
           'replace_content_by',
59
           'apply_if',
eckhart's avatar
eckhart committed
60
           'apply_unless',
61
           'traverse_locally',
62
           'is_anonymous',
63
64
65
66
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
67
           'is_one_of',
Eckhart Arnold's avatar
Eckhart Arnold committed
68
           'matches_re',
69
           'has_content',
di68kap's avatar
di68kap committed
70
           'has_parent',
71
72
73
74
75
76
77
78
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
79
           'remove_children_if',
eckhart's avatar
eckhart committed
80
           'remove_nodes',
81
82
83
84
85
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
86
           'remove_anonymous_empty',
87
88
           'remove_anonymous_expendables',
           'remove_anonymous_tokens',
89
90
           'remove_expendables',
           'remove_brackets',
91
92
           'remove_infix_operator',
           'remove_single_child',
93
94
95
96
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
97
           'assert_content',
98
99
           'error_on',
           'warn_on',
eckhart's avatar
eckhart committed
100
           'assert_has_children')
101
102


103
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
104
TransformationDict = Dict[str, Sequence[Callable]]
105
TransformationFunc = Union[Callable[[Node], Any], partial]
Eckhart Arnold's avatar
Eckhart Arnold committed
106
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
107
108
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
109
CriteriaType = Union[int, str, Callable]
110
111


112
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
113
    """Creates factory functions from transformation-functions that
114
    dispatch on the first parameter after the context parameter.
115
116

    Decorating a transformation-function that has more than merely the
Eckhart Arnold's avatar
Eckhart Arnold committed
117
    ``context``-parameter with ``transformation_factory`` creates a
118
    function with the same name, which returns a partial-function that
119
    takes just the context-parameter.
120
121
122
123
124
125
126
127
128

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

129
130
    Usage::

eckhart's avatar
eckhart committed
131
        @transformation_factory(AbstractSet[str])
132
        def remove_tokens(context, tokens):
133
            ...
134
135
136

    or, alternatively::

137
        @transformation_factory
138
        def remove_tokens(context, tokens: AbstractSet[str]):
139
140
            ...

141
142
    Example::

143
        trans_table = { 'expression': remove_tokens('+', '-') }
144
145
146

    instead of::

147
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
148
149

    Parameters:
150
        t1:  type of the second argument of the transformation function,
151
152
            only necessary if the transformation functions' parameter list
            does not have type annotations.
153
154
    """

155
156
157
158
    def type_guard(t):
        """Raises an error if type `t` is a generic type or could be mistaken
        for the type of the canonical first parameter "List[Node] of
        transformation functions. Returns `t`."""
159
160
161
162
        # if isinstance(t, GenericMeta):
        #     raise TypeError("Generic Type %s not permitted\n in transformation_factory "
        #                     "decorator. Use the equivalent non-generic type instead!"
        #                     % str(t))
163
        if isgenerictype(t):
164
165
166
            raise TypeError("Generic Type %s not permitted\n in transformation_factory "
                            "decorator. Use the equivalent non-generic type instead!"
                            % str(t))
167
        if issubtype(List[Node], t):
168
169
            raise TypeError("Sequence type %s not permitted\nin transformation_factory "
                            "decorator, because it could be mistaken for a base class "
170
171
172
                            "of List[Node]\nwhich is the type of the canonical first "
                            "argument of transformation functions. Try 'tuple' instead!"
                            % str(t))
173
174
        return t

175
    def decorator(f):
176
        nonlocal t1
177
178
179
180
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
181
        assert t1 or params[0].annotation != params[0].empty, \
182
            "No type information on second parameter found! Please, use type " \
eckhart's avatar
eckhart committed
183
            "annotation or provide the type information via transformer-decorator."
184
        f = singledispatch(f)
185
186
187
        p1type = params[0].annotation
        if t1 is None:
            t1 = type_guard(p1type)
188
        elif issubtype(p1type, type_guard(t1)):
189
            try:
190
                if len(params) == 1 and issubtype(p1type, Container) \
191
                        and not (issubtype(p1type, Text) or issubtype(p1type, ByteString)):
192
                    def gen_special(*args):
193
194
                        c = set(args) if issubtype(p1type, AbstractSet) else \
                            tuple(args) if issubtype(p1type, Sequence) else args
195
196
197
198
199
200
201
202
                        d = {params[0].name: c}
                        return partial(f, **d)
                    f.register(type_guard(p1type.__args__[0]), gen_special)
            except AttributeError:
                pass  # Union Type does not allow subclassing, but is not needed here
        else:
            raise TypeError("Annotated type %s is not a subclass of decorated type %s !"
                            % (str(p1type), str(t1)))
203
204
205
206
207
208

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

209
        for t in (t1, t2, t3, t4, t5):
210
            if t:
211
                f.register(type_guard(t), gen_partial)
212
213
            else:
                break
214
215
        return f

216
    if isinstance(t1, type(lambda: 1)):
217
218
219
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
220
221
        func = t1
        t1 = None
222
223
224
225
226
        return decorator(func)
    else:
        return decorator


227
def key_parser_name(node: Node) -> str:
228
229
230
    return node.parser.name


231
def key_tag_name(node: Node) -> str:
232
233
234
    return node.tag_name


235
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
236
             processing_table: ProcessingTableType,
237
238
239
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
240
    first and applies the sequences of callback-functions registered
241
    in the ``processing_table``-dictionary.
242
243
244
245
246
247
248
249
250

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
251
252
253
254
255

    - '+': always called (before any other processing function)
    - '*': called for those nodes for which no (other) processing
      function appears in the table
    - '~': always called (after any other processing function)
256
257
258
259
260

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
261
262
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
263
264
265
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

266
267
    Example::

268
        table = { "term": [replace_by_single_child, flatten],
269
                  "factor, flowmarker, retrieveop": replace_by_single_child }
270
        traverse(node, table)
271

272
    """
273
274
275
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
eckhart's avatar
eckhart committed
276
        table = processing_table               # type: ProcessingTableType
eckhart's avatar
eckhart committed
277
        cache = cast(TransformationDict, processing_table['__cache__'])  # type: TransformationDict
278
    else:
279
280
        # normalize processing_table entries by turning single values
        # into lists with a single value
281
282
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
283
        table = expand_table(table)
eckhart's avatar
eckhart committed
284
285
        cache = cast(TransformationDict,
                     table.setdefault('__cache__', cast(TransformationDict, dict())))
286
287
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
288
289
290
291
292
293
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
294
    # cache = {}  # type: Dict[str, List[Callable]]
295

296
    def traverse_recursive(context):
eckhart's avatar
eckhart committed
297
        nonlocal cache
298
        node = context[-1]
299
        if node.children:
eckhart's avatar
eckhart committed
300
            context.append(ZOMBIE_NODE)
di68kap's avatar
di68kap committed
301
            for child in node.children:
eckhart's avatar
eckhart committed
302
                context[-1] = child
303
                traverse_recursive(context)  # depth first
eckhart's avatar
eckhart committed
304
            context.pop()
305
306

        key = key_func(node)
307
308
309
        try:
            sequence = cache[key]
        except KeyError:
310
311
312
            sequence = table.get('+', []) \
                + table.get(key, table.get('*', [])) \
                + table.get('~', [])
313
314
315
316
317
318
319
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
320
            call(context)
321

322
    traverse_recursive([root_node])
323
324
    # assert processing_table['__cache__']

325

326
#######################################################################
327
#
328
329
# meta transformations, i.e. transformations that call other
# transformations
330
#
331
#######################################################################
332
333


eckhart's avatar
eckhart committed
334
@transformation_factory(dict)
335
336
337
338
339
340
341
342
343
344
345
def traverse_locally(context: List[Node],
                     processing_table: Dict,            # actually: ProcessingTableType
                     key_func: Callable=key_tag_name):  # actually: KeyFunc
    """Transforms the syntax tree starting from the last node in the context
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


346
@transformation_factory(collections.abc.Callable)
347
348
349
350
351
352
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
    if condition(context):
        transformation(context)


353
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
354
355
356
357
358
359
def apply_unless(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation if a certain condition is *not* met."""
    if not condition(context):
        transformation(context)


360
361
362
363
364
365
366
367
368
369
370
371
372
373
#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
374
    """Returns ``True`` if the current node does not have any siblings."""
375
376
377
378
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
379
    """Returns ``True`` if the current node's parser is a named parser."""
380
381
382
383
    return bool(context[-1].parser.name)


def is_anonymous(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
384
    """Returns ``True`` if the current node's parser is an anonymous parser."""
385
386
387
388
389
390
391
392
393
394
    return not context[-1].parser.name


def is_whitespace(context: List[Node]) -> bool:
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
    return context[-1].parser.ptype == WHITESPACE_PTYPE


def is_empty(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
395
    """Returns ``True`` if the current node's content is empty."""
396
397
398
399
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
400
401
    """Returns ``True`` if the current node either is a node containing
    whitespace or an empty node."""
402
403
404
    return is_empty(context) or is_whitespace(context)


405
@transformation_factory(collections.abc.Set)
406
407
408
409
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    """Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
eckhart's avatar
eckhart committed
410
    any token is a match.
411
412
    """
    def stripped(nd: Node) -> str:
413
        """Removes leading and trailing whitespace-nodes from content."""
414
415
416
417
418
        # assert node.parser.ptype == TOKEN_PTYPE
        if nd.children:
            i, k = 0, len(nd.children)
            while i < len(nd.children) and nd.children[i].parser.ptype == WHITESPACE_PTYPE:
                i += 1
419
            while k > 0 and nd.children[k - 1].parser.ptype == WHITESPACE_PTYPE:
420
421
422
423
                k -= 1
            return "".join(child.content for child in node.children[i:k])
        return nd.content
    node = context[-1]
424
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or stripped(node) in tokens)
425
426


427
@transformation_factory(collections.abc.Set)
428
429
430
431
432
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


Eckhart Arnold's avatar
Eckhart Arnold committed
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
# @transformation_factory(collections.abc.Set)
# def matches_wildcard(context: List[Node], wildcards: AbstractSet[str]) -> bool:
#     """Retruns true, if the node's tag_name matches one of the glob patterns
#     in `wildcards`. For example, ':*' matches all anonymous nodes. """
#     tn = context[-1].tag_name
#     for pattern in wildcards:
#         if fnmatch.fnmatch(tn, pattern):
#             return True
#     return False


@transformation_factory(collections.abc.Set)
def matches_re(context: List[Node], patterns: AbstractSet[str]) -> bool:
    """Retruns true, if the node's tag_name matches one of the regular
    expressions in `patterns`. For example, ':.*' matches all anonymous nodes.
    """
    tn = context[-1].tag_name
    for pattern in patterns:
        if re.match(pattern, tn):
            return True
    return False


eckhart's avatar
eckhart committed
456
@transformation_factory
457
def has_content(context: List[Node], regexp: str) -> bool:
458
459
460
461
462
463
464
465
    """
    Checks a node's content against a regular expression.

    In contrast to ``re.match`` the regular expression must match the complete
    string and not just the beginning of the string to succeed!
    """
    if not regexp.endswith('$'):
        regexp += "$"
466
467
468
    return bool(re.match(regexp, context[-1].content))


469
@transformation_factory(collections.abc.Set)
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context."""
    for i in range(2, len(context)):
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
487
488
489
490
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
Eckhart Arnold's avatar
Eckhart Arnold committed
491
    # node.errors.extend(child.errors)
492
    node.result = child.result
493
494
    if hasattr(child, '_xml_attr'):
        node.attributes.update(child.attributes)
495
496


497
def _reduce_child(node: Node, child: Node):
Eckhart Arnold's avatar
Eckhart Arnold committed
498
    # node.errors.extend(child.errors)
499
    node.result = child.result
500
501
    if hasattr(child, '_xml_attr'):
        node.attributes.update(child.attributes)
502
503


eckhart's avatar
eckhart committed
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
# def _pick_child(context: List[Node], criteria: CriteriaType):
#     """Returns the first child that meets the criteria."""
#     if isinstance(criteria, int):
#         try:
#             return context[-1].children[criteria]
#         except IndexError:
#             return None
#     elif isinstance(criteria, str):
#         for child in context[-1].children:
#             if child.tag_name == criteria:
#                 return child
#         return None
#     else:  # assume criteria has type ConditionFunc
#         for child in context[-1].children:
#             context.append(child)
#             evaluation = criteria(context)
#             context.pop()
#             if evaluation:
#                 return child
#         return None
524
525


526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
572
573


574
575
def replace_by_single_child(context: List[Node]):
    """
576
577
578
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
579
580
581
    """
    node = context[-1]
    if len(node.children) == 1:
582
        _replace_by(node, node.children[0])
583
584


Eckhart Arnold's avatar
Eckhart Arnold committed
585
def reduce_single_child(context: List[Node]):
586
    """
587
    Reduces a single branch node by transferring the result of its
588
    immediate descendant to this node, but keeping this node's parser entry.
589
590
    Reduction only takes place if the last node in the context has
    exactly one child.
591
592
593
    """
    node = context[-1]
    if len(node.children) == 1:
594
        _reduce_child(node, node.children[0])
595
596


597
@transformation_factory(collections.abc.Callable)
598
599
600
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
601
602
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
603
    node = context[-1]
604
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
605
        child = node.children[0]
606
        if condition(context):
607
            _replace_by(node, child)
608
        else:
609
            _reduce_child(node, child)
610
611
612


@transformation_factory
613
614
615
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
616
617
618
    name.

    Parameters:
619
        context: the context where the parser shall be replaced
eckhart's avatar
eckhart committed
620
        name: "NAME:PTYPE" of the surrogate. The ptype is optional
621
    """
622
    node = context[-1]
623
    name, ptype = (name.split(':') + [''])[:2]
di68kap's avatar
di68kap committed
624
    node.parser = MockParser(name, ':' + ptype)
625
626


627
@transformation_factory(collections.abc.Callable)
628
629
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
630
    Flattens all children, that fulfil the given ``condition``
631
632
633
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
634
635

    If the parameter ``recursive`` is ``True`` the same will recursively be
636
637
638
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
639
640
641
642

    Applying flatten recursively will result in these kinds of
    structural transformation::

di68kap's avatar
di68kap committed
643
        (1 (+ 2) (+ 3))    ->   (1 + 2 + 3)
644
645
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
646
    node = context[-1]
647
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
648
        new_result = []     # type: List[Node]
eckhart's avatar
eckhart committed
649
        context.append(ZOMBIE_NODE)
650
        for child in node.children:
eckhart's avatar
eckhart committed
651
            context[-1] = child
652
            if child.children and condition(context):
653
                if recursive:
654
                    flatten(context, condition, recursive)
655
656
657
                new_result.extend(child.children)
            else:
                new_result.append(child)
eckhart's avatar
eckhart committed
658
        context.pop()
659
660
661
        node.result = tuple(new_result)


662
663
664
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
665
    string representation of the node.
666
    """
667
    node = context[-1]
668
    node.result = node.content
669
670


671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
# @transformation_factory
# def collect_leaves(context: List[Node], whitespace: str=''):
#     """
#     Collects all leave nodes dropping any intermediary nodes.
#     Optionally adds whitespace between the nodes.
#     """
#     assert context[-1].children
#     node = context[-1]
#     leaves_iterator = node.select(lambda nd: not nd.children, include_root=False)
#     if whitespace:
#         mock_ws_parser = MockParser('', WHITESPACE_PTYPE)
#         result = []
#         for leave in leaves_iterator:
#             result.append(leave)
#             result.append(Node(mock_ws_parser, whitespace, leafhint=True))
#         result.pop()
#         node.result = tuple(result)
#     else:
#         node.result = (nd for nd in leaves_iterator)


692
693
@transformation_factory(tuple)
def merge_children(context: List[Node], tag_names: Tuple[str]):
694
    """
695
696
697
    Joins all children next to each other and with particular tag-names
    into a single child node with a mock-parser with the name of the
    first tag-name in the list.
698
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
699
    node = context[-1]
700
    result = []
701
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
702
    if node.children:
703
        i = 0
704
705
706
707
708
709
710
711
712
713
714
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
715
716
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
717
718
719
720
            i = k
        node.result = tuple(result)


721
@transformation_factory(collections.abc.Callable)
722
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
di68kap's avatar
di68kap committed
723
    """Replaces the content of the node. ``func`` takes the node's result
724
725
    as an argument an returns the mapped result.
    """
726
    node = context[-1]
727
728
729
    node.result = func(node.result)


730
@transformation_factory  # (str)
731
732
733
734
735
736
737
def replace_content_by(context: List[Node], content: str):  # Callable[[Node], ResultType]
    """Replaces the content of the node with the given text content.
    """
    node = context[-1]
    node.result = content


738
739
740
741
742
743
744
745
746
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
# - errors of dropped leaves will be lost
# - no promise that order will be preserved
#
#######################################################################
747
748


749
@transformation_factory(collections.abc.Callable)
750
751
752
753
754
755
756
757
758
759
760
761
762
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


763
@transformation_factory(collections.abc.Callable)
764
765
766
767
768
769
770
771
772
773
774
775
776
777
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
        while i > 0 and condition(context + [node.children[i-1]]):
            i -= 1
        if i < L:
            node.result = node.children[:i]


778
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
779
def strip(context: List[Node], condition: Callable = is_expendable):
780
781
782
783
784
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


785
@transformation_factory  # (slice)
786
def keep_children(context: List[Node], section: slice = slice(None)):
787
    """Keeps only child-nodes which fall into a slice of the result field."""
788
    node = context[-1]
789
    if node.children:
790
        node.result = node.children[section]
791
792


793
@transformation_factory(collections.abc.Callable)
794
795
796
797
798
799
800
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


801
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
802
def keep_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
803
804
805
806
807
808
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


809
@transformation_factory(collections.abc.Set)
810
811
812
813
814
815
816
817
818
819
820
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


821
@transformation_factory(collections.abc.Callable)
822
def remove_children_if(context: List[Node], condition: Callable):
823
824
825
826
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))
eckhart's avatar
eckhart committed
827
    pass
828

eckhart's avatar
eckhart committed
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
850
851


852
853
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
854
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
855
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
856
857
858
859
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
860
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
861
862
863
# remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
# remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
# remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
864
remove_infix_operator = keep_children(slice(0, None, 2))
865
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
866
867


868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
def remove_first(context: List[Node]):
    """Removes the first non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(node.children):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        node.result = node.children[:i] + node.children[i+1:]


def remove_last(context: List[Node]):
    """Removes the last non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(reversed(node.children)):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        i = len(node.children) - i - 1
        node.result = node.children[:i] + node.children[i+1:]


def remove_brackets(context: List[Node]):
    """Removes the first and the last non-whitespace child."""
    remove_first(context)
    remove_last(context)


899
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
900
def remove_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
901
    """Removes any among a particular set of tokens from the immediate
902
903
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
904
    remove_children_if(context, partial(is_token, tokens=tokens))
905
906


907
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
908
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
909
    """Removes children by tag name."""
910
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
911
912
913


@transformation_factory
914
def remove_content(context: List[Node], regexp: str):
915
    """Removes children depending on their string value."""
916
    remove_children_if(context, partial(has_content, regexp=regexp))
917
918
919
920


########################################################################
#
921
# AST semantic validation functions (EXPERIMENTAL!!!)
922
923
924
#
########################################################################

925
@transformation_factory(collections.abc.Callable)
926
927
928
929
def error_on(context: List[Node], condition: Callable, error_msg: str = ''):
    """
    Checks for `condition`; adds an error message if condition is not met.
    """
930
    node = context[-1]
931
    if not condition(context):
932
933
934
935
936
937
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
938
939
940
            node.add_error("transform.error_on: Failed to meet condition " + cond_name)


941
@transformation_factory(collections.abc.Callable)
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
def warn_on(context: List[Node], condition: Callable, warning: str = ''):
    """
    Checks for `condition`; adds an warning message if condition is not met.
    """
    node = context[-1]
    if not condition(context):
        if warning:
            node.add_error(warning % node.tag_name if warning.find("%s") > 0 else warning,
                           Error.WARNING)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.warn_on: Failed to meet condition " + cond_name,
                           Error.WARNING)
957
958


959
assert_has_children = error_on(lambda nd: nd.children, 'Element "%s" has no children')
960
961
962


@transformation_factory
963
def assert_content(context: List[Node], regexp: str):
964
    node = context[-1]
965
    if not has_content(context, regexp):
eckhart's avatar
eckhart committed
966
        context[0].new_error(node, 'Element "%s" violates %s on %s' %
eckhart's avatar
eckhart committed
967
                             (node.parser.name, str(regexp), node.content))
968

969

970
@transformation_factory(collections.abc.Set)
971
def require(context: List[Node], child_tags: AbstractSet[str]):
972
    node = context[-1]
973
974
    for child in node.children:
        if child.tag_name not in child_tags:
eckhart's avatar
eckhart committed
975
            context[0].new_error(node, 'Element "%s" is not allowed inside "%s".' %
eckhart's avatar
eckhart committed
976
                                 (child.parser.name, node.parser.name))
977
978


979
@transformation_factory(collections.abc.Set)
980
def forbid(context: List[Node], child_tags: AbstractSet[str]):
981
    node = context[-1]
982
983
    for child in node.children:
        if child.tag_name in child_tags:
eckhart's avatar
eckhart committed
984
            context[0].new_error(node, 'Element "%s" cannot be nested inside "%s".' %
eckhart's avatar
eckhart committed
985
                                 (child.parser.name, node.parser.name))