transform.py 37.8 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
import collections.abc
31
import inspect
Eckhart Arnold's avatar
Eckhart Arnold committed
32
import fnmatch
33
34
from functools import partial, reduce, singledispatch

35
from DHParser.error import Error
36
37
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, ParserBase, MockParser, \
    ZOMBIE_NODE
38
from DHParser.toolkit import issubtype, isgenerictype, expand_table, smart_list, re, typing
39
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
40
    Tuple, List, Sequence, Union, Text, Generic
41

42
43
__all__ = ('TransformationDict',
           'TransformationProc',
44
           'TransformationFunc',
45
46
47
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
48
49
50
           'key_parser_name',
           'key_tag_name',
           'traverse',
51
           'is_named',
52
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
53
           'reduce_single_child',
54
           'replace_or_reduce',
55
56
           'replace_parser',
           'collapse',
57
           'collapse_if',
58
           'merge_children',
59
           'replace_content',
60
           'replace_content_by',
61
           'apply_if',
eckhart's avatar
eckhart committed
62
           'apply_unless',
63
           'traverse_locally',
64
           'is_anonymous',
65
66
67
68
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
69
           'is_one_of',
70
           'not_one_of',
Eckhart Arnold's avatar
Eckhart Arnold committed
71
           'matches_re',
72
           'has_content',
di68kap's avatar
di68kap committed
73
           'has_parent',
74
75
76
77
78
79
80
81
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
82
           'remove_children_if',
eckhart's avatar
eckhart committed
83
           'remove_nodes',
84
85
86
87
88
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
89
           'remove_anonymous_empty',
90
91
           'remove_anonymous_expendables',
           'remove_anonymous_tokens',
92
93
           'remove_expendables',
           'remove_brackets',
94
95
           'remove_infix_operator',
           'remove_single_child',
96
97
98
99
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
100
           'assert_content',
101
102
           'error_on',
           'warn_on',
di68kap's avatar
di68kap committed
103
104
           'assert_has_children',
           'peek')
105
106


107
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
108
TransformationDict = Dict[str, Sequence[Callable]]
109
TransformationFunc = Union[Callable[[Node], Any], partial]
Eckhart Arnold's avatar
Eckhart Arnold committed
110
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
111
112
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
113
CriteriaType = Union[int, str, Callable]
114
115


116
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
117
    """Creates factory functions from transformation-functions that
118
    dispatch on the first parameter after the context parameter.
119
120

    Decorating a transformation-function that has more than merely the
Eckhart Arnold's avatar
Eckhart Arnold committed
121
    ``context``-parameter with ``transformation_factory`` creates a
122
    function with the same name, which returns a partial-function that
123
    takes just the context-parameter.
124
125
126
127
128
129
130
131
132

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

133
134
    Usage::

eckhart's avatar
eckhart committed
135
        @transformation_factory(AbstractSet[str])
136
        def remove_tokens(context, tokens):
137
            ...
138
139
140

    or, alternatively::

141
        @transformation_factory
142
        def remove_tokens(context, tokens: AbstractSet[str]):
143
144
            ...

145
146
    Example::

147
        trans_table = { 'expression': remove_tokens('+', '-') }
148
149
150

    instead of::

151
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
152
153

    Parameters:
154
        t1:  type of the second argument of the transformation function,
155
156
            only necessary if the transformation functions' parameter list
            does not have type annotations.
157
158
    """

159
160
161
162
    def type_guard(t):
        """Raises an error if type `t` is a generic type or could be mistaken
        for the type of the canonical first parameter "List[Node] of
        transformation functions. Returns `t`."""
163
164
165
166
        # if isinstance(t, GenericMeta):
        #     raise TypeError("Generic Type %s not permitted\n in transformation_factory "
        #                     "decorator. Use the equivalent non-generic type instead!"
        #                     % str(t))
167
        if isgenerictype(t):
168
169
170
            raise TypeError("Generic Type %s not permitted\n in transformation_factory "
                            "decorator. Use the equivalent non-generic type instead!"
                            % str(t))
171
        if issubtype(List[Node], t):
172
173
            raise TypeError("Sequence type %s not permitted\nin transformation_factory "
                            "decorator, because it could be mistaken for a base class "
174
175
176
                            "of List[Node]\nwhich is the type of the canonical first "
                            "argument of transformation functions. Try 'tuple' instead!"
                            % str(t))
177
178
        return t

179
    def decorator(f):
180
        nonlocal t1
181
182
183
184
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
185
        assert t1 or params[0].annotation != params[0].empty, \
186
            "No type information on second parameter found! Please, use type " \
eckhart's avatar
eckhart committed
187
            "annotation or provide the type information via transformer-decorator."
188
        f = singledispatch(f)
189
190
191
        p1type = params[0].annotation
        if t1 is None:
            t1 = type_guard(p1type)
192
        elif issubtype(p1type, type_guard(t1)):
193
            try:
194
                if len(params) == 1 and issubtype(p1type, Container) \
195
                        and not (issubtype(p1type, Text) or issubtype(p1type, ByteString)):
196
                    def gen_special(*args):
197
198
                        c = set(args) if issubtype(p1type, AbstractSet) else \
                            tuple(args) if issubtype(p1type, Sequence) else args
199
200
201
202
203
204
205
206
                        d = {params[0].name: c}
                        return partial(f, **d)
                    f.register(type_guard(p1type.__args__[0]), gen_special)
            except AttributeError:
                pass  # Union Type does not allow subclassing, but is not needed here
        else:
            raise TypeError("Annotated type %s is not a subclass of decorated type %s !"
                            % (str(p1type), str(t1)))
207
208
209
210
211
212

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

213
        for t in (t1, t2, t3, t4, t5):
214
            if t:
215
                f.register(type_guard(t), gen_partial)
216
217
            else:
                break
218
219
        return f

220
    if isinstance(t1, type(lambda: 1)):
221
222
223
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
224
225
        func = t1
        t1 = None
226
227
228
229
230
        return decorator(func)
    else:
        return decorator


231
def key_parser_name(node: Node) -> str:
232
233
234
    return node.parser.name


235
def key_tag_name(node: Node) -> str:
236
237
238
    return node.tag_name


239
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
240
             processing_table: ProcessingTableType,
241
242
243
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
244
    first and applies the sequences of callback-functions registered
245
    in the ``processing_table``-dictionary.
246
247
248
249
250
251
252
253
254

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
255
256
257
258
259

    - '+': always called (before any other processing function)
    - '*': called for those nodes for which no (other) processing
      function appears in the table
    - '~': always called (after any other processing function)
260
261
262
263
264

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
265
266
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
267
268
269
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

270
271
    Example::

272
        table = { "term": [replace_by_single_child, flatten],
273
                  "factor, flowmarker, retrieveop": replace_by_single_child }
274
        traverse(node, table)
275

276
    """
277
278
279
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
eckhart's avatar
eckhart committed
280
        table = processing_table               # type: ProcessingTableType
eckhart's avatar
eckhart committed
281
        cache = cast(TransformationDict, processing_table['__cache__'])  # type: TransformationDict
282
    else:
283
284
        # normalize processing_table entries by turning single values
        # into lists with a single value
285
286
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
287
        table = expand_table(table)
eckhart's avatar
eckhart committed
288
289
        cache = cast(TransformationDict,
                     table.setdefault('__cache__', cast(TransformationDict, dict())))
290
291
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
292
293
294
295
296
297
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
298
    # cache = {}  # type: Dict[str, List[Callable]]
299

300
    def traverse_recursive(context):
eckhart's avatar
eckhart committed
301
        nonlocal cache
302
        node = context[-1]
303
        if node.children:
eckhart's avatar
eckhart committed
304
            context.append(ZOMBIE_NODE)
di68kap's avatar
di68kap committed
305
            for child in node.children:
eckhart's avatar
eckhart committed
306
                context[-1] = child
307
                traverse_recursive(context)  # depth first
eckhart's avatar
eckhart committed
308
            context.pop()
309
310

        key = key_func(node)
311
312
313
        try:
            sequence = cache[key]
        except KeyError:
314
315
316
            sequence = table.get('+', []) \
                + table.get(key, table.get('*', [])) \
                + table.get('~', [])
317
318
319
320
321
322
323
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
324
            call(context)
325

326
    traverse_recursive([root_node])
327
328
    # assert processing_table['__cache__']

329

330
#######################################################################
331
#
332
333
# meta transformations, i.e. transformations that call other
# transformations
334
#
335
#######################################################################
336
337


eckhart's avatar
eckhart committed
338
@transformation_factory(dict)
339
340
341
342
343
344
345
346
347
348
349
def traverse_locally(context: List[Node],
                     processing_table: Dict,            # actually: ProcessingTableType
                     key_func: Callable=key_tag_name):  # actually: KeyFunc
    """Transforms the syntax tree starting from the last node in the context
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


350
@transformation_factory(collections.abc.Callable)
351
352
353
354
355
356
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
    if condition(context):
        transformation(context)


357
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
358
359
360
361
362
363
def apply_unless(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation if a certain condition is *not* met."""
    if not condition(context):
        transformation(context)


364
365
366
367
368
369
370
371
372
373
374
375
376
377
#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
378
    """Returns ``True`` if the current node does not have any siblings."""
379
380
381
382
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
383
    """Returns ``True`` if the current node's parser is a named parser."""
384
385
386
387
    return bool(context[-1].parser.name)


def is_anonymous(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
388
    """Returns ``True`` if the current node's parser is an anonymous parser."""
389
390
391
392
393
394
395
396
397
398
    return not context[-1].parser.name


def is_whitespace(context: List[Node]) -> bool:
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
    return context[-1].parser.ptype == WHITESPACE_PTYPE


def is_empty(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
399
    """Returns ``True`` if the current node's content is empty."""
400
401
402
403
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
404
405
    """Returns ``True`` if the current node either is a node containing
    whitespace or an empty node."""
406
407
408
    return is_empty(context) or is_whitespace(context)


409
@transformation_factory(collections.abc.Set)
410
411
412
413
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    """Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
eckhart's avatar
eckhart committed
414
    any token is a match.
415
    """
di68kap's avatar
di68kap committed
416
417
418
419
420
421
422
423
424
425
426
427
428
    # def stripped(nd: Node) -> str:
    #     """Removes leading and trailing whitespace-nodes from content."""
    #     # assert node.parser.ptype == TOKEN_PTYPE
    #     if nd.children:
    #         i, k = 0, len(nd.children)
    #         while i < len(nd.children) and nd.children[i].parser.ptype == WHITESPACE_PTYPE:
    #             i += 1
    #         while k > 0 and nd.children[k - 1].parser.ptype == WHITESPACE_PTYPE:
    #             k -= 1
    #         return "".join(child.content for child in node.children[i:k])
    #     return nd.content
    # node = context[-1]
    # return node.parser.ptype == TOKEN_PTYPE and (not tokens or stripped(node) in tokens)
429
    node = context[-1]
di68kap's avatar
di68kap committed
430
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.content in tokens)
431
432


433
@transformation_factory(collections.abc.Set)
434
435
436
437
438
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


439
440
441
442
443
444
@transformation_factory(collections.abc.Set)
def not_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is not one of the given tag names."""
    return context[-1].tag_name not in tag_name_set


Eckhart Arnold's avatar
Eckhart Arnold committed
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
# @transformation_factory(collections.abc.Set)
# def matches_wildcard(context: List[Node], wildcards: AbstractSet[str]) -> bool:
#     """Retruns true, if the node's tag_name matches one of the glob patterns
#     in `wildcards`. For example, ':*' matches all anonymous nodes. """
#     tn = context[-1].tag_name
#     for pattern in wildcards:
#         if fnmatch.fnmatch(tn, pattern):
#             return True
#     return False


@transformation_factory(collections.abc.Set)
def matches_re(context: List[Node], patterns: AbstractSet[str]) -> bool:
    """Retruns true, if the node's tag_name matches one of the regular
    expressions in `patterns`. For example, ':.*' matches all anonymous nodes.
    """
    tn = context[-1].tag_name
    for pattern in patterns:
        if re.match(pattern, tn):
            return True
    return False


eckhart's avatar
eckhart committed
468
@transformation_factory
469
def has_content(context: List[Node], regexp: str) -> bool:
470
471
472
473
474
475
476
477
    """
    Checks a node's content against a regular expression.

    In contrast to ``re.match`` the regular expression must match the complete
    string and not just the beginning of the string to succeed!
    """
    if not regexp.endswith('$'):
        regexp += "$"
478
479
480
    return bool(re.match(regexp, context[-1].content))


481
@transformation_factory(collections.abc.Set)
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context."""
    for i in range(2, len(context)):
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
499
500
501
502
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
Eckhart Arnold's avatar
Eckhart Arnold committed
503
    # node.errors.extend(child.errors)
504
    node.result = child.result
505
    if hasattr(child, '_xml_attr'):
506
        node.attr.update(child.attr)
507
508


509
def _reduce_child(node: Node, child: Node):
Eckhart Arnold's avatar
Eckhart Arnold committed
510
    # node.errors.extend(child.errors)
511
    node.result = child.result
512
    if hasattr(child, '_xml_attr'):
513
        node.attr.update(child.attr)
514
515


eckhart's avatar
eckhart committed
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
# def _pick_child(context: List[Node], criteria: CriteriaType):
#     """Returns the first child that meets the criteria."""
#     if isinstance(criteria, int):
#         try:
#             return context[-1].children[criteria]
#         except IndexError:
#             return None
#     elif isinstance(criteria, str):
#         for child in context[-1].children:
#             if child.tag_name == criteria:
#                 return child
#         return None
#     else:  # assume criteria has type ConditionFunc
#         for child in context[-1].children:
#             context.append(child)
#             evaluation = criteria(context)
#             context.pop()
#             if evaluation:
#                 return child
#         return None
536
537


538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
584
585


586
587
def replace_by_single_child(context: List[Node]):
    """
588
589
590
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
591
592
593
    """
    node = context[-1]
    if len(node.children) == 1:
594
        _replace_by(node, node.children[0])
595
596


Eckhart Arnold's avatar
Eckhart Arnold committed
597
def reduce_single_child(context: List[Node]):
598
    """
599
    Reduces a single branch node by transferring the result of its
600
    immediate descendant to this node, but keeping this node's parser entry.
601
602
    Reduction only takes place if the last node in the context has
    exactly one child.
603
604
605
    """
    node = context[-1]
    if len(node.children) == 1:
606
        _reduce_child(node, node.children[0])
607
608


609
@transformation_factory(collections.abc.Callable)
610
611
612
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
613
614
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
615
    node = context[-1]
616
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
617
        child = node.children[0]
618
        if condition(context):
619
            _replace_by(node, child)
620
        else:
621
            _reduce_child(node, child)
622
623
624


@transformation_factory
625
626
627
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
628
629
630
    name.

    Parameters:
631
        context: the context where the parser shall be replaced
eckhart's avatar
eckhart committed
632
        name: "NAME:PTYPE" of the surrogate. The ptype is optional
633
    """
634
    node = context[-1]
635
    name, ptype = (name.split(':') + [''])[:2]
di68kap's avatar
di68kap committed
636
    node.parser = MockParser(name, ':' + ptype)
637
638


639
@transformation_factory(collections.abc.Callable)
640
641
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
642
    Flattens all children, that fulfil the given ``condition``
643
644
645
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
646
647

    If the parameter ``recursive`` is ``True`` the same will recursively be
648
649
650
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
651
652
653
654

    Applying flatten recursively will result in these kinds of
    structural transformation::

di68kap's avatar
di68kap committed
655
        (1 (+ 2) (+ 3))    ->   (1 + 2 + 3)
656
657
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
658
    node = context[-1]
659
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
660
        new_result = []     # type: List[Node]
eckhart's avatar
eckhart committed
661
        context.append(ZOMBIE_NODE)
662
        for child in node.children:
eckhart's avatar
eckhart committed
663
            context[-1] = child
664
            if child.children and condition(context):
665
                if recursive:
666
                    flatten(context, condition, recursive)
667
668
669
                new_result.extend(child.children)
            else:
                new_result.append(child)
eckhart's avatar
eckhart committed
670
        context.pop()
671
672
673
        node.result = tuple(new_result)


674
675
676
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
677
    string representation of the node.
678
    """
679
    node = context[-1]
680
    node.result = node.content
681
682


683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
@transformation_factory(collections.abc.Callable)
def collapse_if(context: List[Node], condition: Callable, target_tag: ParserBase):
    node = context[-1]
    package = []
    result = []

    def close_package():
        nonlocal package
        if package:
            s = "".join(str(nd.result) for nd in package)
            result.append(Node(target_tag, s))
            package = []

    for child in node.children:
        if condition([child]):
            if child.children:
                collapse_if([child], condition, target_tag)
                for c in child.children:
                    if condition([c]):
                        package.append(c)
                    else:
                        close_package()
                        result.append(c)
                close_package()
            else:
                package.append(child)
        else:
            close_package()
            result.append(child)
    close_package()
    node.result = tuple(result)


716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
# @transformation_factory
# def collect_leaves(context: List[Node], whitespace: str=''):
#     """
#     Collects all leave nodes dropping any intermediary nodes.
#     Optionally adds whitespace between the nodes.
#     """
#     assert context[-1].children
#     node = context[-1]
#     leaves_iterator = node.select(lambda nd: not nd.children, include_root=False)
#     if whitespace:
#         mock_ws_parser = MockParser('', WHITESPACE_PTYPE)
#         result = []
#         for leave in leaves_iterator:
#             result.append(leave)
#             result.append(Node(mock_ws_parser, whitespace, leafhint=True))
#         result.pop()
#         node.result = tuple(result)
#     else:
#         node.result = (nd for nd in leaves_iterator)


737
738
@transformation_factory(tuple)
def merge_children(context: List[Node], tag_names: Tuple[str]):
739
    """
740
741
742
    Joins all children next to each other and with particular tag-names
    into a single child node with a mock-parser with the name of the
    first tag-name in the list.
743
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
744
    node = context[-1]
745
    result = []
746
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
747
    if node.children:
748
        i = 0
749
750
751
752
753
754
755
756
757
758
759
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
760
761
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
762
763
764
765
            i = k
        node.result = tuple(result)


766
@transformation_factory(collections.abc.Callable)
767
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
di68kap's avatar
di68kap committed
768
    """Replaces the content of the node. ``func`` takes the node's result
769
770
    as an argument an returns the mapped result.
    """
771
    node = context[-1]
772
773
774
    node.result = func(node.result)


775
@transformation_factory  # (str)
776
777
778
779
780
781
782
def replace_content_by(context: List[Node], content: str):  # Callable[[Node], ResultType]
    """Replaces the content of the node with the given text content.
    """
    node = context[-1]
    node.result = content


783
784
785
786
787
788
789
790
791
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
# - errors of dropped leaves will be lost
# - no promise that order will be preserved
#
#######################################################################
792
793


794
@transformation_factory(collections.abc.Callable)
795
796
797
798
799
800
801
802
803
804
805
806
807
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


808
@transformation_factory(collections.abc.Callable)
809
810
811
812
813
814
815
816
817
818
819
820
821
822
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
        while i > 0 and condition(context + [node.children[i-1]]):
            i -= 1
        if i < L:
            node.result = node.children[:i]


823
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
824
def strip(context: List[Node], condition: Callable = is_expendable):
825
826
827
828
829
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


830
@transformation_factory  # (slice)
831
def keep_children(context: List[Node], section: slice = slice(None)):
832
    """Keeps only child-nodes which fall into a slice of the result field."""
833
    node = context[-1]
834
    if node.children:
835
        node.result = node.children[section]
836
837


838
@transformation_factory(collections.abc.Callable)
839
840
841
842
843
844
845
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


846
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
847
def keep_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
848
849
850
851
852
853
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


854
@transformation_factory(collections.abc.Set)
855
856
857
858
859
860
861
862
863
864
865
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


866
@transformation_factory(collections.abc.Callable)
867
def remove_children_if(context: List[Node], condition: Callable):
868
869
870
871
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))
eckhart's avatar
eckhart committed
872
    pass
873

eckhart's avatar
eckhart committed
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
895
896


897
898
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
899
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
900
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
901
902
903
904
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
905
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
906
907
908
# remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
# remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
# remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
909
remove_infix_operator = keep_children(slice(0, None, 2))
910
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
911
912


913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
def remove_first(context: List[Node]):
    """Removes the first non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(node.children):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        node.result = node.children[:i] + node.children[i+1:]


def remove_last(context: List[Node]):
    """Removes the last non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(reversed(node.children)):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        i = len(node.children) - i - 1
        node.result = node.children[:i] + node.children[i+1:]


def remove_brackets(context: List[Node]):
    """Removes the first and the last non-whitespace child."""
    remove_first(context)
    remove_last(context)


944
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
945
def remove_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
946
    """Removes any among a particular set of tokens from the immediate
947
948
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
949
    remove_children_if(context, partial(is_token, tokens=tokens))
950
951


952
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
953
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
954
    """Removes children by tag name."""
955
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
956
957
958


@transformation_factory
959
def remove_content(context: List[Node], regexp: str):
960
    """Removes children depending on their string value."""
961
    remove_children_if(context, partial(has_content, regexp=regexp))
962
963
964
965


########################################################################
#
966
# AST semantic validation functions (EXPERIMENTAL!!!)
967
968
969
#
########################################################################

970
@transformation_factory(collections.abc.Callable)
971
972
973
974
def error_on(context: List[Node], condition: Callable, error_msg: str = ''):
    """
    Checks for `condition`; adds an error message if condition is not met.
    """
975
    node = context[-1]
976
    if not condition(context):
977
978
979
980
981
982
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
983
984
985
            node.add_error("transform.error_on: Failed to meet condition " + cond_name)


986
@transformation_factory(collections.abc.Callable)
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
def warn_on(context: List[Node], condition: Callable, warning: str = ''):
    """
    Checks for `condition`; adds an warning message if condition is not met.
    """
    node = context[-1]
    if not condition(context):
        if warning:
            node.add_error(warning % node.tag_name if warning.find("%s") > 0 else warning,
                           Error.WARNING)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.warn_on: Failed to meet condition " + cond_name,
                           Error.WARNING)
1002
1003


1004
assert_has_children = error_on(lambda nd: nd.children, 'Element "%s" has no children')
1005
1006
1007


@transformation_factory
1008
def assert_content(context: List[Node], regexp: str):
1009
    node = context[-1]
1010
    if not has_content(context, regexp):
eckhart's avatar
eckhart committed
1011
        context[0].new_error(node, 'Element "%s" violates %s on %s' %
eckhart's avatar
eckhart committed
1012
                             (node.parser.name, str(regexp), node.content))
1013

1014

1015
@transformation_factory(collections.abc.Set)
1016
def require(context: List[Node], child_tags: AbstractSet[str]):
1017
    node = context[-1]
1018
1019
    for child in node.children:
        if child.tag_name not in child_tags:
eckhart's avatar
eckhart committed
1020
            context[0].new_error(node, 'Element "%s" is not allowed inside "%s".' %
eckhart's avatar
eckhart committed
1021
                                 (child.parser.name, node.parser.name))
1022
1023


1024
@transformation_factory(collections.abc.Set)
1025
def forbid(context: List[Node], child_tags: AbstractSet[str]):
1026
    node = context[-1]
1027
1028
    for child in node.children:
        if child.tag_name in child_tags:
eckhart's avatar
eckhart committed
1029
            context[0].new_error(node, 'Element "%s" cannot be nested inside "%s".' %
eckhart's avatar
eckhart committed
1030
                                 (child.parser.name, node.parser.name))
di68kap's avatar
di68kap committed
1031
1032
1033
1034
1035


def peek(context: List[Node]):
    """For debugging: Prints the last node in the context as S-expression."""
    print(context[-1].as_sxpr())