transform.py 35.5 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
import collections.abc
31
32
33
import inspect
from functools import partial, reduce, singledispatch

34
from DHParser.error import Error
eckhart's avatar
eckhart committed
35
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, MockParser, ZOMBIE_NODE
36
37
from DHParser.toolkit import expand_table, smart_list, re, typing
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
38
    Tuple, List, Sequence, Union, Text, Generic
39

40
41
__all__ = ('TransformationDict',
           'TransformationProc',
42
           'TransformationFunc',
43
44
45
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
46
47
48
           'key_parser_name',
           'key_tag_name',
           'traverse',
49
           'is_named',
50
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
51
           'reduce_single_child',
52
           'replace_or_reduce',
53
54
           'replace_parser',
           'collapse',
55
           'merge_children',
56
           'replace_content',
57
           'replace_content_by',
58
           'apply_if',
eckhart's avatar
eckhart committed
59
           'apply_unless',
60
           'traverse_locally',
61
           'is_anonymous',
62
63
64
65
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
66
           'is_one_of',
67
           'has_content',
di68kap's avatar
di68kap committed
68
           'has_parent',
69
70
71
72
73
74
75
76
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
77
           'remove_children_if',
eckhart's avatar
eckhart committed
78
           'remove_nodes',
79
80
81
82
83
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
84
           'remove_anonymous_empty',
85
86
           'remove_anonymous_expendables',
           'remove_anonymous_tokens',
87
88
           'remove_expendables',
           'remove_brackets',
89
90
           'remove_infix_operator',
           'remove_single_child',
91
92
93
94
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
95
           'assert_content',
96
97
           'error_on',
           'warn_on',
eckhart's avatar
eckhart committed
98
           'assert_has_children')
99
100


101
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
102
TransformationDict = Dict[str, Sequence[Callable]]
103
TransformationFunc = Union[Callable[[Node], Any], partial]
Eckhart Arnold's avatar
Eckhart Arnold committed
104
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
105
106
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
107
CriteriaType = Union[int, str, Callable]
108
109


110
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
111
    """Creates factory functions from transformation-functions that
112
    dispatch on the first parameter after the context parameter.
113
114

    Decorating a transformation-function that has more than merely the
Eckhart Arnold's avatar
Eckhart Arnold committed
115
    ``context``-parameter with ``transformation_factory`` creates a
116
    function with the same name, which returns a partial-function that
117
    takes just the context-parameter.
118
119
120
121
122
123
124
125
126

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

127
128
    Usage::

eckhart's avatar
eckhart committed
129
        @transformation_factory(AbstractSet[str])
130
        def remove_tokens(context, tokens):
131
            ...
132
133
134

    or, alternatively::

135
        @transformation_factory
136
        def remove_tokens(context, tokens: AbstractSet[str]):
137
138
            ...

139
140
    Example::

141
        trans_table = { 'expression': remove_tokens('+', '-') }
142
143
144

    instead of::

145
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
146
147

    Parameters:
148
        t1:  type of the second argument of the transformation function,
149
150
            only necessary if the transformation functions' parameter list
            does not have type annotations.
151
152
    """

153
154
155
156
157
158
    def issubtype(sub_type, base_type):
        return base_type in inspect.getmro(sub_type)

    def isgenerictype(t):
        return str(t).endswith(']')

159
160
161
162
    def type_guard(t):
        """Raises an error if type `t` is a generic type or could be mistaken
        for the type of the canonical first parameter "List[Node] of
        transformation functions. Returns `t`."""
163
164
165
166
        # if isinstance(t, GenericMeta):
        #     raise TypeError("Generic Type %s not permitted\n in transformation_factory "
        #                     "decorator. Use the equivalent non-generic type instead!"
        #                     % str(t))
167
        if isgenerictype(t):
168
169
170
            raise TypeError("Generic Type %s not permitted\n in transformation_factory "
                            "decorator. Use the equivalent non-generic type instead!"
                            % str(t))
171
        if issubtype(List[Node], t):
172
173
            raise TypeError("Sequence type %s not permitted\nin transformation_factory "
                            "decorator, because it could be mistaken for a base class "
174
175
176
                            "of List[Node]\nwhich is the type of the canonical first "
                            "argument of transformation functions. Try 'tuple' instead!"
                            % str(t))
177
178
        return t

179
    def decorator(f):
180
        nonlocal t1
181
182
183
184
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
185
        assert t1 or params[0].annotation != params[0].empty, \
186
            "No type information on second parameter found! Please, use type " \
eckhart's avatar
eckhart committed
187
            "annotation or provide the type information via transformer-decorator."
188
        f = singledispatch(f)
189
190
191
        p1type = params[0].annotation
        if t1 is None:
            t1 = type_guard(p1type)
192
        elif issubtype(p1type, type_guard(t1)):
193
194
            try:
                if len(params) == 1 and issubclass(p1type, Container) \
195
                        and not (issubtype(p1type, Text) or issubtype(p1type, ByteString)):
196
                    def gen_special(*args):
197
198
                        c = set(args) if issubtype(p1type, AbstractSet) else \
                            tuple(args) if issubtype(p1type, Sequence) else args
199
200
201
202
203
204
205
206
207
                        d = {params[0].name: c}
                        return partial(f, **d)

                    f.register(type_guard(p1type.__args__[0]), gen_special)
            except AttributeError:
                pass  # Union Type does not allow subclassing, but is not needed here
        else:
            raise TypeError("Annotated type %s is not a subclass of decorated type %s !"
                            % (str(p1type), str(t1)))
208
209
210
211
212
213

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

214
        for t in (t1, t2, t3, t4, t5):
215
            if t:
216
                f.register(type_guard(t), gen_partial)
217
218
            else:
                break
219
220
        return f

221
    if isinstance(t1, type(lambda: 1)):
222
223
224
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
225
226
        func = t1
        t1 = None
227
228
229
230
231
        return decorator(func)
    else:
        return decorator


232
def key_parser_name(node: Node) -> str:
233
234
235
    return node.parser.name


236
def key_tag_name(node: Node) -> str:
237
238
239
    return node.tag_name


240
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
241
             processing_table: ProcessingTableType,
242
243
244
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
245
    first and applies the sequences of callback-functions registered
246
    in the ``processing_table``-dictionary.
247
248
249
250
251
252
253
254
255

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
256
257
258
259
260

    - '+': always called (before any other processing function)
    - '*': called for those nodes for which no (other) processing
      function appears in the table
    - '~': always called (after any other processing function)
261
262
263
264
265

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
266
267
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
268
269
270
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

271
272
    Example::

273
        table = { "term": [replace_by_single_child, flatten],
274
                  "factor, flowmarker, retrieveop": replace_by_single_child }
275
        traverse(node, table)
276

277
    """
278
279
280
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
eckhart's avatar
eckhart committed
281
        table = processing_table               # type: ProcessingTableType
eckhart's avatar
eckhart committed
282
        cache = cast(TransformationDict, processing_table['__cache__'])  # type: TransformationDict
283
    else:
284
285
        # normalize processing_table entries by turning single values
        # into lists with a single value
286
287
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
288
        table = expand_table(table)
eckhart's avatar
eckhart committed
289
290
        cache = cast(TransformationDict,
                     table.setdefault('__cache__', cast(TransformationDict, dict())))
291
292
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
293
294
295
296
297
298
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
299
    # cache = {}  # type: Dict[str, List[Callable]]
300

301
    def traverse_recursive(context):
eckhart's avatar
eckhart committed
302
        nonlocal cache
303
        node = context[-1]
304
        if node.children:
eckhart's avatar
eckhart committed
305
            context.append(ZOMBIE_NODE)
di68kap's avatar
di68kap committed
306
            for child in node.children:
eckhart's avatar
eckhart committed
307
                context[-1] = child
308
                traverse_recursive(context)  # depth first
eckhart's avatar
eckhart committed
309
            context.pop()
310
311

        key = key_func(node)
312
313
314
        try:
            sequence = cache[key]
        except KeyError:
315
316
317
            sequence = table.get('+', []) \
                + table.get(key, table.get('*', [])) \
                + table.get('~', [])
318
319
320
321
322
323
324
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
325
            call(context)
326

327
    traverse_recursive([root_node])
328
329
    # assert processing_table['__cache__']

330

331
#######################################################################
332
#
333
334
# meta transformations, i.e. transformations that call other
# transformations
335
#
336
#######################################################################
337
338


eckhart's avatar
eckhart committed
339
@transformation_factory(dict)
340
341
342
343
344
345
346
347
348
349
350
def traverse_locally(context: List[Node],
                     processing_table: Dict,            # actually: ProcessingTableType
                     key_func: Callable=key_tag_name):  # actually: KeyFunc
    """Transforms the syntax tree starting from the last node in the context
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


351
@transformation_factory(collections.abc.Callable)
352
353
354
355
356
357
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
    if condition(context):
        transformation(context)


358
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
359
360
361
362
363
364
def apply_unless(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation if a certain condition is *not* met."""
    if not condition(context):
        transformation(context)


365
366
367
368
369
370
371
372
373
374
375
376
377
378
#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
379
    """Returns ``True`` if the current node does not have any siblings."""
380
381
382
383
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
384
    """Returns ``True`` if the current node's parser is a named parser."""
385
386
387
388
    return bool(context[-1].parser.name)


def is_anonymous(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
389
    """Returns ``True`` if the current node's parser is an anonymous parser."""
390
391
392
393
394
395
396
397
398
399
    return not context[-1].parser.name


def is_whitespace(context: List[Node]) -> bool:
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
    return context[-1].parser.ptype == WHITESPACE_PTYPE


def is_empty(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
400
    """Returns ``True`` if the current node's content is empty."""
401
402
403
404
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
405
406
    """Returns ``True`` if the current node either is a node containing
    whitespace or an empty node."""
407
408
409
    return is_empty(context) or is_whitespace(context)


410
@transformation_factory(collections.abc.Set)
411
412
413
414
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    """Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
eckhart's avatar
eckhart committed
415
    any token is a match.
416
417
    """
    def stripped(nd: Node) -> str:
418
        """Removes leading and trailing whitespace-nodes from content."""
419
420
421
422
423
        # assert node.parser.ptype == TOKEN_PTYPE
        if nd.children:
            i, k = 0, len(nd.children)
            while i < len(nd.children) and nd.children[i].parser.ptype == WHITESPACE_PTYPE:
                i += 1
424
            while k > 0 and nd.children[k - 1].parser.ptype == WHITESPACE_PTYPE:
425
426
427
428
                k -= 1
            return "".join(child.content for child in node.children[i:k])
        return nd.content
    node = context[-1]
429
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or stripped(node) in tokens)
430
431


432
@transformation_factory(collections.abc.Set)
433
434
435
436
437
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


eckhart's avatar
eckhart committed
438
@transformation_factory
439
def has_content(context: List[Node], regexp: str) -> bool:
440
441
442
443
444
445
446
447
    """
    Checks a node's content against a regular expression.

    In contrast to ``re.match`` the regular expression must match the complete
    string and not just the beginning of the string to succeed!
    """
    if not regexp.endswith('$'):
        regexp += "$"
448
449
450
    return bool(re.match(regexp, context[-1].content))


451
@transformation_factory(collections.abc.Set)
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context."""
    for i in range(2, len(context)):
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
469
470
471
472
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
Eckhart Arnold's avatar
Eckhart Arnold committed
473
    # node.errors.extend(child.errors)
474
    node.result = child.result
475
476
    if hasattr(child, '_xml_attr'):
        node.attributes.update(child.attributes)
477
478


479
def _reduce_child(node: Node, child: Node):
Eckhart Arnold's avatar
Eckhart Arnold committed
480
    # node.errors.extend(child.errors)
481
    node.result = child.result
482
483
    if hasattr(child, '_xml_attr'):
        node.attributes.update(child.attributes)
484
485


eckhart's avatar
eckhart committed
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
# def _pick_child(context: List[Node], criteria: CriteriaType):
#     """Returns the first child that meets the criteria."""
#     if isinstance(criteria, int):
#         try:
#             return context[-1].children[criteria]
#         except IndexError:
#             return None
#     elif isinstance(criteria, str):
#         for child in context[-1].children:
#             if child.tag_name == criteria:
#                 return child
#         return None
#     else:  # assume criteria has type ConditionFunc
#         for child in context[-1].children:
#             context.append(child)
#             evaluation = criteria(context)
#             context.pop()
#             if evaluation:
#                 return child
#         return None
506
507


508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
554
555


556
557
def replace_by_single_child(context: List[Node]):
    """
558
559
560
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
561
562
563
    """
    node = context[-1]
    if len(node.children) == 1:
564
        _replace_by(node, node.children[0])
565
566


Eckhart Arnold's avatar
Eckhart Arnold committed
567
def reduce_single_child(context: List[Node]):
568
    """
569
    Reduces a single branch node by transferring the result of its
570
    immediate descendant to this node, but keeping this node's parser entry.
571
572
    Reduction only takes place if the last node in the context has
    exactly one child.
573
574
575
    """
    node = context[-1]
    if len(node.children) == 1:
576
        _reduce_child(node, node.children[0])
577
578


579
@transformation_factory(collections.abc.Callable)
580
581
582
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
583
584
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
585
    node = context[-1]
586
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
587
        child = node.children[0]
588
        if condition(context):
589
            _replace_by(node, child)
590
        else:
591
            _reduce_child(node, child)
592
593
594


@transformation_factory
595
596
597
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
598
599
600
    name.

    Parameters:
601
        context: the context where the parser shall be replaced
eckhart's avatar
eckhart committed
602
        name: "NAME:PTYPE" of the surrogate. The ptype is optional
603
    """
604
    node = context[-1]
605
    name, ptype = (name.split(':') + [''])[:2]
di68kap's avatar
di68kap committed
606
    node.parser = MockParser(name, ':' + ptype)
607
608


609
@transformation_factory(collections.abc.Callable)
610
611
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
612
    Flattens all children, that fulfil the given ``condition``
613
614
615
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
616
617

    If the parameter ``recursive`` is ``True`` the same will recursively be
618
619
620
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
621
622
623
624

    Applying flatten recursively will result in these kinds of
    structural transformation::

di68kap's avatar
di68kap committed
625
        (1 (+ 2) (+ 3))    ->   (1 + 2 + 3)
626
627
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
628
    node = context[-1]
629
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
630
        new_result = []     # type: List[Node]
eckhart's avatar
eckhart committed
631
        context.append(ZOMBIE_NODE)
632
        for child in node.children:
eckhart's avatar
eckhart committed
633
            context[-1] = child
634
            if child.children and condition(context):
635
                if recursive:
636
                    flatten(context, condition, recursive)
637
638
639
                new_result.extend(child.children)
            else:
                new_result.append(child)
eckhart's avatar
eckhart committed
640
        context.pop()
641
642
643
        node.result = tuple(new_result)


644
645
646
def collapse(context: List[Node]):
    """
    Collapses all sub-nodes of a node by replacing them with the
647
    string representation of the node.
648
    """
649
    node = context[-1]
650
    node.result = node.content
651
652


653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
# @transformation_factory
# def collect_leaves(context: List[Node], whitespace: str=''):
#     """
#     Collects all leave nodes dropping any intermediary nodes.
#     Optionally adds whitespace between the nodes.
#     """
#     assert context[-1].children
#     node = context[-1]
#     leaves_iterator = node.select(lambda nd: not nd.children, include_root=False)
#     if whitespace:
#         mock_ws_parser = MockParser('', WHITESPACE_PTYPE)
#         result = []
#         for leave in leaves_iterator:
#             result.append(leave)
#             result.append(Node(mock_ws_parser, whitespace, leafhint=True))
#         result.pop()
#         node.result = tuple(result)
#     else:
#         node.result = (nd for nd in leaves_iterator)


674
675
@transformation_factory(tuple)
def merge_children(context: List[Node], tag_names: Tuple[str]):
676
    """
677
678
679
    Joins all children next to each other and with particular tag-names
    into a single child node with a mock-parser with the name of the
    first tag-name in the list.
680
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
681
    node = context[-1]
682
    result = []
683
    name, ptype = ('', tag_names[0]) if tag_names[0][:1] == ':' else (tag_names[0], '')
684
    if node.children:
685
        i = 0
686
687
688
689
690
691
692
693
694
695
696
        L = len(node.children)
        while i < L:
            while i < L and not node.children[i].tag_name in tag_names:
                result.append(node.children[i])
                i += 1
            k = i + 1
            while (k < L and node.children[k].tag_name in tag_names
                   and bool(node.children[i].children) == bool(node.children[k].children)):
                k += 1
            if i < L:
                result.append(Node(MockParser(name, ptype),
Eckhart Arnold's avatar
Eckhart Arnold committed
697
698
                                   reduce(lambda a, b: a + b,
                                          (node.children for node in node.children[i:k]))))
699
700
701
702
            i = k
        node.result = tuple(result)


703
@transformation_factory(collections.abc.Callable)
704
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
di68kap's avatar
di68kap committed
705
    """Replaces the content of the node. ``func`` takes the node's result
706
707
    as an argument an returns the mapped result.
    """
708
    node = context[-1]
709
710
711
    node.result = func(node.result)


712
@transformation_factory  # (str)
713
714
715
716
717
718
719
def replace_content_by(context: List[Node], content: str):  # Callable[[Node], ResultType]
    """Replaces the content of the node with the given text content.
    """
    node = context[-1]
    node.result = content


720
721
722
723
724
725
726
727
728
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
# - errors of dropped leaves will be lost
# - no promise that order will be preserved
#
#######################################################################
729
730


731
@transformation_factory(collections.abc.Callable)
732
733
734
735
736
737
738
739
740
741
742
743
744
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


745
@transformation_factory(collections.abc.Callable)
746
747
748
749
750
751
752
753
754
755
756
757
758
759
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
        while i > 0 and condition(context + [node.children[i-1]]):
            i -= 1
        if i < L:
            node.result = node.children[:i]


760
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
761
def strip(context: List[Node], condition: Callable = is_expendable):
762
763
764
765
766
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


767
@transformation_factory  # (slice)
768
def keep_children(context: List[Node], section: slice = slice(None)):
769
    """Keeps only child-nodes which fall into a slice of the result field."""
770
    node = context[-1]
771
    if node.children:
772
        node.result = node.children[section]
773
774


775
@transformation_factory(collections.abc.Callable)
776
777
778
779
780
781
782
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


783
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
784
def keep_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
785
786
787
788
789
790
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


791
@transformation_factory(collections.abc.Set)
792
793
794
795
796
797
798
799
800
801
802
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


803
@transformation_factory(collections.abc.Callable)
804
def remove_children_if(context: List[Node], condition: Callable):
805
806
807
808
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))
eckhart's avatar
eckhart committed
809
    pass
810

eckhart's avatar
eckhart committed
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
832
833


834
835
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
836
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
837
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
838
839
840
841
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
842
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
843
844
845
# remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
# remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
# remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
846
remove_infix_operator = keep_children(slice(0, None, 2))
847
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
848
849


850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
def remove_first(context: List[Node]):
    """Removes the first non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(node.children):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        node.result = node.children[:i] + node.children[i+1:]


def remove_last(context: List[Node]):
    """Removes the last non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(reversed(node.children)):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        i = len(node.children) - i - 1
        node.result = node.children[:i] + node.children[i+1:]


def remove_brackets(context: List[Node]):
    """Removes the first and the last non-whitespace child."""
    remove_first(context)
    remove_last(context)


881
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
882
def remove_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
883
    """Removes any among a particular set of tokens from the immediate
884
885
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
886
    remove_children_if(context, partial(is_token, tokens=tokens))
887
888


889
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
890
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
891
    """Removes children by tag name."""
892
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
893
894
895


@transformation_factory
896
def remove_content(context: List[Node], regexp: str):
897
    """Removes children depending on their string value."""
898
    remove_children_if(context, partial(has_content, regexp=regexp))
899
900
901
902


########################################################################
#
903
# AST semantic validation functions (EXPERIMENTAL!!!)
904
905
906
#
########################################################################

907
@transformation_factory(collections.abc.Callable)
908
909
910
911
def error_on(context: List[Node], condition: Callable, error_msg: str = ''):
    """
    Checks for `condition`; adds an error message if condition is not met.
    """
912
    node = context[-1]
913
    if not condition(context):
914
915
916
917
918
919
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
920
921
922
            node.add_error("transform.error_on: Failed to meet condition " + cond_name)


923
@transformation_factory(collections.abc.Callable)
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
def warn_on(context: List[Node], condition: Callable, warning: str = ''):
    """
    Checks for `condition`; adds an warning message if condition is not met.
    """
    node = context[-1]
    if not condition(context):
        if warning:
            node.add_error(warning % node.tag_name if warning.find("%s") > 0 else warning,
                           Error.WARNING)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.warn_on: Failed to meet condition " + cond_name,
                           Error.WARNING)
939
940


941
assert_has_children = error_on(lambda nd: nd.children, 'Element "%s" has no children')
942
943
944


@transformation_factory
945
def assert_content(context: List[Node], regexp: str):
946
    node = context[-1]
947
    if not has_content(context, regexp):
eckhart's avatar
eckhart committed
948
        context[0].new_error(node, 'Element "%s" violates %s on %s' %
eckhart's avatar
eckhart committed
949
                             (node.parser.name, str(regexp), node.content))
950

951

952
@transformation_factory(collections.abc.Set)
953
def require(context: List[Node], child_tags: AbstractSet[str]):
954
    node = context[-1]
955
956
    for child in node.children:
        if child.tag_name not in child_tags:
eckhart's avatar
eckhart committed
957
            context[0].new_error(node, 'Element "%s" is not allowed inside "%s".' %
eckhart's avatar
eckhart committed
958
                                 (child.parser.name, node.parser.name))
959
960


961
@transformation_factory(collections.abc.Set)
962
def forbid(context: List[Node], child_tags: AbstractSet[str]):
963
    node = context[-1]
964
965
    for child in node.children:
        if child.tag_name in child_tags:
eckhart's avatar
eckhart committed
966
            context[0].new_error(node, 'Element "%s" cannot be nested inside "%s".' %
eckhart's avatar
eckhart committed
967
                                 (child.parser.name, node.parser.name))