transform.py 36.3 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
import collections.abc
31
import inspect
Eckhart Arnold's avatar
Eckhart Arnold committed
32
import fnmatch
33
34
from functools import partial, reduce, singledispatch

35
from DHParser.error import Error
36
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, ParserBase, MockParser, \
37
    ZOMBIE_NODE, parse_sxpr, flatten_sxpr
38
from DHParser.toolkit import issubtype, isgenerictype, expand_table, smart_list, re, typing
39
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
40
    Tuple, List, Sequence, Union, Text, Generic
41

42
43
__all__ = ('TransformationDict',
           'TransformationProc',
44
           'TransformationFunc',
45
46
47
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
48
49
50
           'key_parser_name',
           'key_tag_name',
           'traverse',
51
           'is_named',
52
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
53
           'reduce_single_child',
54
           'replace_or_reduce',
55
56
           'replace_parser',
           'collapse',
57
           'collapse_if',
58
           # 'merge_children',
59
           'replace_content',
60
           'replace_content_by',
61
62
           'normalize_whitespace',
           'move_whitespace',
63
           'apply_if',
eckhart's avatar
eckhart committed
64
           'apply_unless',
65
           'traverse_locally',
66
           'is_anonymous',
67
68
69
70
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
71
           'is_one_of',
72
           'not_one_of',
Eckhart Arnold's avatar
Eckhart Arnold committed
73
           'matches_re',
74
           'has_content',
di68kap's avatar
di68kap committed
75
           'has_parent',
76
77
78
79
80
81
82
83
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
84
           'remove_children_if',
eckhart's avatar
eckhart committed
85
           'remove_nodes',
86
87
88
89
90
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
91
           'remove_anonymous_empty',
92
93
           'remove_anonymous_expendables',
           'remove_anonymous_tokens',
94
95
           'remove_expendables',
           'remove_brackets',
96
97
           'remove_infix_operator',
           'remove_single_child',
98
99
100
101
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
102
           'assert_content',
103
104
           'error_on',
           'warn_on',
di68kap's avatar
di68kap committed
105
106
           'assert_has_children',
           'peek')
107
108


109
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
110
TransformationDict = Dict[str, Sequence[Callable]]
111
TransformationFunc = Union[Callable[[Node], Any], partial]
Eckhart Arnold's avatar
Eckhart Arnold committed
112
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
113
114
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
115
CriteriaType = Union[int, str, Callable]
116
117


118
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
119
120
    """
    Creates factory functions from transformation-functions that
121
    dispatch on the first parameter after the context parameter.
122
123

    Decorating a transformation-function that has more than merely the
Eckhart Arnold's avatar
Eckhart Arnold committed
124
    ``context``-parameter with ``transformation_factory`` creates a
125
    function with the same name, which returns a partial-function that
126
    takes just the context-parameter.
127
128
129
130
131
132
133
134
135

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

136
137
    Usage::

eckhart's avatar
eckhart committed
138
        @transformation_factory(AbstractSet[str])
139
        def remove_tokens(context, tokens):
140
            ...
141
142
143

    or, alternatively::

144
        @transformation_factory
145
        def remove_tokens(context, tokens: AbstractSet[str]):
146
147
            ...

148
149
    Example::

150
        trans_table = { 'expression': remove_tokens('+', '-') }
151
152
153

    instead of::

154
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
155
156

    Parameters:
157
        t1:  type of the second argument of the transformation function,
158
159
            only necessary if the transformation functions' parameter list
            does not have type annotations.
160
161
    """

162
163
164
165
    def type_guard(t):
        """Raises an error if type `t` is a generic type or could be mistaken
        for the type of the canonical first parameter "List[Node] of
        transformation functions. Returns `t`."""
166
167
168
169
        # if isinstance(t, GenericMeta):
        #     raise TypeError("Generic Type %s not permitted\n in transformation_factory "
        #                     "decorator. Use the equivalent non-generic type instead!"
        #                     % str(t))
170
        if isgenerictype(t):
171
172
173
            raise TypeError("Generic Type %s not permitted\n in transformation_factory "
                            "decorator. Use the equivalent non-generic type instead!"
                            % str(t))
174
        if issubtype(List[Node], t):
175
176
            raise TypeError("Sequence type %s not permitted\nin transformation_factory "
                            "decorator, because it could be mistaken for a base class "
177
178
179
                            "of List[Node]\nwhich is the type of the canonical first "
                            "argument of transformation functions. Try 'tuple' instead!"
                            % str(t))
180
181
        return t

182
    def decorator(f):
183
        nonlocal t1
184
185
186
187
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
188
        assert t1 or params[0].annotation != params[0].empty, \
189
            "No type information on second parameter found! Please, use type " \
eckhart's avatar
eckhart committed
190
            "annotation or provide the type information via transformer-decorator."
191
        f = singledispatch(f)
192
193
194
        p1type = params[0].annotation
        if t1 is None:
            t1 = type_guard(p1type)
195
        elif issubtype(p1type, type_guard(t1)):
196
            try:
197
                if len(params) == 1 and issubtype(p1type, Container) \
198
                        and not (issubtype(p1type, Text) or issubtype(p1type, ByteString)):
199
                    def gen_special(*args):
200
201
                        c = set(args) if issubtype(p1type, AbstractSet) else \
                            tuple(args) if issubtype(p1type, Sequence) else args
202
203
204
205
206
207
208
209
                        d = {params[0].name: c}
                        return partial(f, **d)
                    f.register(type_guard(p1type.__args__[0]), gen_special)
            except AttributeError:
                pass  # Union Type does not allow subclassing, but is not needed here
        else:
            raise TypeError("Annotated type %s is not a subclass of decorated type %s !"
                            % (str(p1type), str(t1)))
210
211
212
213
214
215

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

216
        for t in (t1, t2, t3, t4, t5):
217
            if t:
218
                f.register(type_guard(t), gen_partial)
219
220
            else:
                break
221
222
        return f

223
    if isinstance(t1, type(lambda: 1)):
224
225
226
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
227
228
        func = t1
        t1 = None
229
230
231
232
233
        return decorator(func)
    else:
        return decorator


234
def key_parser_name(node: Node) -> str:
235
236
237
    return node.parser.name


238
def key_tag_name(node: Node) -> str:
239
240
241
    return node.tag_name


242
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
243
             processing_table: ProcessingTableType,
244
245
246
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
247
    first and applies the sequences of callback-functions registered
248
    in the ``processing_table``-dictionary.
249
250
251
252
253
254
255
256
257

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
258
259
260
261
262

    - '+': always called (before any other processing function)
    - '*': called for those nodes for which no (other) processing
      function appears in the table
    - '~': always called (after any other processing function)
263
264
265
266
267

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
268
269
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
270
271
272
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

273
274
    Example::

275
        table = { "term": [replace_by_single_child, flatten],
276
                  "factor, flowmarker, retrieveop": replace_by_single_child }
277
        traverse(node, table)
278

279
    """
280

281
282
283
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
eckhart's avatar
eckhart committed
284
        table = processing_table               # type: ProcessingTableType
eckhart's avatar
eckhart committed
285
        cache = cast(TransformationDict, processing_table['__cache__'])  # type: TransformationDict
286
    else:
287
288
        # normalize processing_table entries by turning single values
        # into lists with a single value
289
290
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
291
        table = expand_table(table)
eckhart's avatar
eckhart committed
292
293
        cache = cast(TransformationDict,
                     table.setdefault('__cache__', cast(TransformationDict, dict())))
294
295
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
296
297
        processing_table.update(table)

298
    def traverse_recursive(context):
eckhart's avatar
eckhart committed
299
        nonlocal cache
300
        node = context[-1]
301
        if node.children:
eckhart's avatar
eckhart committed
302
            context.append(ZOMBIE_NODE)
di68kap's avatar
di68kap committed
303
            for child in node.children:
eckhart's avatar
eckhart committed
304
                context[-1] = child
305
                traverse_recursive(context)  # depth first
eckhart's avatar
eckhart committed
306
            context.pop()
307
308

        key = key_func(node)
309
310
311
        try:
            sequence = cache[key]
        except KeyError:
312
313
314
            sequence = table.get('+', []) \
                + table.get(key, table.get('*', [])) \
                + table.get('~', [])
315
316
317
318
319
320
321
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
322
            call(context)
323

324
    traverse_recursive([root_node])
325
326
    # assert processing_table['__cache__']

327

328
#######################################################################
329
#
330
331
# meta transformations, i.e. transformations that call other
# transformations
332
#
333
#######################################################################
334
335


eckhart's avatar
eckhart committed
336
@transformation_factory(dict)
337
338
339
def traverse_locally(context: List[Node],
                     processing_table: Dict,            # actually: ProcessingTableType
                     key_func: Callable=key_tag_name):  # actually: KeyFunc
340
341
    """
    Transforms the syntax tree starting from the last node in the context
342
343
344
345
346
347
348
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


349
@transformation_factory(collections.abc.Callable)
350
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
351
352
353
    """
    Applies a transformation only if a certain condition is met.
    """
354
355
356
357
    if condition(context):
        transformation(context)


358
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
359
def apply_unless(context: List[Node], transformation: Callable, condition: Callable):
360
361
362
    """
    Applies a transformation if a certain condition is *not* met.
    """
eckhart's avatar
eckhart committed
363
364
365
366
    if not condition(context):
        transformation(context)


367
368
369
370
371
372
373
374
375
376
377
378
379
380
#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
381
    """Returns ``True`` if the current node does not have any siblings."""
382
383
384
385
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
386
    """Returns ``True`` if the current node's parser is a named parser."""
387
388
389
390
    return bool(context[-1].parser.name)


def is_anonymous(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
391
    """Returns ``True`` if the current node's parser is an anonymous parser."""
392
393
394
395
396
397
398
399
400
401
    return not context[-1].parser.name


def is_whitespace(context: List[Node]) -> bool:
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
    return context[-1].parser.ptype == WHITESPACE_PTYPE


def is_empty(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
402
    """Returns ``True`` if the current node's content is empty."""
403
404
405
406
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
407
408
    """Returns ``True`` if the current node either is a node containing
    whitespace or an empty node."""
409
410
411
    return is_empty(context) or is_whitespace(context)


412
@transformation_factory(collections.abc.Set)
413
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
414
415
    """
    Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
416
417
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
eckhart's avatar
eckhart committed
418
    any token is a match.
419
420
    """
    node = context[-1]
di68kap's avatar
di68kap committed
421
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.content in tokens)
422
423


424
@transformation_factory(collections.abc.Set)
425
426
427
428
429
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


430
431
432
433
434
435
@transformation_factory(collections.abc.Set)
def not_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is not one of the given tag names."""
    return context[-1].tag_name not in tag_name_set


Eckhart Arnold's avatar
Eckhart Arnold committed
436
437
@transformation_factory(collections.abc.Set)
def matches_re(context: List[Node], patterns: AbstractSet[str]) -> bool:
438
439
    """
    Returns true, if the node's tag_name matches one of the regular
Eckhart Arnold's avatar
Eckhart Arnold committed
440
441
442
443
444
445
446
447
448
    expressions in `patterns`. For example, ':.*' matches all anonymous nodes.
    """
    tn = context[-1].tag_name
    for pattern in patterns:
        if re.match(pattern, tn):
            return True
    return False


eckhart's avatar
eckhart committed
449
@transformation_factory
450
def has_content(context: List[Node], regexp: str) -> bool:
451
452
453
454
455
456
457
458
    """
    Checks a node's content against a regular expression.

    In contrast to ``re.match`` the regular expression must match the complete
    string and not just the beginning of the string to succeed!
    """
    if not regexp.endswith('$'):
        regexp += "$"
459
460
461
    return bool(re.match(regexp, context[-1].content))


462
@transformation_factory(collections.abc.Set)
463
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
464
465
466
467
    """
    Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context.
     """
468
469
470
471
472
473
474
475
476
477
478
479
480
481
    for i in range(2, len(context)):
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
482
483
484
485
486
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
    node.result = child.result
487
    if hasattr(child, '_xml_attr'):
488
        node.attr.update(child.attr)
489
490


491
def _reduce_child(node: Node, child: Node):
492
    node.result = child.result
493
    if hasattr(child, '_xml_attr'):
494
        node.attr.update(child.attr)
495
496


497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
543
544


545
546
def replace_by_single_child(context: List[Node]):
    """
547
548
549
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
550
551
552
    """
    node = context[-1]
    if len(node.children) == 1:
553
        _replace_by(node, node.children[0])
554
555


Eckhart Arnold's avatar
Eckhart Arnold committed
556
def reduce_single_child(context: List[Node]):
557
    """
558
    Reduces a single branch node by transferring the result of its
559
    immediate descendant to this node, but keeping this node's parser entry.
560
561
    Reduction only takes place if the last node in the context has
    exactly one child.
562
563
564
    """
    node = context[-1]
    if len(node.children) == 1:
565
        _reduce_child(node, node.children[0])
566
567


568
@transformation_factory(collections.abc.Callable)
569
570
571
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
572
573
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
574
    node = context[-1]
575
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
576
        child = node.children[0]
577
        if condition(context):
578
            _replace_by(node, child)
579
        else:
580
            _reduce_child(node, child)
581
582
583


@transformation_factory
584
585
586
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
587
588
589
    name.

    Parameters:
590
        context: the context where the parser shall be replaced
eckhart's avatar
eckhart committed
591
        name: "NAME:PTYPE" of the surrogate. The ptype is optional
592
    """
593
    node = context[-1]
594
    name, ptype = (name.split(':') + [''])[:2]
di68kap's avatar
di68kap committed
595
    node.parser = MockParser(name, ':' + ptype)
596
597


598
@transformation_factory(collections.abc.Callable)
599
600
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
601
    Flattens all children, that fulfil the given ``condition``
602
603
604
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
605
606

    If the parameter ``recursive`` is ``True`` the same will recursively be
607
608
609
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
610
611
612
613

    Applying flatten recursively will result in these kinds of
    structural transformation::

di68kap's avatar
di68kap committed
614
        (1 (+ 2) (+ 3))    ->   (1 + 2 + 3)
615
616
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
617

618
    node = context[-1]
619
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
620
        new_result = []     # type: List[Node]
eckhart's avatar
eckhart committed
621
        context.append(ZOMBIE_NODE)
622
        for child in node.children:
eckhart's avatar
eckhart committed
623
            context[-1] = child
624
            if child.children and condition(context):
625
                if recursive:
626
                    flatten(context, condition, recursive)
627
628
629
                new_result.extend(child.children)
            else:
                new_result.append(child)
eckhart's avatar
eckhart committed
630
        context.pop()
631
632
633
        node.result = tuple(new_result)


634
def collapse(context: List[Node]):
635
636
637
638
    """
    Collapses all sub-nodes of a node by replacing them with the
    string representation of the node. USE WITH CARE!
    """
639
    node = context[-1]
640
    node.result = node.content
641
642


643
644
@transformation_factory(collections.abc.Callable)
def collapse_if(context: List[Node], condition: Callable, target_tag: ParserBase):
645
646
    """
    (Recursively) merges the content of all adjacent child nodes that
647
648
649
650
651
652
653
654
    fulfil the given `condition` into a single leaf node with parser
    `target_tag`. Nodes that do not fulfil the condition will be preserved.

    >>> sxpr = '(place (abbreviation "p.") (page "26") (superscript "b") (mark ",") (page "18"))'
    >>> tree = parse_sxpr(sxpr)
    >>> text = MockParser('text')
    >>> collapse_if([tree], not_one_of({'superscript', 'subscript'}), text)
    >>> print(flatten_sxpr(tree.as_sxpr()))
655
    (place (text "p.26") (superscript "b") (text ",18"))
656
657
658

    See `test_transform.TestComplexTransformations` for examples.
    """
659

660
661
662
663
664
665
666
    node = context[-1]
    package = []
    result = []

    def close_package():
        nonlocal package
        if package:
667
            s = "".join(nd.content for nd in package)
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
            result.append(Node(target_tag, s))
            package = []

    for child in node.children:
        if condition([child]):
            if child.children:
                collapse_if([child], condition, target_tag)
                for c in child.children:
                    if condition([c]):
                        package.append(c)
                    else:
                        close_package()
                        result.append(c)
                close_package()
            else:
                package.append(child)
        else:
            close_package()
            result.append(child)
    close_package()
    node.result = tuple(result)


691
@transformation_factory(collections.abc.Callable)
692
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
693
694
    """
    Replaces the content of the node. ``func`` takes the node's result
695
696
    as an argument an returns the mapped result.
    """
697
    node = context[-1]
698
699
700
    node.result = func(node.result)


701
@transformation_factory  # (str)
702
def replace_content_by(context: List[Node], content: str):  # Callable[[Node], ResultType]
703
704
    """
    Replaces the content of the node with the given text content.
705
706
707
708
709
    """
    node = context[-1]
    node.result = content


710
def normalize_whitespace(context):
di68kap's avatar
di68kap committed
711
712
    """
    Normalizes Whitespace inside a leaf node, i.e. any sequence of
713
714
    whitespaces, tabs and linefeeds will be replaced by a single
    whitespace. Empty (i.e. zero-length) Whitespace remains empty,
di68kap's avatar
di68kap committed
715
716
    however.
    """
717
718
719
720
721
722
723
724
725
726
    node = context[-1]
    assert not node.children
    if is_whitespace(context):
        if node.result:
            node.result = ' '
    else:
        node.result = re.sub('\s+', ' ', node.result)


def move_whitespace(context):
di68kap's avatar
di68kap committed
727
728
    """
    Moves adjacent whitespace nodes to the parent node.
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
    """
    node = context[-1]
    if len(context) <= 1 or not node.children:
        return
    parent = context[-2]
    children = node.children
    if children[0].parser.ptype == WHITESPACE_PTYPE:
        before = (children[0],)
        children = children[1:]
    else:
        before = ()
    if children and children[-1].parser.ptype == WHITESPACE_PTYPE:
        after = (children[-1],)
        children = children[:-1]
    else:
        after = tuple()

    if before or after:
        node.result = children
        for i, child in enumerate(parent.children):
            if child == node:
                break

        # merge adjacent whitespace
        prevN = parent.children[i-1] if i > 0 else None
        nextN = parent.children[i+1] if i < len(parent.children)-1 else None
        if before and prevN and prevN.parser.ptype == WHITESPACE_PTYPE:
            prevN.result = prevN.result + before[0].result
            before = ()
        if after and nextN and nextN.parser.ptype == WHITESPACE_PTYPE:
            nextN.result = after[0].result + nextN.result
            after = ()

        parent.result = parent.children[:i] + before + (node,) + after + parent.children[i+1:]


765
766
767
768
769
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
770
# - errors of dropped leaves may be be lost
771
772
773
# - no promise that order will be preserved
#
#######################################################################
774
775


776
@transformation_factory(collections.abc.Callable)
777
778
779
780
781
782
783
784
785
786
787
788
789
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


790
@transformation_factory(collections.abc.Callable)
791
792
793
794
795
796
797
798
799
800
801
802
803
804
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
        while i > 0 and condition(context + [node.children[i-1]]):
            i -= 1
        if i < L:
            node.result = node.children[:i]


805
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
806
def strip(context: List[Node], condition: Callable = is_expendable):
807
808
809
810
811
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


812
@transformation_factory  # (slice)
813
def keep_children(context: List[Node], section: slice = slice(None)):
814
    """Keeps only child-nodes which fall into a slice of the result field."""
815
    node = context[-1]
816
    if node.children:
817
        node.result = node.children[section]
818
819


820
@transformation_factory(collections.abc.Callable)
821
822
823
824
825
826
827
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


828
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
829
def keep_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
830
831
832
833
834
835
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


836
@transformation_factory(collections.abc.Set)
837
838
839
840
841
842
843
844
845
846
847
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


848
@transformation_factory(collections.abc.Callable)
849
def remove_children_if(context: List[Node], condition: Callable):
850
851
852
853
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))
eckhart's avatar
eckhart committed
854
    pass
855

eckhart's avatar
eckhart committed
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
877
878


879
880
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
881
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
882
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
883
884
885
886
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
887
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
888
889
890
# remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
# remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
# remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
891
remove_infix_operator = keep_children(slice(0, None, 2))
892
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
893
894


895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
def remove_first(context: List[Node]):
    """Removes the first non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(node.children):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        node.result = node.children[:i] + node.children[i+1:]


def remove_last(context: List[Node]):
    """Removes the last non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(reversed(node.children)):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        i = len(node.children) - i - 1
        node.result = node.children[:i] + node.children[i+1:]


def remove_brackets(context: List[Node]):
    """Removes the first and the last non-whitespace child."""
    remove_first(context)
    remove_last(context)


926
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
927
def remove_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
928
    """Removes any among a particular set of tokens from the immediate
929
930
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
931
    remove_children_if(context, partial(is_token, tokens=tokens))
932
933


934
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
935
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
936
    """Removes children by tag name."""
937
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
938
939
940


@transformation_factory
941
def remove_content(context: List[Node], regexp: str):
942
    """Removes children depending on their string value."""
943
    remove_children_if(context, partial(has_content, regexp=regexp))
944
945
946
947


########################################################################
#
948
# AST semantic validation functions (EXPERIMENTAL!!!)
949
950
951
#
########################################################################

952
@transformation_factory(collections.abc.Callable)
953
954
955
956
def error_on(context: List[Node], condition: Callable, error_msg: str = ''):
    """
    Checks for `condition`; adds an error message if condition is not met.
    """
957
    node = context[-1]
958
    if not condition(context):
959
960
961
962
963
964
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
965
966
967
            node.add_error("transform.error_on: Failed to meet condition " + cond_name)


968
@transformation_factory(collections.abc.Callable)
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
def warn_on(context: List[Node], condition: Callable, warning: str = ''):
    """
    Checks for `condition`; adds an warning message if condition is not met.
    """
    node = context[-1]
    if not condition(context):
        if warning:
            node.add_error(warning % node.tag_name if warning.find("%s") > 0 else warning,
                           Error.WARNING)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.warn_on: Failed to meet condition " + cond_name,
                           Error.WARNING)
984
985


986
assert_has_children = error_on(lambda nd: nd.children, 'Element "%s" has no children')
987
988
989


@transformation_factory
990
def assert_content(context: List[Node], regexp: str):
991
    node = context[-1]
992
    if not has_content(context, regexp):
eckhart's avatar
eckhart committed
993
        context[0].new_error(node, 'Element "%s" violates %s on %s' %
eckhart's avatar
eckhart committed
994
                             (node.parser.name, str(regexp), node.content))
995

996

997
@transformation_factory(collections.abc.Set)
998
def require(context: List[Node], child_tags: AbstractSet[str]):
999
    node = context[-1]
1000
1001
    for child in node.children:
        if child.tag_name not in child_tags:
eckhart's avatar
eckhart committed
1002
            context[0].new_error(node, 'Element "%s" is not allowed inside "%s".' %
eckhart's avatar
eckhart committed
1003
                                 (child.parser.name, node.parser.name))
1004
1005


1006
@transformation_factory(collections.abc.Set)
1007
def forbid(context: List[Node], child_tags: AbstractSet[str]):
1008
    node = context[-1]
1009
1010
    for child in node.children:
        if child.tag_name in child_tags:
eckhart's avatar
eckhart committed
1011
            context[0].new_error(node, 'Element "%s" cannot be nested inside "%s".' %
eckhart's avatar
eckhart committed
1012
                                 (child.parser.name, node.parser.name))
di68kap's avatar
di68kap committed
1013
1014
1015
1016
1017


def peek(context: List[Node]):
    """For debugging: Prints the last node in the context as S-expression."""
    print(context[-1].as_sxpr())