transform.py 37.8 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
import collections.abc
31
import inspect
eckhart's avatar
eckhart committed
32
from functools import partial, singledispatch
33

34
from DHParser.error import Error, ErrorCode
35
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, ParserBase, MockParser, \
eckhart's avatar
eckhart committed
36
    ZOMBIE_NODE, RootNode, parse_sxpr, flatten_sxpr
37
from DHParser.toolkit import issubtype, isgenerictype, expand_table, smart_list, re, typing
38
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
39
    Tuple, List, Sequence, Union, Text, Generic
40

41
42
__all__ = ('TransformationDict',
           'TransformationProc',
43
           'TransformationFunc',
44
45
46
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
47
48
49
           'key_parser_name',
           'key_tag_name',
           'traverse',
50
           'is_named',
51
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
52
           'reduce_single_child',
53
           'replace_or_reduce',
54
55
           'replace_parser',
           'collapse',
56
           'collapse_if',
57
           # 'merge_children',
58
           'replace_content',
59
           'replace_content_by',
60
61
           'normalize_whitespace',
           'move_whitespace',
62
           'apply_if',
eckhart's avatar
eckhart committed
63
           'apply_unless',
64
           'traverse_locally',
65
           'is_anonymous',
66
67
68
69
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
70
           'is_one_of',
71
           'not_one_of',
Eckhart Arnold's avatar
Eckhart Arnold committed
72
           'matches_re',
73
           'has_content',
di68kap's avatar
di68kap committed
74
           'has_parent',
75
76
77
78
79
80
81
82
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
83
           'remove_children_if',
eckhart's avatar
eckhart committed
84
           'remove_nodes',
85
86
87
88
89
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
90
           'remove_anonymous_empty',
91
92
           'remove_anonymous_expendables',
           'remove_anonymous_tokens',
93
94
           'remove_expendables',
           'remove_brackets',
95
96
           'remove_infix_operator',
           'remove_single_child',
97
98
99
100
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
101
           'assert_content',
102
           'error_on',
di68kap's avatar
di68kap committed
103
104
           'assert_has_children',
           'peek')
105
106


107
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
108
TransformationDict = Dict[str, Sequence[Callable]]
109
TransformationFunc = Union[Callable[[Node], Any], partial]
Eckhart Arnold's avatar
Eckhart Arnold committed
110
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
111
112
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
113
CriteriaType = Union[int, str, Callable]
114
115


116
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
117
118
    """
    Creates factory functions from transformation-functions that
119
    dispatch on the first parameter after the context parameter.
120
121

    Decorating a transformation-function that has more than merely the
Eckhart Arnold's avatar
Eckhart Arnold committed
122
    ``context``-parameter with ``transformation_factory`` creates a
123
    function with the same name, which returns a partial-function that
124
    takes just the context-parameter.
125
126
127
128
129
130
131
132
133

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

134
135
    Usage::

eckhart's avatar
eckhart committed
136
        @transformation_factory(AbstractSet[str])
137
        def remove_tokens(context, tokens):
138
            ...
139
140
141

    or, alternatively::

142
        @transformation_factory
143
        def remove_tokens(context, tokens: AbstractSet[str]):
144
145
            ...

146
147
    Example::

148
        trans_table = { 'expression': remove_tokens('+', '-') }
149
150
151

    instead of::

152
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
153
154

    Parameters:
155
        t1:  type of the second argument of the transformation function,
156
157
            only necessary if the transformation functions' parameter list
            does not have type annotations.
158
159
    """

160
161
162
163
    def type_guard(t):
        """Raises an error if type `t` is a generic type or could be mistaken
        for the type of the canonical first parameter "List[Node] of
        transformation functions. Returns `t`."""
164
165
166
167
        # if isinstance(t, GenericMeta):
        #     raise TypeError("Generic Type %s not permitted\n in transformation_factory "
        #                     "decorator. Use the equivalent non-generic type instead!"
        #                     % str(t))
168
        if isgenerictype(t):
169
170
171
            raise TypeError("Generic Type %s not permitted\n in transformation_factory "
                            "decorator. Use the equivalent non-generic type instead!"
                            % str(t))
172
        if issubtype(List[Node], t):
173
174
            raise TypeError("Sequence type %s not permitted\nin transformation_factory "
                            "decorator, because it could be mistaken for a base class "
175
176
177
                            "of List[Node]\nwhich is the type of the canonical first "
                            "argument of transformation functions. Try 'tuple' instead!"
                            % str(t))
178
179
        return t

180
    def decorator(f):
181
        nonlocal t1
182
183
184
185
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
186
        assert t1 or params[0].annotation != params[0].empty, \
187
            "No type information on second parameter found! Please, use type " \
eckhart's avatar
eckhart committed
188
            "annotation or provide the type information via transformer-decorator."
189
        f = singledispatch(f)
190
191
192
        p1type = params[0].annotation
        if t1 is None:
            t1 = type_guard(p1type)
193
        elif issubtype(p1type, type_guard(t1)):
194
            try:
195
                if len(params) == 1 and issubtype(p1type, Container) \
196
                        and not (issubtype(p1type, Text) or issubtype(p1type, ByteString)):
197
                    def gen_special(*args):
198
199
                        c = set(args) if issubtype(p1type, AbstractSet) else \
                            tuple(args) if issubtype(p1type, Sequence) else args
200
201
202
203
204
205
206
207
                        d = {params[0].name: c}
                        return partial(f, **d)
                    f.register(type_guard(p1type.__args__[0]), gen_special)
            except AttributeError:
                pass  # Union Type does not allow subclassing, but is not needed here
        else:
            raise TypeError("Annotated type %s is not a subclass of decorated type %s !"
                            % (str(p1type), str(t1)))
208
209
210
211
212
213

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

214
        for t in (t1, t2, t3, t4, t5):
215
            if t:
216
                f.register(type_guard(t), gen_partial)
217
218
            else:
                break
219
220
        return f

221
    if isinstance(t1, type(lambda: 1)):
222
223
224
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
225
226
        func = t1
        t1 = None
227
228
229
230
231
        return decorator(func)
    else:
        return decorator


232
def key_parser_name(node: Node) -> str:
233
234
235
    return node.parser.name


236
def key_tag_name(node: Node) -> str:
237
238
239
    return node.tag_name


240
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
241
             processing_table: ProcessingTableType,
eckhart's avatar
eckhart committed
242
             key_func: KeyFunc = key_tag_name) -> None:
243
244
    """
    Traverses the snytax tree starting with the given ``node`` depth
245
    first and applies the sequences of callback-functions registered
246
    in the ``processing_table``-dictionary.
247
248
249
250
251
252
253
254
255

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
256

257
    - '<': always called (before any other processing function)
258
259
    - '*': called for those nodes for which no (other) processing
      function appears in the table
260
    - '>': always called (after any other processing function)
261
262
263
264
265

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
266
267
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
268
269
270
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

271
272
    Example::

273
        table = { "term": [replace_by_single_child, flatten],
274
                  "factor, flowmarker, retrieveop": replace_by_single_child }
275
        traverse(node, table)
276

277
    """
278

279
280
281
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
eckhart's avatar
eckhart committed
282
        table = processing_table               # type: ProcessingTableType
eckhart's avatar
eckhart committed
283
        cache = cast(TransformationDict, processing_table['__cache__'])  # type: TransformationDict
284
    else:
285
286
        # normalize processing_table entries by turning single values
        # into lists with a single value
287
288
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
289
        table = expand_table(table)
290
        # substitute key for insiginificant whitespace
291
        assert '+' not in table, 'Symbol "+" in processing table is obsolete, use "<" instead'
292
293
        if '~' in table:
            if ':Whitespace' in table:
eckhart's avatar
eckhart committed
294
295
296
297
                raise AssertionError(
                    '"~" is a synonym for ":Whitespace" in the processing table. '
                    'To avoid confusion, choose either of the two, but do not use '
                    'both at the same time!')
298
299
300
301
            whitespace_transformation = table['~']
            del table['~']
            table[':Whitespace'] = whitespace_transformation
        # cache expanded table
eckhart's avatar
eckhart committed
302
303
        cache = cast(TransformationDict,
                     table.setdefault('__cache__', cast(TransformationDict, dict())))
304
305
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
306
307
        processing_table.update(table)

308
    def traverse_recursive(context):
eckhart's avatar
eckhart committed
309
        nonlocal cache
310
        node = context[-1]
311
        if node.children:
eckhart's avatar
eckhart committed
312
            context.append(ZOMBIE_NODE)
di68kap's avatar
di68kap committed
313
            for child in node.children:
eckhart's avatar
eckhart committed
314
                context[-1] = child
315
                traverse_recursive(context)  # depth first
eckhart's avatar
eckhart committed
316
            context.pop()
317
318

        key = key_func(node)
319
320
321
        try:
            sequence = cache[key]
        except KeyError:
322
            sequence = table.get('<', []) \
323
                + table.get(key, table.get('*', [])) \
324
                + table.get('>', [])
325
326
327
            cache[key] = sequence

        for call in sequence:
328
            call(context)
329

330
    traverse_recursive([root_node])
331
332
    # assert processing_table['__cache__']

333

334
#######################################################################
335
#
336
337
# meta transformations, i.e. transformations that call other
# transformations
338
#
339
#######################################################################
340
341


eckhart's avatar
eckhart committed
342
@transformation_factory(dict)
343
def traverse_locally(context: List[Node],
eckhart's avatar
eckhart committed
344
345
                     processing_table: Dict,              # actually: ProcessingTableType
                     key_func: Callable = key_tag_name):  # actually: KeyFunc
346
347
    """
    Transforms the syntax tree starting from the last node in the context
348
349
350
351
352
353
354
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


355
@transformation_factory(collections.abc.Callable)
356
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
357
358
359
    """
    Applies a transformation only if a certain condition is met.
    """
360
361
362
363
    if condition(context):
        transformation(context)


364
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
365
def apply_unless(context: List[Node], transformation: Callable, condition: Callable):
366
367
368
    """
    Applies a transformation if a certain condition is *not* met.
    """
eckhart's avatar
eckhart committed
369
370
371
372
    if not condition(context):
        transformation(context)


373
374
375
376
377
378
379
380
381
382
383
384
385
386
#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
387
    """Returns ``True`` if the current node does not have any siblings."""
388
389
390
391
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
392
    """Returns ``True`` if the current node's parser is a named parser."""
393
394
395
396
    return bool(context[-1].parser.name)


def is_anonymous(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
397
    """Returns ``True`` if the current node's parser is an anonymous parser."""
398
399
400
401
    return not context[-1].parser.name


def is_whitespace(context: List[Node]) -> bool:
di68kap's avatar
di68kap committed
402
    """Returns ``True`` for whitespace and comments defined with the
403
404
405
406
407
    ``@comment``-directive."""
    return context[-1].parser.ptype == WHITESPACE_PTYPE


def is_empty(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
408
    """Returns ``True`` if the current node's content is empty."""
409
410
411
412
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
413
414
    """Returns ``True`` if the current node either is a node containing
    whitespace or an empty node."""
415
416
417
    return is_empty(context) or is_whitespace(context)


418
@transformation_factory(collections.abc.Set)
419
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
420
421
    """
    Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
422
423
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
eckhart's avatar
eckhart committed
424
    any token is a match.
425
426
    """
    node = context[-1]
di68kap's avatar
di68kap committed
427
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.content in tokens)
428
429


430
@transformation_factory(collections.abc.Set)
431
432
433
434
435
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


436
437
438
439
440
441
@transformation_factory(collections.abc.Set)
def not_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is not one of the given tag names."""
    return context[-1].tag_name not in tag_name_set


Eckhart Arnold's avatar
Eckhart Arnold committed
442
443
@transformation_factory(collections.abc.Set)
def matches_re(context: List[Node], patterns: AbstractSet[str]) -> bool:
444
445
    """
    Returns true, if the node's tag_name matches one of the regular
Eckhart Arnold's avatar
Eckhart Arnold committed
446
447
448
449
450
451
452
453
454
    expressions in `patterns`. For example, ':.*' matches all anonymous nodes.
    """
    tn = context[-1].tag_name
    for pattern in patterns:
        if re.match(pattern, tn):
            return True
    return False


eckhart's avatar
eckhart committed
455
@transformation_factory
456
def has_content(context: List[Node], regexp: str) -> bool:
457
458
459
460
461
462
463
464
    """
    Checks a node's content against a regular expression.

    In contrast to ``re.match`` the regular expression must match the complete
    string and not just the beginning of the string to succeed!
    """
    if not regexp.endswith('$'):
        regexp += "$"
465
466
467
    return bool(re.match(regexp, context[-1].content))


468
@transformation_factory(collections.abc.Set)
469
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
470
471
472
473
    """
    Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context.
     """
474
    for i in range(2, len(context) + 1):
475
476
477
478
479
480
481
482
483
484
485
486
487
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
488
489
490
491
492
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
    node.result = child.result
493
    if hasattr(child, '_xml_attr'):
494
        node.attr.update(child.attr)
495
496


497
def _reduce_child(node: Node, child: Node):
498
    node.result = child.result
499
    if hasattr(child, '_xml_attr'):
500
        node.attr.update(child.attr)
501
502


503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
549
550


551
552
def replace_by_single_child(context: List[Node]):
    """
553
554
555
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
556
557
558
    """
    node = context[-1]
    if len(node.children) == 1:
559
        _replace_by(node, node.children[0])
560
561


Eckhart Arnold's avatar
Eckhart Arnold committed
562
def reduce_single_child(context: List[Node]):
563
    """
564
    Reduces a single branch node by transferring the result of its
565
    immediate descendant to this node, but keeping this node's parser entry.
566
567
    Reduction only takes place if the last node in the context has
    exactly one child.
568
569
570
    """
    node = context[-1]
    if len(node.children) == 1:
571
        _reduce_child(node, node.children[0])
572
573


574
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
575
def replace_or_reduce(context: List[Node], condition: Callable = is_named):
576
577
    """
    Replaces node by a single child, if condition is met on child,
578
579
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
580
    node = context[-1]
581
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
582
        child = node.children[0]
583
        if condition(context):
584
            _replace_by(node, child)
585
        else:
586
            _reduce_child(node, child)
587
588
589


@transformation_factory
590
591
592
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
593
594
595
    name.

    Parameters:
596
        context: the context where the parser shall be replaced
eckhart's avatar
eckhart committed
597
        name: "NAME:PTYPE" of the surrogate. The ptype is optional
598
    """
599
    node = context[-1]
600
    name, ptype = (name.split(':') + [''])[:2]
di68kap's avatar
di68kap committed
601
    node.parser = MockParser(name, ':' + ptype)
602
603


604
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
605
def flatten(context: List[Node], condition: Callable = is_anonymous, recursive: bool = True):
606
    """
eckhart's avatar
eckhart committed
607
    Flattens all children, that fulfill the given ``condition``
608
609
610
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
611
612

    If the parameter ``recursive`` is ``True`` the same will recursively be
613
614
615
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
616
617
618
619

    Applying flatten recursively will result in these kinds of
    structural transformation::

di68kap's avatar
di68kap committed
620
        (1 (+ 2) (+ 3))    ->   (1 + 2 + 3)
621
622
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
623

624
    node = context[-1]
625
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
626
        new_result = []     # type: List[Node]
eckhart's avatar
eckhart committed
627
        context.append(ZOMBIE_NODE)
628
        for child in node.children:
eckhart's avatar
eckhart committed
629
            context[-1] = child
630
            if child.children and condition(context):
631
                if recursive:
632
                    flatten(context, condition, recursive)
633
634
635
                new_result.extend(child.children)
            else:
                new_result.append(child)
eckhart's avatar
eckhart committed
636
        context.pop()
637
638
639
        node.result = tuple(new_result)


640
def collapse(context: List[Node]):
641
642
643
644
    """
    Collapses all sub-nodes of a node by replacing them with the
    string representation of the node. USE WITH CARE!
    """
645
    node = context[-1]
646
    node.result = node.content
647
648


649
650
@transformation_factory(collections.abc.Callable)
def collapse_if(context: List[Node], condition: Callable, target_tag: ParserBase):
651
652
    """
    (Recursively) merges the content of all adjacent child nodes that
653
654
655
656
657
658
659
660
    fulfil the given `condition` into a single leaf node with parser
    `target_tag`. Nodes that do not fulfil the condition will be preserved.

    >>> sxpr = '(place (abbreviation "p.") (page "26") (superscript "b") (mark ",") (page "18"))'
    >>> tree = parse_sxpr(sxpr)
    >>> text = MockParser('text')
    >>> collapse_if([tree], not_one_of({'superscript', 'subscript'}), text)
    >>> print(flatten_sxpr(tree.as_sxpr()))
661
    (place (text "p.26") (superscript "b") (text ",18"))
662
663
664

    See `test_transform.TestComplexTransformations` for examples.
    """
665

666
    node = context[-1]
eckhart's avatar
eckhart committed
667
668
    package = []  # type: List[Node]
    result = []  # type: List[Node]
669
670
671
672

    def close_package():
        nonlocal package
        if package:
673
            s = "".join(nd.content for nd in package)
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
            result.append(Node(target_tag, s))
            package = []

    for child in node.children:
        if condition([child]):
            if child.children:
                collapse_if([child], condition, target_tag)
                for c in child.children:
                    if condition([c]):
                        package.append(c)
                    else:
                        close_package()
                        result.append(c)
                close_package()
            else:
                package.append(child)
        else:
            close_package()
            result.append(child)
    close_package()
    node.result = tuple(result)


697
@transformation_factory(collections.abc.Callable)
698
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
699
700
    """
    Replaces the content of the node. ``func`` takes the node's result
701
702
    as an argument an returns the mapped result.
    """
703
    node = context[-1]
704
705
706
    node.result = func(node.result)


707
@transformation_factory  # (str)
708
def replace_content_by(context: List[Node], content: str):  # Callable[[Node], ResultType]
709
710
    """
    Replaces the content of the node with the given text content.
711
712
713
714
715
    """
    node = context[-1]
    node.result = content


716
def normalize_whitespace(context):
di68kap's avatar
di68kap committed
717
718
    """
    Normalizes Whitespace inside a leaf node, i.e. any sequence of
719
720
    whitespaces, tabs and linefeeds will be replaced by a single
    whitespace. Empty (i.e. zero-length) Whitespace remains empty,
di68kap's avatar
di68kap committed
721
722
    however.
    """
723
724
725
726
727
728
    node = context[-1]
    assert not node.children
    if is_whitespace(context):
        if node.result:
            node.result = ' '
    else:
eckhart's avatar
eckhart committed
729
        node.result = re.sub(r'\s+', ' ', node.result)
730
731


di68kap's avatar
di68kap committed
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
def merge_whitespace(context):
    """
    Merges adjacent whitespace. UNTESTED!
    """
    node = context[-1]
    children = node.children
    new_result = []
    i = 0
    L = len(children)
    while i < L:
        if children[i].parser.pytpe == WHITESPACE_PTYPE:
            k = i
            while i < L and children[k].parser.ptype == WHITESPACE_PTYPE:
                i += 1
            if i > k:
                children[k].result = sum(children[n].result for n in range(k, i + 1))
            new_result.append(children[k])
        i += 1
    node.result = tuple(new_result)


753
def move_whitespace(context):
di68kap's avatar
di68kap committed
754
755
    """
    Moves adjacent whitespace nodes to the parent node.
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
    """
    node = context[-1]
    if len(context) <= 1 or not node.children:
        return
    parent = context[-2]
    children = node.children
    if children[0].parser.ptype == WHITESPACE_PTYPE:
        before = (children[0],)
        children = children[1:]
    else:
        before = ()
    if children and children[-1].parser.ptype == WHITESPACE_PTYPE:
        after = (children[-1],)
        children = children[:-1]
    else:
        after = tuple()

    if before or after:
        node.result = children
        for i, child in enumerate(parent.children):
            if child == node:
                break

        # merge adjacent whitespace
eckhart's avatar
eckhart committed
780
781
        prevN = parent.children[i - 1] if i > 0 else None
        nextN = parent.children[i + 1] if i < len(parent.children) - 1 else None
782
783
784
785
786
787
788
        if before and prevN and prevN.parser.ptype == WHITESPACE_PTYPE:
            prevN.result = prevN.result + before[0].result
            before = ()
        if after and nextN and nextN.parser.ptype == WHITESPACE_PTYPE:
            nextN.result = after[0].result + nextN.result
            after = ()

eckhart's avatar
eckhart committed
789
        parent.result = parent.children[:i] + before + (node,) + after + parent.children[i + 1:]
790
791


792
793
794
795
796
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
797
# - errors of dropped leaves may be be lost
798
799
800
# - no promise that order will be preserved
#
#######################################################################
801
802


803
@transformation_factory(collections.abc.Callable)
804
805
806
807
808
809
810
811
812
813
814
815
816
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


817
@transformation_factory(collections.abc.Callable)
818
819
820
821
822
823
824
825
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
eckhart's avatar
eckhart committed
826
        while i > 0 and condition(context + [node.children[i - 1]]):
827
828
829
830
831
            i -= 1
        if i < L:
            node.result = node.children[:i]


832
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
833
def strip(context: List[Node], condition: Callable = is_expendable):
834
835
836
837
838
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


839
@transformation_factory  # (slice)
840
def keep_children(context: List[Node], section: slice = slice(None)):
841
    """Keeps only child-nodes which fall into a slice of the result field."""
842
    node = context[-1]
843
    if node.children:
844
        node.result = node.children[section]
845
846


847
@transformation_factory(collections.abc.Callable)
848
849
850
851
852
853
854
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


855
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
856
def keep_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
857
858
859
860
861
862
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


863
@transformation_factory(collections.abc.Set)
864
865
866
867
868
869
870
871
872
873
874
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


875
@transformation_factory(collections.abc.Callable)
876
def remove_children_if(context: List[Node], condition: Callable):
877
878
879
880
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))
eckhart's avatar
eckhart committed
881
    pass
882

eckhart's avatar
eckhart committed
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
904
905


906
907
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
908
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
909
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
910
911
912
913
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
914
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
915
916
917
# remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
# remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
# remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
918
remove_infix_operator = keep_children(slice(0, None, 2))
919
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
920
921


922
923
924
925
926
927
928
929
930
def remove_first(context: List[Node]):
    """Removes the first non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(node.children):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
eckhart's avatar
eckhart committed
931
        node.result = node.children[:i] + node.children[i + 1:]
932
933
934
935
936
937
938
939
940
941
942
943


def remove_last(context: List[Node]):
    """Removes the last non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(reversed(node.children)):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        i = len(node.children) - i - 1
eckhart's avatar
eckhart committed
944
        node.result = node.children[:i] + node.children[i + 1:]
945
946
947
948
949
950
951
952


def remove_brackets(context: List[Node]):
    """Removes the first and the last non-whitespace child."""
    remove_first(context)
    remove_last(context)


953
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
954
def remove_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
955
    """Removes any among a particular set of tokens from the immediate
956
957
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
958
    remove_children_if(context, partial(is_token, tokens=tokens))
959
960


961
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
962
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
963
    """Removes children by tag name."""
964
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
965
966
967


@transformation_factory
968
def remove_content(context: List[Node], regexp: str):
969
    """Removes children depending on their string value."""
970
    remove_children_if(context, partial(has_content, regexp=regexp))
971
972
973
974


########################################################################
#
975
# AST semantic validation functions (EXPERIMENTAL!!!)
976
977
978
#
########################################################################

979
@transformation_factory(collections.abc.Callable)
980
981
982
983
def error_on(context: List[Node],
             condition: Callable,
             error_msg: str = '',
             error_code: ErrorCode = Error.ERROR):
984
    """
985
    Checks for `condition`; adds an error or warning message if condition is not met.
986
    """
987
    node = context[-1]
eckhart's avatar
eckhart committed
988
    if condition(context):
989
        if error_msg:
eckhart's avatar
eckhart committed
990
991
992
            cast(RootNode, context[0]).new_error(node, error_msg % node.tag_name
                                                 if error_msg.find("%s") > 0
                                                 else error_msg, error_code)
993
994
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
eckhart's avatar
eckhart committed
995
996
997
998
                else condition.__class__.__name__ if hasattr(condition, '__class__') \
                else '<unknown>'
            cast(RootNode, context[0]).new_error(node, "transform.error_on: Failed to meet"
                                                 "condition " + cond_name, error_code)
999

1000
#