Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

transform.py 37.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
import collections.abc
31
import inspect
eckhart's avatar
eckhart committed
32
from functools import partial, singledispatch
33

34
from DHParser.error import Error, ErrorCode
35
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, ParserBase, MockParser, \
eckhart's avatar
eckhart committed
36
    ZOMBIE_NODE, RootNode, parse_sxpr, flatten_sxpr
37
from DHParser.toolkit import issubtype, isgenerictype, expand_table, smart_list, re, typing
38
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
39
    Tuple, List, Sequence, Union, Text, Generic
40

41
42
__all__ = ('TransformationDict',
           'TransformationProc',
43
           'TransformationFunc',
44
45
46
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
47
48
           'key_tag_name',
           'traverse',
49
           'is_named',
50
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
51
           'reduce_single_child',
52
           'replace_or_reduce',
53
54
           'replace_parser',
           'collapse',
55
           'collapse_if',
56
           # 'merge_children',
57
           'replace_content',
58
           'replace_content_by',
59
60
           'normalize_whitespace',
           'move_whitespace',
61
           'apply_if',
eckhart's avatar
eckhart committed
62
           'apply_unless',
63
           'traverse_locally',
64
           'is_anonymous',
65
66
67
68
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
69
           'is_one_of',
70
           'not_one_of',
Eckhart Arnold's avatar
Eckhart Arnold committed
71
           'matches_re',
72
           'has_content',
di68kap's avatar
di68kap committed
73
           'has_parent',
74
75
76
77
78
79
80
81
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
82
           'remove_children_if',
eckhart's avatar
eckhart committed
83
           'remove_nodes',
84
85
86
87
88
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
89
           'remove_anonymous_empty',
90
91
           'remove_anonymous_expendables',
           'remove_anonymous_tokens',
92
93
           'remove_expendables',
           'remove_brackets',
94
95
           'remove_infix_operator',
           'remove_single_child',
96
97
98
99
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
100
           'assert_content',
101
           'error_on',
di68kap's avatar
di68kap committed
102
103
           'assert_has_children',
           'peek')
104
105


106
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
107
TransformationDict = Dict[str, Sequence[Callable]]
108
TransformationFunc = Union[Callable[[Node], Any], partial]
Eckhart Arnold's avatar
Eckhart Arnold committed
109
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
110
111
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
112
CriteriaType = Union[int, str, Callable]
113
114


115
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
116
117
    """
    Creates factory functions from transformation-functions that
118
    dispatch on the first parameter after the context parameter.
119
120

    Decorating a transformation-function that has more than merely the
Eckhart Arnold's avatar
Eckhart Arnold committed
121
    ``context``-parameter with ``transformation_factory`` creates a
122
    function with the same name, which returns a partial-function that
123
    takes just the context-parameter.
124
125
126
127
128
129
130
131
132

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

133
134
    Usage::

eckhart's avatar
eckhart committed
135
        @transformation_factory(AbstractSet[str])
136
        def remove_tokens(context, tokens):
137
            ...
138
139
140

    or, alternatively::

141
        @transformation_factory
142
        def remove_tokens(context, tokens: AbstractSet[str]):
143
144
            ...

145
146
    Example::

147
        trans_table = { 'expression': remove_tokens('+', '-') }
148
149
150

    instead of::

151
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
152
153

    Parameters:
154
        t1:  type of the second argument of the transformation function,
155
156
            only necessary if the transformation functions' parameter list
            does not have type annotations.
157
158
    """

159
160
161
162
    def type_guard(t):
        """Raises an error if type `t` is a generic type or could be mistaken
        for the type of the canonical first parameter "List[Node] of
        transformation functions. Returns `t`."""
163
164
165
166
        # if isinstance(t, GenericMeta):
        #     raise TypeError("Generic Type %s not permitted\n in transformation_factory "
        #                     "decorator. Use the equivalent non-generic type instead!"
        #                     % str(t))
Eckhart Arnold's avatar
Eckhart Arnold committed
167
168
        if isinstance(t, str):          # ensure compatibility with python versions
            t = eval(t.replace('unicode', 'str'))  # with alternative type handling.
169
        if isgenerictype(t):
170
171
172
            raise TypeError("Generic Type %s not permitted\n in transformation_factory "
                            "decorator. Use the equivalent non-generic type instead!"
                            % str(t))
173
        if issubtype(List[Node], t):
174
175
            raise TypeError("Sequence type %s not permitted\nin transformation_factory "
                            "decorator, because it could be mistaken for a base class "
176
177
178
                            "of List[Node]\nwhich is the type of the canonical first "
                            "argument of transformation functions. Try 'tuple' instead!"
                            % str(t))
179
180
        return t

181
    def decorator(f):
182
        nonlocal t1
183
184
185
186
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
187
        assert t1 or params[0].annotation != params[0].empty, \
188
            "No type information on second parameter found! Please, use type " \
eckhart's avatar
eckhart committed
189
            "annotation or provide the type information via transformer-decorator."
190
        f = singledispatch(f)
191
192
193
        p1type = params[0].annotation
        if t1 is None:
            t1 = type_guard(p1type)
194
        elif issubtype(p1type, type_guard(t1)):
195
            try:
196
                if len(params) == 1 and issubtype(p1type, Container) \
197
                        and not (issubtype(p1type, Text) or issubtype(p1type, ByteString)):
198
                    def gen_special(*args):
199
200
                        c = set(args) if issubtype(p1type, AbstractSet) else \
                            tuple(args) if issubtype(p1type, Sequence) else args
201
202
203
204
205
206
207
208
                        d = {params[0].name: c}
                        return partial(f, **d)
                    f.register(type_guard(p1type.__args__[0]), gen_special)
            except AttributeError:
                pass  # Union Type does not allow subclassing, but is not needed here
        else:
            raise TypeError("Annotated type %s is not a subclass of decorated type %s !"
                            % (str(p1type), str(t1)))
209
210
211
212
213
214

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

215
        for t in (t1, t2, t3, t4, t5):
216
            if t:
217
                f.register(type_guard(t), gen_partial)
218
219
            else:
                break
220
221
        return f

222
    if isinstance(t1, type(lambda: 1)):
223
224
225
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
226
227
        func = t1
        t1 = None
228
229
230
231
232
        return decorator(func)
    else:
        return decorator


233
234
# def key_parser_name(node: Node) -> str:
#     return node.parser.name
235
236


237
def key_tag_name(node: Node) -> str:
238
239
240
    return node.tag_name


241
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
242
             processing_table: ProcessingTableType,
eckhart's avatar
eckhart committed
243
             key_func: KeyFunc = key_tag_name) -> None:
244
245
    """
    Traverses the snytax tree starting with the given ``node`` depth
246
    first and applies the sequences of callback-functions registered
247
    in the ``processing_table``-dictionary.
248
249
250
251
252
253
254
255
256

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
257

258
    - '<': always called (before any other processing function)
259
260
    - '*': called for those nodes for which no (other) processing
      function appears in the table
261
    - '>': always called (after any other processing function)
262
263
264
265
266

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
267
268
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
269
        key_func (function): A mapping key_func(node) -> keystr. The default
270
            key_func yields node.tag_name.
271

272
273
    Example::

274
        table = { "term": [replace_by_single_child, flatten],
275
                  "factor, flowmarker, retrieveop": replace_by_single_child }
276
        traverse(node, table)
277

278
    """
279

280
281
282
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
eckhart's avatar
eckhart committed
283
        table = processing_table               # type: ProcessingTableType
eckhart's avatar
eckhart committed
284
        cache = cast(TransformationDict, processing_table['__cache__'])  # type: TransformationDict
285
    else:
286
287
        # normalize processing_table entries by turning single values
        # into lists with a single value
288
289
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
290
        table = expand_table(table)
291
        # substitute key for insiginificant whitespace
292
        assert '+' not in table, 'Symbol "+" in processing table is obsolete, use "<" instead'
293
294
        if '~' in table:
            if ':Whitespace' in table:
eckhart's avatar
eckhart committed
295
296
297
298
                raise AssertionError(
                    '"~" is a synonym for ":Whitespace" in the processing table. '
                    'To avoid confusion, choose either of the two, but do not use '
                    'both at the same time!')
299
300
301
302
            whitespace_transformation = table['~']
            del table['~']
            table[':Whitespace'] = whitespace_transformation
        # cache expanded table
eckhart's avatar
eckhart committed
303
304
        cache = cast(TransformationDict,
                     table.setdefault('__cache__', cast(TransformationDict, dict())))
305
306
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
307
308
        processing_table.update(table)

309
    def traverse_recursive(context):
eckhart's avatar
eckhart committed
310
        nonlocal cache
311
        node = context[-1]
312
        if node.children:
eckhart's avatar
eckhart committed
313
            context.append(ZOMBIE_NODE)
di68kap's avatar
di68kap committed
314
            for child in node.children:
eckhart's avatar
eckhart committed
315
                context[-1] = child
316
                traverse_recursive(context)  # depth first
eckhart's avatar
eckhart committed
317
            context.pop()
318
319

        key = key_func(node)
320
321
322
        try:
            sequence = cache[key]
        except KeyError:
323
            sequence = table.get('<', []) \
324
                + table.get(key, table.get('*', [])) \
325
                + table.get('>', [])
326
327
328
            cache[key] = sequence

        for call in sequence:
329
            call(context)
330

331
    traverse_recursive([root_node])
332
333
    # assert processing_table['__cache__']

334

335
#######################################################################
336
#
337
338
# meta transformations, i.e. transformations that call other
# transformations
339
#
340
#######################################################################
341
342


eckhart's avatar
eckhart committed
343
@transformation_factory(dict)
344
def traverse_locally(context: List[Node],
eckhart's avatar
eckhart committed
345
346
                     processing_table: Dict,              # actually: ProcessingTableType
                     key_func: Callable = key_tag_name):  # actually: KeyFunc
347
348
    """
    Transforms the syntax tree starting from the last node in the context
349
350
351
352
353
354
355
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


356
@transformation_factory(collections.abc.Callable)
357
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
358
359
360
    """
    Applies a transformation only if a certain condition is met.
    """
361
362
363
364
    if condition(context):
        transformation(context)


365
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
366
def apply_unless(context: List[Node], transformation: Callable, condition: Callable):
367
368
369
    """
    Applies a transformation if a certain condition is *not* met.
    """
eckhart's avatar
eckhart committed
370
371
372
373
    if not condition(context):
        transformation(context)


374
375
376
377
378
379
380
381
382
383
384
385
386
387
#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
388
    """Returns ``True`` if the current node does not have any siblings."""
389
390
391
392
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
393
    """Returns ``True`` if the current node's parser is a named parser."""
394
    return not context[-1].is_anonymous()
395
396
397


def is_anonymous(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
398
    """Returns ``True`` if the current node's parser is an anonymous parser."""
399
    return context[-1].is_anonymous()
400
401
402


def is_whitespace(context: List[Node]) -> bool:
di68kap's avatar
di68kap committed
403
    """Returns ``True`` for whitespace and comments defined with the
404
    ``@comment``-directive."""
405
    return context[-1].tag_name == WHITESPACE_PTYPE
406
407
408


def is_empty(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
409
    """Returns ``True`` if the current node's content is empty."""
410
411
412
413
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
414
415
    """Returns ``True`` if the current node either is a node containing
    whitespace or an empty node."""
416
417
418
    return is_empty(context) or is_whitespace(context)


419
@transformation_factory(collections.abc.Set)
420
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
421
422
    """
    Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
423
424
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
eckhart's avatar
eckhart committed
425
    any token is a match.
426
427
    """
    node = context[-1]
428
    return node.tag_name == TOKEN_PTYPE and (not tokens or node.content in tokens)
429
430


431
@transformation_factory(collections.abc.Set)
432
433
434
435
436
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


437
438
439
440
441
442
@transformation_factory(collections.abc.Set)
def not_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is not one of the given tag names."""
    return context[-1].tag_name not in tag_name_set


Eckhart Arnold's avatar
Eckhart Arnold committed
443
444
@transformation_factory(collections.abc.Set)
def matches_re(context: List[Node], patterns: AbstractSet[str]) -> bool:
445
446
    """
    Returns true, if the node's tag_name matches one of the regular
Eckhart Arnold's avatar
Eckhart Arnold committed
447
448
449
450
451
452
453
454
455
    expressions in `patterns`. For example, ':.*' matches all anonymous nodes.
    """
    tn = context[-1].tag_name
    for pattern in patterns:
        if re.match(pattern, tn):
            return True
    return False


eckhart's avatar
eckhart committed
456
@transformation_factory
457
def has_content(context: List[Node], regexp: str) -> bool:
458
459
460
461
462
463
464
465
    """
    Checks a node's content against a regular expression.

    In contrast to ``re.match`` the regular expression must match the complete
    string and not just the beginning of the string to succeed!
    """
    if not regexp.endswith('$'):
        regexp += "$"
466
467
468
    return bool(re.match(regexp, context[-1].content))


469
@transformation_factory(collections.abc.Set)
470
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
471
472
473
474
    """
    Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context.
     """
475
    for i in range(2, len(context) + 1):
476
477
478
479
480
481
482
483
484
485
486
487
488
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
489
490
491
492
    if node.is_anonymous() or not child.is_anonymous():
        node.tag_name = child.tag_name
        # name, ptype = (node.tag_name.split(':') + [''])[:2]
        # child.parser = MockParser(name, ptype)
493
494
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.result = child.result
495
    if hasattr(child, '_xml_attr'):
496
        node.attr.update(child.attr)
497
498


499
def _reduce_child(node: Node, child: Node):
500
    node.result = child.result
501
    if hasattr(child, '_xml_attr'):
502
        node.attr.update(child.attr)
503
504


505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
551
552


553
554
def replace_by_single_child(context: List[Node]):
    """
555
556
557
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
558
559
560
    """
    node = context[-1]
    if len(node.children) == 1:
561
        _replace_by(node, node.children[0])
562
563


Eckhart Arnold's avatar
Eckhart Arnold committed
564
def reduce_single_child(context: List[Node]):
565
    """
566
    Reduces a single branch node by transferring the result of its
567
    immediate descendant to this node, but keeping this node's parser entry.
568
569
    Reduction only takes place if the last node in the context has
    exactly one child.
570
571
572
    """
    node = context[-1]
    if len(node.children) == 1:
573
        _reduce_child(node, node.children[0])
574
575


576
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
577
def replace_or_reduce(context: List[Node], condition: Callable = is_named):
578
579
    """
    Replaces node by a single child, if condition is met on child,
580
581
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
582
    node = context[-1]
583
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
584
        child = node.children[0]
585
        if condition(context):
586
            _replace_by(node, child)
587
        else:
588
            _reduce_child(node, child)
589
590
591


@transformation_factory
592
593
594
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
595
596
597
    name.

    Parameters:
598
        context: the context where the parser shall be replaced
eckhart's avatar
eckhart committed
599
        name: "NAME:PTYPE" of the surrogate. The ptype is optional
600
    """
601
    node = context[-1]
602
    node.tag_name = name
603
604


605
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
606
def flatten(context: List[Node], condition: Callable = is_anonymous, recursive: bool = True):
607
    """
eckhart's avatar
eckhart committed
608
    Flattens all children, that fulfill the given ``condition``
609
610
611
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
612
613

    If the parameter ``recursive`` is ``True`` the same will recursively be
614
615
616
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
617
618
619
620

    Applying flatten recursively will result in these kinds of
    structural transformation::

di68kap's avatar
di68kap committed
621
        (1 (+ 2) (+ 3))    ->   (1 + 2 + 3)
622
623
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
624

625
    node = context[-1]
626
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
627
        new_result = []     # type: List[Node]
eckhart's avatar
eckhart committed
628
        context.append(ZOMBIE_NODE)
629
        for child in node.children:
eckhart's avatar
eckhart committed
630
            context[-1] = child
631
            if child.children and condition(context):
632
                if recursive:
633
                    flatten(context, condition, recursive)
634
635
636
                new_result.extend(child.children)
            else:
                new_result.append(child)
eckhart's avatar
eckhart committed
637
        context.pop()
638
639
640
        node.result = tuple(new_result)


641
def collapse(context: List[Node]):
642
643
644
645
    """
    Collapses all sub-nodes of a node by replacing them with the
    string representation of the node. USE WITH CARE!
    """
646
    node = context[-1]
647
    node.result = node.content
648
649


650
651
@transformation_factory(collections.abc.Callable)
def collapse_if(context: List[Node], condition: Callable, target_tag: ParserBase):
652
653
    """
    (Recursively) merges the content of all adjacent child nodes that
654
655
656
657
658
659
660
661
    fulfil the given `condition` into a single leaf node with parser
    `target_tag`. Nodes that do not fulfil the condition will be preserved.

    >>> sxpr = '(place (abbreviation "p.") (page "26") (superscript "b") (mark ",") (page "18"))'
    >>> tree = parse_sxpr(sxpr)
    >>> text = MockParser('text')
    >>> collapse_if([tree], not_one_of({'superscript', 'subscript'}), text)
    >>> print(flatten_sxpr(tree.as_sxpr()))
662
    (place (text "p.26") (superscript "b") (text ",18"))
663
664
665

    See `test_transform.TestComplexTransformations` for examples.
    """
666

667
    node = context[-1]
eckhart's avatar
eckhart committed
668
669
    package = []  # type: List[Node]
    result = []  # type: List[Node]
670
671
672
673

    def close_package():
        nonlocal package
        if package:
674
            s = "".join(nd.content for nd in package)
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
            result.append(Node(target_tag, s))
            package = []

    for child in node.children:
        if condition([child]):
            if child.children:
                collapse_if([child], condition, target_tag)
                for c in child.children:
                    if condition([c]):
                        package.append(c)
                    else:
                        close_package()
                        result.append(c)
                close_package()
            else:
                package.append(child)
        else:
            close_package()
            result.append(child)
    close_package()
    node.result = tuple(result)


698
@transformation_factory(collections.abc.Callable)
699
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
700
701
    """
    Replaces the content of the node. ``func`` takes the node's result
702
703
    as an argument an returns the mapped result.
    """
704
    node = context[-1]
705
706
707
    node.result = func(node.result)


708
@transformation_factory  # (str)
709
def replace_content_by(context: List[Node], content: str):  # Callable[[Node], ResultType]
710
711
    """
    Replaces the content of the node with the given text content.
712
713
714
715
716
    """
    node = context[-1]
    node.result = content


717
def normalize_whitespace(context):
di68kap's avatar
di68kap committed
718
719
    """
    Normalizes Whitespace inside a leaf node, i.e. any sequence of
720
721
    whitespaces, tabs and linefeeds will be replaced by a single
    whitespace. Empty (i.e. zero-length) Whitespace remains empty,
di68kap's avatar
di68kap committed
722
723
    however.
    """
724
725
726
727
728
729
    node = context[-1]
    assert not node.children
    if is_whitespace(context):
        if node.result:
            node.result = ' '
    else:
eckhart's avatar
eckhart committed
730
        node.result = re.sub(r'\s+', ' ', node.result)
731
732


di68kap's avatar
di68kap committed
733
734
735
736
737
738
739
740
741
742
def merge_whitespace(context):
    """
    Merges adjacent whitespace. UNTESTED!
    """
    node = context[-1]
    children = node.children
    new_result = []
    i = 0
    L = len(children)
    while i < L:
743
        if children[i].tag_name == WHITESPACE_PTYPE:
di68kap's avatar
di68kap committed
744
            k = i
745
            while i < L and children[k].tag_name == WHITESPACE_PTYPE:
di68kap's avatar
di68kap committed
746
747
748
749
750
751
752
753
                i += 1
            if i > k:
                children[k].result = sum(children[n].result for n in range(k, i + 1))
            new_result.append(children[k])
        i += 1
    node.result = tuple(new_result)


754
def move_whitespace(context):
di68kap's avatar
di68kap committed
755
756
    """
    Moves adjacent whitespace nodes to the parent node.
757
758
759
760
761
762
    """
    node = context[-1]
    if len(context) <= 1 or not node.children:
        return
    parent = context[-2]
    children = node.children
763
    if children[0].tag_name == WHITESPACE_PTYPE:
764
765
766
767
        before = (children[0],)
        children = children[1:]
    else:
        before = ()
768
    if children and children[-1].tag_name == WHITESPACE_PTYPE:
769
770
771
772
773
774
775
776
777
778
779
780
        after = (children[-1],)
        children = children[:-1]
    else:
        after = tuple()

    if before or after:
        node.result = children
        for i, child in enumerate(parent.children):
            if child == node:
                break

        # merge adjacent whitespace
eckhart's avatar
eckhart committed
781
782
        prevN = parent.children[i - 1] if i > 0 else None
        nextN = parent.children[i + 1] if i < len(parent.children) - 1 else None
783
        if before and prevN and prevN.tag_name == WHITESPACE_PTYPE:
784
785
            prevN.result = prevN.result + before[0].result
            before = ()
786
        if after and nextN and nextN.tag_name == WHITESPACE_PTYPE:
787
788
789
            nextN.result = after[0].result + nextN.result
            after = ()

eckhart's avatar
eckhart committed
790
        parent.result = parent.children[:i] + before + (node,) + after + parent.children[i + 1:]
791
792


793
794
795
796
797
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
798
# - errors of dropped leaves may be be lost
799
800
801
# - no promise that order will be preserved
#
#######################################################################
802
803


804
@transformation_factory(collections.abc.Callable)
805
806
807
808
809
810
811
812
813
814
815
816
817
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


818
@transformation_factory(collections.abc.Callable)
819
820
821
822
823
824
825
826
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
eckhart's avatar
eckhart committed
827
        while i > 0 and condition(context + [node.children[i - 1]]):
828
829
830
831
832
            i -= 1
        if i < L:
            node.result = node.children[:i]


833
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
834
def strip(context: List[Node], condition: Callable = is_expendable):
835
836
837
838
839
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


840
@transformation_factory  # (slice)
841
def keep_children(context: List[Node], section: slice = slice(None)):
842
    """Keeps only child-nodes which fall into a slice of the result field."""
843
    node = context[-1]
844
    if node.children:
845
        node.result = node.children[section]
846
847


848
@transformation_factory(collections.abc.Callable)
849
850
851
852
853
854
855
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


856
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
857
def keep_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
858
859
860
861
862
863
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


864
@transformation_factory(collections.abc.Set)
865
866
867
868
869
870
871
872
873
874
875
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


876
@transformation_factory(collections.abc.Callable)
877
def remove_children_if(context: List[Node], condition: Callable):
878
879
880
881
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))
eckhart's avatar
eckhart committed
882
    pass
883

eckhart's avatar
eckhart committed
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
905
906


907
908
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
909
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
910
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
911
912
913
914
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
915
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
916
917
918
# remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
# remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
# remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
919
remove_infix_operator = keep_children(slice(0, None, 2))
920
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
921
922


923
924
925
926
927
def remove_first(context: List[Node]):
    """Removes the first non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(node.children):
928
            if child.tag_name != WHITESPACE_PTYPE:
929
930
931
                break
        else:
            return
eckhart's avatar
eckhart committed
932
        node.result = node.children[:i] + node.children[i + 1:]
933
934
935
936
937
938
939


def remove_last(context: List[Node]):
    """Removes the last non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(reversed(node.children)):
940
            if child.tag_name != WHITESPACE_PTYPE:
941
942
943
944
                break
        else:
            return
        i = len(node.children) - i - 1
eckhart's avatar
eckhart committed
945
        node.result = node.children[:i] + node.children[i + 1:]
946
947
948
949
950
951
952
953


def remove_brackets(context: List[Node]):
    """Removes the first and the last non-whitespace child."""
    remove_first(context)
    remove_last(context)


954
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
955
def remove_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
956
    """Removes any among a particular set of tokens from the immediate
957
958
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
959
    remove_children_if(context, partial(is_token, tokens=tokens))
960
961


962
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
963
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
964
    """Removes children by tag name."""
965
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
966
967
968


@transformation_factory
969
def remove_content(context: List[Node], regexp: str):
970
    """Removes children depending on their string value."""
971
    remove_children_if(context, partial(has_content, regexp=regexp))
972
973
974
975


########################################################################
#
976
# AST semantic validation functions (EXPERIMENTAL!!!)
977
978
979
#
########################################################################

980
@transformation_factory(collections.abc.Callable)