transform.py 37.7 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# transform.py - transformation functions for converting the
#                concrete into the abstract syntax tree
#
# Copyright 2016  by Eckhart Arnold (arnold@badw.de)
#                 Bavarian Academy of Sciences an Humanities (badw.de)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.  See the License for the specific language governing
# permissions and limitations under the License.
18
19


20
21
22
"""
Module ``transform`` contains the functions for transforming the
concrete syntax tree (CST) into an abstract syntax tree (AST).
23

24
25
26
As these functions are very generic, they can in principle be
used for any kind of tree transformations, not necessarily only
for CST -> AST transformations.
27
28
"""

29

30
import collections.abc
31
import inspect
Eckhart Arnold's avatar
Eckhart Arnold committed
32
import fnmatch
33
34
from functools import partial, reduce, singledispatch

35
from DHParser.error import Error
36
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, ParserBase, MockParser, \
37
    ZOMBIE_NODE, parse_sxpr, flatten_sxpr
38
from DHParser.toolkit import issubtype, isgenerictype, expand_table, smart_list, re, typing
39
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
40
    Tuple, List, Sequence, Union, Text, Generic
41

42
43
__all__ = ('TransformationDict',
           'TransformationProc',
44
           'TransformationFunc',
45
46
47
           'ConditionFunc',
           'KeyFunc',
           'transformation_factory',
48
49
50
           'key_parser_name',
           'key_tag_name',
           'traverse',
51
           'is_named',
52
           'replace_by_single_child',
Eckhart Arnold's avatar
Eckhart Arnold committed
53
           'reduce_single_child',
54
           'replace_or_reduce',
55
56
           'replace_parser',
           'collapse',
57
           'collapse_if',
58
           # 'merge_children',
59
           'replace_content',
60
           'replace_content_by',
61
62
           'normalize_whitespace',
           'move_whitespace',
63
           'apply_if',
eckhart's avatar
eckhart committed
64
           'apply_unless',
65
           'traverse_locally',
66
           'is_anonymous',
67
68
69
70
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
71
           'is_one_of',
72
           'not_one_of',
Eckhart Arnold's avatar
Eckhart Arnold committed
73
           'matches_re',
74
           'has_content',
di68kap's avatar
di68kap committed
75
           'has_parent',
76
77
78
79
80
81
82
83
           'lstrip',
           'rstrip',
           'strip',
           'keep_children',
           'keep_children_if',
           'keep_tokens',
           'keep_nodes',
           'keep_content',
84
           'remove_children_if',
eckhart's avatar
eckhart committed
85
           'remove_nodes',
86
87
88
89
90
           'remove_content',
           'remove_first',
           'remove_last',
           'remove_whitespace',
           'remove_empty',
di68kap's avatar
di68kap committed
91
           'remove_anonymous_empty',
92
93
           'remove_anonymous_expendables',
           'remove_anonymous_tokens',
94
95
           'remove_expendables',
           'remove_brackets',
96
97
           'remove_infix_operator',
           'remove_single_child',
98
99
100
101
           'remove_tokens',
           'flatten',
           'forbid',
           'require',
102
           'assert_content',
103
104
           'error_on',
           'warn_on',
di68kap's avatar
di68kap committed
105
106
           'assert_has_children',
           'peek')
107
108


109
TransformationProc = Callable[[List[Node]], None]
Eckhart Arnold's avatar
Eckhart Arnold committed
110
TransformationDict = Dict[str, Sequence[Callable]]
111
TransformationFunc = Union[Callable[[Node], Any], partial]
Eckhart Arnold's avatar
Eckhart Arnold committed
112
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
113
114
ConditionFunc = Callable  # Callable[[List[Node]], bool]
KeyFunc = Callable[[Node], str]
eckhart's avatar
eckhart committed
115
CriteriaType = Union[int, str, Callable]
116
117


118
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
119
    """Creates factory functions from transformation-functions that
120
    dispatch on the first parameter after the context parameter.
121
122

    Decorating a transformation-function that has more than merely the
Eckhart Arnold's avatar
Eckhart Arnold committed
123
    ``context``-parameter with ``transformation_factory`` creates a
124
    function with the same name, which returns a partial-function that
125
    takes just the context-parameter.
126
127
128
129
130
131
132
133
134

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.

135
136
    Usage::

eckhart's avatar
eckhart committed
137
        @transformation_factory(AbstractSet[str])
138
        def remove_tokens(context, tokens):
139
            ...
140
141
142

    or, alternatively::

143
        @transformation_factory
144
        def remove_tokens(context, tokens: AbstractSet[str]):
145
146
            ...

147
148
    Example::

149
        trans_table = { 'expression': remove_tokens('+', '-') }
150
151
152

    instead of::

153
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
154
155

    Parameters:
156
        t1:  type of the second argument of the transformation function,
157
158
            only necessary if the transformation functions' parameter list
            does not have type annotations.
159
160
    """

161
162
163
164
    def type_guard(t):
        """Raises an error if type `t` is a generic type or could be mistaken
        for the type of the canonical first parameter "List[Node] of
        transformation functions. Returns `t`."""
165
166
167
168
        # if isinstance(t, GenericMeta):
        #     raise TypeError("Generic Type %s not permitted\n in transformation_factory "
        #                     "decorator. Use the equivalent non-generic type instead!"
        #                     % str(t))
169
        if isgenerictype(t):
170
171
172
            raise TypeError("Generic Type %s not permitted\n in transformation_factory "
                            "decorator. Use the equivalent non-generic type instead!"
                            % str(t))
173
        if issubtype(List[Node], t):
174
175
            raise TypeError("Sequence type %s not permitted\nin transformation_factory "
                            "decorator, because it could be mistaken for a base class "
176
177
178
                            "of List[Node]\nwhich is the type of the canonical first "
                            "argument of transformation functions. Try 'tuple' instead!"
                            % str(t))
179
180
        return t

181
    def decorator(f):
182
        nonlocal t1
183
184
185
186
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
187
        assert t1 or params[0].annotation != params[0].empty, \
188
            "No type information on second parameter found! Please, use type " \
eckhart's avatar
eckhart committed
189
            "annotation or provide the type information via transformer-decorator."
190
        f = singledispatch(f)
191
192
193
        p1type = params[0].annotation
        if t1 is None:
            t1 = type_guard(p1type)
194
        elif issubtype(p1type, type_guard(t1)):
195
            try:
196
                if len(params) == 1 and issubtype(p1type, Container) \
197
                        and not (issubtype(p1type, Text) or issubtype(p1type, ByteString)):
198
                    def gen_special(*args):
199
200
                        c = set(args) if issubtype(p1type, AbstractSet) else \
                            tuple(args) if issubtype(p1type, Sequence) else args
201
202
203
204
205
206
207
208
                        d = {params[0].name: c}
                        return partial(f, **d)
                    f.register(type_guard(p1type.__args__[0]), gen_special)
            except AttributeError:
                pass  # Union Type does not allow subclassing, but is not needed here
        else:
            raise TypeError("Annotated type %s is not a subclass of decorated type %s !"
                            % (str(p1type), str(t1)))
209
210
211
212
213
214

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

215
        for t in (t1, t2, t3, t4, t5):
216
            if t:
217
                f.register(type_guard(t), gen_partial)
218
219
            else:
                break
220
221
        return f

222
    if isinstance(t1, type(lambda: 1)):
223
224
225
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
226
227
        func = t1
        t1 = None
228
229
230
231
232
        return decorator(func)
    else:
        return decorator


233
def key_parser_name(node: Node) -> str:
234
235
236
    return node.parser.name


237
def key_tag_name(node: Node) -> str:
238
239
240
    return node.tag_name


241
def traverse(root_node: Node,
Eckhart Arnold's avatar
Eckhart Arnold committed
242
             processing_table: ProcessingTableType,
243
244
245
             key_func: KeyFunc=key_tag_name) -> None:
    """
    Traverses the snytax tree starting with the given ``node`` depth
246
    first and applies the sequences of callback-functions registered
247
    in the ``processing_table``-dictionary.
248
249
250
251
252
253
254
255
256

    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
    to employ tree-traversal for the semantic analysis of the AST.

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
257
258
259
260
261

    - '+': always called (before any other processing function)
    - '*': called for those nodes for which no (other) processing
      function appears in the table
    - '~': always called (after any other processing function)
262
263
264
265
266

    Args:
        root_node (Node): The root-node of the syntax tree to be traversed
        processing_table (dict): node key -> sequence of functions that
            will be applied to matching nodes in order. This dictionary
267
268
            is interpreted as a ``compact_table``. See
            :func:`expand_table` or :func:`EBNFCompiler.EBNFTransTable`
269
270
271
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.

272
273
    Example::

274
        table = { "term": [replace_by_single_child, flatten],
275
                  "factor, flowmarker, retrieveop": replace_by_single_child }
276
        traverse(node, table)
277

278
    """
279
280
281
    # Is this optimazation really needed?
    if '__cache__' in processing_table:
        # assume that processing table has already been expanded
eckhart's avatar
eckhart committed
282
        table = processing_table               # type: ProcessingTableType
eckhart's avatar
eckhart committed
283
        cache = cast(TransformationDict, processing_table['__cache__'])  # type: TransformationDict
284
    else:
285
286
        # normalize processing_table entries by turning single values
        # into lists with a single value
287
288
        table = {name: cast(Sequence[Callable], smart_list(call))
                 for name, call in list(processing_table.items())}
289
        table = expand_table(table)
eckhart's avatar
eckhart committed
290
291
        cache = cast(TransformationDict,
                     table.setdefault('__cache__', cast(TransformationDict, dict())))
292
293
        # change processing table in place, so its already expanded and cache filled next time
        processing_table.clear()
294
295
296
297
298
299
        processing_table.update(table)

    # assert '__cache__' in processing_table
    # # Code without optimization
    # table = {name: smart_list(call) for name, call in list(processing_table.items())}
    # table = expand_table(table)
Eckhart Arnold's avatar
Eckhart Arnold committed
300
    # cache = {}  # type: Dict[str, List[Callable]]
301

302
    def traverse_recursive(context):
eckhart's avatar
eckhart committed
303
        nonlocal cache
304
        node = context[-1]
305
        if node.children:
eckhart's avatar
eckhart committed
306
            context.append(ZOMBIE_NODE)
di68kap's avatar
di68kap committed
307
            for child in node.children:
eckhart's avatar
eckhart committed
308
                context[-1] = child
309
                traverse_recursive(context)  # depth first
eckhart's avatar
eckhart committed
310
            context.pop()
311
312

        key = key_func(node)
313
314
315
        try:
            sequence = cache[key]
        except KeyError:
316
317
318
            sequence = table.get('+', []) \
                + table.get(key, table.get('*', [])) \
                + table.get('~', [])
319
320
321
322
323
324
325
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

        for call in sequence:
326
            call(context)
327

328
    traverse_recursive([root_node])
329
330
    # assert processing_table['__cache__']

331

332
#######################################################################
333
#
334
335
# meta transformations, i.e. transformations that call other
# transformations
336
#
337
#######################################################################
338
339


eckhart's avatar
eckhart committed
340
@transformation_factory(dict)
341
342
343
344
345
346
347
348
349
350
351
def traverse_locally(context: List[Node],
                     processing_table: Dict,            # actually: ProcessingTableType
                     key_func: Callable=key_tag_name):  # actually: KeyFunc
    """Transforms the syntax tree starting from the last node in the context
    according to the given processing table. The purpose of this function is
    to apply certain transformations locally, i.e. only for those nodes that
    have the last node in the context as their parent node.
    """
    traverse(context[-1], processing_table, key_func)


352
@transformation_factory(collections.abc.Callable)
353
354
355
356
357
358
def apply_if(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation only if a certain condition is met."""
    if condition(context):
        transformation(context)


359
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
360
361
362
363
364
365
def apply_unless(context: List[Node], transformation: Callable, condition: Callable):
    """Applies a transformation if a certain condition is *not* met."""
    if not condition(context):
        transformation(context)


366
367
368
369
370
371
372
373
374
375
376
377
378
379
#######################################################################
#
# conditionals that determine whether the context (or the last node in
# the context for that matter) fulfill a specific condition.
# ---------------------------------------------------------------------
#
# The context of a node is understood as a list of all parent nodes
# leading up to and including the node itself. If represented as list,
# the last element of the list is the node itself.
#
#######################################################################


def is_single_child(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
380
    """Returns ``True`` if the current node does not have any siblings."""
381
382
383
384
    return len(context[-2].children) == 1


def is_named(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
385
    """Returns ``True`` if the current node's parser is a named parser."""
386
387
388
389
    return bool(context[-1].parser.name)


def is_anonymous(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
390
    """Returns ``True`` if the current node's parser is an anonymous parser."""
391
392
393
394
395
396
397
398
399
400
    return not context[-1].parser.name


def is_whitespace(context: List[Node]) -> bool:
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
    return context[-1].parser.ptype == WHITESPACE_PTYPE


def is_empty(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
401
    """Returns ``True`` if the current node's content is empty."""
402
403
404
405
    return not context[-1].result


def is_expendable(context: List[Node]) -> bool:
eckhart's avatar
eckhart committed
406
407
    """Returns ``True`` if the current node either is a node containing
    whitespace or an empty node."""
408
409
410
    return is_empty(context) or is_whitespace(context)


411
@transformation_factory(collections.abc.Set)
412
413
414
415
def is_token(context: List[Node], tokens: AbstractSet[str] = frozenset()) -> bool:
    """Checks whether the last node in the context has `ptype == TOKEN_PTYPE`
    and it's content matches one of the given tokens. Leading and trailing
    whitespace-tokens will be ignored. In case an empty set of tokens is passed,
eckhart's avatar
eckhart committed
416
    any token is a match.
417
    """
di68kap's avatar
di68kap committed
418
419
420
421
422
423
424
425
426
427
428
429
430
    # def stripped(nd: Node) -> str:
    #     """Removes leading and trailing whitespace-nodes from content."""
    #     # assert node.parser.ptype == TOKEN_PTYPE
    #     if nd.children:
    #         i, k = 0, len(nd.children)
    #         while i < len(nd.children) and nd.children[i].parser.ptype == WHITESPACE_PTYPE:
    #             i += 1
    #         while k > 0 and nd.children[k - 1].parser.ptype == WHITESPACE_PTYPE:
    #             k -= 1
    #         return "".join(child.content for child in node.children[i:k])
    #     return nd.content
    # node = context[-1]
    # return node.parser.ptype == TOKEN_PTYPE and (not tokens or stripped(node) in tokens)
431
    node = context[-1]
di68kap's avatar
di68kap committed
432
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.content in tokens)
433
434


435
@transformation_factory(collections.abc.Set)
436
437
438
439
440
def is_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is one of the given tag names."""
    return context[-1].tag_name in tag_name_set


441
442
443
444
445
446
@transformation_factory(collections.abc.Set)
def not_one_of(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Returns true, if the node's tag_name is not one of the given tag names."""
    return context[-1].tag_name not in tag_name_set


Eckhart Arnold's avatar
Eckhart Arnold committed
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
# @transformation_factory(collections.abc.Set)
# def matches_wildcard(context: List[Node], wildcards: AbstractSet[str]) -> bool:
#     """Retruns true, if the node's tag_name matches one of the glob patterns
#     in `wildcards`. For example, ':*' matches all anonymous nodes. """
#     tn = context[-1].tag_name
#     for pattern in wildcards:
#         if fnmatch.fnmatch(tn, pattern):
#             return True
#     return False


@transformation_factory(collections.abc.Set)
def matches_re(context: List[Node], patterns: AbstractSet[str]) -> bool:
    """Retruns true, if the node's tag_name matches one of the regular
    expressions in `patterns`. For example, ':.*' matches all anonymous nodes.
    """
    tn = context[-1].tag_name
    for pattern in patterns:
        if re.match(pattern, tn):
            return True
    return False


eckhart's avatar
eckhart committed
470
@transformation_factory
471
def has_content(context: List[Node], regexp: str) -> bool:
472
473
474
475
476
477
478
479
    """
    Checks a node's content against a regular expression.

    In contrast to ``re.match`` the regular expression must match the complete
    string and not just the beginning of the string to succeed!
    """
    if not regexp.endswith('$'):
        regexp += "$"
480
481
482
    return bool(re.match(regexp, context[-1].content))


483
@transformation_factory(collections.abc.Set)
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
def has_parent(context: List[Node], tag_name_set: AbstractSet[str]) -> bool:
    """Checks whether a node with one of the given tag names appears somewhere
     in the context before the last node in the context."""
    for i in range(2, len(context)):
        if context[-i].tag_name in tag_name_set:
            return True
    return False


#######################################################################
#
# utility functions (private)
#
#######################################################################


def _replace_by(node: Node, child: Node):
501
502
503
504
    if not child.parser.name:
        child.parser = MockParser(node.parser.name, child.parser.ptype)
        # parser names must not be overwritten, else: child.parser.name = node.parser.name
    node.parser = child.parser
Eckhart Arnold's avatar
Eckhart Arnold committed
505
    # node.errors.extend(child.errors)
506
    node.result = child.result
507
    if hasattr(child, '_xml_attr'):
508
        node.attr.update(child.attr)
509
510


511
def _reduce_child(node: Node, child: Node):
Eckhart Arnold's avatar
Eckhart Arnold committed
512
    # node.errors.extend(child.errors)
513
    node.result = child.result
514
    if hasattr(child, '_xml_attr'):
515
        node.attr.update(child.attr)
516
517


518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
#######################################################################
#
# rearranging transformations
#
# - tree may be rearranged (e.g.flattened)
# - nodes that are not leaves may be dropped
# - order is preserved
# - leave content is preserved (though not necessarily the leaves
#   themselves)
#
#######################################################################


# @transformation_factory(int, str, Callable)
# def replace_by_child(context: List[Node], criteria: CriteriaType=is_single_child):
#     """
#     Replaces a node by the first of its immediate descendants
#     that meets the `criteria`. The criteria can either be the
#     index of the child (counting from zero), or the tag name or
#     a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` the same semantics is
#     the same that of `replace_by_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _replace_by(context[-1], child)
#
#
# @transformation_factory(int, str, Callable)
# def content_from_child(context: List[Node], criteria: CriteriaType = is_single_child):
#     """
#     Reduces a node, by transferring the result of the first of its
#     immediate descendants that meets the `criteria` to this node,
#     but keeping this node's parser entry. The criteria can either
#     be the index of the child (counting from zero), or the tag
#     name or a boolean-valued function on the context of the child.
#     If no child matching the criteria is found, the node will
#     not be replaced.
#     With the default value for `criteria` this has the same semantics
#     as `content_from_single_child`.
#     """
#     child = _pick_child(context, criteria)
#     if child:
#         _reduce_child(context[-1], child)
564
565


566
567
def replace_by_single_child(context: List[Node]):
    """
568
569
570
    Removes single branch node, replacing it by its immediate descendant.
    Replacement only takes place, if the last node in the context has
    exactly one child.
571
572
573
    """
    node = context[-1]
    if len(node.children) == 1:
574
        _replace_by(node, node.children[0])
575
576


Eckhart Arnold's avatar
Eckhart Arnold committed
577
def reduce_single_child(context: List[Node]):
578
    """
579
    Reduces a single branch node by transferring the result of its
580
    immediate descendant to this node, but keeping this node's parser entry.
581
582
    Reduction only takes place if the last node in the context has
    exactly one child.
583
584
585
    """
    node = context[-1]
    if len(node.children) == 1:
586
        _reduce_child(node, node.children[0])
587
588


589
@transformation_factory(collections.abc.Callable)
590
591
592
def replace_or_reduce(context: List[Node], condition: Callable=is_named):
    """
    Replaces node by a single child, if condition is met on child,
593
594
    otherwise (i.e. if the child is anonymous) reduces the child.
    """
595
    node = context[-1]
596
    if len(node.children) == 1:
di68kap's avatar
di68kap committed
597
        child = node.children[0]
598
        if condition(context):
599
            _replace_by(node, child)
600
        else:
601
            _reduce_child(node, child)
602
603
604


@transformation_factory
605
606
607
def replace_parser(context: List[Node], name: str):
    """
    Replaces the parser of a Node with a mock parser with the given
608
609
610
    name.

    Parameters:
611
        context: the context where the parser shall be replaced
eckhart's avatar
eckhart committed
612
        name: "NAME:PTYPE" of the surrogate. The ptype is optional
613
    """
614
    node = context[-1]
615
    name, ptype = (name.split(':') + [''])[:2]
di68kap's avatar
di68kap committed
616
    node.parser = MockParser(name, ':' + ptype)
617
618


619
@transformation_factory(collections.abc.Callable)
620
621
def flatten(context: List[Node], condition: Callable=is_anonymous, recursive: bool=True):
    """
622
    Flattens all children, that fulfil the given ``condition``
623
624
625
    (default: all unnamed children). Flattening means that wherever a
    node has child nodes, the child nodes are inserted in place of the
    node.
626
627

    If the parameter ``recursive`` is ``True`` the same will recursively be
628
629
630
    done with the child-nodes, first. In other words, all leaves of
    this node and its child nodes are collected in-order as direct
    children of this node.
631
632
633
634

    Applying flatten recursively will result in these kinds of
    structural transformation::

di68kap's avatar
di68kap committed
635
        (1 (+ 2) (+ 3))    ->   (1 + 2 + 3)
636
637
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)
    """
638
    node = context[-1]
639
    if node.children:
Eckhart Arnold's avatar
Eckhart Arnold committed
640
        new_result = []     # type: List[Node]
eckhart's avatar
eckhart committed
641
        context.append(ZOMBIE_NODE)
642
        for child in node.children:
eckhart's avatar
eckhart committed
643
            context[-1] = child
644
            if child.children and condition(context):
645
                if recursive:
646
                    flatten(context, condition, recursive)
647
648
649
                new_result.extend(child.children)
            else:
                new_result.append(child)
eckhart's avatar
eckhart committed
650
        context.pop()
651
652
653
        node.result = tuple(new_result)


654
def collapse(context: List[Node]):
655
656
    """Collapses all sub-nodes of a node by replacing them with the
    string representation of the node. USE WITH CARE!"""
657
    node = context[-1]
658
    node.result = node.content
659
660


661
662
@transformation_factory(collections.abc.Callable)
def collapse_if(context: List[Node], condition: Callable, target_tag: ParserBase):
663
664
665
666
667
668
669
670
671
    """(Recursively) merges the content of all adjacent child nodes that
    fulfil the given `condition` into a single leaf node with parser
    `target_tag`. Nodes that do not fulfil the condition will be preserved.

    >>> sxpr = '(place (abbreviation "p.") (page "26") (superscript "b") (mark ",") (page "18"))'
    >>> tree = parse_sxpr(sxpr)
    >>> text = MockParser('text')
    >>> collapse_if([tree], not_one_of({'superscript', 'subscript'}), text)
    >>> print(flatten_sxpr(tree.as_sxpr()))
672
    (place (text "p.26") (superscript "b") (text ",18"))
673
674
675

    See `test_transform.TestComplexTransformations` for examples.
    """
676
677
678
679
680
681
682
    node = context[-1]
    package = []
    result = []

    def close_package():
        nonlocal package
        if package:
683
            s = "".join(nd.content for nd in package)
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
            result.append(Node(target_tag, s))
            package = []

    for child in node.children:
        if condition([child]):
            if child.children:
                collapse_if([child], condition, target_tag)
                for c in child.children:
                    if condition([c]):
                        package.append(c)
                    else:
                        close_package()
                        result.append(c)
                close_package()
            else:
                package.append(child)
        else:
            close_package()
            result.append(child)
    close_package()
    node.result = tuple(result)


707
@transformation_factory(collections.abc.Callable)
708
def replace_content(context: List[Node], func: Callable):  # Callable[[Node], ResultType]
di68kap's avatar
di68kap committed
709
    """Replaces the content of the node. ``func`` takes the node's result
710
711
    as an argument an returns the mapped result.
    """
712
    node = context[-1]
713
714
715
    node.result = func(node.result)


716
@transformation_factory  # (str)
717
718
719
720
721
722
723
def replace_content_by(context: List[Node], content: str):  # Callable[[Node], ResultType]
    """Replaces the content of the node with the given text content.
    """
    node = context[-1]
    node.result = content


724
def normalize_whitespace(context):
di68kap's avatar
di68kap committed
725
726
    """
    Normalizes Whitespace inside a leaf node, i.e. any sequence of
727
728
    whitespaces, tabs and linefeeds will be replaced by a single
    whitespace. Empty (i.e. zero-length) Whitespace remains empty,
di68kap's avatar
di68kap committed
729
730
    however.
    """
731
732
733
734
735
736
737
738
739
740
    node = context[-1]
    assert not node.children
    if is_whitespace(context):
        if node.result:
            node.result = ' '
    else:
        node.result = re.sub('\s+', ' ', node.result)


def move_whitespace(context):
di68kap's avatar
di68kap committed
741
742
    """
    Moves adjacent whitespace nodes to the parent node.
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
    """
    node = context[-1]
    if len(context) <= 1 or not node.children:
        return
    parent = context[-2]
    children = node.children
    if children[0].parser.ptype == WHITESPACE_PTYPE:
        before = (children[0],)
        children = children[1:]
    else:
        before = ()
    if children and children[-1].parser.ptype == WHITESPACE_PTYPE:
        after = (children[-1],)
        children = children[:-1]
    else:
        after = tuple()

    if before or after:
        node.result = children
        for i, child in enumerate(parent.children):
            if child == node:
                break

        # merge adjacent whitespace
        prevN = parent.children[i-1] if i > 0 else None
        nextN = parent.children[i+1] if i < len(parent.children)-1 else None
        if before and prevN and prevN.parser.ptype == WHITESPACE_PTYPE:
            prevN.result = prevN.result + before[0].result
            before = ()
        if after and nextN and nextN.parser.ptype == WHITESPACE_PTYPE:
            nextN.result = after[0].result + nextN.result
            after = ()

        parent.result = parent.children[:i] + before + (node,) + after + parent.children[i+1:]


779
780
781
782
783
#######################################################################
#
# destructive transformations:
#
# - leaves may be dropped (e.g. if deemed irrelevant)
784
# - errors of dropped leaves may be be lost
785
786
787
# - no promise that order will be preserved
#
#######################################################################
788
789


790
@transformation_factory(collections.abc.Callable)
791
792
793
794
795
796
797
798
799
800
801
802
803
def lstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading child-nodes that fulfill a given condition."""
    node = context[-1]
    i = 1
    while i > 0 and node.children:
        lstrip(context + [node.children[0]], condition)
        i, L = 0, len(node.children)
        while i < L and condition(context + [node.children[i]]):
            i += 1
        if i > 0:
            node.result = node.children[i:]


804
@transformation_factory(collections.abc.Callable)
805
806
807
808
809
810
811
812
813
814
815
816
817
818
def rstrip(context: List[Node], condition: Callable = is_expendable):
    """Recursively removes all leading nodes that fulfill a given condition."""
    node = context[-1]
    i, L = 0, len(node.children)
    while i < L and node.children:
        rstrip(context + [node.children[-1]], condition)
        L = len(node.children)
        i = L
        while i > 0 and condition(context + [node.children[i-1]]):
            i -= 1
        if i < L:
            node.result = node.children[:i]


819
@transformation_factory(collections.abc.Callable)
eckhart's avatar
eckhart committed
820
def strip(context: List[Node], condition: Callable = is_expendable):
821
822
823
824
825
    """Removes leading and trailing child-nodes that fulfill a given condition."""
    lstrip(context, condition)
    rstrip(context, condition)


826
@transformation_factory  # (slice)
827
def keep_children(context: List[Node], section: slice = slice(None)):
828
    """Keeps only child-nodes which fall into a slice of the result field."""
829
    node = context[-1]
830
    if node.children:
831
        node.result = node.children[section]
832
833


834
@transformation_factory(collections.abc.Callable)
835
836
837
838
839
840
841
def keep_children_if(context: List[Node], condition: Callable):
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if condition(context + [c]))


842
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
843
def keep_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
844
845
846
847
848
849
    """Removes any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
    keep_children_if(context, partial(is_token, tokens=tokens))


850
@transformation_factory(collections.abc.Set)
851
852
853
854
855
856
857
858
859
860
861
def keep_nodes(context: List[Node], tag_names: AbstractSet[str]):
    """Removes children by tag name."""
    keep_children_if(context, partial(is_one_of, tag_name_set=tag_names))


@transformation_factory
def keep_content(context: List[Node], regexp: str):
    """Removes children depending on their string value."""
    keep_children_if(context, partial(has_content, regexp=regexp))


862
@transformation_factory(collections.abc.Callable)
863
def remove_children_if(context: List[Node], condition: Callable):
864
865
866
867
    """Removes all children for which `condition()` returns `True`."""
    node = context[-1]
    if node.children:
        node.result = tuple(c for c in node.children if not condition(context + [c]))
eckhart's avatar
eckhart committed
868
    pass
869

eckhart's avatar
eckhart committed
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
# @transformation_factory(Callable)
# def remove_children(context: List[Node],
#                     condition: Callable = TRUE_CONDITION,
#                     section: slice = slice(None)):
#     """Removes all nodes from a slice of the result field if the function
#     `condition(child_node)` evaluates to `True`."""
#     node = context[-1]
#     if node.children:
#         c = node.children
#         N = len(c)
#         rng = range(*section.indices(N))
#         node.result = tuple(c[i] for i in range(N)
#                             if i not in rng or not condition(context + [c[i]]))
#         # selection = []
#         # for i in range(N):
#         #     context.append(c[i])
#         #     if not i in rng or not condition(context):
#         #         selection.append(c[i])
#         #     context.pop()
#         # if len(selection) != c:
#         #     node.result = tuple(selection)
891
892


893
894
remove_whitespace = remove_children_if(is_whitespace)
# partial(remove_children_if, condition=is_whitespace)
895
remove_empty = remove_children_if(is_empty)
di68kap's avatar
di68kap committed
896
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
897
898
899
900
remove_expendables = remove_children_if(is_expendable)
# partial(remove_children_if, condition=is_expendable)
remove_anonymous_expendables = remove_children_if(lambda ctx: is_anonymous(ctx)
                                                  and is_expendable(ctx))
901
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
902
903
904
# remove_first = apply_if(keep_children(slice(1, None)), lambda ctx: len(ctx[-1].children) > 1)
# remove_last = apply_if(keep_children(slice(None, -1)), lambda ctx: len(ctx[-1].children) > 1)
# remove_brackets = apply_if(keep_children(slice(1, -1)), lambda ctx: len(ctx[-1].children) >= 2)
905
remove_infix_operator = keep_children(slice(0, None, 2))
906
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
907
908


909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
def remove_first(context: List[Node]):
    """Removes the first non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(node.children):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        node.result = node.children[:i] + node.children[i+1:]


def remove_last(context: List[Node]):
    """Removes the last non-whitespace child."""
    node = context[-1]
    if node.children:
        for i, child in enumerate(reversed(node.children)):
            if child.parser.ptype != WHITESPACE_PTYPE:
                break
        else:
            return
        i = len(node.children) - i - 1
        node.result = node.children[:i] + node.children[i+1:]


def remove_brackets(context: List[Node]):
    """Removes the first and the last non-whitespace child."""
    remove_first(context)
    remove_last(context)


940
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
941
def remove_tokens(context: List[Node], tokens: AbstractSet[str]=frozenset()):
942
    """Removes any among a particular set of tokens from the immediate
943
944
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed."""
945
    remove_children_if(context, partial(is_token, tokens=tokens))
946
947


948
@transformation_factory(collections.abc.Set)
eckhart's avatar
eckhart committed
949
def remove_nodes(context: List[Node], tag_names: AbstractSet[str]):
Eckhart Arnold's avatar
Eckhart Arnold committed
950
    """Removes children by tag name."""
951
    remove_children_if(context, partial(is_one_of, tag_name_set=tag_names))
952
953
954


@transformation_factory
955
def remove_content(context: List[Node], regexp: str):
956
    """Removes children depending on their string value."""
957
    remove_children_if(context, partial(has_content, regexp=regexp))
958
959
960
961


########################################################################
#
962
# AST semantic validation functions (EXPERIMENTAL!!!)
963
964
965
#
########################################################################

966
@transformation_factory(collections.abc.Callable)
967
968
969
970
def error_on(context: List[Node], condition: Callable, error_msg: str = ''):
    """
    Checks for `condition`; adds an error message if condition is not met.
    """
971
    node = context[-1]
972
    if not condition(context):
973
974
975
976
977
978
        if error_msg:
            node.add_error(error_msg % node.tag_name if error_msg.find("%s") > 0 else error_msg)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
979
980
981
            node.add_error("transform.error_on: Failed to meet condition " + cond_name)


982
@transformation_factory(collections.abc.Callable)
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
def warn_on(context: List[Node], condition: Callable, warning: str = ''):
    """
    Checks for `condition`; adds an warning message if condition is not met.
    """
    node = context[-1]
    if not condition(context):
        if warning:
            node.add_error(warning % node.tag_name if warning.find("%s") > 0 else warning,
                           Error.WARNING)
        else:
            cond_name = condition.__name__ if hasattr(condition, '__name__') \
                        else condition.__class__.__name__ if hasattr(condition, '__class__') \
                        else '<unknown>'
            node.add_error("transform.warn_on: Failed to meet condition " + cond_name,
                           Error.WARNING)
998
999


1000
assert_has_children = error_on(lambda nd: nd.children, 'Element "%s" has no children')
1001
1002
1003


@transformation_factory
1004
def assert_content(context: List[Node], regexp: str):
1005
    node = context[-1]
1006
    if not has_content(context, regexp):
eckhart's avatar
eckhart committed
1007
        context[0].new_error(node, 'Element "%s" violates %s on %s' %
eckhart's avatar
eckhart committed
1008
                             (node.parser.name, str(regexp), node.content))
1009

1010

1011
@transformation_factory(collections.abc.Set)
1012
def require(context: List[Node], child_tags: AbstractSet[str]):
1013
    node = context[-1]
1014
1015
    for child in node.children:
        if child.tag_name not in child_tags:
eckhart's avatar
eckhart committed
1016
            context[0].new_error(node, 'Element "%s" is not allowed inside "%s".' %
eckhart's avatar
eckhart committed
1017
                                 (child.parser.name, node.parser.name))
1018
1019


1020
@transformation_factory(collections.abc.Set)
1021
def forbid(context: List[Node], child_tags: AbstractSet[str]):
1022
    node = context[-1]
1023
1024
    for child in node.children:
        if child.tag_name in child_tags:
eckhart's avatar
eckhart committed
1025
            context[0].new_error(node, 'Element "%s" cannot be nested inside "%s".' %
eckhart's avatar
eckhart committed
1026
                                 (child.parser.name, node.parser.name))
di68kap's avatar
di68kap committed
1027
1028
1029
1030
1031


def peek(context: List[Node]):
    """For debugging: Prints the last node in the context as S-expression."""
    print(context[-1].as_sxpr())