syntaxtree.py 27.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
"""syntaxtree.py - syntax tree classes and transformation functions for 
converting the concrete into the abstract syntax tree for DHParser

Copyright 2016  by Eckhart Arnold (arnold@badw.de)
                Bavarian Academy of Sciences an Humanities (badw.de)

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied.  See the License for the specific language governing
permissions and limitations under the License.
"""

20
import copy
21
import inspect
22
import itertools
23
import os
24
from functools import partial, singledispatch
25 26 27 28
try:
    import regex as re
except ImportError:
    import re
29 30
from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Iterator, List, \
    NamedTuple, Sequence, Union, Text, Tuple
31

32
from DHParser.toolkit import log_dir, expand_table, line_col, smart_list
33

Eckhart Arnold's avatar
Eckhart Arnold committed
34

35 36
__all__ = ['WHITESPACE_PTYPE',
           'TOKEN_PTYPE',
Eckhart Arnold's avatar
Eckhart Arnold committed
37 38 39
           'ZOMBIE_PARSER',
           'Error',
           'Node',
40
           'TransformationFunc',
41 42
           'key_parser_name',
           'key_tag_name',
43
           'traverse',
44
           'no_transformation',
Eckhart Arnold's avatar
Eckhart Arnold committed
45 46
           'replace_by_single_child',
           'reduce_single_child',
47
           'replace_parser',
Eckhart Arnold's avatar
Eckhart Arnold committed
48 49 50 51 52 53 54 55 56
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
           'remove_children_if',
           'remove_whitespace',
           'remove_expendables',
           'remove_tokens',
           'flatten',
57 58 59 60
           'remove_enclosing_delimiters',
           'forbid',
           'require',
           'assert_content']
Eckhart Arnold's avatar
Eckhart Arnold committed
61 62


63
class MockParser:
64 65 66 67 68 69 70 71 72 73
    """
    MockParser objects can be used to reconstruct syntax trees from a
    serialized form like S-expressions or XML. Mock objects are needed,
    because Node objects require a parser object for instantiation.
    Mock objects have just enough properties to serve that purpose. 
    
    Mock objects should not be used for anything other than 
    syntax tree (re-)construction. In all other cases where a parser
    object substitute is needed, chose the singleton ZOMBIE_PARSER.
    """
74
    def __init__(self, name='', ptype='', pbases=frozenset()):
75
        assert not ptype or ptype[0] == ':'
76
        self.name = name
77
        self.ptype = ptype or ':' + self.__class__.__name__
78
        # self.pbases = pbases or {cls.__name__ for cls in inspect.getmro(self.__class__)}
79 80

    def __str__(self):
81
        return self.name or self.ptype
82 83 84


class ZombieParser(MockParser):
85 86 87 88 89 90 91 92 93 94 95 96
    """
    Serves as a substitute for a Parser instance.

    ``ZombieParser`` is the class of the singelton object
    ``ZOMBIE_PARSER``. The  ``ZOMBIE_PARSER`` has a name and can be
    called, but it never matches. It serves as a substitute where only
    these (or one of these properties) is needed, but no real Parser-
    object is instantiated.
    """
    alive = False

    def __init__(self):
97
        super(ZombieParser, self).__init__("__ZOMBIE__")
98 99 100 101
        assert not self.__class__.alive, "There can be only one!"
        assert self.__class__ == ZombieParser, "No derivatives, please!"
        self.__class__.alive = True

Eckhart Arnold's avatar
Eckhart Arnold committed
102 103 104 105 106 107
    def __copy__(self):
        return self

    def __deepcopy__(self, memo):
        return self

108 109 110 111
    def __call__(self, text):
        """Better call Saul ;-)"""
        return None, text

112 113 114 115

ZOMBIE_PARSER = ZombieParser()


116 117 118 119 120
# # Python 3.6:
# class Error(NamedTuple):
#     pos: int
#     msg: str
Error = NamedTuple('Error', [('pos', int), ('msg', str)])
121

122 123
ResultType = Union[Tuple['Node', ...], str]
SloppyResultType = Union[Tuple['Node', ...], 'Node', str, None]
124 125


126 127 128 129 130 131 132 133 134 135 136 137
class Node:
    """
    Represents a node in the concrete or abstract syntax tree.

    Attributes:
        tag_name (str):  The name of the node, which is either its
            parser's name or, if that is empty, the parser's class name
        result (str or tuple):  The result of the parser which
            generated this node, which can be either a string or a
            tuple of child nodes.
        children (tuple):  The tuple of child nodes or an empty tuple
            if there are no child nodes. READ ONLY!
138 139 140 141 142 143
        parser (Parser):  The parser which generated this node. 
            WARNING: In case you use mock syntax trees for testing or
            parser replacement during the AST-transformation: DO NOT
            rely on this being a real parser object in any phase after 
            parsing (i.e. AST-transformation and compiling), for 
            example by calling ``isinstance(node.parer, ...)``.
144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165
        errors (list):  A list of parser- or compiler-errors:
            tuple(position, string) attached to this node
        len (int):  The full length of the node's string result if the
            node is a leaf node or, otherwise, the concatenated string
            result's of its descendants. The figure always represents
            the length before AST-transformation ans will never change
            through AST-transformation. READ ONLY!
        pos (int):  the position of the node within the parsed text.

            The value of ``pos`` is -1 meaning invalid by default. 
            Setting this value will set the positions of all child
            nodes relative to this value.  

            To set the pos values of all nodes in a syntax tree, the
            pos value of the root node should be set to 0 right 
            after parsing.

            Other than that, this value should be considered READ ONLY. 
            At any rate, it should only be reassigned only during
            parsing stage and never during or after the
            AST-transformation.
    """
166 167

    def __init__(self, parser, result: SloppyResultType) -> None:
168 169 170
        """Initializes the ``Node``-object with the ``Parser``-Instance
        that generated the node and the parser's result.
        """
171 172
        self._result = ''  # type: ResultType
        self._errors = []  # type: List[str]
173
        self._children = ()  # type: Tuple['Node', ...]
174 175 176 177
        self._len = len(self.result) if not self.children else \
            sum(child._len for child in self.children)  # type: int
        # self.pos: int  = 0  # continuous updating of pos values
        self._pos = -1  # type: int
178 179
        self.result = result
        self.parser = parser or ZOMBIE_PARSER
180 181
        self.error_flag = any(r.error_flag for r in self.children) \
            if self.children else False  # type: bool
182 183 184

    def __str__(self):
        if self.children:
185
            return "".join(str(child) for child in self.children)
186 187
        return str(self.result)

188
    def __eq__(self, other):
189 190
        # return str(self.parser) == str(other.parser) and self.result == other.result
        return self.tag_name == other.tag_name and self.result == other.result
191

192
    def __hash__(self):
193
        return hash(self.tag_name)
194 195 196 197 198 199 200

    def __deepcopy__(self, memodict={}):
        result = copy.deepcopy(self.result)
        other = Node(self.parser, result)
        other._pos = self._pos
        return other

201
    @property
202
    def tag_name(self) -> str:
203 204
        return self.parser.name or self.parser.ptype
        # ONLY FOR DEBUGGING: return self.parser.name + ':' + self.parser.ptype
205 206

    @property
207
    def result(self) -> ResultType:
208 209 210
        return self._result

    @result.setter
211
    def result(self, result: SloppyResultType):
212 213 214 215
        # # made obsolete by static type checking with mypy is done
        # assert ((isinstance(result, tuple) and all(isinstance(child, Node) for child in result))
        #         or isinstance(result, Node)
        #         or isinstance(result, str)), str(result)
216
        self._result = (result,) if isinstance(result, Node) else result or ''
217 218
        self._children = cast(Tuple['Node', ...], self._result) \
            if isinstance(self._result, tuple) else cast(Tuple['Node', ...], ())
219 220

    @property
221
    def children(self) -> Tuple['Node', ...]:
222 223 224
        return self._children

    @property
225
    def len(self) -> int:
226
        # DEBUGGING:  print(self.tag_name, str(self.pos), str(self._len), str(self)[:10].replace('\n','.'))
227 228 229
        return self._len

    @property
230
    def pos(self) -> int:
231 232 233 234
        assert self._pos >= 0, "position value not initialized!"
        return self._pos

    @pos.setter
235 236
    def pos(self, pos: int):
        # assert isinstance(pos, int)
237 238 239 240 241 242 243
        self._pos = pos
        offset = 0
        for child in self.children:
            child.pos = pos + offset
            offset += child.len

    @property
244
    def errors(self) -> List[Error]:
245 246
        return [Error(self.pos, err) for err in self._errors]

247
    def _tree_repr(self, tab, openF, closeF, dataF=lambda s: s) -> str:
248 249 250 251 252 253 254 255
        """
        Generates a tree representation of this node and its children
        in string from.

        The kind ot tree-representation that is determined by several
        function parameters. This could be an XML-representation or a
        lisp-like S-expression.

256
        Args:
257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279
            tab (str):  The indentation string, e.g. '\t' or '    '
            openF:  (Node->str) A function that returns an opening
                string (e.g. an XML-tag_name) for a given node
            closeF:  (Node->str) A function that returns a closeF
                string (e.g. an XML-tag_name) for a given node.
            dataF:  (str->str) A function that filters the data string
                before printing, e.g. to add quotation marks

        Returns (str):
            A string that contains a (serialized) tree representation
            of the node and its children.
        """
        head = openF(self)
        tail = closeF(self)

        if not self.result:
            return head + tail

        head = head + '\n'  # place the head, tail and content
        tail = '\n' + tail  # of the node on different lines

        if self.children:
            content = []
280
            for child in self.children:
281 282 283 284
                subtree = child._tree_repr(tab, openF, closeF, dataF).split('\n')
                content.append('\n'.join((tab + s) for s in subtree))
            return head + '\n'.join(content) + tail

285 286
        res = cast(str, self.result)  # safe, because if there are no children, result is a string
        if head[0] == "<" and res.find('\n') < 0:
287 288 289
            # for XML: place tags for leaf-nodes on one line if possible
            return head[:-1] + self.result + tail[1:]
        else:
290
            return head + '\n'.join([tab + dataF(s) for s in res.split('\n')]) + tail
291

292
    def as_sexpr(self, src=None) -> str:
293 294 295
        """
        Returns content as S-expression, i.e. in lisp-like form.

296
        Args:
297 298 299
            src:  The source text or `None`. In case the source text is
                given the position of the element in the text will be
                reported as line and column.
300 301
            prettyprint(bool):  True (default), if pretty printing 
                of leaf nodes shall be applied for better readability.
302 303
        """

304
        def opening(node) -> str:
305 306 307 308 309 310 311 312 313 314 315 316 317 318
            s = '(' + node.tag_name
            # s += " '(pos %i)" % node.pos
            if src:
                s += " '(pos %i  %i %i)" % (node.pos, *line_col(src, node.pos))
            if node.errors:
                s += " '(err '(%s))" % ' '.join(str(err).replace('"', r'\"')
                                                for err in node.errors)
            return s

        def pretty(s):
            return '"%s"' % s if s.find('"') < 0 \
                else "'%s'" % s if s.find("'") < 0 \
                else '"%s"' % s.replace('"', r'\"')

319
        return self._tree_repr('    ', opening, lambda node: ')', pretty)  # pretty if prettyprint else lambda s: s)
320

321
    def as_xml(self, src=None) -> str:
322 323 324
        """
        Returns content as XML-tree.

325
        Args:
326 327 328 329 330
            src:  The source text or `None`. In case the source text is
                given the position will also be reported as line and
                column.
        """

331
        def opening(node) -> str:
332 333 334 335 336 337 338 339 340 341 342 343 344 345 346
            s = '<' + node.tag_name
            # s += ' pos="%i"' % node.pos
            if src:
                s += ' line="%i" col="%i"' % line_col(src, node.pos)
            if node.errors:
                s += ' err="%s"' % ''.join(str(err).replace('"', r'\"') for err in node.errors)
            s += ">"
            return s

        def closing(node):
            s = '</' + node.tag_name + '>'
            return s

        return self._tree_repr('    ', opening, closing)

347
    def add_error(self, error_str) -> 'Node':
348 349 350 351
        self._errors.append(error_str)
        self.error_flag = True
        return self

352 353 354 355 356 357 358 359 360
    def propagate_error_flags(self):
        """ Recursively propagates error flags set on child nodes to its
        parents. This can be used if errors are added to descendant 
        nodes after syntaxtree construction, i.e. in the compile phase.
        """
        for child in self.children:
            child.propagate_error_flags()
            self.error_flag |= child.error_flag

361
    def collect_errors(self, clear_errors=False) -> List[Error]:
362 363 364 365 366
        """
        Returns all errors of this node or any child node in the form
        of a set of tuples (position, error_message), where position
        is always relative to this node.
        """
367 368 369 370 371
        errors = self.errors
        if clear_errors:
            self._errors = []
            self.error_flag = False
        if self.children:
372
            for child in self.children:
373
                errors.extend(child.collect_errors(clear_errors))
374
        return errors
375

Eckhart Arnold's avatar
Eckhart Arnold committed
376 377 378 379
    def log(self, log_file_name):
        st_file_name = log_file_name
        with open(os.path.join(log_dir(), st_file_name), "w", encoding="utf-8") as f:
            f.write(self.as_sexpr())
380

381
    def find(self, match_function) -> Iterator['Node']:
382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401
        """Finds nodes in the tree that match a specific criterion.
        
        ``find`` is a generator that yields all nodes for which the
        given ``match_function`` evaluates to True. The tree is 
        traversed pre-order.
        
        Args:
            match_function (function): A function  that takes as Node
                object as argument and returns True or False
        Yields:
            Node: all nodes of the tree for which 
            ``match_function(node)`` returns True
        """
        if match_function(self):
            yield self
        else:
            for child in self.children:
                for nd in child.find(match_function):
                    yield nd

402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418
    # def range(self, match_first, match_last):
    #     """Iterates over the range of nodes, starting from the first
    #     node for which ``match_first`` becomes True until the first node
    #     after this one for which ``match_last`` becomes true or until
    #     the end if it never does.
    #
    #     Args:
    #         match_first (function): A function  that takes as Node
    #             object as argument and returns True or False
    #         match_last (function): A function  that takes as Node
    #             object as argument and returns True or False
    #     Yields:
    #         Node: all nodes of the tree for which
    #         ``match_function(node)`` returns True
    #     """


419
    def navigate(self, path):
420 421 422 423 424
        """Yields the results of all descendant elements matched by
        ``path``, e.g.
        'd/s' yields 'l' from (d (s l)(e (r x1) (r x2))
        'e/r' yields 'x1', then 'x2'
        'e'   yields (r x1)(r x2)
425

426
        Args:
427 428
            path (str):  The path of the object, e.g. 'a/b/c'. The
                components of ``path`` can be regular expressions
429 430 431 432 433

        Returns:
            The object at the path, either a string or a Node or
            ``None``, if the path did not match.
        """
434 435 436 437
        def nav(node, pl):
            if pl:
                return itertools.chain(nav(child, pl[1:]) for child in node.children
                                       if re.match(pl[0], child.tag_name))
438
            else:
439 440
                return self.result,
        return nav(path.split('/'))
441 442 443 444 445 446 447 448 449


########################################################################
#
# syntax tree transformation functions
#
########################################################################


450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517
TransformationFunc = Union[Callable[[Node], Any], partial]


def transformation_factory(t=None):
    """Creates factory functions transformer-functions with more than
    one parameter like ``remove_tokens(node, tokens)``. Decorating this
    function with ``transformation_factory`` creates a function factory with
    the same name, but without the ``node`` paramter, e.g.
    ``remove_tokens(tokens)`` which returns a transformerfunction with
    only one parameter (i.e. ``node``), which can be used in processing
    dictionaries, thus avoiding explicit lamba- or partial-functions
    in the table.

    Additionally it converts a list of parameters into a
    collection, if the decorated function has exaclty two arguments and
    the second argument is of type Collection.

    Main benefit is reability of processing tables.
    Example:
        trans_table = { 'expression': remove_tokens('+', '-') }
    rather than:
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }

    Usage:
        @transformation_factory(AbtractSet[str])
        def remove_tokens(node, tokens):
            ...
    or, alternatively:
        @transformation_factory
        def remove_tokens(node, tokens: AbstractSet[str]):
            ...
    """

    def decorator(f):
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
        assert t or params[0].annotation != params[0].empty, \
            "No type information on second parameter found! Please, use type " \
            "annotation or provide the type information via transfomer-decorator."
        p1type = t or params[0].annotation
        f = singledispatch(f)
        if len(params) == 1 and issubclass(p1type, Container) and not issubclass(p1type, Text) \
                and not issubclass(p1type, ByteString):
            def gen_special(*args):
                c = set(args) if issubclass(p1type, AbstractSet) else \
                    list(args) if issubclass(p1type, Sequence) else args
                d = {params[0].name: c}
                return partial(f, **d)

            f.register(p1type.__args__[0], gen_special)

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

        f.register(p1type, gen_partial)
        return f

    if isinstance(t, type(lambda: 1)):
        # assume transformation_factory has been used as decorator w/o parameters
        func = t;
        t = None
        return decorator(func)
    else:
        return decorator
518 519


520 521
WHITESPACE_PTYPE = ':Whitespace'
TOKEN_PTYPE = ':Token'
522 523


524
def key_parser_name(node) -> str:
525 526 527
    return node.parser.name


528
def key_tag_name(node) -> str:
529 530 531
    return node.tag_name


532
def traverse(root_node, processing_table, key_func=key_tag_name) -> None:
533
    """Traverses the snytax tree starting with the given ``node`` depth
534
    first and applies the sequences of callback-functions registered
535 536
    in the ``calltable``-dictionary.
    
537 538
    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
539
    to employ tree-traversal for the semantic analysis of the AST.
540 541 542 543 544 545 546 547 548 549

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
        '+': always called (before any other processing function)
        '*': called for those nodes for which no (other) processing
             function appears in the table
        '~': always called (after any other processing function)

550
    Args:
di68kap's avatar
di68kap committed
551
        root_node (Node): The root-node of the syntax tree to be traversed 
552
        processing_table (dict): node key -> sequence of functions that
di68kap's avatar
di68kap committed
553 554
            will be applied to matching nodes in order. This dictionary
            is interpreted as a ``compact_table``. See 
555
            ``toolkit.expand_table`` or ``EBNFCompiler.EBNFTransTable``
556 557
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.
558 559 560 561 562
            
    Example:
        table = { "term": [replace_by_single_child, flatten], 
            "factor, flowmarker, retrieveop": replace_by_single_child }
        traverse(node, table)
563
    """
564 565 566 567
    # commented, because this approach is too error prone!
    # def funclist(call):
    #     return [as_partial(func) for func in smart_list(call)]

di68kap's avatar
di68kap committed
568
    # normalize processing_table entries by turning single values into lists
569
    # with a single value
570
    table = {name: smart_list(call) for name, call in list(processing_table.items())}
571 572
    table = expand_table(table)

di68kap's avatar
di68kap committed
573 574 575
    def traverse_recursive(node):
        if node.children:
            for child in node.result:
576
                traverse_recursive(child)
577
                node.error_flag |= child.error_flag  # propagate error flag
578 579
        sequence = table.get('+', []) + \
                   table.get(key_func(node), table.get('*', [])) + \
580
                   table.get('~', [])
581
        # '+' always called (before any other processing function)
582
        # '*' called for those nodes for which no (other) processing function appears in the table
583
        # '~' always called (after any other processing function)
584
        for call in sequence:
di68kap's avatar
di68kap committed
585
            call(node)
586

di68kap's avatar
di68kap committed
587
    traverse_recursive(root_node)
588 589


590
def no_transformation(node):
591 592 593 594 595 596
    pass


# ------------------------------------------------
#
# rearranging transformations:
di68kap's avatar
di68kap committed
597 598
#     - tree may be rearranged (e.g.flattened)
#     - nodes that are not leaves may be dropped
599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626
#     - order is preserved
#     - all leaves are kept
#
# ------------------------------------------------


def replace_by_single_child(node):
    """Remove single branch node, replacing it by its immediate descendant.
    (In case the descendant's name is empty (i.e. anonymous) the
    name of this node's parser is kept.)
    """
    if node.children and len(node.result) == 1:
        if not node.result[0].parser.name:
            node.result[0].parser.name = node.parser.name
        node.parser = node.result[0].parser
        node._errors.extend(node.result[0].errors)
        node.result = node.result[0].result


def reduce_single_child(node):
    """Reduce a single branch node, by transferring the result of its
    immediate descendant to this node, but keeping this node's parser entry.
    """
    if node.children and len(node.result) == 1:
        node._errors.extend(node.result[0].errors)
        node.result = node.result[0].result


627 628
@transformation_factory
def replace_parser(node, name: str):
629
    """Replaces the parser of a Node with a mock parser with the given
630 631 632 633 634
    name.

    Parameters:
        name(str): "NAME:PTYPE" of the surogate. The ptype is optional
        node(Node): The node where the parser shall be replaced
di68kap's avatar
di68kap committed
635
    """
636
    name, ptype = (name.split(':') + [''])[:2]
637
    node.parser = MockParser(name, ptype)
di68kap's avatar
di68kap committed
638 639


640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671
def flatten(node):
    """Recursively flattens all unnamed sub-nodes, in case there is more
    than one sub-node present. Flattening means that
    wherever a node has child nodes, the child nodes are inserted in place
    of the node. In other words, all leaves of this node and its child nodes
    are collected in-order as direct children of this node.
    This is meant to achieve these kinds of structural transformation:
        (1 (+ 2) (+ 3)     ->   (1 + 2 + 3)
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)

    Warning: Use with care. Du tue its recursive nature, flattening can
    have unexpected side-effects.
    """
    if node.children:
        new_result = []
        for child in node.children:
            if not child.parser.name and child.children:
                assert child.children, node.as_sexpr()
                flatten(child)
                new_result.extend(child.result)
            else:
                new_result.append(child)
        node.result = tuple(new_result)


def collapse(node):
    """Collapses all sub-nodes by replacing the node's result with it's
    string representation.
    """
    node.result = str(node)


672 673 674 675 676 677 678 679 680 681 682 683 684 685
# ------------------------------------------------
#
# destructive transformations:
#     - tree may be rearranged (flattened),
#     - order is preserved
#     - but (irrelevant) leaves may be dropped
#     - errors of dropped leaves will be lost
#
# ------------------------------------------------


def is_whitespace(node):
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
686
    return node.parser.ptype == WHITESPACE_PTYPE
687 688 689 690 691 692 693


def is_empty(node):
    return not node.result


def is_expendable(node):
694
    return is_empty(node) or is_whitespace(node)
695 696


697
def is_token(node, tokens: AbstractSet[str] = frozenset()) -> bool:
698
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.result in tokens)
699 700


701
@transformation_factory(Callable)  # @singledispatch
702
def remove_children_if(node, condition):
703 704 705 706 707 708
    """Removes all nodes from the result field if the function 
    ``condition(child_node)`` evaluates to ``True``."""
    if node.children:
        node.result = tuple(c for c in node.children if not condition(c))


709 710
remove_whitespace = partial(remove_children_if, condition=is_whitespace)
remove_expendables = partial(remove_children_if, condition=is_expendable)
711 712


713 714 715 716 717
# remove_scanner_tokens = partial(remove_children_if, condition=is_scanner_token)


@transformation_factory
def remove_tokens(node, tokens: AbstractSet[str] = frozenset()):
718 719 720 721
    """Reomoves any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed.
    """
722
    remove_children_if(node, partial(is_token, tokens=tokens))
723 724


725
def remove_enclosing_delimiters(node):
726 727 728 729 730 731 732 733
    """Removes any enclosing delimiters from a structure (e.g. quotation marks
    from a literal or braces from a group).
    """
    if len(node.children) >= 3:
        assert not node.children[0].children and not node.children[-1].children, node.as_sexpr()
        node.result = node.result[1:-1]


734
def map_content(node, func):
735 736 737 738 739 740
    """Replaces the content of the node. ``func`` takes the node
    as an argument an returns the mapped result.
    """
    node.result = func(node.result)


741 742
########################################################################
#
743
# AST semantic validation functions
di68kap's avatar
di68kap committed
744
# EXPERIMENTAL!
745 746 747 748
#
########################################################################


749 750
@transformation_factory
def require(node, child_tags: AbstractSet[str]):
751
    for child in node.children:
752
        if child.tag_name not in child_tags:
753
            node.add_error('Element "%s" is not allowed inside "%s".' %
754
                           (child.parser.name, node.parser.name))
755 756


757 758
@transformation_factory
def forbid(node, child_tags: AbstractSet[str]):
759
    for child in node.children:
760
        if child.tag_name in child_tags:
761
            node.add_error('Element "%s" cannot be nested inside "%s".' %
762
                           (child.parser.name, node.parser.name))
763 764


765 766
@transformation_factory
def assert_content(node, regex: str):
767 768 769
    content = str(node)
    if not re.match(regex, content):
        node.add_error('Element "%s" violates %s on %s' %
770
                       (node.parser.name, str(regex), content))
771