syntaxtree.py 29.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
"""syntaxtree.py - syntax tree classes and transformation functions for 
converting the concrete into the abstract syntax tree for DHParser

Copyright 2016  by Eckhart Arnold (arnold@badw.de)
                Bavarian Academy of Sciences an Humanities (badw.de)

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied.  See the License for the specific language governing
permissions and limitations under the License.
"""

Eckhart Arnold's avatar
Eckhart Arnold committed
20
import abc
21
import copy
22
import inspect
23
import itertools
24
import os
25
from functools import partial, singledispatch
26 27 28 29
try:
    import regex as re
except ImportError:
    import re
30 31 32 33 34 35
try:
    from typing import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
        Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
except ImportError:
    from .typing34 import AbstractSet, Any, ByteString, Callable, cast, Container, Dict, \
        Iterator, List, NamedTuple, Sequence, Union, Text, Tuple
36

37
from DHParser.toolkit import log_dir, expand_table, line_col, smart_list
38

Eckhart Arnold's avatar
Eckhart Arnold committed
39

40 41
__all__ = ['WHITESPACE_PTYPE',
           'TOKEN_PTYPE',
Eckhart Arnold's avatar
Eckhart Arnold committed
42
           'ZOMBIE_PARSER',
Eckhart Arnold's avatar
Eckhart Arnold committed
43
           'ParserBase',
Eckhart Arnold's avatar
Eckhart Arnold committed
44 45
           'Error',
           'Node',
46
           'TransformationFunc',
Eckhart Arnold's avatar
Eckhart Arnold committed
47
           'transformation_factory',
48 49
           'key_parser_name',
           'key_tag_name',
50
           'traverse',
51
           'no_transformation',
Eckhart Arnold's avatar
Eckhart Arnold committed
52 53
           'replace_by_single_child',
           'reduce_single_child',
54
           'replace_parser',
Eckhart Arnold's avatar
Eckhart Arnold committed
55 56 57 58 59 60
           'is_whitespace',
           'is_empty',
           'is_expendable',
           'is_token',
           'remove_children_if',
           'remove_whitespace',
Eckhart Arnold's avatar
Eckhart Arnold committed
61
           'remove_empty',
Eckhart Arnold's avatar
Eckhart Arnold committed
62 63 64
           'remove_expendables',
           'remove_tokens',
           'flatten',
65 66 67 68
           'remove_enclosing_delimiters',
           'forbid',
           'require',
           'assert_content']
Eckhart Arnold's avatar
Eckhart Arnold committed
69 70


Eckhart Arnold's avatar
Eckhart Arnold committed
71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89
class ParserBase:
    """
    ParserBase is the base class for all real and mock parser classes.
    It is defined here, because Node objects require a parser object
    for instantiation.
    """
    def __init__(self, name=''):  # , pbases=frozenset()):
        self.name = name  # type: str
        self._ptype = ':' + self.__class__.__name__  # type: str

    def __str__(self):
        return self.name or self.ptype

    @property
    def ptype(self) -> str:
        return self._ptype


class MockParser(ParserBase):
90 91
    """
    MockParser objects can be used to reconstruct syntax trees from a
Eckhart Arnold's avatar
Eckhart Arnold committed
92 93
    serialized form like S-expressions or XML. Mock objects can mimic
    different parser types by assigning them a ptype on initialization.
94 95 96 97 98
    
    Mock objects should not be used for anything other than 
    syntax tree (re-)construction. In all other cases where a parser
    object substitute is needed, chose the singleton ZOMBIE_PARSER.
    """
Eckhart Arnold's avatar
Eckhart Arnold committed
99
    def __init__(self, name='', ptype=''):  # , pbases=frozenset()):
100
        assert not ptype or ptype[0] == ':'
Eckhart Arnold's avatar
Eckhart Arnold committed
101
        super(MockParser, self).__init__(name)
102
        self.name = name
Eckhart Arnold's avatar
Eckhart Arnold committed
103
        self._ptype = ptype or ':' + self.__class__.__name__
104 105

    def __str__(self):
106
        return self.name or self.ptype
107 108 109


class ZombieParser(MockParser):
110 111 112 113 114 115 116 117 118 119 120 121
    """
    Serves as a substitute for a Parser instance.

    ``ZombieParser`` is the class of the singelton object
    ``ZOMBIE_PARSER``. The  ``ZOMBIE_PARSER`` has a name and can be
    called, but it never matches. It serves as a substitute where only
    these (or one of these properties) is needed, but no real Parser-
    object is instantiated.
    """
    alive = False

    def __init__(self):
122
        super(ZombieParser, self).__init__("__ZOMBIE__")
123 124 125 126
        assert not self.__class__.alive, "There can be only one!"
        assert self.__class__ == ZombieParser, "No derivatives, please!"
        self.__class__.alive = True

Eckhart Arnold's avatar
Eckhart Arnold committed
127 128 129 130 131 132
    def __copy__(self):
        return self

    def __deepcopy__(self, memo):
        return self

133 134 135 136
    def __call__(self, text):
        """Better call Saul ;-)"""
        return None, text

137 138 139 140

ZOMBIE_PARSER = ZombieParser()


141 142 143 144 145
# # Python 3.6:
# class Error(NamedTuple):
#     pos: int
#     msg: str
Error = NamedTuple('Error', [('pos', int), ('msg', str)])
146

Eckhart Arnold's avatar
Eckhart Arnold committed
147 148
StrictResultType = Union[Tuple['Node', ...], str]
ResultType = Union[Tuple['Node', ...], 'Node', str, None]
149 150


151 152 153 154 155 156 157 158 159 160 161 162
class Node:
    """
    Represents a node in the concrete or abstract syntax tree.

    Attributes:
        tag_name (str):  The name of the node, which is either its
            parser's name or, if that is empty, the parser's class name
        result (str or tuple):  The result of the parser which
            generated this node, which can be either a string or a
            tuple of child nodes.
        children (tuple):  The tuple of child nodes or an empty tuple
            if there are no child nodes. READ ONLY!
163 164 165 166 167 168
        parser (Parser):  The parser which generated this node. 
            WARNING: In case you use mock syntax trees for testing or
            parser replacement during the AST-transformation: DO NOT
            rely on this being a real parser object in any phase after 
            parsing (i.e. AST-transformation and compiling), for 
            example by calling ``isinstance(node.parer, ...)``.
169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190
        errors (list):  A list of parser- or compiler-errors:
            tuple(position, string) attached to this node
        len (int):  The full length of the node's string result if the
            node is a leaf node or, otherwise, the concatenated string
            result's of its descendants. The figure always represents
            the length before AST-transformation ans will never change
            through AST-transformation. READ ONLY!
        pos (int):  the position of the node within the parsed text.

            The value of ``pos`` is -1 meaning invalid by default. 
            Setting this value will set the positions of all child
            nodes relative to this value.  

            To set the pos values of all nodes in a syntax tree, the
            pos value of the root node should be set to 0 right 
            after parsing.

            Other than that, this value should be considered READ ONLY. 
            At any rate, it should only be reassigned only during
            parsing stage and never during or after the
            AST-transformation.
    """
191

Eckhart Arnold's avatar
Eckhart Arnold committed
192
    def __init__(self, parser, result: ResultType) -> None:
193 194 195
        """Initializes the ``Node``-object with the ``Parser``-Instance
        that generated the node and the parser's result.
        """
Eckhart Arnold's avatar
Eckhart Arnold committed
196
        self._result = ''  # type: StrictResultType
197
        self._errors = []  # type: List[str]
198
        self._children = ()  # type: Tuple['Node', ...]
199
        self.result = result
200 201 202 203
        self._len = len(self.result) if not self.children else \
            sum(child._len for child in self.children)  # type: int
        # self.pos: int  = 0  # continuous updating of pos values
        self._pos = -1  # type: int
204
        self.parser = parser or ZOMBIE_PARSER
205 206
        self.error_flag = any(r.error_flag for r in self.children) \
            if self.children else False  # type: bool
207 208 209

    def __str__(self):
        if self.children:
210
            return "".join(str(child) for child in self.children)
211 212 213 214 215 216 217 218
        return str(self.result)

    def __repr__(self):
        mpargs = {'name': self.parser.name, 'ptype': self.parser.ptype}
        parg = "MockParser({name}, {ptype})".format(**mpargs)
        rarg = str(self) if not self.children else \
               "(" + ", ".join(repr(child) for child in self.children) + ")"
        return "Node(%s, %s)" % (parg, rarg)
219

220
    def __eq__(self, other):
221 222
        # return str(self.parser) == str(other.parser) and self.result == other.result
        return self.tag_name == other.tag_name and self.result == other.result
223

224
    def __hash__(self):
225
        return hash(self.tag_name)
226 227 228 229 230 231 232

    def __deepcopy__(self, memodict={}):
        result = copy.deepcopy(self.result)
        other = Node(self.parser, result)
        other._pos = self._pos
        return other

233
    @property   # this needs to be a (dynamic) property, in case sef.parser gets updated
234
    def tag_name(self) -> str:
235 236
        return self.parser.name or self.parser.ptype
        # ONLY FOR DEBUGGING: return self.parser.name + ':' + self.parser.ptype
237 238

    @property
Eckhart Arnold's avatar
Eckhart Arnold committed
239
    def result(self) -> StrictResultType:
240 241 242
        return self._result

    @result.setter
Eckhart Arnold's avatar
Eckhart Arnold committed
243
    def result(self, result: ResultType):
244 245 246 247
        # # made obsolete by static type checking with mypy is done
        # assert ((isinstance(result, tuple) and all(isinstance(child, Node) for child in result))
        #         or isinstance(result, Node)
        #         or isinstance(result, str)), str(result)
248
        self._result = (result,) if isinstance(result, Node) else result or ''
249 250
        self._children = cast(Tuple['Node', ...], self._result) \
            if isinstance(self._result, tuple) else cast(Tuple['Node', ...], ())
251 252

    @property
253
    def children(self) -> Tuple['Node', ...]:
254 255 256
        return self._children

    @property
257
    def len(self) -> int:
258
        # DEBUGGING:  print(self.tag_name, str(self.pos), str(self._len), str(self)[:10].replace('\n','.'))
259 260 261
        return self._len

    @property
262
    def pos(self) -> int:
263 264 265 266
        assert self._pos >= 0, "position value not initialized!"
        return self._pos

    @pos.setter
267 268
    def pos(self, pos: int):
        # assert isinstance(pos, int)
269 270 271 272 273 274 275
        self._pos = pos
        offset = 0
        for child in self.children:
            child.pos = pos + offset
            offset += child.len

    @property
276
    def errors(self) -> List[Error]:
277 278
        return [Error(self.pos, err) for err in self._errors]

279 280
    def show(self) -> str:
        """Returns content as string, inserting error messages where
281
        errors occurred.
282
        """
283
        s = "".join(child.show() for child in self.children) if self.children \
284 285 286
            else str(self.result)
        return (' <<< Error on "%s" | %s >>> ' % (s, '; '.join(self._errors))) if self._errors else s

287
    def _tree_repr(self, tab, openF, closeF, dataF=lambda s: s) -> str:
288 289 290 291 292 293 294 295
        """
        Generates a tree representation of this node and its children
        in string from.

        The kind ot tree-representation that is determined by several
        function parameters. This could be an XML-representation or a
        lisp-like S-expression.

296
        Args:
297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319
            tab (str):  The indentation string, e.g. '\t' or '    '
            openF:  (Node->str) A function that returns an opening
                string (e.g. an XML-tag_name) for a given node
            closeF:  (Node->str) A function that returns a closeF
                string (e.g. an XML-tag_name) for a given node.
            dataF:  (str->str) A function that filters the data string
                before printing, e.g. to add quotation marks

        Returns (str):
            A string that contains a (serialized) tree representation
            of the node and its children.
        """
        head = openF(self)
        tail = closeF(self)

        if not self.result:
            return head + tail

        head = head + '\n'  # place the head, tail and content
        tail = '\n' + tail  # of the node on different lines

        if self.children:
            content = []
320
            for child in self.children:
321 322 323 324
                subtree = child._tree_repr(tab, openF, closeF, dataF).split('\n')
                content.append('\n'.join((tab + s) for s in subtree))
            return head + '\n'.join(content) + tail

325 326
        res = cast(str, self.result)  # safe, because if there are no children, result is a string
        if head[0] == "<" and res.find('\n') < 0:
327 328 329
            # for XML: place tags for leaf-nodes on one line if possible
            return head[:-1] + self.result + tail[1:]
        else:
330
            return head + '\n'.join([tab + dataF(s) for s in res.split('\n')]) + tail
331

332
    def as_sexpr(self, src: str=None) -> str:
333 334 335
        """
        Returns content as S-expression, i.e. in lisp-like form.

336
        Args:
337 338 339 340 341
            src:  The source text or `None`. In case the source text is
                given the position of the element in the text will be
                reported as line and column.
        """

342
        def opening(node) -> str:
343 344 345
            s = '(' + node.tag_name
            # s += " '(pos %i)" % node.pos
            if src:
Eckhart Arnold's avatar
Eckhart Arnold committed
346
                s += " '(pos %i " % node.pos + " %i %i)" % line_col(src, node.pos)
347 348 349 350 351 352 353 354 355 356
            if node.errors:
                s += " '(err '(%s))" % ' '.join(str(err).replace('"', r'\"')
                                                for err in node.errors)
            return s

        def pretty(s):
            return '"%s"' % s if s.find('"') < 0 \
                else "'%s'" % s if s.find("'") < 0 \
                else '"%s"' % s.replace('"', r'\"')

357
        return self._tree_repr('    ', opening, lambda node: ')', pretty)
358

359
    def as_xml(self, src: str=None) -> str:
360 361 362
        """
        Returns content as XML-tree.

363
        Args:
364 365 366 367 368
            src:  The source text or `None`. In case the source text is
                given the position will also be reported as line and
                column.
        """

369
        def opening(node) -> str:
370 371 372 373 374 375 376 377 378 379 380 381 382 383 384
            s = '<' + node.tag_name
            # s += ' pos="%i"' % node.pos
            if src:
                s += ' line="%i" col="%i"' % line_col(src, node.pos)
            if node.errors:
                s += ' err="%s"' % ''.join(str(err).replace('"', r'\"') for err in node.errors)
            s += ">"
            return s

        def closing(node):
            s = '</' + node.tag_name + '>'
            return s

        return self._tree_repr('    ', opening, closing)

385
    def add_error(self, error_str) -> 'Node':
386 387 388 389
        self._errors.append(error_str)
        self.error_flag = True
        return self

390 391
    def propagate_error_flags(self) -> None:
        """Recursively propagates error flags set on child nodes to its
392 393 394 395 396
        parents. This can be used if errors are added to descendant 
        nodes after syntaxtree construction, i.e. in the compile phase.
        """
        for child in self.children:
            child.propagate_error_flags()
397
            self.error_flag = self.error_flag or child.error_flag
398

399
    def collect_errors(self, clear_errors=False) -> List[Error]:
400 401 402 403 404
        """
        Returns all errors of this node or any child node in the form
        of a set of tuples (position, error_message), where position
        is always relative to this node.
        """
405 406 407 408 409
        errors = self.errors
        if clear_errors:
            self._errors = []
            self.error_flag = False
        if self.children:
410
            for child in self.children:
411
                errors.extend(child.collect_errors(clear_errors))
412
        return errors
413

Eckhart Arnold's avatar
Eckhart Arnold committed
414 415 416 417
    def log(self, log_file_name):
        st_file_name = log_file_name
        with open(os.path.join(log_dir(), st_file_name), "w", encoding="utf-8") as f:
            f.write(self.as_sexpr())
418

419
    def find(self, match_function) -> Iterator['Node']:
420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439
        """Finds nodes in the tree that match a specific criterion.
        
        ``find`` is a generator that yields all nodes for which the
        given ``match_function`` evaluates to True. The tree is 
        traversed pre-order.
        
        Args:
            match_function (function): A function  that takes as Node
                object as argument and returns True or False
        Yields:
            Node: all nodes of the tree for which 
            ``match_function(node)`` returns True
        """
        if match_function(self):
            yield self
        else:
            for child in self.children:
                for nd in child.find(match_function):
                    yield nd

440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456
    # def range(self, match_first, match_last):
    #     """Iterates over the range of nodes, starting from the first
    #     node for which ``match_first`` becomes True until the first node
    #     after this one for which ``match_last`` becomes true or until
    #     the end if it never does.
    #
    #     Args:
    #         match_first (function): A function  that takes as Node
    #             object as argument and returns True or False
    #         match_last (function): A function  that takes as Node
    #             object as argument and returns True or False
    #     Yields:
    #         Node: all nodes of the tree for which
    #         ``match_function(node)`` returns True
    #     """


457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478
    # def navigate(self, path):
    #     """Yields the results of all descendant elements matched by
    #     ``path``, e.g.
    #     'd/s' yields 'l' from (d (s l)(e (r x1) (r x2))
    #     'e/r' yields 'x1', then 'x2'
    #     'e'   yields (r x1)(r x2)
    #
    #     Args:
    #         path (str):  The path of the object, e.g. 'a/b/c'. The
    #             components of ``path`` can be regular expressions
    #
    #     Returns:
    #         The object at the path, either a string or a Node or
    #         ``None``, if the path did not match.
    #     """
    #     def nav(node, pl):
    #         if pl:
    #             return itertools.chain(nav(child, pl[1:]) for child in node.children
    #                                    if re.match(pl[0], child.tag_name))
    #         else:
    #             return self.result,
    #     return nav(path.split('/'))
479 480 481 482 483 484 485 486 487


########################################################################
#
# syntax tree transformation functions
#
########################################################################


488 489 490 491
TransformationFunc = Union[Callable[[Node], Any], partial]


def transformation_factory(t=None):
Eckhart Arnold's avatar
Eckhart Arnold committed
492 493 494 495 496 497 498 499 500 501 502 503 504 505 506
    """Creates factory functions from transformation-functions that
    dispatch on the first parameter after the node parameter.

    Decorating a transformation-function that has more than merely the
    ``node``-parameter with ``transformation_factory`` creates a
    function with the same name, which returns a partial-function that
    takes just the node-parameter.

    Additionally, there is some some syntactic sugar for
    transformation-functions that receive a collection as their second
    parameter and do not have any further parameters. In this case a
    list of parameters passed to the factory function will be converted
    into a collection.

    Main benefit is readability of processing tables.
507 508 509 510 511

    Usage:
        @transformation_factory(AbtractSet[str])
        def remove_tokens(node, tokens):
            ...
Eckhart Arnold's avatar
Eckhart Arnold committed
512
      or, alternatively:
513 514 515
        @transformation_factory
        def remove_tokens(node, tokens: AbstractSet[str]):
            ...
Eckhart Arnold's avatar
Eckhart Arnold committed
516 517 518 519 520

    Example:
        trans_table = { 'expression': remove_tokens('+', '-') }
      instead of:
        trans_table = { 'expression': partial(remove_tokens, tokens={'+', '-'}) }
521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551
    """

    def decorator(f):
        sig = inspect.signature(f)
        params = list(sig.parameters.values())[1:]
        if len(params) == 0:
            return f  # '@transformer' not needed w/o free parameters
        assert t or params[0].annotation != params[0].empty, \
            "No type information on second parameter found! Please, use type " \
            "annotation or provide the type information via transfomer-decorator."
        p1type = t or params[0].annotation
        f = singledispatch(f)
        if len(params) == 1 and issubclass(p1type, Container) and not issubclass(p1type, Text) \
                and not issubclass(p1type, ByteString):
            def gen_special(*args):
                c = set(args) if issubclass(p1type, AbstractSet) else \
                    list(args) if issubclass(p1type, Sequence) else args
                d = {params[0].name: c}
                return partial(f, **d)

            f.register(p1type.__args__[0], gen_special)

        def gen_partial(*args, **kwargs):
            d = {p.name: arg for p, arg in zip(params, args)}
            d.update(kwargs)
            return partial(f, **d)

        f.register(p1type, gen_partial)
        return f

    if isinstance(t, type(lambda: 1)):
Eckhart Arnold's avatar
Eckhart Arnold committed
552 553 554 555
        # Provide for the case that transformation_factory has been
        # written as plain decorator and not as a function call that
        # returns the decorator proper.
        func = t;  t = None
556 557 558
        return decorator(func)
    else:
        return decorator
559 560


561 562
WHITESPACE_PTYPE = ':Whitespace'
TOKEN_PTYPE = ':Token'
563 564


565
def key_parser_name(node) -> str:
566 567 568
    return node.parser.name


569
def key_tag_name(node) -> str:
570 571 572
    return node.tag_name


573
def traverse(root_node, processing_table, key_func=key_tag_name) -> None:
574
    """Traverses the snytax tree starting with the given ``node`` depth
575
    first and applies the sequences of callback-functions registered
576 577
    in the ``calltable``-dictionary.
    
578 579
    The most important use case is the transformation of a concrete
    syntax tree into an abstract tree (AST). But it is also imaginable
580
    to employ tree-traversal for the semantic analysis of the AST.
581 582 583 584 585 586 587 588 589 590

    In order to assign sequences of callback-functions to nodes, a
    dictionary ("processing table") is used. The keys usually represent
    tag names, but any other key function is possible. There exist
    three special keys:
        '+': always called (before any other processing function)
        '*': called for those nodes for which no (other) processing
             function appears in the table
        '~': always called (after any other processing function)

591
    Args:
di68kap's avatar
di68kap committed
592
        root_node (Node): The root-node of the syntax tree to be traversed 
593
        processing_table (dict): node key -> sequence of functions that
di68kap's avatar
di68kap committed
594 595
            will be applied to matching nodes in order. This dictionary
            is interpreted as a ``compact_table``. See 
596
            ``toolkit.expand_table`` or ``EBNFCompiler.EBNFTransTable``
597 598
        key_func (function): A mapping key_func(node) -> keystr. The default
            key_func yields node.parser.name.
599 600 601 602 603
            
    Example:
        table = { "term": [replace_by_single_child, flatten], 
            "factor, flowmarker, retrieveop": replace_by_single_child }
        traverse(node, table)
604
    """
605 606 607 608
    # commented, because this approach is too error prone!
    # def funclist(call):
    #     return [as_partial(func) for func in smart_list(call)]

di68kap's avatar
di68kap committed
609
    # normalize processing_table entries by turning single values into lists
610
    # with a single value
611
    table = {name: smart_list(call) for name, call in list(processing_table.items())}
612
    table = expand_table(table)
613
    cache = {}  # type: Dict[str, List[Callable]]
614

di68kap's avatar
di68kap committed
615 616 617
    def traverse_recursive(node):
        if node.children:
            for child in node.result:
618
                traverse_recursive(child)            # depth first
619
                node.error_flag |= child.error_flag  # propagate error flag
620 621 622 623 624 625 626 627 628 629 630 631 632

        key = key_func(node)
        sequence = cache.get(key, None)
        if sequence is None:
            sequence = table.get('+', []) + \
                       table.get(key, table.get('*', [])) + \
                       table.get('~', [])
            # '+' always called (before any other processing function)
            # '*' called for those nodes for which no (other) processing function
            #     appears in the table
            # '~' always called (after any other processing function)
            cache[key] = sequence

633
        for call in sequence:
di68kap's avatar
di68kap committed
634
            call(node)
635

di68kap's avatar
di68kap committed
636
    traverse_recursive(root_node)
637 638


639
def no_transformation(node):
640 641 642 643 644 645
    pass


# ------------------------------------------------
#
# rearranging transformations:
di68kap's avatar
di68kap committed
646 647
#     - tree may be rearranged (e.g.flattened)
#     - nodes that are not leaves may be dropped
648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675
#     - order is preserved
#     - all leaves are kept
#
# ------------------------------------------------


def replace_by_single_child(node):
    """Remove single branch node, replacing it by its immediate descendant.
    (In case the descendant's name is empty (i.e. anonymous) the
    name of this node's parser is kept.)
    """
    if node.children and len(node.result) == 1:
        if not node.result[0].parser.name:
            node.result[0].parser.name = node.parser.name
        node.parser = node.result[0].parser
        node._errors.extend(node.result[0].errors)
        node.result = node.result[0].result


def reduce_single_child(node):
    """Reduce a single branch node, by transferring the result of its
    immediate descendant to this node, but keeping this node's parser entry.
    """
    if node.children and len(node.result) == 1:
        node._errors.extend(node.result[0].errors)
        node.result = node.result[0].result


676 677
@transformation_factory
def replace_parser(node, name: str):
678
    """Replaces the parser of a Node with a mock parser with the given
679 680 681 682 683
    name.

    Parameters:
        name(str): "NAME:PTYPE" of the surogate. The ptype is optional
        node(Node): The node where the parser shall be replaced
di68kap's avatar
di68kap committed
684
    """
685
    name, ptype = (name.split(':') + [''])[:2]
686
    node.parser = MockParser(name, ptype)
di68kap's avatar
di68kap committed
687 688


689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720
def flatten(node):
    """Recursively flattens all unnamed sub-nodes, in case there is more
    than one sub-node present. Flattening means that
    wherever a node has child nodes, the child nodes are inserted in place
    of the node. In other words, all leaves of this node and its child nodes
    are collected in-order as direct children of this node.
    This is meant to achieve these kinds of structural transformation:
        (1 (+ 2) (+ 3)     ->   (1 + 2 + 3)
        (1 (+ (2 + (3))))  ->   (1 + 2 + 3)

    Warning: Use with care. Du tue its recursive nature, flattening can
    have unexpected side-effects.
    """
    if node.children:
        new_result = []
        for child in node.children:
            if not child.parser.name and child.children:
                assert child.children, node.as_sexpr()
                flatten(child)
                new_result.extend(child.result)
            else:
                new_result.append(child)
        node.result = tuple(new_result)


def collapse(node):
    """Collapses all sub-nodes by replacing the node's result with it's
    string representation.
    """
    node.result = str(node)


721 722 723 724 725 726 727 728 729 730 731 732 733 734
# ------------------------------------------------
#
# destructive transformations:
#     - tree may be rearranged (flattened),
#     - order is preserved
#     - but (irrelevant) leaves may be dropped
#     - errors of dropped leaves will be lost
#
# ------------------------------------------------


def is_whitespace(node):
    """Removes whitespace and comments defined with the
    ``@comment``-directive."""
735
    return node.parser.ptype == WHITESPACE_PTYPE
736 737 738 739 740 741 742


def is_empty(node):
    return not node.result


def is_expendable(node):
743
    return is_empty(node) or is_whitespace(node)
744 745


746
def is_token(node, tokens: AbstractSet[str] = frozenset()) -> bool:
747
    return node.parser.ptype == TOKEN_PTYPE and (not tokens or node.result in tokens)
748 749


750
@transformation_factory(Callable)  # @singledispatch
751
def remove_children_if(node, condition):
752 753 754 755 756 757
    """Removes all nodes from the result field if the function 
    ``condition(child_node)`` evaluates to ``True``."""
    if node.children:
        node.result = tuple(c for c in node.children if not condition(c))


758
remove_whitespace = remove_children_if(is_whitespace)  # partial(remove_children_if, condition=is_whitespace)
Eckhart Arnold's avatar
Eckhart Arnold committed
759
remove_empty = remove_children_if(is_empty)
760
remove_expendables = remove_children_if(is_expendable)  # partial(remove_children_if, condition=is_expendable)
761 762 763 764


@transformation_factory
def remove_tokens(node, tokens: AbstractSet[str] = frozenset()):
765 766 767 768
    """Reomoves any among a particular set of tokens from the immediate
    descendants of a node. If ``tokens`` is the empty set, all tokens
    are removed.
    """
769
    remove_children_if(node, partial(is_token, tokens=tokens))
770 771


772
def remove_enclosing_delimiters(node):
773 774 775 776 777 778 779 780
    """Removes any enclosing delimiters from a structure (e.g. quotation marks
    from a literal or braces from a group).
    """
    if len(node.children) >= 3:
        assert not node.children[0].children and not node.children[-1].children, node.as_sexpr()
        node.result = node.result[1:-1]


Eckhart Arnold's avatar
Eckhart Arnold committed
781 782
@transformation_factory
def map_content(node, func: Callable):      # Callable[[Node], ResultType]
783 784 785 786 787 788
    """Replaces the content of the node. ``func`` takes the node
    as an argument an returns the mapped result.
    """
    node.result = func(node.result)


789 790
########################################################################
#
791
# AST semantic validation functions
di68kap's avatar
di68kap committed
792
# EXPERIMENTAL!
793 794 795 796
#
########################################################################


797 798
@transformation_factory
def require(node, child_tags: AbstractSet[str]):
799
    for child in node.children:
800
        if child.tag_name not in child_tags:
801
            node.add_error('Element "%s" is not allowed inside "%s".' %
802
                           (child.parser.name, node.parser.name))
803 804


805 806
@transformation_factory
def forbid(node, child_tags: AbstractSet[str]):
807
    for child in node.children:
808
        if child.tag_name in child_tags:
809
            node.add_error('Element "%s" cannot be nested inside "%s".' %
810
                           (child.parser.name, node.parser.name))
811 812


813 814
@transformation_factory
def assert_content(node, regex: str):
815 816 817
    content = str(node)
    if not re.match(regex, content):
        node.add_error('Element "%s" violates %s on %s' %
818
                       (node.parser.name, str(regex), content))
819