Commit 813bebe5 authored by Eckhart Arnold's avatar Eckhart Arnold

- bugfixes

parent 821cb67c
......@@ -81,7 +81,7 @@ from DHParser import logging, is_filename, load_if_file, \\
ZeroOrMore, Forward, NegativeLookahead, mixin_comment, compile_source, \\
last_value, counterpart, accumulate, PreprocessorFunc, \\
Node, TransformationFunc, \\
traverse, remove_children_if, \\
traverse, remove_children_if, join, \\
reduce_single_child, replace_by_single_child, remove_whitespace, \\
remove_expendables, remove_empty, remove_tokens, flatten, is_whitespace, \\
is_empty, is_expendable, collapse, replace_content, WHITESPACE_PTYPE, TOKEN_PTYPE, \\
......
......@@ -132,7 +132,7 @@ PreprocessorFunc = Union[Callable[[str], str], partial]
LEFT_RECURSION_DEPTH = 8 # type: int
# because of python's recursion depth limit, this value ought not to be
# set too high. PyPy allows higher values than CPython
MAX_DROPOUTS = 5 # type: int
MAX_DROPOUTS = 3 # type: int
# stop trying to recover parsing after so many errors
......@@ -231,7 +231,8 @@ def add_parser_guard(parser_func):
# in case of left recursion, the first recursive step that
# matches will store its result in the cache
parser.visited[location] = (node, rest)
grammar.last_node__ = node # store last node for Lookbehind parser
# store last non-empty node for Lookbehind parser
if len(rest) < location: grammar.last_node__ = node
parser.recursion_counter[location] -= 1
......@@ -293,6 +294,15 @@ class Parser(ParserBase, metaclass=ParserMetaClass):
2. *Anonymous parsers* where the name-field just contains the empty
string. AST-transformation of Anonymous parsers can be hooked
only to their class name, and not to the individual parser.
Parser objects are callable and parsing is done by calling a parser
object with the text to parse. If the parser matches it returns
a tuple consisting of a node representing the root of the concrete
syntax tree resulting from the match as well as the substring
`text[i:]` where i is the length of matched text (which can be
zero in the case of parsers like `ZeroOrMore` or `Optional`).
If `i > 0` then the parser has "moved forward". If the parser does
not match it returns `(None, text).
"""
ApplyFunc = Callable[['Parser'], None]
......@@ -304,15 +314,27 @@ class Parser(ParserBase, metaclass=ParserMetaClass):
self.reset()
def __deepcopy__(self, memo):
"""Deepcopy method of the parser. Upon instantiation of a Grammar-
object, parsers will be deep-copied to the Grammar object. If a
derived parser-class changes the signature of the constructor,
`__deepcopy__`-method must be replaced (i.e. overridden without
calling the same method from the superclass) by the derived class.
"""
return self.__class__(self.name)
def reset(self):
"""Initializes or resets any parser variables. If overwritten,
the `reset()`-method of the parent class must be called from the
`reset()`-method of the derived class."""
self.visited = dict() # type: Dict[int, Tuple[Node, str]]
self.recursion_counter = dict() # type: Dict[int, int]
self.cycle_detection = set() # type: Set[Callable]
return self
def __call__(self, text: str) -> Tuple[Node, str]:
"""Applies the parser to the given `text` and returns a node with
the results or None as well as the text at the position right behind
the matching string."""
return None, text # default behaviour: don't match
def __add__(self, other: 'Parser') -> 'Series':
......@@ -332,10 +354,12 @@ class Parser(ParserBase, metaclass=ParserMetaClass):
@grammar.setter
def grammar(self, grammar: 'Grammar'):
assert self._grammar is None or self._grammar == grammar, \
"Parser has already been assigned to a Grammar object!"
self._grammar = grammar
self._grammar_assigned_notifier()
if self._grammar is None:
self._grammar = grammar
self._grammar_assigned_notifier()
else:
assert self._grammar == grammar, \
"Parser has already been assigned to a different Grammar object!"
def _grammar_assigned_notifier(self):
"""A function that notifies the parser object that it has been
......@@ -345,7 +369,7 @@ class Parser(ParserBase, metaclass=ParserMetaClass):
def apply(self, func: ApplyFunc):
"""
Applies function `func(parser)` recursively to this parser and all
descendants of the tree of parsers. The same function can never
descendant parsers if any exist. The same function can never
be applied twice between calls of the ``reset()``-method!
"""
if func in self.cycle_detection:
......@@ -387,7 +411,7 @@ class Grammar:
>>> number = RE('\d+') + RE('\.') + RE('\d+') | RE('\d+')
>>> number_parser = Grammar(number)
>>> number_parser("3.1416").show()
>>> number_parser("3.1416").content()
'3.1416'
Collecting the parsers that define a grammar in a descentand class of
......@@ -518,7 +542,7 @@ class Grammar:
# parsers not connected to the root object will be copied later
# on demand (see Grammar.__getitem__()). Usually, the need to
# do so only arises during testing.
self.root__ = root if root else copy.deepcopy(self.__class__.root__)
self.root__ = copy.deepcopy(root) if root else copy.deepcopy(self.__class__.root__)
if self.wspL__:
self.wsp_left_parser__ = Whitespace(self.wspL__) # type: ParserBase
......@@ -556,7 +580,7 @@ class Grammar:
self.rollback__ = [] # type: List[Tuple[int, Callable]]
self.last_rb__loc__ = -1 # type: int
# previously parsed node, needed by Lookbehind parser
self.last_node__ = None # type: Node
self.last_node__ = Node(ZOMBIE_PARSER, '') # type: Node
# support for call stack tracing
self.call_stack__ = [] # type: List[Parser]
# snapshots of call stacks
......@@ -807,13 +831,20 @@ class PreprocessorToken(Parser):
class RegExp(Parser):
"""
Regular expression parser.
"""Regular expression parser.
The RegExp-parser parses text that matches a regular expression.
RegExp can also be considered as the "atomic parser", because all
other parsers delegate part of the parsing job to other parsers,
but do not match text directly.
Example:
>>> word = RegExp(r'\w+')
>>> Grammar(word)("Haus").content()
'Haus'
EBNF-Notation: `/ ... /`
EBNF-Example: `word = /\w+/`
"""
def __init__(self, regexp, name: str = '') -> None:
......@@ -856,6 +887,21 @@ class RE(Parser):
string, e.g. use r'\s*' or r'[\t ]+', but not r'\s+'. If the
respective parameters in the constructor are set to ``None`` the
default whitespace expression from the Grammar object will be used.
Example (allowing whitespace on the right hand side, but not on
the left hand side of a regular expression):
>>> word = RE(r'\w+', wR=r'\s*')
>>> parser = Grammar(word)
>>> result = parser('Haus ')
>>> result.content()
'Haus '
>>> result.structure()
'(:RE (:RegExp "Haus") (:Whitespace " "))'
>>> parser(' Haus').content()
' <<< Error on " Haus" | Parser did not match! Invalid source file? >>> '
EBNF-Notation: `/ ... /~` or `~/ ... /` or `~/ ... /~`
EBNF-Example: `word = /\w+/~`
"""
def __init__(self, regexp, wL=None, wR=None, name=''):
"""Constructor for class RE.
......@@ -1004,6 +1050,30 @@ class NaryOperator(Parser):
class Optional(UnaryOperator):
"""
Parser `Optional` always matches, even if its child-parser
did not match.
If the child-parser did not match `Optional` returns a node
with no content and does not move forward in the text.
If the child-parser did match, `Optional` returns the a node
with the node returnd by the child-parser as its single
child and the text at the position where the child-parser
left it.
Examples:
>>> number = Optional(Token('-')) + RegExp(r'\d+') + Optional(RegExp(r'\.\d+'))
>>> Grammar(number)('3.14159').content()
'3.14159'
>>> Grammar(number)('3.14159').structure()
'(:Series (:Optional) (:RegExp "3") (:Optional (:RegExp ".14159")))'
>>> Grammar(number)('-1').content()
'-1'
EBNF-Notation: `[ ... ]`
EBNF-Example: `number = ["-"] /\d+/ [ /\.\d+/ ]
"""
def __init__(self, parser: Parser, name: str = '') -> None:
super(Optional, self).__init__(parser, name)
# assert isinstance(parser, Parser)
......@@ -1024,6 +1094,7 @@ class Optional(UnaryOperator):
return '[' + (self.parser.repr[1:-1] if isinstance(self.parser, Alternative)
and not self.parser.name else self.parser.repr) + ']'
class ZeroOrMore(Optional):
def __call__(self, text: str) -> Tuple[Node, str]:
results = () # type: Tuple[Node, ...]
......@@ -1120,12 +1191,12 @@ class Alternative(NaryOperator):
# the order of the sub-expression matters!
>>> number = RE('\d+') | RE('\d+') + RE('\.') + RE('\d+')
>>> Grammar(number)("3.1416").show()
>>> Grammar(number)("3.1416").content()
'3 <<< Error on ".1416" | Parser stopped before end! trying to recover... >>> '
# the most selective expression should be put first:
>>> number = RE('\d+') + RE('\.') + RE('\d+') | RE('\d+')
>>> Grammar(number)("3.1416").show()
>>> Grammar(number)("3.1416").content()
'3.1416'
"""
......@@ -1246,7 +1317,6 @@ class Lookbehind(FlowOperator):
assert isinstance(p, RegExp), str(type(p))
self.regexp = p.main.regexp if isinstance(p, RE) else p.regexp
super(Lookbehind, self).__init__(parser, name)
print("WARNING: Lookbehind Operator is experimental!")
def __call__(self, text: str) -> Tuple[Node, str]:
if self.sign(self.condition()):
......@@ -1262,7 +1332,10 @@ class Lookbehind(FlowOperator):
def condition(self):
node = self.grammar.last_node__
return node and self.regexp.match(str(node))
assert node is not None # can be removed
s = str(node)
assert s or node.parser.name == '__ZOMBIE__', str(node.parser)
return self.regexp.match(s)
class NegativeLookbehind(Lookbehind):
......
......@@ -132,6 +132,17 @@ StrictResultType = Union[ChildrenType, str]
ResultType = Union[ChildrenType, 'Node', str, None]
def oneliner_sxpr(sxpr: str) -> str:
"""Returns S-expression `sxpr` as a one liner without unnecessary
whitespace.
Example:
>>> oneliner_sxpr('(a\\n (b\\n c\\n )\\n)\\n')
'(a (b c))'
"""
return re.sub('\s(?=\))', '', re.sub('\s+', ' ', sxpr)).strip()
class Node:
"""
Represents a node in the concrete or abstract syntax tree.
......@@ -259,13 +270,34 @@ class Node:
def errors(self) -> List[Error]:
return [Error(self.pos, err) for err in self._errors]
def show(self) -> str:
"""Returns content as string, inserting error messages where
errors occurred.
def add_error(self, error_str) -> 'Node':
self._errors.append(error_str)
self.error_flag = True
return self
def propagate_error_flags(self) -> None:
"""Recursively propagates error flags set on child nodes to its
parents. This can be used if errors are added to descendant
nodes after syntaxtree construction, i.e. in the compile phase.
"""
s = "".join(child.show() for child in self.children) if self.children \
else str(self.result)
return (' <<< Error on "%s" | %s >>> ' % (s, '; '.join(self._errors))) if self._errors else s
for child in self.children:
child.propagate_error_flags()
self.error_flag = self.error_flag or child.error_flag
def collect_errors(self, clear_errors=False) -> List[Error]:
"""
Returns all errors of this node or any child node in the form
of a set of tuples (position, error_message), where position
is always relative to this node.
"""
errors = self.errors
if clear_errors:
self._errors = []
self.error_flag = False
if self.children:
for child in self.children:
errors.extend(child.collect_errors(clear_errors))
return errors
def _tree_repr(self, tab, openF, closeF, dataF=identity, density=0) -> str:
"""
......@@ -363,39 +395,20 @@ class Node:
return self._tree_repr(' ', opening, closing, density=1)
def add_error(self, error_str) -> 'Node':
self._errors.append(error_str)
self.error_flag = True
return self
def propagate_error_flags(self) -> None:
"""Recursively propagates error flags set on child nodes to its
parents. This can be used if errors are added to descendant
nodes after syntaxtree construction, i.e. in the compile phase.
"""
for child in self.children:
child.propagate_error_flags()
self.error_flag = self.error_flag or child.error_flag
def structure(self) -> str:
"""Return structure (and content) as S-expression on a single line
without any line breaks."""
return oneliner_sxpr(self.as_sxpr())
def collect_errors(self, clear_errors=False) -> List[Error]:
def content(self) -> str:
"""
Returns all errors of this node or any child node in the form
of a set of tuples (position, error_message), where position
is always relative to this node.
Returns content as string, inserting error messages where
errors occurred.
"""
errors = self.errors
if clear_errors:
self._errors = []
self.error_flag = False
if self.children:
for child in self.children:
errors.extend(child.collect_errors(clear_errors))
return errors
def log(self, log_file_name):
st_file_name = log_file_name
with open(os.path.join(log_dir(), st_file_name), "w", encoding="utf-8") as f:
f.write(self.as_sxpr())
s = "".join(child.content() for child in self.children) if self.children \
else str(self.result)
return (
' <<< Error on "%s" | %s >>> ' % (s, '; '.join(self._errors))) if self._errors else s
def find(self, match_function: Callable) -> Iterator['Node']:
"""Finds nodes in the tree that match a specific criterion.
......@@ -458,6 +471,11 @@ class Node:
# return self.result,
# return nav(path.split('/'))
def log(self, log_file_name):
st_file_name = log_file_name
with open(os.path.join(log_dir(), st_file_name), "w", encoding="utf-8") as f:
f.write(self.as_sxpr())
def mock_syntax_tree(sxpr):
"""
......@@ -511,17 +529,6 @@ def mock_syntax_tree(sxpr):
return Node(MockParser(name, ':' + class_name), result)
def compact_sxpr(s) -> str:
"""Returns S-expression ``s`` as a one liner without unnecessary
whitespace.
Example:
>>> compact_sxpr('(a\\n (b\\n c\\n )\\n)\\n')
'(a (b c))'
"""
return re.sub('\s(?=\))', '', re.sub('\s+', ' ', s)).strip()
TransformationFunc = Union[Callable[[Node], Any], partial]
......
......@@ -28,7 +28,7 @@ except ImportError:
from DHParser import error_messages
from DHParser.toolkit import is_logging
from DHParser.syntaxtree import mock_syntax_tree, compact_sxpr
from DHParser.syntaxtree import mock_syntax_tree, oneliner_sxpr
__all__ = ('unit_from_configfile',
'unit_from_json',
......@@ -171,8 +171,8 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, report=True, ve
errata.append('Abstract syntax tree test "%s" for parser "%s" failed:'
'\n\tExpr.: %s\n\tExpected: %s\n\tReceived: %s'
% (test_name, parser_name, '\n\t'.join(test_code.split('\n')),
compact_sxpr(compare.as_sxpr()),
compact_sxpr(ast.as_sxpr())))
oneliner_sxpr(compare.as_sxpr()),
oneliner_sxpr(ast.as_sxpr())))
tests.setdefault('__err__', {})[test_name] = errata[-1]
if verbose:
print(infostr + ("OK" if len(errata) == errflag else "FAIL"))
......
......@@ -57,8 +57,8 @@ block_environment = known_environment | generic_block
known_environment = itemize | enumerate | figure | table | quotation
| verbatim
generic_block = begin_generic_block sequence §end_generic_block
begin_generic_block = -&SUCC_LB begin_environment &PRED_LB
end_generic_block = -&SUCC_LB end_environment &PRED_LB
begin_generic_block = -&SUCC_LB begin_environment -&SUCC_LB
end_generic_block = -&SUCC_LB end_environment -&SUCC_LB
itemize = "\begin{itemize}" [PARSEP] { item } §"\end{itemize}"
enumerate = "\begin{enumerate}" [PARSEP] {item } §"\end{enumerate}"
......@@ -86,8 +86,8 @@ text_elements = command | text | block | inline_environment
inline_environment = known_inline_env | generic_inline_env
known_inline_env = inline_math
generic_inline_env = begin_inline_env { text_elements }+ §end_inline_env
begin_inline_env = (-!SUCC_LB begin_environment) | (begin_environment !PRED_LB)
end_inline_env = (-!SUCC_LB end_environment) | (end_environment !PRED_LB)
begin_inline_env = (-!SUCC_LB begin_environment) | (begin_environment -!SUCC_LB)
end_inline_env = (-!SUCC_LB end_environment) | (end_environment -!SUCC_LB)
begin_environment = "\begin{" §NAME §"}"
end_environment = "\end{" §::NAME §"}"
......@@ -144,7 +144,7 @@ WSPC = /[ \t]+/ # (horizontal) whitespace
LF = !PARSEP /[ \t]*\n[ \t]*/ # linefeed but not an empty line
PARSEP = /[ \t]*(?:\n[ \t]*)+\n[ \t]*/ # at least one empty line, i.e.
# [whitespace] linefeed [whitespace] linefeed
EOF = !/./
EOF = /(?!.)/
SUCC_LB = /(?:.*\n)+\s*$/ # linebreak succeeding an arbitrary chunk of text
PRED_LB = /\s*?\n/ # linebreak preeceding any text
SUCC_LB = /(?!.)|(?:.*\n)+\s*$/ # linebreak succeeding an arbitrary chunk of text
# PRED_LB = /\s*(?!.)|\s*?\n/ # linebreak preeceding any text
This diff is collapsed.
......@@ -62,22 +62,22 @@ newpath
36 62 lineto
stroke
400 28 moveto
(Generations) show
(Generations) content
190 46 moveto
(10) show
(10) content
351 46 moveto
(20) show
(20) content
512 46 moveto
(30) show
(30) content
673 46 moveto
(40) show
(40) content
17 274 moveto
90.000000 rotate
(Population) show
(Population) content
-90.000000 rotate
32 537 moveto
90.000000 rotate
(1.0) show
(1.0) content
-90.000000 rotate
0.000000 0.000000 0.500000 setrgbcolor
2.000000 setlinewidth
......@@ -362,7 +362,7 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
165 9 moveto
(Dove) show
(Dove) content
0.330000 1.000000 1.000000 setrgbcolor
newpath
257 15 moveto
......@@ -370,7 +370,7 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
277 9 moveto
(Hawk) show
(Hawk) content
1.000000 0.000000 1.000000 setrgbcolor
newpath
369 15 moveto
......@@ -378,7 +378,7 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
389 9 moveto
(Pavlov) show
(Pavlov) content
0.000000 1.000000 0.000000 setrgbcolor
newpath
481 15 moveto
......@@ -386,7 +386,7 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
501 9 moveto
(Random) show
(Random) content
1.000000 0.000000 0.000000 setrgbcolor
newpath
593 15 moveto
......@@ -394,9 +394,9 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
613 9 moveto
(TitForTat) show
(TitForTat) content
281 571 moveto
(Population dynamics of "Simple Example") show
(Population dynamics of "Simple Example") content
1.000000 1.000000 1.000000 setrgbcolor
newpath
0 0 moveto
......@@ -449,22 +449,22 @@ newpath
36 62 lineto
stroke
400 28 moveto
(Generations) show
(Generations) content
190 46 moveto
(10) show
(10) content
351 46 moveto
(20) show
(20) content
512 46 moveto
(30) show
(30) content
673 46 moveto
(40) show
(40) content
17 274 moveto
90.000000 rotate
(Population) show
(Population) content
-90.000000 rotate
32 537 moveto
90.000000 rotate
(1.0) show
(1.0) content
-90.000000 rotate
0.000000 0.000000 0.500000 setrgbcolor
2.000000 setlinewidth
......@@ -749,7 +749,7 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
165 9 moveto
(Dove) show
(Dove) content
0.330000 1.000000 1.000000 setrgbcolor
newpath
257 15 moveto
......@@ -757,7 +757,7 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
277 9 moveto
(Hawk) show
(Hawk) content
1.000000 0.000000 1.000000 setrgbcolor
newpath
369 15 moveto
......@@ -765,7 +765,7 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
389 9 moveto
(Pavlov) show
(Pavlov) content
0.000000 1.000000 0.000000 setrgbcolor
newpath
481 15 moveto
......@@ -773,7 +773,7 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
501 9 moveto
(Random) show
(Random) content
1.000000 0.000000 0.000000 setrgbcolor
newpath
593 15 moveto
......@@ -781,9 +781,9 @@ newpath
stroke
0.000000 0.000000 0.000000 setrgbcolor
613 9 moveto
(TitForTat) show
(TitForTat) content
281 571 moveto
(Population dynamics of "Simple Example") show
(Population dynamics of "Simple Example") content
=======
%!PS-Adobe-2.0 EPSF-1.2
......@@ -5164,7 +5164,7 @@ systemdict /ISOLatin1Encoding known not {
moveto
} forall
grestore
} {show} ifelse
} {content} ifelse
0 spacing neg translate
} forall
} bind def
......
......@@ -713,7 +713,7 @@
constitutes one of the central epistemological problems of computer
simulation methods. Especially in the case of simulations in the
social sciences the answers given by many authors are not satisfactory.
The following paper attempts to show how the characteristics of simulation,
The following paper attempts to content how the characteristics of simulation,
i.e. the imitation of a dynamic, constitute the problem of validation
even in the case of the natural sciences and what consequences arise.
Differences as well as common grounds between social and natural
......
......@@ -756,7 +756,7 @@ prevailing attitude of modelers towards empirical research.
The examples discussed previously indicate that simulation models can
be a valuable tool to study some of the possible causes of some social
phenomena. However, the examples also show that a) modeling approaches
phenomena. However, the examples also content that a) modeling approaches
in the social sciences can easily fail to deliver resilient results,
that b) social simulations are not yet generally embedded in a
research culture where the critical assessment of the (empirical)
......
......@@ -899,7 +899,7 @@ prevailing attitude of modelers towards empirical research.
The examples discussed previously indicate that simulation models can
be a valuable tool to study some of the possible causes of some social
phenomena. However, the examples also show that a) modeling approaches
phenomena. However, the examples also content that a) modeling approaches
in the social sciences can easily fail to deliver resilient results,
that b) social simulations are not yet generally embedded in a
research culture where the critical assessment of the (empirical)
......
#!/usr/bin/python3
"""tst_grammar.py - runs the unit tests for the LaTeX grammar
"""tst_LaTeX_grammar.py - runs the unit tests for the LaTeX grammar
Author: Eckhart Arnold <arnold@badw.de>
......
......@@ -24,7 +24,7 @@ from functools import partial
sys.path.extend(['../', './'])
from DHParser.syntaxtree import TOKEN_PTYPE, mock_syntax_tree, compact_sxpr
from DHParser.syntaxtree import TOKEN_PTYPE, mock_syntax_tree, oneliner_sxpr
from DHParser.transform import traverse, remove_expendables, \
replace_by_single_child, reduce_single_child, flatten
from DHParser.dsl import grammar_provider
......@@ -118,25 +118,25 @@ class TestSExpr:
Tests for S-expression handling.
"""
def test_compact_sexpr(self):
assert compact_sxpr("(a\n (b\n c\n )\n)\n") == "(a (b c))"
assert oneliner_sxpr("(a\n (b\n c\n )\n)\n") == "(a (b c))"
def test_mock_syntax_tree(self):
sexpr = '(a (b c) (d e) (f (g h)))'
tree = mock_syntax_tree(sexpr)
assert compact_sxpr(tree.as_sxpr().replace('"', '')) == sexpr
assert oneliner_sxpr(tree.as_sxpr().replace('"', '')) == sexpr
# test different quotation marks
sexpr = '''(a (b """c""" 'k' "l") (d e) (f (g h)))'''
sexpr_stripped = '(a (b c k l) (d e) (f (g h)))'
tree = mock_syntax_tree(sexpr)
assert compact_sxpr(tree.as_sxpr().replace('"', '')) == sexpr_stripped
assert oneliner_sxpr(tree.as_sxpr().replace('"', '')) == sexpr_stripped
sexpr_clean = '(a (b "c" "k" "l") (d "e") (f (g "h")))'
tree = mock_syntax_tree(sexpr_clean)
assert compact_sxpr(tree.as_sxpr()) == sexpr_clean
assert oneliner_sxpr(tree.as_sxpr()) == sexpr_clean
tree = mock_syntax_tree(sexpr_stripped)<