Commit a7a402d2 authored by di68kap's avatar di68kap
Browse files

- transform: added replace_by_children; bug fixes

parent 020f74c6
......@@ -76,7 +76,7 @@ __all__ = ('log_dir',
#######################################################################
def log_dir() -> str:
def log_dir() -> Union[str, bool]:
"""Creates a directory for log files (if it does not exist) and
returns its path.
......@@ -84,8 +84,15 @@ def log_dir() -> str:
Don't use a directory name that could be the name of a directory
for other purposes than logging.
ATTENTION: The log-dir is sotred thread locally, which means the log-dir
as well as the information whether logging is turned on or off will not
automatically be transferred to any subprocesses. This needs to be done
explicitly. (See `testing.grammar_suite()` for an example, how this can
be done.
Returns:
name of the logging directory
name of the logging directory (str) or False (bool) if logging has
not been switched on with the logging-contextmanager (see below), yet.
"""
# the try-except clauses in the following are precautions for multithreading
try:
......@@ -93,8 +100,9 @@ def log_dir() -> str:
if not dirname:
raise AttributeError # raise a name error if LOGGING evaluates to False
except AttributeError:
raise AttributeError("No access to log directory before logging has been "
"turned on within the same thread/process.")
return False
# raise AttributeError("No access to log directory before logging has been "
# "turned on within the same thread/process.")
if os.path.exists(dirname) and not os.path.isdir(dirname):
raise IOError('"' + dirname + '" cannot be used as log directory, '
'because it is not a directory!')
......
......@@ -169,7 +169,7 @@ class Server:
await writer.drain()
writer.close()
# TODO: add these lines in case a terminate signal is received, i.e. exit server coroutine
# gracefully.
# gracefully. Is this needed?
# self.server.cancel()
async def serve(self, address: str = '127.0.0.1', port: int = 8888):
......
......@@ -38,10 +38,10 @@ import sys
from typing import Dict, List, Union, cast
from DHParser.error import Error, is_error, adjust_error_locations
from DHParser.log import is_logging, clear_logs, log_parsing_history
from DHParser.log import log_dir, logging, is_logging, clear_logs, log_parsing_history
from DHParser.parse import UnknownParserError, Parser, Lookahead
from DHParser.syntaxtree import Node, RootNode, parse_tree, flatten_sxpr, serialize, ZOMBIE_TAG
from DHParser.toolkit import get_config_value, set_config_value, load_if_file, re
from DHParser.toolkit import GLOBALS, get_config_value, load_if_file, re
__all__ = ('unit_from_config',
......@@ -506,6 +506,17 @@ def reset_unit(test_unit):
del tests[key]
def run_unit(logdir, *parameters):
"""
Run `grammar_unit()` with logs written to `log_dir` or no logs if `log_dir`
evaluates to False. This helper functions is needed for running unit tests
in a multiprocessing environment, because log.log_dir(), log.logging() and
log.is_logging() are thread-local.
"""
with logging(logdir):
return grammar_unit(*parameters)
def grammar_suite(directory, parser_factory, transformer_factory,
fn_patterns=['*test*'],
ignore_unknown_filetypes=False,
......@@ -530,7 +541,7 @@ def grammar_suite(directory, parser_factory, transformer_factory,
for filename in sorted(os.listdir('.')):
if any(fnmatch.fnmatch(filename, pattern) for pattern in fn_patterns):
parameters = filename, parser_factory, transformer_factory, report, verbose
results.append((filename, pool.submit(grammar_unit, *parameters)))
results.append((filename, pool.submit(run_unit, log_dir(), *parameters)))
for filename, err_future in results:
try:
errata = err_future.result()
......@@ -785,6 +796,15 @@ def run_file(fname):
exec('import ' + fname[:-3])
runner('', eval(fname[:-3]).__dict__)
def run_with_log(logdir, f):
"""
Run `grammar_unit()` with logs written to `log_dir` or no logs if `log_dir`
evaluates to False. This helper functions is needed for running unit tests
in a multiprocessing environment, because log.log_dir(), log.logging() and
log.is_logging() are thread-local.
"""
with logging(logdir):
run_file(f)
def run_path(path):
"""Runs all unit tests in `path`"""
......@@ -796,7 +816,7 @@ def run_path(path):
if get_config_value('test_parallelization'):
with concurrent.futures.ProcessPoolExecutor(multiprocessing.cpu_count()) as pool:
for f in files:
result_futures.append(pool.submit(run_file, f))
result_futures.append(pool.submit(run_with_log, log_dir(), f))
# run_file(f) # for testing!
for r in result_futures:
try:
......
......@@ -49,6 +49,7 @@ __all__ = ('TransformationDict',
'is_named',
'update_attr',
'replace_by_single_child',
'replace_by_children',
'reduce_single_child',
'replace_or_reduce',
'change_tag_name',
......@@ -93,7 +94,6 @@ __all__ = ('TransformationDict',
'remove_anonymous_tokens',
'remove_brackets',
'remove_infix_operator',
'remove_single_child',
'remove_tokens',
'flatten',
'forbid',
......@@ -563,7 +563,9 @@ def _reduce_child(node: Node, child: Node):
Sets node's results to the child's result, keeping node's tag_name.
"""
node.result = child.result
update_attr(node, child)
update_attr(child, node)
if child.attr_active():
node._xml_attr = child._xml_attr
#######################################################################
......@@ -617,19 +619,36 @@ def replace_by_single_child(context: List[Node]):
"""
Removes single branch node, replacing it by its immediate descendant.
Replacement only takes place, if the last node in the context has
exactly one child.
exactly one child. Attributes will be merged. In case one and the same
attribute is defined for the child as well as the parent, the child's
attribute value take precedence.
"""
node = context[-1]
if len(node.children) == 1:
_replace_by(node, node.children[0])
def replace_by_children(context: List[Node]):
"""
Eliminates the last node in the context by replacing it with its children.
The attributes of this node will be dropped.
"""
node = context[-1]
assert node.children
parent = context[-2]
result = parent.result
i = result.index(node)
parent.result = result[:i] + node.children + result[i + 1:]
def reduce_single_child(context: List[Node]):
"""
Reduces a single branch node by transferring the result of its
immediate descendant to this node, but keeping this node's parser entry.
Reduction only takes place if the last node in the context has
exactly one child.
exactly one child. Attributes will be merged. In case one and the same
attribute is defined for the child as well as the parent, the parent's
attribute value take precedence.
"""
node = context[-1]
if len(node.children) == 1:
......@@ -1046,7 +1065,7 @@ remove_empty = remove_children_if(is_empty)
remove_anonymous_empty = remove_children_if(lambda ctx: is_empty(ctx) and is_anonymous(ctx))
remove_anonymous_tokens = remove_children_if(lambda ctx: is_token(ctx) and is_anonymous(ctx))
remove_infix_operator = keep_children(slice(0, None, 2))
remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
# remove_single_child = apply_if(keep_children(slice(0)), lambda ctx: len(ctx[-1].children) == 1)
def remove_first(context: List[Node]):
......
......@@ -23,9 +23,12 @@ import sys
sys.path.extend(['../../', '../'])
from DHParser import configuration
import DHParser.dsl
from DHParser import testing
configuration.CONFIG_PRESET['test_parallelization'] = True
if __name__ == "__main__":
if not DHParser.dsl.recompile_grammar('BibTeX.ebnf', force=False): # recompiles Grammar only if it has changed
print('\nErrors while recompiling "BibTeX.ebnf":\n--------------------------------------\n\n')
......
......@@ -92,12 +92,14 @@ class TestLoggingAndLoading:
def test_logging(self):
try:
log_dir()
assert False, "AttributeError should be raised when log_dir() is called outside " \
"a logging context."
except AttributeError:
pass
# try:
# log_dir()
# assert False, "AttributeError should be raised when log_dir() is called outside " \
# "a logging context."
# except AttributeError:
# pass
res = log_dir()
assert isinstance(res, bool) and not res
with logging("TESTLOGS"):
assert not os.path.exists("TESTLOGS"), \
"Log dir should be created lazily!"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment