Commit 54fd2807 authored by di68kap's avatar di68kap
Browse files

- log.py refactored: logging-context-manager removed

parent fbc5d3db
......@@ -84,7 +84,8 @@ def access_presets() -> Dict[str, Any]:
f = open(syncfile_path, 'rb')
preset = pickle.load(f)
assert isinstance(preset, dict)
assert preset['syncfile_path'] == syncfile_path
assert preset['syncfile_path'] == syncfile_path, \
'Conflicting syncfile paths %s != %s' % (preset['syncfile_path'], syncfile_path)
CONFIG_PRESET = preset
except FileNotFoundError:
pass
......@@ -117,8 +118,8 @@ def finalize_presets():
and (not os.path.exists((get_syncfile_path(os.getppid()))))), \
"finalize_presets() can only be called from the main process!"
with open(syncfile_path, 'wb') as f:
if not existing_syncfile:
CONFIG_PRESET['syncfile_path'] = syncfile_path
CONFIG_PRESET['syncfile_path'] = syncfile_path
if existing_syncfile != syncfile_path:
atexit.register(remove_cfg_tempfile, syncfile_path)
pickle.dump(CONFIG_PRESET, f)
......
......@@ -28,11 +28,12 @@ import stat
import DHParser.ebnf
from DHParser.compile import Compiler, compile_source
from DHParser.configuration import get_config_value, set_config_value
from DHParser.ebnf import EBNFCompiler, grammar_changed, DHPARSER_IMPORTS, \
get_ebnf_preprocessor, get_ebnf_grammar, get_ebnf_transformer, get_ebnf_compiler, \
PreprocessorFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc
from DHParser.error import Error, is_error, has_errors, only_errors
from DHParser.log import logging
from DHParser.log import suspend_logging, resume_logging
from DHParser.parse import Grammar
from DHParser.preprocess import nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node
......@@ -145,11 +146,12 @@ def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
if is_python_code(grammar_src):
parser_py, messages = grammar_src, [] # type: str, List[Error]
else:
with logging(False):
result, messages, _ = compile_source(
grammar_src, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
parser_py = cast(str, result)
log_dir = suspend_logging()
result, messages, _ = compile_source(
grammar_src, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
parser_py = cast(str, result)
resume_logging(log_dir)
if has_errors(messages):
raise DefinitionError(only_errors(messages), grammar_src)
parser_root = compile_python_object(DHPARSER_IMPORTS + parser_py, r'\w+Grammar$')()
......@@ -291,10 +293,11 @@ def load_compiler_suite(compiler_suite: str) -> \
else:
# Assume source is an ebnf grammar.
# Is there really any reasonable application case for this?
with logging(False):
compiler_py, messages, n = compile_source(source, None, get_ebnf_grammar(),
get_ebnf_transformer(),
get_ebnf_compiler(compiler_suite, source))
log_dir = suspend_logging()
compiler_py, messages, n = compile_source(source, None, get_ebnf_grammar(),
get_ebnf_transformer(),
get_ebnf_compiler(compiler_suite, source))
resume_logging(log_dir)
if has_errors(messages):
raise DefinitionError(only_errors(messages), source)
preprocessor = get_ebnf_preprocessor
......
......@@ -81,7 +81,7 @@ try:
import regex as re
except ImportError:
import re
from DHParser import logging, is_filename, load_if_file, \\
from DHParser import start_logging, suspend_logging, resume_logging, is_filename, load_if_file, \\
Grammar, Compiler, nil_preprocessor, PreprocessorToken, Whitespace, DropWhitespace, \\
Lookbehind, Lookahead, Alternative, Pop, Token, DropToken, Synonym, AllOf, SomeOf, \\
Unordered, Option, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Series, Capture, \\
......
......@@ -43,9 +43,9 @@ Example::
from DHParser import compile_source, logging
with logging("LOGS"):
result, errors, ast = compile_source(source, preprocessor, grammar,
transformer, compiler)
start_logging("LOGS")
result, errors, ast = compile_source(source, preprocessor, grammar,
transformer, compiler)
"""
import collections
......@@ -54,14 +54,17 @@ import html
import os
from typing import List, Tuple, Union, Optional
from DHParser.configuration import access_presets, finalize_presets, THREAD_LOCALS
from DHParser.configuration import access_presets, finalize_presets, get_config_value, \
set_config_value
from DHParser.error import Error
from DHParser.stringview import StringView
from DHParser.syntaxtree import Node, ZOMBIE_TAG
from DHParser.toolkit import is_filename, escape_control_characters
from DHParser.toolkit import escape_control_characters
__all__ = ('log_dir',
'logging',
__all__ = ('start_logging',
'suspend_logging',
'resume_logging',
'log_dir',
'is_logging',
'create_log',
'append_log',
......@@ -82,37 +85,54 @@ def start_logging(dirname="LOGS"):
The log-directory, if it does not already exist, will be created
lazily, i.e. only when logging actually starts."""
CFG = access_presets()
CFG['log_dir'] = os.path.abspath(dirname)
finalize_presets()
#TODO: Remove this context manager, not really useful...
@contextlib.contextmanager
def logging(dirname="LOGS"):
"""
DEPRECATED! Use `start_logging()` instead!
Context manager. Log files within this context will be stored in
directory ``dirname``. Logging is turned off if name is empty.
Args:
dirname: the name for the log directory or the empty string to
turn logging of
"""
print('The `logging`-context-manager is DEPRECATED! Use `start_logging()` instead!')
CFG = access_presets()
if dirname and not isinstance(dirname, str):
dirname = "LOGS" # be fail tolerant here...
try:
save = CFG['log_dir']
except AttributeError:
save = ''
CFG['log_dir'] = dirname
finalize_presets()
yield
CFG = access_presets()
CFG['log_dir'] = save
finalize_presets()
log_dir = os.path.abspath(dirname) if dirname else ''
if log_dir != CFG['log_dir']:
CFG['log_dir'] = log_dir
set_config_value('log_dir', log_dir)
finalize_presets()
def suspend_logging() -> str:
"""Suspends logging in the current thread. Returns the log-dir
for resuming logging later."""
save = get_config_value('log_dir')
set_config_value('log_dir', '')
return save
def resume_logging(log_dir: str=''):
"""Resumes logging in the current thread with the given log-dir."""
if not 'log_dir':
CFG = access_presets()
log_dir = CFG['log_dir']
set_config_value('log_dir', log_dir)
# #TODO: Remove this context manager, not really useful...
# @contextlib.contextmanager
# def logging(dirname="LOGS"):
# """
# DEPRECATED! Use `start_logging()` instead!
#
# Context manager. Log files within this context will be stored in
# directory ``dirname``. Logging is turned off if name is empty.
#
# Args:
# dirname: the name for the log directory or the empty string to
# turn logging of
# """
# print('The `logging`-context-manager is DEPRECATED! Use `start_logging()` instead!')
# CFG = access_presets()
# if dirname and not isinstance(dirname, str):
# dirname = "LOGS" # be fail tolerant here...
# try:
# save = CFG['log_dir']
# except AttributeError:
# save = ''
# CFG['log_dir'] = dirname
# finalize_presets()
# yield
# CFG = access_presets()
# CFG['log_dir'] = save
# finalize_presets()
def log_dir() -> Union[str, bool]:
......@@ -134,8 +154,7 @@ def log_dir() -> Union[str, bool]:
not been switched on with the logging-contextmanager (see below), yet.
"""
# the try-except clauses in the following are precautions for multithreading
CFG = access_presets()
dirname = CFG['log_dir'] # raises a name error if LOGGING is not defined
dirname = get_config_value('log_dir') # raises a name error if LOGGING is not defined
if not dirname:
return False
if os.path.exists(dirname) and not os.path.isdir(dirname):
......@@ -156,10 +175,13 @@ def log_dir() -> Union[str, bool]:
return dirname
def is_logging() -> bool:
def is_logging(thread_local_query: bool=True) -> bool:
"""-> True, if logging is turned on."""
CFG = access_presets()
return bool(CFG['log_dir'])
if thread_local_query:
return bool(get_config_value('log_dir'))
else:
CFG = access_presets()
return bool(CFG['log_dir'])
def create_log(log_name: str) -> str:
......@@ -209,16 +231,17 @@ def clear_logs(logfile_types=frozenset(['.cst', '.ast', '.log'])):
log-directory if it is empty.
"""
log_dirname = log_dir()
files = os.listdir(log_dirname)
only_log_files = True
for file in files:
path = os.path.join(log_dirname, file)
if os.path.splitext(file)[1] in logfile_types or file == 'info.txt':
os.remove(path)
else:
only_log_files = False
if only_log_files:
os.rmdir(log_dirname)
if log_dirname and os.path.exists(log_dirname) and os.path.isdir(log_dirname):
files = os.listdir(log_dirname)
only_log_files = True
for file in files:
path = os.path.join(log_dirname, file)
if os.path.splitext(file)[1] in logfile_types or file == 'info.txt':
os.remove(path)
else:
only_log_files = False
if only_log_files:
os.rmdir(log_dirname)
#######################################################################
......
......@@ -20,6 +20,7 @@ permissions and limitations under the License.
import os
import sys
from typing import cast
scriptdir = os.path.dirname(os.path.abspath(__file__))
i, k = scriptdir.find('DHParser-submodule'), len('DHParser-submodule')
......@@ -36,9 +37,9 @@ templatedir = os.path.join(os.path.dirname(scriptdir.rstrip('/')), 'templates')
from DHParser.compile import compile_source
from DHParser.dsl import compileDSL, compile_on_disk # , recompile_grammar
from DHParser.ebnf import get_ebnf_grammar, get_ebnf_transformer, get_ebnf_compiler
from DHParser.log import logging
from DHParser.toolkit import re, typing
from typing import cast
from DHParser.log import start_logging
from DHParser.toolkit import re
LOGGING = False
......@@ -219,11 +220,12 @@ def main():
print('File %s not found! Aborting.' % file_path)
sys.exit(1)
elif choice.strip() == '3':
with logging(LOGGING):
if not cpu_profile(selftest, 1):
print("Selftest FAILED :-(\n")
sys.exit(1)
print("Selftest SUCCEEDED :-)\n")
if LOGGING:
start_logging(LOGGING)
if not cpu_profile(selftest, 1):
print("Selftest FAILED :-(\n")
sys.exit(1)
print("Selftest SUCCEEDED :-)\n")
elif choice.strip().lower() not in {'q', 'quit', 'exit'}:
print('No valid choice. Goodbye!')
......
def compile_src(source, log_dir=''):
def compile_src(source):
"""Compiles ``source`` and returns (result, errors, ast).
"""
with logging(log_dir):
compiler = get_compiler()
result_tuple = compile_source(source, get_preprocessor(),
get_grammar(),
get_transformer(), compiler)
result_tuple = compile_source(source, get_preprocessor(), get_grammar(), get_transformer(),
get_compiler())
return result_tuple
......@@ -28,7 +25,8 @@ if __name__ == "__main__":
file_name, log_dir = sys.argv[1], ''
if file_name in ['-d', '--debug'] and len(sys.argv) > 2:
file_name, log_dir = sys.argv[2], 'LOGS'
result, errors, _ = compile_src(file_name, log_dir)
start_logging(log_dir)
result, errors, _ = compile_src(file_name)
if errors:
cwd = os.getcwd()
rel_path = file_name[len(cwd):] if file_name.startswith(cwd) else file_name
......
......@@ -26,23 +26,23 @@ except ModuleNotFoundError:
def recompile_grammar(grammar_src, force):
grammar_tests_dir = os.path.join(scriptpath, 'grammar_tests')
testing.create_test_templates(grammar_src, grammar_tests_dir)
with DHParser.log.logging(False):
# recompiles Grammar only if it has changed
if not dsl.recompile_grammar(grammar_src, force=force,
notify=lambda: print('recompiling ' + grammar_src)):
print('\nErrors while recompiling "%s":' % grammar_src +
'\n--------------------------------------\n\n')
with open('{name}_ebnf_ERRORS.txt', encoding='utf-8') as f:
print(f.read())
sys.exit(1)
DHParser.log.start_logging('LOGS')
# recompiles Grammar only if it has changed
if not dsl.recompile_grammar(grammar_src, force=force,
notify=lambda: print('recompiling ' + grammar_src)):
print('\nErrors while recompiling "%s":' % grammar_src +
'\n--------------------------------------\n\n')
with open('{name}_ebnf_ERRORS.txt', encoding='utf-8') as f:
print(f.read())
sys.exit(1)
def run_grammar_tests(glob_pattern, get_grammar, get_transformer):
with DHParser.log.logging(LOGGING):
error_report = testing.grammar_suite(
os.path.join(scriptpath, 'grammar_tests'),
get_grammar, get_transformer,
fn_patterns=[glob_pattern], report='REPORT', verbose=True)
DHParser.log.start_logging(LOGGING)
error_report = testing.grammar_suite(
os.path.join(scriptpath, 'grammar_tests'),
get_grammar, get_transformer,
fn_patterns=[glob_pattern], report='REPORT', verbose=True)
return error_report
......
......@@ -39,7 +39,7 @@ from typing import Dict, List, Union, cast
from DHParser.configuration import THREAD_LOCALS, get_config_value
from DHParser.error import Error, is_error, adjust_error_locations
from DHParser.log import log_dir, logging, is_logging, clear_logs, log_parsing_history
from DHParser.log import log_dir, is_logging, clear_logs, log_parsing_history
from DHParser.parse import UnknownParserError, Parser, Lookahead
from DHParser.syntaxtree import Node, RootNode, parse_tree, flatten_sxpr, ZOMBIE_TAG
from DHParser.toolkit import load_if_file, re
......@@ -521,17 +521,6 @@ def reset_unit(test_unit):
del tests[key]
def run_unit(logdir, *parameters):
"""
Run `grammar_unit()` with logs written to `log_dir` or no logs if `log_dir`
evaluates to False. This helper functions is needed for running unit tests
in a multiprocessing environment, because log.log_dir(), log.logging() and
log.is_logging() are thread-local.
"""
with logging(logdir):
return grammar_unit(*parameters)
def grammar_suite(directory, parser_factory, transformer_factory,
fn_patterns=['*test*'],
ignore_unknown_filetypes=False,
......@@ -559,7 +548,7 @@ def grammar_suite(directory, parser_factory, transformer_factory,
print(filename)
if any(fnmatch.fnmatch(filename, pattern) for pattern in fn_patterns):
parameters = filename, parser_factory, transformer_factory, report, verbose
results.append((filename, pool.submit(run_unit, log_dir(), *parameters)))
results.append((filename, pool.submit(grammar_unit, *parameters)))
for filename, err_future in results:
try:
errata = err_future.result()
......@@ -816,17 +805,6 @@ def run_file(fname):
runner('', eval(fname[:-3]).__dict__)
def run_with_log(logdir, f):
"""
Run `grammar_unit()` with logs written to `log_dir` or no logs if `log_dir`
evaluates to False. This helper functions is needed for running unit tests
in a multiprocessing environment, because log.log_dir(), log.logging() and
log.is_logging() are thread-local.
"""
with logging(logdir):
run_file(f)
def run_path(path):
"""Runs all unit tests in `path`"""
if os.path.isdir(path):
......@@ -837,7 +815,7 @@ def run_path(path):
if get_config_value('test_parallelization'):
with concurrent.futures.ProcessPoolExecutor(multiprocessing.cpu_count()) as pool:
for f in files:
result_futures.append(pool.submit(run_with_log, log_dir(), f))
result_futures.append(pool.submit(run_file, f))
# run_file(f) # for testing!
for r in result_futures:
try:
......
......@@ -37,7 +37,7 @@ except ImportError:
import re
import sys
import typing
from typing import Any, Iterable, Sequence, Set, Union, Dict, Hashable
from typing import Any, Iterable, Sequence, Set, Union, Dict
try:
import cython
......
......@@ -44,6 +44,4 @@ from DHParser import parse_sxpr, Compiler
if __name__ == "__main__":
from DHParser.testing import runner
from DHParser.log import logging
with logging(False):
runner("", globals())
runner("", globals())
......@@ -26,7 +26,7 @@ sys.path.extend(['../', './'])
from DHParser.configuration import get_config_value, set_config_value
from DHParser.toolkit import compile_python_object
from DHParser.log import logging, is_logging, log_ST, log_parsing_history
from DHParser.log import is_logging, log_ST, log_parsing_history
from DHParser.error import Error, is_error
from DHParser.parse import ParserError, Parser, Grammar, Forward, TKN, ZeroOrMore, RE, \
RegExp, Lookbehind, NegativeLookahead, OneOrMore, Series, Alternative, AllOf, SomeOf, \
......@@ -93,8 +93,7 @@ class TestInfiLoopsAndRecursion:
snippet = "9 + 8 + 7 + 6 + 5 + 3 * 4"
parser = grammar_provider(minilang)()
assert parser
with logging():
syntax_tree = parser(snippet)
syntax_tree = parser(snippet)
assert not is_error(syntax_tree.error_flag), syntax_tree.errors_sorted
assert snippet == syntax_tree.content
......@@ -323,9 +322,8 @@ class TestGrammar:
def test_pos_values_initialized(self):
# checks whether pos values in the parsing result and in the
# history record have been initialized
with logging("LOGS"):
grammar = compile_python_object(DHPARSER_IMPORTS + self.pyparser, r'\w+Grammar$')()
grammar("no_file_name*")
grammar = compile_python_object(DHPARSER_IMPORTS + self.pyparser, r'\w+Grammar$')()
grammar("no_file_name*")
for record in grammar.history__:
assert not record.node or record.node.pos >= 0
......
......@@ -29,7 +29,7 @@ sys.path.extend(['../', './'])
from DHParser.toolkit import has_fenced_code, load_if_file, re, \
lstrip_docstring, issubtype, typing, concurrent_ident
from DHParser.log import log_dir, logging, is_logging
from DHParser.log import log_dir, start_logging, is_logging, suspend_logging, resume_logging
class TestLoggingAndLoading:
......@@ -100,32 +100,36 @@ class TestLoggingAndLoading:
# pass
res = log_dir()
assert isinstance(res, bool) and not res
with logging("TESTLOGS"):
assert not os.path.exists("TESTLOGS"), \
"Log dir should be created lazily!"
dirname = log_dir()
assert dirname == "TESTLOGS"
assert is_logging(), "is_logging() should return True, if logging is on"
with logging(False):
assert not is_logging(), \
"is_logging() should return False, if innermost logging context " \
"has logging turned off."
assert is_logging(), "is_logging() should return True after logging off " \
"context has been left"
assert os.path.exists("TESTLOGS/info.txt"), "an 'info.txt' file should be " \
"created within a newly created log dir"
start_logging("TESTLOGS")
assert not os.path.exists("TESTLOGS"), \
"Log dir should be created lazily!"
dirname = log_dir()
print(type(dirname), dirname)
assert os.path.basename(dirname) == "TESTLOGS"
assert is_logging(), "is_logging() should return True, if logging is on"
save_log_dir = suspend_logging()
assert not is_logging(), \
"is_logging() should return False, if innermost logging context " \
"has logging turned off."
resume_logging(save_log_dir)
assert is_logging(), "is_logging() should return True after logging off " \
"context has been left"
assert os.path.exists("TESTLOGS/info.txt"), "an 'info.txt' file should be " \
"created within a newly created log dir"
# cleanup
os.remove("TESTLOGS/info.txt")
os.rmdir("TESTLOGS")
def logging_task(self):
with logging("TESTLOGS"):
log_dir()
assert is_logging(), "Logging should be on inside logging context"
log_dir()
assert is_logging(), "Logging should be on inside logging context"
save_log_dir = suspend_logging()
assert not is_logging(), "Logging should be off outside logging context"
resume_logging(save_log_dir)
return os.path.exists("TESTLOGS/info.txt")
def test_logging_multiprocessing(self):
start_logging("TESTLOGS")
with concurrent.futures.ProcessPoolExecutor() as ex:
f1 = ex.submit(self.logging_task)
f2 = ex.submit(self.logging_task)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment