Commit 923da94b authored by di68kap's avatar di68kap

Merge branch 'development'

parents 1b5d7691 6bd65922
......@@ -15,6 +15,8 @@ testdata/*.pdf
*.old
DEBUG*
LOGS
**/LOGS/*
**/LOG*
**/REPORT/*
REPORT
external_resources/
......@@ -46,3 +48,5 @@ OLDSTUFF
*.c
*.pyd
docs/*
**/node_modules
*.py.cfg
{
"python.pythonPath": "/usr/bin/python3",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true
}
\ No newline at end of file
......@@ -9,6 +9,8 @@ DHParser Version 0.9.0
DHParser/scripts-directory
- syntaxtree.py streamlined
(breaking change: Node.select_by_tag() remove, use Node.select() instead
- added DHParser/templates subdirectory and moved larger code templates
from dhparser.py script to the templates-subdirectory
DHParser Version 0.8.9 (5.4.2019)
......@@ -84,7 +86,7 @@ DHParser Version 0.8.4 (6.1.2019)
DHParser Version 0.8.3 (20.8.2018)
..................................
- new transformation function collapse_if()
- new transformation function collapse_children_if()
- restored compatibility with Python 3.4
- StepByStep Guide typos fixed
- bug fixes
......
#!/usr/bin/python3
"""__init__.py - package definition module for DHParser
Copyright 2016 by Eckhart Arnold (arnold@badw.de)
......@@ -28,6 +26,7 @@ from .error import *
from .log import *
from .parse import *
from .preprocess import *
from .server import *
from .stringview import *
from .syntaxtree import *
from .testing import *
......@@ -43,6 +42,7 @@ __all__ = (compile.__all__ +
log.__all__ +
parse.__all__ +
preprocess.__all__ +
server.__all__ +
stringview.__all__ +
syntaxtree.__all__ +
testing.__all__ +
......
......@@ -38,13 +38,14 @@ import copy
import os
from typing import Any, Optional, Tuple, List, cast
from DHParser.configuration import get_config_value
from DHParser.preprocess import with_source_mapping, PreprocessorFunc, SourceMapFunc
from DHParser.syntaxtree import Node, RootNode, ZOMBIE_TAG, StrictResultType
from DHParser.transform import TransformationFunc
from DHParser.parse import Grammar
from DHParser.error import adjust_error_locations, is_error, is_fatal, Error
from DHParser.log import log_parsing_history, log_ST, is_logging
from DHParser.toolkit import load_if_file, is_filename, get_config_value
from DHParser.toolkit import load_if_file, is_filename
__all__ = ('CompilerError',
......@@ -154,7 +155,6 @@ class Compiler:
(This very much depends on the kind and purpose of the
implemented compiler.)
"""
assert root.tag_name != ZOMBIE_TAG
if self._dirty_flag:
self.reset()
self._dirty_flag = True
......@@ -372,7 +372,7 @@ def process_tree(tp: TreeProcessor, tree: RootNode) -> RootNode:
"""Process a tree with the tree-processor `tp` only if no fatal error
has occurred so far. Process, but catch any Python exceptions, in case
any normal errors have occurred earlier in the processing pipeline.
Don't catch Python-exceptions if not errors have occurred earlier.
Don't catch Python-exceptions if no errors have occurred earlier.
This behaviour is based on the assumption that given any non-fatal
errors have occurred earlier, the tree passed through the pipeline
......@@ -383,6 +383,12 @@ def process_tree(tp: TreeProcessor, tree: RootNode) -> RootNode:
be difficult to provide for all possible kinds of badly structured
trees resulting from errors, exceptions occurring on code processing
potentially faulty trees will be dealt with gracefully.
Although process_tree returns the root-node of the processed tree,
tree processing should generally be assumed to change the tree
in place, even if a different root-node is returned than was passed
to the tree. If the input tree shall be prserved, it is necessary to
make a deep copy of the input tree, before calling process_tree.
"""
assert isinstance(tp, TreeProcessor)
if not is_fatal(tree.error_flag):
......@@ -402,8 +408,8 @@ def process_tree(tp: TreeProcessor, tree: RootNode) -> RootNode:
# the exceptions through
tree = tp(tree)
assert isinstance(tree, RootNode)
return tree
return tree
# TODO: Verify compiler against grammar, i.e. make sure that for all on_X()-methods, `X` is the name of a parser
# TODO: AST validation against an ASDSL-Specification
\ No newline at end of file
# TODO: AST validation against an ASDSL-Specification
......@@ -17,9 +17,8 @@
"""
Module "configuration.py" defines the default configuration for DHParser.
The configuration values can be changed while running via the
DHParser.toolkit.get_config_value() and DHParser.toolkit.get_config_value()-
functions.
The configuration values can be read and changed while running via the
get_config_value() and set_config_value()-functions.
The presets can also be overwritten before(!) spawning any parsing processes by
overwriting the values in the CONFIG_PRESET dictionary.
......@@ -32,7 +31,12 @@ program and before any DHParser-function is invoked.
from typing import Dict, Hashable, Any
__all__ = ('CONFIG_PRESET',
__all__ = ('access_presets',
'finalize_presets',
'THREAD_LOCALS',
'access_thread_locals',
'get_config_value',
'set_config_value',
'XML_SERIALIZATION',
'SXPRESSION_SERIALIZATION',
'COMPACT_SERIALIZATION',
......@@ -40,7 +44,138 @@ __all__ = ('CONFIG_PRESET',
'JSON_SERIALIZATION',
'SERIALIZATIONS')
CONFIG_PRESET = dict() # type: Dict[Hashable, Any]
########################################################################
#
# multiprocessing-safe preset- and configuration-handling
#
########################################################################
CONFIG_PRESET = dict() # type: Dict[str, Any]
CONFIG_PRESET['syncfile_path'] = ''
THREAD_LOCALS = None
def get_syncfile_path(pid: int) -> str:
import os
import tempfile
return os.path.join(tempfile.gettempdir(), 'DHParser_%i.cfg' % pid)
def access_presets() -> Dict[str, Any]:
"""
Returns a dictionary of presets for configuration values.
If any preset values are changed after calling `access_presets()`,
`finalize_presets()` should be called to make sure that processes
spawned after changing the preset values, will be able to read
the changed values.
See: https://docs.python.org/3/library/multiprocessing.html#the-spawn-and-forkserver-start-methods
"""
import multiprocessing
global CONFIG_PRESET
if not CONFIG_PRESET['syncfile_path'] and multiprocessing.get_start_method() != 'fork':
import os
import pickle
syncfile_path = get_syncfile_path(os.getppid()) # assume this is a spawned process
if not os.path.exists(syncfile_path):
syncfile_path = get_syncfile_path(os.getpid()) # assume this is the root process
f = None
try:
f = open(syncfile_path, 'rb')
preset = pickle.load(f)
assert isinstance(preset, dict)
assert preset['syncfile_path'] == syncfile_path, \
'Conflicting syncfile paths %s != %s' % (preset['syncfile_path'], syncfile_path)
CONFIG_PRESET = preset
except FileNotFoundError:
pass
finally:
if f is not None:
f.close()
return CONFIG_PRESET
def remove_cfg_tempfile(filename: str):
import os
os.remove(filename)
def finalize_presets():
"""
Finalizes changes of the presets of the configuration values.
This method should always be called after changing preset values to
make sure the changes will be visible to processes spawned later.
"""
import atexit
import multiprocessing
import os
import pickle
global CONFIG_PRESET
if multiprocessing.get_start_method() != 'fork':
syncfile_path = get_syncfile_path(os.getpid())
existing_syncfile = CONFIG_PRESET['syncfile_path']
assert ((not existing_syncfile or existing_syncfile == syncfile_path)
and (not os.path.exists((get_syncfile_path(os.getppid()))))), \
"finalize_presets() can only be called from the main process!"
with open(syncfile_path, 'wb') as f:
CONFIG_PRESET['syncfile_path'] = syncfile_path
if existing_syncfile != syncfile_path:
atexit.register(remove_cfg_tempfile, syncfile_path)
pickle.dump(CONFIG_PRESET, f)
def access_thread_locals() -> Any:
"""Intitializes (if not done yet) and returns the thread local variable
store. (Call this function before using THREAD_LOCALS.
Direct usage of THREAD_LOCALS is DEPRECATED!)
"""
global THREAD_LOCALS
if THREAD_LOCALS is None:
import threading
THREAD_LOCALS = threading.local()
return THREAD_LOCALS
def get_config_value(key: Hashable) -> Any:
"""
Retrieves a configuration value thread-safely.
:param key: the key (an immutable, usually a string)
:return: the value
"""
THREAD_LOCALS = access_thread_locals()
try:
cfg = THREAD_LOCALS.config
except AttributeError:
THREAD_LOCALS.config = dict()
cfg = THREAD_LOCALS.config
try:
return cfg[key]
except KeyError:
CONFIG_PRESET = access_presets()
value = CONFIG_PRESET[key]
THREAD_LOCALS.config[key] = value
return value
def set_config_value(key: Hashable, value: Any):
"""
Changes a configuration value thread-safely. The configuration
value will be set only for the current thread. In order to
set configuration values for any new thread, add the key and value
to CONFIG_PRESET, before any thread accessing config values is started.
:param key: the key (an immutable, usually a string)
:param value: the value
"""
global THREAD_LOCALS
if THREAD_LOCALS is None:
import threading
THREAD_LOCALS = threading.local()
try:
_ = THREAD_LOCALS.config
except AttributeError:
THREAD_LOCALS.config = dict()
THREAD_LOCALS.config[key] = value
########################################################################
......@@ -117,6 +252,9 @@ CONFIG_PRESET['default_serialization'] = SMART_SERIALIZATION
# Default value: 120
CONFIG_PRESET['flatten_sxpr_threshold'] = 120
# Defines the maximum number of LINES before the "smart" serialization
# will switch from S-expression output to compact output
CONFIG_PRESET['compact_sxpr_threshold'] = 25
########################################################################
#
......@@ -157,6 +295,11 @@ CONFIG_PRESET['add_grammar_source_to_parser_docstring'] = False
# Default value: 4 MB
CONFIG_PRESET['max_rpc_size'] = 4 * 1024 * 1024
# Add a header to JSON-RPC requests of responses.
# see: https://microsoft.github.io/language-server-protocol/specification#header-part
# Default value: True
CONFIG_PRESET['jsonrpc_header'] = True
# Defaut host name or IP-adress for the compiler server. Should usually
# be localhost (127.0.0.1)
# Default value: 127.0.0.1.
......@@ -185,10 +328,20 @@ CONFIG_PRESET['debug_compiler'] = False
#
########################################################################
# Log-directory. An empty string means that writing of log files is
# turned off, no matter what value the other log-configuration
# parameters have. The only exception is "echo logging" to the terminal!
# Default value: '' (all logging is turned off)
CONFIG_PRESET['log_dir'] = ''
# Log server traffic (requests and responses)
# Default value: False
CONFIG_PRESET['log_server'] = False
# Echo server log messages on the terminal.
# Default value: False
CONFIG_PRESET['echo_server_log'] = False
########################################################################
#
......
......@@ -28,11 +28,12 @@ import stat
import DHParser.ebnf
from DHParser.compile import Compiler, compile_source
from DHParser.configuration import get_config_value, set_config_value
from DHParser.ebnf import EBNFCompiler, grammar_changed, DHPARSER_IMPORTS, \
get_ebnf_preprocessor, get_ebnf_grammar, get_ebnf_transformer, get_ebnf_compiler, \
PreprocessorFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc
from DHParser.error import Error, is_error, has_errors, only_errors
from DHParser.log import logging
from DHParser.log import suspend_logging, resume_logging
from DHParser.parse import Grammar
from DHParser.preprocess import nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node
......@@ -58,7 +59,7 @@ def read_template(template_name: str) -> str:
Reads a script-template from a template file named `template_name`
in the template-directory and returns it as a string.
"""
with open(os.path.join(DHPARSER_DIR, 'templates', template_name), 'r') as f:
with open(os.path.join(DHPARSER_DIR, 'templates', template_name), 'r', encoding='utf-8') as f:
return f.read()
......@@ -145,11 +146,12 @@ def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
if is_python_code(grammar_src):
parser_py, messages = grammar_src, [] # type: str, List[Error]
else:
with logging(False):
result, messages, _ = compile_source(
grammar_src, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
parser_py = cast(str, result)
log_dir = suspend_logging()
result, messages, _ = compile_source(
grammar_src, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
parser_py = cast(str, result)
resume_logging(log_dir)
if has_errors(messages):
raise DefinitionError(only_errors(messages), grammar_src)
parser_root = compile_python_object(DHPARSER_IMPORTS + parser_py, r'\w+Grammar$')()
......@@ -291,10 +293,11 @@ def load_compiler_suite(compiler_suite: str) -> \
else:
# Assume source is an ebnf grammar.
# Is there really any reasonable application case for this?
with logging(False):
compiler_py, messages, n = compile_source(source, None, get_ebnf_grammar(),
get_ebnf_transformer(),
get_ebnf_compiler(compiler_suite, source))
log_dir = suspend_logging()
compiler_py, messages, n = compile_source(source, None, get_ebnf_grammar(),
get_ebnf_transformer(),
get_ebnf_compiler(compiler_suite, source))
resume_logging(log_dir)
if has_errors(messages):
raise DefinitionError(only_errors(messages), source)
preprocessor = get_ebnf_preprocessor
......
......@@ -31,6 +31,7 @@ import os
from typing import Callable, Dict, List, Set, Tuple, Sequence, Union, Optional, Any, cast
from DHParser.compile import CompilerError, Compiler, compile_source, visitor_name
from DHParser.configuration import THREAD_LOCALS, get_config_value
from DHParser.error import Error
from DHParser.parse import Grammar, mixin_comment, Forward, RegExp, DropWhitespace, \
NegativeLookahead, Alternative, Series, Option, OneOrMore, ZeroOrMore, Token, \
......@@ -38,7 +39,7 @@ from DHParser.parse import Grammar, mixin_comment, Forward, RegExp, DropWhitespa
from DHParser.preprocess import nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE
from DHParser.toolkit import load_if_file, escape_re, md5, sane_parser_name, re, expand_table, \
GLOBALS, get_config_value, unrepr, compile_python_object, DHPARSER_PARENTDIR
unrepr, compile_python_object, DHPARSER_PARENTDIR
from DHParser.transform import TransformationFunc, traverse, remove_brackets, \
reduce_single_child, replace_by_single_child, remove_whitespace, remove_empty, \
remove_tokens, flatten, forbid, assert_content
......@@ -74,30 +75,31 @@ from functools import partial
import os
import sys
sys.path.append(r'{dhparser_parentdir}')
if r'{dhparser_parentdir}' not in sys.path:
sys.path.append(r'{dhparser_parentdir}')
try:
import regex as re
except ImportError:
import re
from DHParser import logging, is_filename, load_if_file, \\
from DHParser import start_logging, suspend_logging, resume_logging, is_filename, load_if_file, \\
Grammar, Compiler, nil_preprocessor, PreprocessorToken, Whitespace, DropWhitespace, \\
Lookbehind, Lookahead, Alternative, Pop, Token, DropToken, Synonym, AllOf, SomeOf, \\
Unordered, Option, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Series, Capture, \\
ZeroOrMore, Forward, NegativeLookahead, Required, mixin_comment, compile_source, \\
grammar_changed, last_value, counterpart, PreprocessorFunc, is_empty, \\
grammar_changed, last_value, counterpart, PreprocessorFunc, is_empty, remove_if, \\
Node, TransformationFunc, TransformationDict, transformation_factory, traverse, \\
remove_children_if, move_adjacent, normalize_whitespace, is_anonymous, matches_re, \\
reduce_single_child, replace_by_single_child, replace_or_reduce, remove_whitespace, \\
replace_by_children, remove_empty, remove_tokens, flatten, is_insignificant_whitespace, \\
collapse, collapse_if, replace_content, WHITESPACE_PTYPE, TOKEN_PTYPE, \\
merge_adjacent, collapse, collapse_children_if, replace_content, WHITESPACE_PTYPE, TOKEN_PTYPE, \\
remove_nodes, remove_content, remove_brackets, change_tag_name, remove_anonymous_tokens, \\
keep_children, is_one_of, not_one_of, has_content, apply_if, remove_first, remove_last, \\
remove_anonymous_empty, keep_nodes, traverse_locally, strip, lstrip, rstrip, \\
replace_content, replace_content_by, forbid, assert_content, remove_infix_operator, \\
error_on, recompile_grammar, left_associative, lean_left, set_config_value, \\
get_config_value, XML_SERIALIZATION, SXPRESSION_SERIALIZATION, COMPACT_SERIALIZATION, \\
JSON_SERIALIZATION, CONFIG_PRESET, GLOBALS
JSON_SERIALIZATION, access_thread_locals, access_presets, finalize_presets, ErrorCode
'''.format(dhparser_parentdir=DHPARSER_PARENTDIR)
......@@ -251,11 +253,11 @@ def grammar_changed(grammar_class, grammar_source: str) -> bool:
def get_ebnf_grammar() -> EBNFGrammar:
try:
grammar = GLOBALS.ebnf_grammar_singleton
grammar = THREAD_LOCALS.ebnf_grammar_singleton
return grammar
except AttributeError:
GLOBALS.ebnf_grammar_singleton = EBNFGrammar()
return GLOBALS.ebnf_grammar_singleton
THREAD_LOCALS.ebnf_grammar_singleton = EBNFGrammar()
return THREAD_LOCALS.ebnf_grammar_singleton
########################################################################
......@@ -302,10 +304,10 @@ def EBNFTransform() -> TransformationFunc:
def get_ebnf_transformer() -> TransformationFunc:
try:
transformer = GLOBALS.EBNF_transformer_singleton
transformer = THREAD_LOCALS.EBNF_transformer_singleton
except AttributeError:
GLOBALS.EBNF_transformer_singleton = EBNFTransform()
transformer = GLOBALS.EBNF_transformer_singleton
THREAD_LOCALS.EBNF_transformer_singleton = EBNFTransform()
transformer = THREAD_LOCALS.EBNF_transformer_singleton
return transformer
......@@ -330,13 +332,14 @@ def get_preprocessor() -> PreprocessorFunc:
GRAMMAR_FACTORY = '''
def get_grammar() -> {NAME}Grammar:
"""Returns a thread/process-exclusive {NAME}Grammar-singleton."""
THREAD_LOCALS = access_thread_locals()
try:
grammar = GLOBALS.{NAME}_{ID:08d}_grammar_singleton
grammar = THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton
except AttributeError:
GLOBALS.{NAME}_{ID:08d}_grammar_singleton = {NAME}Grammar()
THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton = {NAME}Grammar()
if hasattr(get_grammar, 'python_src__'):
GLOBALS.{NAME}_{ID:08d}_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = GLOBALS.{NAME}_{ID:08d}_grammar_singleton
THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton
return grammar
'''
......@@ -349,11 +352,12 @@ def Create{NAME}Transformer() -> TransformationFunc:
def get_transformer() -> TransformationFunc:
"""Returns a thread/process-exclusive transformation function."""
THREAD_LOCALS = access_thread_locals()
try:
transformer = GLOBALS.{NAME}_{ID:08d}_transformer_singleton
transformer = THREAD_LOCALS.{NAME}_{ID:08d}_transformer_singleton
except AttributeError:
GLOBALS.{NAME}_{ID:08d}_transformer_singleton = Create{NAME}Transformer()
transformer = GLOBALS.{NAME}_{ID:08d}_transformer_singleton
THREAD_LOCALS.{NAME}_{ID:08d}_transformer_singleton = Create{NAME}Transformer()
transformer = THREAD_LOCALS.{NAME}_{ID:08d}_transformer_singleton
return transformer
'''
......@@ -361,11 +365,12 @@ def get_transformer() -> TransformationFunc:
COMPILER_FACTORY = '''
def get_compiler() -> {NAME}Compiler:
"""Returns a thread/process-exclusive {NAME}Compiler-singleton."""
THREAD_LOCALS = access_thread_locals()
try:
compiler = GLOBALS.{NAME}_{ID:08d}_compiler_singleton
compiler = THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton
except AttributeError:
GLOBALS.{NAME}_{ID:08d}_compiler_singleton = {NAME}Compiler()
compiler = GLOBALS.{NAME}_{ID:08d}_compiler_singleton
THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton = {NAME}Compiler()
compiler = THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton
return compiler
'''
......@@ -444,7 +449,7 @@ class EBNFDirectives:
setattr(self, key, value)
def keys(self):
return self.__dict__.keys()
return self.__slots__
class EBNFCompilerError(CompilerError):
......@@ -698,9 +703,10 @@ class EBNFCompiler(Compiler):
def _check_rx(self, node: Node, rx: str) -> str:
"""
Checks whether the string `rx` represents a valid regular
expression. Makes sure that multiline regular expressions are
prepended by the multiline-flag. Returns the regular expression string.
expression. Makes sure that multi-line regular expressions are
prepended by the multi-line-flag. Returns the regular expression string.
"""
# TODO: Support atomic grouping: https://stackoverflow.com/questions/13577372/do-python-regular-expressions-have-an-equivalent-to-rubys-atomic-grouping
flags = self.re_flags | {'x'} if rx.find('\n') >= 0 else self.re_flags
if flags:
rx = "(?%s)%s" % ("".join(flags), rx)
......@@ -755,7 +761,7 @@ class EBNFCompiler(Compiler):
task()
# provide for capturing of symbols that are variables, i.e. the
# value of will be retrieved at some point during the parsing process
# value of which will be retrieved at some point during the parsing process
if self.variables:
for i in range(len(definitions)):
......@@ -1327,13 +1333,13 @@ class EBNFCompiler(Compiler):
def get_ebnf_compiler(grammar_name="", grammar_source="") -> EBNFCompiler:
try:
compiler = GLOBALS.ebnf_compiler_singleton
compiler = THREAD_LOCALS.ebnf_compiler_singleton
compiler.set_grammar_name(grammar_name, grammar_source)
return compiler
except AttributeError:
compiler = EBNFCompiler(grammar_name, grammar_source)
compiler.set_grammar_name(grammar_name, grammar_source)
GLOBALS.ebnf_compiler_singleton = compiler
THREAD_LOCALS.ebnf_compiler_singleton = compiler
return compiler
......
......@@ -105,6 +105,8 @@ class Error:
assert code >= 0
self.message = message # type: str
self._pos = pos # type: int
# TODO: Add some logic to avoid double assignment of the same error code?
# Problem: Same code might allowedly be used by two different parsers/compilers
self.code = code # type: ErrorCode
self.orig_pos = orig_pos # type: int
self.line = line # type: int
......
......@@ -43,9 +43,9 @@ Example::
from DHParser import compile_source, logging
with logging("LOGS"):
result, errors, ast = compile_source(source, preprocessor, grammar,
transformer, compiler)
start_logging("LOGS")
result, errors, ast = compile_source(source, preprocessor, grammar,
transformer, compiler)
"""
import collections
......@@ -54,13 +54,17 @@ import html
import os
from typing import List, Tuple, Union, Optional
from DHParser.configuration import access_presets, finalize_presets, get_config_value, \
set_config_value
from DHParser.error import Error
from DHParser.stringview import StringView
from DHParser.syntaxtree import Node, ZOMBIE_TAG
from DHParser.toolkit import is_filename, escape_control_characters, GLOBALS
from DHParser.toolkit import escape_control_characters
__all__ = ('log_dir',
'logging',
__all__ = ('start_logging',
'suspend_logging',
'resume_logging',
'log_dir',
'is_logging',
'create_log',
'append_log',
......@@ -76,8 +80,64 @@ __all__ = ('log_dir',
#
#######################################################################
def log_dir() -> Union[str, bool]:
def start_logging(dirname: str = "LOGS"):
"""Turns logging on an sets the log-directory to `dirname`.
The log-directory, if it does not already exist, will be created
lazily, i.e. only when logging actually starts."""
CFG = access_presets()
log_dir = os.path.abspath(dirname) if dirname else ''
if log_dir != CFG['log_dir']:
CFG['log_dir'] = log_dir
set_config_value('log_dir', log_dir)
finalize_presets()
def suspend_logging() -> str:
"""Suspends logging in the current thread. Returns the log-dir
for resuming logging later."""
save = get_config_value('log_dir')
set_config_value('log_dir', '')
return save
def resume_logging(log_dir: str=''):
"""Resumes logging in the current thread with the given log-dir."""
if not 'log_dir':
CFG = access_presets()
log_dir = CFG['log_dir']
set_config_value('log_dir', log_dir)
# #TODO: Remove this context manager, not really useful...
# @contextlib.contextmanager
# def logging(dirname="LOGS"):
# """
# DEPRECATED! Use `start_logging()` instead!
#
# Context manager. Log files within this context will be stored in
# directory ``dirname``. Logging is turned off if name is empty.
#
# Args:
# dirname: the name for the log directory or the empty string to
# turn logging of
# """
# print('The `logging`-context-manager is DEPRECATED! Use `start_logging()` instead!')
# CFG = access_presets()
# if dirname and not isinstance(dirname, str):
# dirname = "LOGS" # be fail tolerant here...
# try:
# save = CFG['log_dir']
# except AttributeError:
# save = ''
# CFG['log_dir'] = dirname
# finalize_presets()
# yield
# CFG = access_presets()
# CFG['log_dir'] = save
# finalize_presets()
def log_dir(path: str="") -> Union[str, bool]:
"""Creates a directory for log files (if it does not exist) and
returns its path.
......@@ -91,19 +151,18 @@ def log_dir() -> Union[str, bool]:
explicitly. (See `testing.grammar_suite()` for an example, how this can
be done.
Parameters:
path: The directory path. If empty, the configured value will be
used: `configuration.get_config_value('log_dir')`.
Returns:
name of the logging directory (str) or False (bool) if logging has
not been switched on with the logging-contextmanager (see below), yet.
"""
# the try-except clauses in the following are precautions for multithreading
try:
dirname = GLOBALS.LOGGING # raises a name error if LOGGING is not defined
if not dirname:
raise AttributeError # raise a name error if LOGGING evaluates to False
except AttributeError:
dirname = path if path else get_config_value('log_dir')
if not dirname:
return False
# raise AttributeError("No access to log directory before logging has been "
# "turned on within the same thread/process.")
if os.path.exists(dirname) and not os.path.isdir(dirname):
raise IOError('"' + dirname + '" cannot be used as log directory, '
'because it is not a directory!')
......@@ -122,60 +181,59 @@ def log_dir() -> Union[str, bool]:
return dirname
#TODO: Remove this context manager, not really useful...s
@contextlib.contextmanager
def logging(dirname="LOGS"):