...
 
Commits (30)
......@@ -38,13 +38,14 @@ import copy
import os
from typing import Any, Optional, Tuple, List, cast
from DHParser.configuration import get_config_value
from DHParser.preprocess import with_source_mapping, PreprocessorFunc, SourceMapFunc
from DHParser.syntaxtree import Node, RootNode, ZOMBIE_TAG, StrictResultType
from DHParser.transform import TransformationFunc
from DHParser.parse import Grammar
from DHParser.error import adjust_error_locations, is_error, is_fatal, Error
from DHParser.log import log_parsing_history, log_ST, is_logging
from DHParser.toolkit import load_if_file, is_filename, get_config_value
from DHParser.toolkit import load_if_file, is_filename
__all__ = ('CompilerError',
......@@ -144,6 +145,7 @@ class Compiler:
A finalization method that is called after compilation has finished and
after all tasks from the finalizers stack have been executed
"""
pass
def __call__(self, root: RootNode) -> Any:
"""
......@@ -164,6 +166,7 @@ class Compiler:
while self.finalizers:
task, parameters = self.finalizers.pop()
task(*parameters)
self.finalize()
return result
# Obsolete, because never used...
......
......@@ -17,9 +17,8 @@
"""
Module "configuration.py" defines the default configuration for DHParser.
The configuration values can be changed while running via the
DHParser.toolkit.get_config_value() and DHParser.toolkit.get_config_value()-
functions.
The configuration values can be read and changed while running via the
get_config_value() and set_config_value()-functions.
The presets can also be overwritten before(!) spawning any parsing processes by
overwriting the values in the CONFIG_PRESET dictionary.
......@@ -32,7 +31,11 @@ program and before any DHParser-function is invoked.
from typing import Dict, Hashable, Any
__all__ = ('CONFIG_PRESET',
__all__ = ('access_presets',
'finalize_presets',
'THREAD_LOCALS',
'get_config_value',
'set_config_value',
'XML_SERIALIZATION',
'SXPRESSION_SERIALIZATION',
'COMPACT_SERIALIZATION',
......@@ -40,7 +43,129 @@ __all__ = ('CONFIG_PRESET',
'JSON_SERIALIZATION',
'SERIALIZATIONS')
CONFIG_PRESET = dict() # type: Dict[Hashable, Any]
########################################################################
#
# multiprocessing-safe preset- and configuration-handling
#
########################################################################
CONFIG_PRESET = dict() # type: Dict[str, Any]
CONFIG_PRESET['syncfile_path'] = ''
THREAD_LOCALS = None
def get_syncfile_path(pid: int) -> str:
import os
import tempfile
return os.path.join(tempfile.gettempdir(), 'DHParser_%i.cfg' % pid)
def access_presets() -> Dict[str, Any]:
"""
Returns a dictionary of presets for configuration values.
If any preset values are changed after calling `access_presets()`,
`finalize_presets()` should be called to make sure that processes
spawned after changing the preset values, will be able to read
the changed values.
See: https://docs.python.org/3/library/multiprocessing.html#the-spawn-and-forkserver-start-methods
"""
import multiprocessing
global CONFIG_PRESET
if not CONFIG_PRESET['syncfile_path'] and multiprocessing.get_start_method() != 'fork':
import os
import pickle
syncfile_path = get_syncfile_path(os.getppid()) # assume this is a spawned process
if not os.path.exists(syncfile_path):
syncfile_path = get_syncfile_path(os.getpid()) # assume this is the root process
f = None
try:
f = open(syncfile_path, 'rb')
preset = pickle.load(f)
assert isinstance(preset, dict)
assert preset['syncfile_path'] == syncfile_path, \
'Conflicting syncfile paths %s != %s' % (preset['syncfile_path'], syncfile_path)
CONFIG_PRESET = preset
except FileNotFoundError:
pass
finally:
if f is not None:
f.close()
return CONFIG_PRESET
def remove_cfg_tempfile(filename: str):
import os
os.remove(filename)
def finalize_presets():
"""
Finalizes changes of the presets of the configuration values.
This method should always be called after changing preset values to
make sure the changes will be visible to processes spawned later.
"""
import atexit
import multiprocessing
import os
import pickle
global CONFIG_PRESET
if multiprocessing.get_start_method() != 'fork':
syncfile_path = get_syncfile_path(os.getpid())
existing_syncfile = CONFIG_PRESET['syncfile_path']
assert ((not existing_syncfile or existing_syncfile == syncfile_path)
and (not os.path.exists((get_syncfile_path(os.getppid()))))), \
"finalize_presets() can only be called from the main process!"
with open(syncfile_path, 'wb') as f:
CONFIG_PRESET['syncfile_path'] = syncfile_path
if existing_syncfile != syncfile_path:
atexit.register(remove_cfg_tempfile, syncfile_path)
pickle.dump(CONFIG_PRESET, f)
def get_config_value(key: Hashable) -> Any:
"""
Retrieves a configuration value thread-safely.
:param key: the key (an immutable, usually a string)
:return: the value
"""
global THREAD_LOCALS
if THREAD_LOCALS is None:
import threading
THREAD_LOCALS = threading.local()
try:
cfg = THREAD_LOCALS.config
except AttributeError:
THREAD_LOCALS.config = dict()
cfg = THREAD_LOCALS.config
try:
return cfg[key]
except KeyError:
CONFIG_PRESET = access_presets()
value = CONFIG_PRESET[key]
THREAD_LOCALS.config[key] = value
return value
def set_config_value(key: Hashable, value: Any):
"""
Changes a configuration value thread-safely. The configuration
value will be set only for the current thread. In order to
set configuration values for any new thread, add the key and value
to CONFIG_PRESET, before any thread accessing config values is started.
:param key: the key (an immutable, usually a string)
:param value: the value
"""
global THREAD_LOCALS
if THREAD_LOCALS is None:
import threading
THREAD_LOCALS = threading.local()
try:
_ = THREAD_LOCALS.config
except AttributeError:
THREAD_LOCALS.config = dict()
THREAD_LOCALS.config[key] = value
########################################################################
......@@ -185,10 +310,19 @@ CONFIG_PRESET['debug_compiler'] = False
#
########################################################################
# Log-directory. An empty string means that logging is turned off,
# no matter what value the other log-configuration parameters have.
# Default value: '' (all logging is turned off)
CONFIG_PRESET['log_dir'] = ''
# Log server traffic (requests and responses)
# Default value: False
CONFIG_PRESET['log_server'] = False
# Echo server log messages on the terminal.
# Default value: False
CONFIG_PRESET['echo_server_log'] = False
########################################################################
#
......
......@@ -28,11 +28,12 @@ import stat
import DHParser.ebnf
from DHParser.compile import Compiler, compile_source
from DHParser.configuration import get_config_value, set_config_value
from DHParser.ebnf import EBNFCompiler, grammar_changed, DHPARSER_IMPORTS, \
get_ebnf_preprocessor, get_ebnf_grammar, get_ebnf_transformer, get_ebnf_compiler, \
PreprocessorFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc
from DHParser.error import Error, is_error, has_errors, only_errors
from DHParser.log import logging
from DHParser.log import suspend_logging, resume_logging
from DHParser.parse import Grammar
from DHParser.preprocess import nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node
......@@ -145,11 +146,12 @@ def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
if is_python_code(grammar_src):
parser_py, messages = grammar_src, [] # type: str, List[Error]
else:
with logging(False):
result, messages, _ = compile_source(
grammar_src, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
parser_py = cast(str, result)
log_dir = suspend_logging()
result, messages, _ = compile_source(
grammar_src, None,
get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler())
parser_py = cast(str, result)
resume_logging(log_dir)
if has_errors(messages):
raise DefinitionError(only_errors(messages), grammar_src)
parser_root = compile_python_object(DHPARSER_IMPORTS + parser_py, r'\w+Grammar$')()
......@@ -291,10 +293,11 @@ def load_compiler_suite(compiler_suite: str) -> \
else:
# Assume source is an ebnf grammar.
# Is there really any reasonable application case for this?
with logging(False):
compiler_py, messages, n = compile_source(source, None, get_ebnf_grammar(),
get_ebnf_transformer(),
get_ebnf_compiler(compiler_suite, source))
log_dir = suspend_logging()
compiler_py, messages, n = compile_source(source, None, get_ebnf_grammar(),
get_ebnf_transformer(),
get_ebnf_compiler(compiler_suite, source))
resume_logging(log_dir)
if has_errors(messages):
raise DefinitionError(only_errors(messages), source)
preprocessor = get_ebnf_preprocessor
......
......@@ -31,6 +31,7 @@ import os
from typing import Callable, Dict, List, Set, Tuple, Sequence, Union, Optional, Any, cast
from DHParser.compile import CompilerError, Compiler, compile_source, visitor_name
from DHParser.configuration import THREAD_LOCALS, get_config_value
from DHParser.error import Error
from DHParser.parse import Grammar, mixin_comment, Forward, RegExp, DropWhitespace, \
NegativeLookahead, Alternative, Series, Option, OneOrMore, ZeroOrMore, Token, \
......@@ -38,7 +39,7 @@ from DHParser.parse import Grammar, mixin_comment, Forward, RegExp, DropWhitespa
from DHParser.preprocess import nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE
from DHParser.toolkit import load_if_file, escape_re, md5, sane_parser_name, re, expand_table, \
GLOBALS, get_config_value, unrepr, compile_python_object, DHPARSER_PARENTDIR
unrepr, compile_python_object, DHPARSER_PARENTDIR
from DHParser.transform import TransformationFunc, traverse, remove_brackets, \
reduce_single_child, replace_by_single_child, remove_whitespace, remove_empty, \
remove_tokens, flatten, forbid, assert_content
......@@ -80,7 +81,7 @@ try:
import regex as re
except ImportError:
import re
from DHParser import logging, is_filename, load_if_file, \\
from DHParser import start_logging, suspend_logging, resume_logging, is_filename, load_if_file, \\
Grammar, Compiler, nil_preprocessor, PreprocessorToken, Whitespace, DropWhitespace, \\
Lookbehind, Lookahead, Alternative, Pop, Token, DropToken, Synonym, AllOf, SomeOf, \\
Unordered, Option, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Series, Capture, \\
......@@ -97,7 +98,7 @@ from DHParser import logging, is_filename, load_if_file, \\
replace_content, replace_content_by, forbid, assert_content, remove_infix_operator, \\
error_on, recompile_grammar, left_associative, lean_left, set_config_value, \\
get_config_value, XML_SERIALIZATION, SXPRESSION_SERIALIZATION, COMPACT_SERIALIZATION, \\
JSON_SERIALIZATION, CONFIG_PRESET, GLOBALS
JSON_SERIALIZATION, THREAD_LOCALS, access_presets, finalize_presets
'''.format(dhparser_parentdir=DHPARSER_PARENTDIR)
......@@ -251,11 +252,11 @@ def grammar_changed(grammar_class, grammar_source: str) -> bool:
def get_ebnf_grammar() -> EBNFGrammar:
try:
grammar = GLOBALS.ebnf_grammar_singleton
grammar = THREAD_LOCALS.ebnf_grammar_singleton
return grammar
except AttributeError:
GLOBALS.ebnf_grammar_singleton = EBNFGrammar()
return GLOBALS.ebnf_grammar_singleton
THREAD_LOCALS.ebnf_grammar_singleton = EBNFGrammar()
return THREAD_LOCALS.ebnf_grammar_singleton
########################################################################
......@@ -302,10 +303,10 @@ def EBNFTransform() -> TransformationFunc:
def get_ebnf_transformer() -> TransformationFunc:
try:
transformer = GLOBALS.EBNF_transformer_singleton
transformer = THREAD_LOCALS.EBNF_transformer_singleton
except AttributeError:
GLOBALS.EBNF_transformer_singleton = EBNFTransform()
transformer = GLOBALS.EBNF_transformer_singleton
THREAD_LOCALS.EBNF_transformer_singleton = EBNFTransform()
transformer = THREAD_LOCALS.EBNF_transformer_singleton
return transformer
......@@ -331,12 +332,12 @@ GRAMMAR_FACTORY = '''
def get_grammar() -> {NAME}Grammar:
"""Returns a thread/process-exclusive {NAME}Grammar-singleton."""
try:
grammar = GLOBALS.{NAME}_{ID:08d}_grammar_singleton
grammar = THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton
except AttributeError:
GLOBALS.{NAME}_{ID:08d}_grammar_singleton = {NAME}Grammar()
THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton = {NAME}Grammar()
if hasattr(get_grammar, 'python_src__'):
GLOBALS.{NAME}_{ID:08d}_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = GLOBALS.{NAME}_{ID:08d}_grammar_singleton
THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton
return grammar
'''
......@@ -350,10 +351,10 @@ def Create{NAME}Transformer() -> TransformationFunc:
def get_transformer() -> TransformationFunc:
"""Returns a thread/process-exclusive transformation function."""
try:
transformer = GLOBALS.{NAME}_{ID:08d}_transformer_singleton
transformer = THREAD_LOCALS.{NAME}_{ID:08d}_transformer_singleton
except AttributeError:
GLOBALS.{NAME}_{ID:08d}_transformer_singleton = Create{NAME}Transformer()
transformer = GLOBALS.{NAME}_{ID:08d}_transformer_singleton
THREAD_LOCALS.{NAME}_{ID:08d}_transformer_singleton = Create{NAME}Transformer()
transformer = THREAD_LOCALS.{NAME}_{ID:08d}_transformer_singleton
return transformer
'''
......@@ -362,10 +363,10 @@ COMPILER_FACTORY = '''
def get_compiler() -> {NAME}Compiler:
"""Returns a thread/process-exclusive {NAME}Compiler-singleton."""
try:
compiler = GLOBALS.{NAME}_{ID:08d}_compiler_singleton
compiler = THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton
except AttributeError:
GLOBALS.{NAME}_{ID:08d}_compiler_singleton = {NAME}Compiler()
compiler = GLOBALS.{NAME}_{ID:08d}_compiler_singleton
THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton = {NAME}Compiler()
compiler = THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton
return compiler
'''
......@@ -1327,13 +1328,13 @@ class EBNFCompiler(Compiler):
def get_ebnf_compiler(grammar_name="", grammar_source="") -> EBNFCompiler:
try:
compiler = GLOBALS.ebnf_compiler_singleton
compiler = THREAD_LOCALS.ebnf_compiler_singleton
compiler.set_grammar_name(grammar_name, grammar_source)
return compiler
except AttributeError:
compiler = EBNFCompiler(grammar_name, grammar_source)
compiler.set_grammar_name(grammar_name, grammar_source)
GLOBALS.ebnf_compiler_singleton = compiler
THREAD_LOCALS.ebnf_compiler_singleton = compiler
return compiler
......
......@@ -65,7 +65,7 @@ class Error:
# error levels
NO_ERROR = ErrorCode(0)
MESSAGE = ErrorCode(1)
NOTICE = ErrorCode(1)
WARNING = ErrorCode(100)
ERROR = ErrorCode(1000)
FATAL = ErrorCode(10000)
......
......@@ -43,9 +43,9 @@ Example::
from DHParser import compile_source, logging
with logging("LOGS"):
result, errors, ast = compile_source(source, preprocessor, grammar,
transformer, compiler)
start_logging("LOGS")
result, errors, ast = compile_source(source, preprocessor, grammar,
transformer, compiler)
"""
import collections
......@@ -54,13 +54,17 @@ import html
import os
from typing import List, Tuple, Union, Optional
from DHParser.configuration import access_presets, finalize_presets, get_config_value, \
set_config_value
from DHParser.error import Error
from DHParser.stringview import StringView
from DHParser.syntaxtree import Node, ZOMBIE_TAG
from DHParser.toolkit import is_filename, escape_control_characters, GLOBALS
from DHParser.toolkit import escape_control_characters
__all__ = ('log_dir',
'logging',
__all__ = ('start_logging',
'suspend_logging',
'resume_logging',
'log_dir',
'is_logging',
'create_log',
'append_log',
......@@ -76,6 +80,62 @@ __all__ = ('log_dir',
#
#######################################################################
def start_logging(dirname: str = "LOGS"):
"""Turns logging on an sets the log-directory to `dirname`.
The log-directory, if it does not already exist, will be created
lazily, i.e. only when logging actually starts."""
CFG = access_presets()
log_dir = os.path.abspath(dirname) if dirname else ''
if log_dir != CFG['log_dir']:
CFG['log_dir'] = log_dir
set_config_value('log_dir', log_dir)
finalize_presets()
def suspend_logging() -> str:
"""Suspends logging in the current thread. Returns the log-dir
for resuming logging later."""
save = get_config_value('log_dir')
set_config_value('log_dir', '')
return save
def resume_logging(log_dir: str=''):
"""Resumes logging in the current thread with the given log-dir."""
if not 'log_dir':
CFG = access_presets()
log_dir = CFG['log_dir']
set_config_value('log_dir', log_dir)
# #TODO: Remove this context manager, not really useful...
# @contextlib.contextmanager
# def logging(dirname="LOGS"):
# """
# DEPRECATED! Use `start_logging()` instead!
#
# Context manager. Log files within this context will be stored in
# directory ``dirname``. Logging is turned off if name is empty.
#
# Args:
# dirname: the name for the log directory or the empty string to
# turn logging of
# """
# print('The `logging`-context-manager is DEPRECATED! Use `start_logging()` instead!')
# CFG = access_presets()
# if dirname and not isinstance(dirname, str):
# dirname = "LOGS" # be fail tolerant here...
# try:
# save = CFG['log_dir']
# except AttributeError:
# save = ''
# CFG['log_dir'] = dirname
# finalize_presets()
# yield
# CFG = access_presets()
# CFG['log_dir'] = save
# finalize_presets()
def log_dir() -> Union[str, bool]:
"""Creates a directory for log files (if it does not exist) and
......@@ -96,14 +156,9 @@ def log_dir() -> Union[str, bool]:
not been switched on with the logging-contextmanager (see below), yet.
"""
# the try-except clauses in the following are precautions for multithreading
try:
dirname = GLOBALS.LOGGING # raises a name error if LOGGING is not defined
if not dirname:
raise AttributeError # raise a name error if LOGGING evaluates to False
except AttributeError:
dirname = get_config_value('log_dir') # raises a name error if LOGGING is not defined
if not dirname:
return False
# raise AttributeError("No access to log directory before logging has been "
# "turned on within the same thread/process.")
if os.path.exists(dirname) and not os.path.isdir(dirname):
raise IOError('"' + dirname + '" cannot be used as log directory, '
'because it is not a directory!')
......@@ -122,35 +177,13 @@ def log_dir() -> Union[str, bool]:
return dirname
#TODO: Remove this context manager, not really useful...s
@contextlib.contextmanager
def logging(dirname="LOGS"):
"""Context manager. Log files within this context will be stored in
directory ``dirname``. Logging is turned off if name is empty.
Args:
dirname: the name for the log directory or the empty string to
turn logging of
"""
if dirname and not isinstance(dirname, str):
dirname = "LOGS" # be fail tolerant here...
try:
save = GLOBALS.LOGGING
except AttributeError:
save = ""
GLOBALS.LOGGING = dirname or ""
# if dirname and not os.path.exists(dirname):
# os.mkdir(dirname)
yield
GLOBALS.LOGGING = save
def is_logging() -> bool:
def is_logging(thread_local_query: bool=True) -> bool:
"""-> True, if logging is turned on."""
try:
return bool(GLOBALS.LOGGING)
except AttributeError:
return False
if thread_local_query:
return bool(get_config_value('log_dir'))
else:
CFG = access_presets()
return bool(CFG['log_dir'])
def create_log(log_name: str) -> str:
......@@ -158,24 +191,40 @@ def create_log(log_name: str) -> str:
Creates a new log file in the log directory. If a file with
the same name already exists, it will be overwritten.
:param log_name: The file name of the log file to be created
:return: the file name of the log file.
:return: the file name of the log file or an empty string if the log-file
has not been created (e.g. because logging is still turned off and
no log-directory set).
"""
ldir = log_dir()
if ldir:
with open(os.path.join(ldir, log_name), 'w') as f:
f.write('LOG-FILE: ' + log_name + '\n\n')
return log_name
return log_name
return ''
def append_log(log_name: str, *strings) -> None:
def append_log(log_name: str, *strings, echo: bool=False) -> None:
"""
Appends one or more strings to the log-file with the name 'log_name'.
Appends one or more strings to the log-file with the name 'log_name', if
logging is turned on and the log_name is not the empty string.
:param log_name: The name of the log file. The file must already exist.
(See: ``create_log()`` above).
:param *strings: One or more strings that will be written to the log-file.
No delimiters will be added, i.e. all delimiters like blanks or
linefeeds need to be added explicitely to the list of strings, before
calling 'append_log()'.
:param echo: If True, the log message will be echoed on the terminal. This
will also happen if logging is turned off.
"""
ldir = log_dir()
if ldir:
with open(os.path.join(ldir, log_name), 'a') as f:
if ldir and log_name:
log_path = os.path.join(ldir, log_name)
assert os.path.exists(log_path)
with open(log_path, 'a') as f:
for text in strings:
f.write(text)
if echo:
print(''.join(strings))
def clear_logs(logfile_types=frozenset(['.cst', '.ast', '.log'])):
......@@ -184,16 +233,17 @@ def clear_logs(logfile_types=frozenset(['.cst', '.ast', '.log'])):
log-directory if it is empty.
"""
log_dirname = log_dir()
files = os.listdir(log_dirname)
only_log_files = True
for file in files:
path = os.path.join(log_dirname, file)
if os.path.splitext(file)[1] in logfile_types or file == 'info.txt':
os.remove(path)
else:
only_log_files = False
if only_log_files:
os.rmdir(log_dirname)
if log_dirname and os.path.exists(log_dirname) and os.path.isdir(log_dirname):
files = os.listdir(log_dirname)
only_log_files = True
for file in files:
path = os.path.join(log_dirname, file)
if os.path.splitext(file)[1] in logfile_types or file == 'info.txt':
os.remove(path)
else:
only_log_files = False
if only_log_files:
os.rmdir(log_dirname)
#######################################################################
......
......@@ -34,14 +34,14 @@ from collections import defaultdict
import copy
from typing import Callable, cast, List, Tuple, Set, Dict, DefaultDict, Union, Optional, Any
from DHParser.configuration import get_config_value
from DHParser.error import Error, linebreaks, line_col
from DHParser.log import is_logging, HistoryRecord
from DHParser.preprocess import BEGIN_TOKEN, END_TOKEN, RX_TOKEN_NAME
from DHParser.stringview import StringView, EMPTY_STRING_VIEW
from DHParser.syntaxtree import Node, FrozenNode, RootNode, WHITESPACE_PTYPE, \
TOKEN_PTYPE, ZOMBIE_TAG, ResultType
from DHParser.toolkit import sane_parser_name, escape_control_characters, get_config_value, \
re, cython
from DHParser.toolkit import sane_parser_name, escape_control_characters, re, cython
__all__ = ('Parser',
......@@ -298,8 +298,8 @@ class Parser:
history_tracking__ = grammar.history_tracking__
if history_tracking__:
grammar.call_stack__.append(
(self.repr if self.tag_name in (':RegExp', ':Token', ':DropToken')
else self.tag_name, location))
((self.repr if self.tag_name in (':RegExp', ':Token', ':DropToken')
else self.tag_name), location))
grammar.moving_forward__ = True
error = None
......@@ -574,10 +574,11 @@ class Grammar:
constructor of the Parser object explicitly, but it suffices to
assign them to class variables, which results in better
readability of the Python code.
See classmethod `Grammar._assign_parser_names__()`
3. The parsers in the class do not necessarily need to be connected
to one single root parser, which is helpful for testing and
building up a parser successively of several components.
to one single root parser, which is helpful for testing and when
building up a parser gradually from several components.
As a consequence, though, it is highly recommended that a Grammar
class should not define any other variables or methods with names
......@@ -606,10 +607,9 @@ class Grammar:
'named' parser and its field `parser.pname` contains the variable
name after instantiation of the Grammar class. All other parsers,
i.e. parsers that are defined within a `named` parser, remain
"anonymous parsers" where `parser.pname` is the empty string, unless
a name has been passed explicitly upon instantiation.
"anonymous parsers" where `parser.pname` is the empty string.
If one and the same parser is assigned to several class variables
such as, for example the parser `expression` in the example above,
such as, for example, the parser `expression` in the example above,
the first name sticks.
Grammar objects are callable. Calling a grammar object with a UTF-8
......@@ -856,7 +856,7 @@ class Grammar:
self.rollback__ = [] # type: List[Tuple[int, Callable]]
self.last_rb__loc__ = -1 # type: int
# support for call stack tracing
self.call_stack__ = [] # type: List[str, int] # tag_name, location
self.call_stack__ = [] # type: List[Tuple[str, int]] # tag_name, location
# snapshots of call stacks
self.history__ = [] # type: List[HistoryRecord]
# also needed for call stack tracing
......@@ -927,7 +927,7 @@ class Grammar:
Checks if failure to match document was only due to a succeeding
lookahead parser, which is a common design pattern that can break test
cases. (Testing for this case allows to modify the error message, so
that the testing framework can know that the failure is only a
that the testing framework knows that the failure is only a
test-case-artifact and no real failure.
(See test/test_testing.TestLookahead !)
"""
......
......@@ -20,6 +20,7 @@ permissions and limitations under the License.
import os
import sys
from typing import cast
scriptdir = os.path.dirname(os.path.abspath(__file__))
i, k = scriptdir.find('DHParser-submodule'), len('DHParser-submodule')
......@@ -36,9 +37,9 @@ templatedir = os.path.join(os.path.dirname(scriptdir.rstrip('/')), 'templates')
from DHParser.compile import compile_source
from DHParser.dsl import compileDSL, compile_on_disk # , recompile_grammar
from DHParser.ebnf import get_ebnf_grammar, get_ebnf_transformer, get_ebnf_compiler
from DHParser.log import logging
from DHParser.toolkit import re, typing
from typing import cast
from DHParser.log import start_logging
from DHParser.toolkit import re
LOGGING = False
......@@ -219,11 +220,12 @@ def main():
print('File %s not found! Aborting.' % file_path)
sys.exit(1)
elif choice.strip() == '3':
with logging(LOGGING):
if not cpu_profile(selftest, 1):
print("Selftest FAILED :-(\n")
sys.exit(1)
print("Selftest SUCCEEDED :-)\n")
if LOGGING:
start_logging(LOGGING)
if not cpu_profile(selftest, 1):
print("Selftest FAILED :-(\n")
sys.exit(1)
print("Selftest SUCCEEDED :-)\n")
elif choice.strip().lower() not in {'q', 'quit', 'exit'}:
print('No valid choice. Goodbye!')
......
This diff is collapsed.
......@@ -16,14 +16,14 @@ cdef class Node:
cpdef is_anonymous(self)
cpdef _content(self)
cpdef with_pos(self, pos)
cpdef has_attr(self)
# cpdef has_attr(self, attr)
# cpdef compare_attr(self, other)
# cpdef _tree_repr(self, tab, open_fn, close_fn, data_fn, density, inline, inline_fn)
# cpdef as_sxpr(self, src, indentation, compact)
# cpdef as_xml(self, src, indentation, inline_tags, omit_tags, empty_tags)
# cpdef select_if(self, match_function, include_root, reverse)
# cpdef select(self, tag_names, include_root)
cpdef pick(self, tag_names)
# cpdef pick(self, criterion, reverse)
# cpdef tree_size(self)
cpdef to_json_obj(self)
......
......@@ -31,10 +31,10 @@ from typing import Callable, cast, Iterator, Sequence, List, AbstractSet, Set, U
Container, Optional, Dict
from DHParser.configuration import SERIALIZATIONS, XML_SERIALIZATION, SXPRESSION_SERIALIZATION, \
COMPACT_SERIALIZATION, JSON_SERIALIZATION, SMART_SERIALIZATION
COMPACT_SERIALIZATION, JSON_SERIALIZATION, SMART_SERIALIZATION, get_config_value
from DHParser.error import Error, ErrorCode, linebreaks, line_col
from DHParser.stringview import StringView
from DHParser.toolkit import get_config_value, re
from DHParser.toolkit import re
__all__ = ('WHITESPACE_PTYPE',
......@@ -405,18 +405,18 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
# (XML-)attributes ###
def has_attr(self) -> bool:
def has_attr(self, attr: str = '') -> bool:
"""
Returns `True`, if the node has any attributes, `False` otherwise.
Returns `True`, if the node has the attribute `attr` or,
in case `attr` is the empty string, any attributes at all;
`False` otherwise.
This function does not create an attribute dictionary, therefore
it should be preferred to querying node.attr when testing for the
existence of any attributes.
"""
try:
# if self._xml_attr is not None:
# return True
return bool(self._xml_attr)
return attr in self._xml_attr if attr else bool(self._xml_attr)
except AttributeError:
pass
return False
......@@ -614,8 +614,7 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
"""
return self.select_if(create_match_function(criterion), include_root, reverse)
def pick(self, criterion: CriteriaType,
reverse: bool = False) -> Optional['Node']:
def pick(self, criterion: CriteriaType, reverse: bool = False) -> Optional['Node']:
"""
Picks the first (or last if run in reverse mode) descendant that fulfills
the given criterion which can be either a match-function or a tag-name or
......@@ -788,7 +787,7 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
if src == '' and not (node.has_attr() and '_pos' in node.attr) and node.pos >= 0:
txt.append(' _pos="%i"' % node.pos)
if root and id(node) in root.error_nodes and not has_reserved_attrs:
txt.append(' err="%s"' % ''.join(str(err).replace('"', r'\"')
txt.append(' err="%s"' % ''.join(str(err).replace('"', "'")
for err in root.get_errors(node)))
if node.tag_name in empty_tags:
assert not node.result, ("Node %s with content %s is not an empty element!" %
......@@ -1037,7 +1036,6 @@ class RootNode(Node):
duplicate.tag_name = self.tag_name
return duplicate
def swallow(self, node: Node) -> 'RootNode':
"""
Put `self` in the place of `node` by copying all its data.
......@@ -1067,7 +1065,8 @@ class RootNode(Node):
"""
if not node:
node = Node(ZOMBIE_TAG, '').with_pos(error.pos)
assert node.pos == error.pos or isinstance(node, FrozenNode)
else:
assert node.pos == error.pos or isinstance(node, FrozenNode)
self.error_nodes.setdefault(id(node), []).append(error)
self.error_positions.setdefault(error.pos, set()).add(id(node))
self.errors.append(error)
......
def compile_src(source, log_dir=''):
def compile_src(source):
"""Compiles ``source`` and returns (result, errors, ast).
"""
with logging(log_dir):
compiler = get_compiler()
result_tuple = compile_source(source, get_preprocessor(),
get_grammar(),
get_transformer(), compiler)
result_tuple = compile_source(source, get_preprocessor(), get_grammar(), get_transformer(),
get_compiler())
return result_tuple
......@@ -28,7 +25,8 @@ if __name__ == "__main__":
file_name, log_dir = sys.argv[1], ''
if file_name in ['-d', '--debug'] and len(sys.argv) > 2:
file_name, log_dir = sys.argv[2], 'LOGS'
result, errors, _ = compile_src(file_name, log_dir)
start_logging(log_dir)
result, errors, _ = compile_src(file_name)
if errors:
cwd = os.getcwd()
rel_path = file_name[len(cwd):] if file_name.startswith(cwd) else file_name
......
......@@ -29,6 +29,7 @@ scriptpath = os.path.dirname(__file__)
STOP_SERVER_REQUEST = b"__STOP_SERVER__" # hardcoded in order to avoid import from DHParser.server
IDENTIFY_REQUEST = "identify()"
DEFAULT_PORT = 8888
config_filename_cache = ''
......@@ -62,7 +63,7 @@ def retrieve_host_and_port():
for host and port, in case the temporary config file does not exist.
"""
host = '127.0.0.1' # default host
port = 8888
port = DEFAULT_PORT
cfg_filename = get_config_filename()
try:
with open(cfg_filename) as f:
......@@ -101,7 +102,7 @@ def run_server(host, port):
from tst_DSL_grammar import recompile_grammar
recompile_grammar(os.path.join(scriptpath, 'DSL.ebnf'), force=False)
from DSLCompiler import compile_src
from DHParser.server import LanguageServer
from DHParser.server import LanguageServerProtocol, create_language_server
config_filename = get_config_filename()
try:
with open(config_filename, 'w') as f:
......@@ -110,7 +111,7 @@ def run_server(host, port):
print('PermissionError: Could not write temporary config file: ' + config_filename)
print('Starting server on %s:%i' % (host, port))
DSL_server = LanguageServer({'DSL_compiler': compile_src})
DSL_server = create_language_server(LanguageServerProtocol({'default': compile_src}))
DSL_server.run_server(host, port)
......
......@@ -26,27 +26,32 @@ except ModuleNotFoundError:
def recompile_grammar(grammar_src, force):
grammar_tests_dir = os.path.join(scriptpath, 'grammar_tests')
testing.create_test_templates(grammar_src, grammar_tests_dir)
with DHParser.log.logging(False):
# recompiles Grammar only if it has changed
if not dsl.recompile_grammar(grammar_src, force=force,
notify=lambda: print('recompiling ' + grammar_src)):
print('\nErrors while recompiling "%s":' % grammar_src +
'\n--------------------------------------\n\n')
with open('{name}_ebnf_ERRORS.txt', encoding='utf-8') as f:
print(f.read())
sys.exit(1)
DHParser.log.start_logging('LOGS')
# recompiles Grammar only if it has changed
if not dsl.recompile_grammar(grammar_src, force=force,
notify=lambda: print('recompiling ' + grammar_src)):
print('\nErrors while recompiling "%s":' % grammar_src +
'\n--------------------------------------\n\n')
with open('{name}_ebnf_ERRORS.txt', encoding='utf-8') as f:
print(f.read())
sys.exit(1)
def run_grammar_tests(glob_pattern, get_grammar, get_transformer):
with DHParser.log.logging(LOGGING):
error_report = testing.grammar_suite(
os.path.join(scriptpath, 'grammar_tests'),
get_grammar, get_transformer,
fn_patterns=[glob_pattern], report='REPORT', verbose=True)
DHParser.log.start_logging(LOGGING)
error_report = testing.grammar_suite(
os.path.join(scriptpath, 'grammar_tests'),
get_grammar, get_transformer,
fn_patterns=[glob_pattern], report='REPORT', verbose=True)
return error_report
if __name__ == '__main__':
# from DHParser.configuration import access_presets, finalize_presets
# CONFIG_PRESETS = access_presets()
# CONFIG_PRESET['test_parallelization'] = True
# finalize_presets()
argv = sys.argv[:]
if len(argv) > 1 and sys.argv[1] == "--debug":
LOGGING = True
......
......@@ -37,12 +37,12 @@ import os
import sys
from typing import Dict, List, Union, cast
from DHParser.configuration import THREAD_LOCALS, get_config_value
from DHParser.error import Error, is_error, adjust_error_locations
from DHParser.log import log_dir, logging, is_logging, clear_logs, log_parsing_history
from DHParser.log import log_dir, is_logging, clear_logs, log_parsing_history
from DHParser.parse import UnknownParserError, Parser, Lookahead
from DHParser.syntaxtree import Node, RootNode, parse_tree, flatten_sxpr, ZOMBIE_TAG
from DHParser.toolkit import GLOBALS, get_config_value, load_if_file, re
from DHParser.toolkit import load_if_file, re
__all__ = ('unit_from_config',
'unit_from_json',
......@@ -233,7 +233,7 @@ def unit_from_file(filename):
# tests.get('match*', dict()).items())
def get_report(test_unit):
def get_report(test_unit) -> str:
"""
Returns a text-report of the results of a grammar unit test. The report
lists the source of all tests as well as the error messages, if a test
......@@ -352,7 +352,7 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, report='REPORT'
"""
if not get_config_value('test_supress_lookahead_failures'):
return False
raw_errors = syntax_tree.errors_sorted
raw_errors = cast(RootNode, syntax_tree).errors_sorted
is_artifact = ({e.code for e in raw_errors} <=
{Error.PARSER_LOOKAHEAD_FAILURE_ONLY,
# Error.PARSER_STOPPED_BEFORE_END,
......@@ -521,17 +521,6 @@ def reset_unit(test_unit):
del tests[key]
def run_unit(logdir, *parameters):
"""
Run `grammar_unit()` with logs written to `log_dir` or no logs if `log_dir`
evaluates to False. This helper functions is needed for running unit tests
in a multiprocessing environment, because log.log_dir(), log.logging() and
log.is_logging() are thread-local.
"""
with logging(logdir):
return grammar_unit(*parameters)
def grammar_suite(directory, parser_factory, transformer_factory,
fn_patterns=['*test*'],
ignore_unknown_filetypes=False,
......@@ -540,6 +529,8 @@ def grammar_suite(directory, parser_factory, transformer_factory,
Runs all grammar unit tests in a directory. A file is considered a test
unit, if it has the word "test" in its name.
"""
assert isinstance(report, str)
if not isinstance(fn_patterns, collections.abc.Iterable):
fn_patterns = [fn_patterns]
all_errors = collections.OrderedDict()
......@@ -557,7 +548,7 @@ def grammar_suite(directory, parser_factory, transformer_factory,
print(filename)
if any(fnmatch.fnmatch(filename, pattern) for pattern in fn_patterns):
parameters = filename, parser_factory, transformer_factory, report, verbose
results.append((filename, pool.submit(run_unit, log_dir(), *parameters)))
results.append((filename, pool.submit(grammar_unit, *parameters)))
for filename, err_future in results:
try:
errata = err_future.result()
......@@ -814,17 +805,6 @@ def run_file(fname):
runner('', eval(fname[:-3]).__dict__)
def run_with_log(logdir, f):
"""
Run `grammar_unit()` with logs written to `log_dir` or no logs if `log_dir`
evaluates to False. This helper functions is needed for running unit tests
in a multiprocessing environment, because log.log_dir(), log.logging() and
log.is_logging() are thread-local.
"""
with logging(logdir):
run_file(f)
def run_path(path):
"""Runs all unit tests in `path`"""
if os.path.isdir(path):
......@@ -835,7 +815,7 @@ def run_path(path):
if get_config_value('test_parallelization'):
with concurrent.futures.ProcessPoolExecutor(multiprocessing.cpu_count()) as pool:
for f in files:
result_futures.append(pool.submit(run_with_log, log_dir(), f))
result_futures.append(pool.submit(run_file, f))
# run_file(f) # for testing!
for r in result_futures:
try:
......
......@@ -37,7 +37,7 @@ except ImportError:
import re
import sys
import typing
from typing import Any, Iterable, Sequence, Set, Union, Dict, Hashable
from typing import Any, Iterable, Sequence, Set, Union, Dict
try:
import cython
......@@ -48,8 +48,6 @@ except ImportError:
cython_optimized = False
import DHParser.shadow_cython as cython
from DHParser.configuration import CONFIG_PRESET
__all__ = ('typing',
'cython',
......@@ -70,10 +68,7 @@ __all__ = ('typing',
'smart_list',
'sane_parser_name',
'DHPARSER_DIR',
'DHPARSER_PARENTDIR',
'GLOBALS',
'get_config_value',
'set_config_value')
'DHPARSER_PARENTDIR')
#######################################################################
......@@ -85,42 +80,6 @@ __all__ = ('typing',
DHPARSER_DIR = os.path.dirname(os.path.abspath(__file__))
DHPARSER_PARENTDIR = os.path.dirname(DHPARSER_DIR.rstrip('/'))
GLOBALS = threading.local()
def get_config_value(key: Hashable) -> Any:
"""
Retrieves a configuration value thread-safely.
:param key: the key (an immutable, usually a string)
:return: the value
"""
try:
cfg = GLOBALS.config
except AttributeError:
GLOBALS.config = dict()
cfg = GLOBALS.config
try:
return cfg[key]
except KeyError:
value = CONFIG_PRESET[key]
GLOBALS.config[key] = value
return value
def set_config_value(key: Hashable, value: Any):
"""
Changes a configuration value thread-safely. The configuration
value will be set only for the current thread. In order to
set configuration values for any new thread, add the key and value
to CONFIG_PRESET, before any thread accessing config values is started.
:param key: the key (an immutable, usually a string)
:param value: the value
"""
try:
_ = GLOBALS.config
except AttributeError:
GLOBALS.config = dict()
GLOBALS.config[key] = value
# global_id_counter = multiprocessing.Value('Q', 0)
......@@ -274,9 +233,9 @@ def load_if_file(text_or_file) -> str:
return content
except FileNotFoundError:
if RX_FILEPATH.fullmatch(text_or_file):
raise FileNotFoundError('Not a valid filepath or URL: "' + text_or_file + '". \n'
'(Add an empty line to distinguish source data from '
'a file name.)')
raise FileNotFoundError('File not found or not a valid filepath or URL: "'
+ text_or_file + '". \n(Add an empty line to '
'distinguish source data from a file name.)')
else:
return text_or_file
else:
......@@ -478,4 +437,3 @@ try:
except AttributeError:
# somebody has already taken care of this !?
pass
#!/usr/bin/python3
"""Lädt das MLW-Installationsskript vonb gitlab herunter und startet es.
Nach dem Starten wird das Installationsskript wieder gelöscht, d.h. um
es wieder zu starten, muss erst dieses Skript wieder ausgeführt werden,
welches das jeweils aktuelle Installationsskript herunterlädt.
"""
import os
import ssl
import sys
import urllib.request
cwd = os.path.abspath(os.getcwd())
if cwd.endswith('MLW-DSL'):
print('Der Pfad "%s" scheint das MLW-Git-Repositorium zu enthalten.' % cwd)
print('Dieses Skript sollte nicht innerhalb des MLW-Git-Repositoriums ausgeführt werden!')
sys.exit(1)
PFAD = "https://gitlab.lrz.de/badw-it/MLW-DSL/raw/master/Installiere-MLW.py"
SKRIPT_NAME = os.path.basename(PFAD)
# Lade Installationsskript herunter
with urllib.request.urlopen(PFAD, context=ssl.create_default_context()) as https:
skript = https.read()
# Speichere Installationsskript im aktuellen Ordner (Desktop)
with open(SKRIPT_NAME, 'wb') as datei:
datei.write(skript)
# Führe Installationsskript aus
os.system('python.exe ' + SKRIPT_NAME)
# Lösche Installationsskript nach der Ausführung, damit nicht versehentlich
# ein veraltetes Installationsskript gestartet wird.
os.remove(SKRIPT_NAME)
This diff is collapsed.
......@@ -18,7 +18,7 @@ try:
import regex as re
except ImportError:
import re
from DHParser import logging, is_filename, load_if_file, \
from DHParser import start_logging, is_filename, load_if_file, \
Grammar, Compiler, nil_preprocessor, PreprocessorToken, Whitespace, DropWhitespace, \
Lookbehind, Lookahead, Alternative, Pop, Token, DropToken, Synonym, AllOf, SomeOf, \
Unordered, Option, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Series, Capture, \
......@@ -33,7 +33,7 @@ from DHParser import logging, is_filename, load_if_file, \
keep_children, is_one_of, not_one_of, has_content, apply_if, remove_first, remove_last, \
remove_anonymous_empty, keep_nodes, traverse_locally, strip, lstrip, rstrip, \
replace_content, replace_content_by, forbid, assert_content, remove_infix_operator, \
error_on, recompile_grammar, left_associative, GLOBALS
error_on, recompile_grammar, left_associative, THREAD_LOCALS
#######################################################################
......@@ -85,12 +85,12 @@ class ArithmeticGrammar(Grammar):
def get_grammar() -> ArithmeticGrammar:
"""Returns a thread/process-exclusive ArithmeticGrammar-singleton."""
try:
grammar = GLOBALS.Arithmetic_00000001_grammar_singleton
grammar = THREAD_LOCALS.Arithmetic_00000001_grammar_singleton
except AttributeError:
GLOBALS.Arithmetic_00000001_grammar_singleton = ArithmeticGrammar()
THREAD_LOCALS.Arithmetic_00000001_grammar_singleton = ArithmeticGrammar()
if hasattr(get_grammar, 'python_src__'):
GLOBALS.Arithmetic_00000001_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = GLOBALS.Arithmetic_00000001_grammar_singleton
THREAD_LOCALS.Arithmetic_00000001_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = THREAD_LOCALS.Arithmetic_00000001_grammar_singleton
return grammar
......@@ -117,10 +117,10 @@ def ArithmeticTransform() -> TransformationFunc:
def get_transformer() -> TransformationFunc:
try:
transformer = GLOBALS.Arithmetic_00000001_transformer_singleton
transformer = THREAD_LOCALS.Arithmetic_00000001_transformer_singleton
except AttributeError:
GLOBALS.Arithmetic_00000001_transformer_singleton = ArithmeticTransform()
transformer = GLOBALS.Arithmetic_00000001_transformer_singleton
THREAD_LOCALS.Arithmetic_00000001_transformer_singleton = ArithmeticTransform()
transformer = THREAD_LOCALS.Arithmetic_00000001_transformer_singleton
return transformer
......@@ -158,10 +158,10 @@ class ArithmeticCompiler(Compiler):
def get_compiler() -> ArithmeticCompiler:
try:
compiler = GLOBALS.Arithmetic_00000001_compiler_singleton
compiler = THREAD_LOCALS.Arithmetic_00000001_compiler_singleton
except AttributeError:
GLOBALS.Arithmetic_00000001_compiler_singleton = ArithmeticCompiler()
compiler = GLOBALS.Arithmetic_00000001_compiler_singleton
THREAD_LOCALS.Arithmetic_00000001_compiler_singleton = ArithmeticCompiler()
compiler = THREAD_LOCALS.Arithmetic_00000001_compiler_singleton
return compiler
......@@ -175,12 +175,12 @@ def get_compiler() -> ArithmeticCompiler:
def compile_src(source, log_dir=''):
"""Compiles ``source`` and returns (result, errors, ast).
"""
with logging(log_dir):
compiler = get_compiler()
cname = compiler.__class__.__name__
result_tuple = compile_source(source, get_preprocessor(),
get_grammar(),
get_transformer(), compiler)
start_logging(log_dir)
compiler = get_compiler()
cname = compiler.__class__.__name__
result_tuple = compile_source(source, get_preprocessor(),
get_grammar(),
get_transformer(), compiler)
return result_tuple
......
......@@ -16,16 +16,13 @@ scriptpath = os.path.dirname(__file__)
try:
from DHParser import dsl
import DHParser.log
from DHParser import testing, create_test_templates, CONFIG_PRESET
from DHParser import testing, create_test_templates, access_presets, finalize_presets
except ModuleNotFoundError:
print('Could not import DHParser. Please adjust sys.path in file '
'"%s" manually' % __file__)
sys.exit(1)
CONFIG_PRESET['ast_serialization'] = "S-expression"
CONFIG_PRESET['test_parallelization'] = False
def recompile_grammar(grammar_src, force):
grammar_tests_dir = os.path.join(scriptpath, 'grammar_tests')
if not os.path.exists(grammar_tests_dir) \
......@@ -33,27 +30,32 @@ def recompile_grammar(grammar_src, force):
for entry in os.listdir(grammar_tests_dir)):
print('No grammar-tests found, generating test templates.')
create_test_templates(grammar_src, grammar_tests_dir)
with DHParser.log.logging(LOGGING):
# recompiles Grammar only if it has changed
name = os.path.splitext(os.path.basename(grammar_src))[0]
if not dsl.recompile_grammar(grammar_src, force=force):
print('\nErrors while recompiling "{}":'.format(grammar_src) +
'\n--------------------------------------\n\n')
with open('{}_ebnf_ERRORS.txt'.format(name)) as f:
print(f.read())
sys.exit(1)
DHParser.log.start_logging(LOGGING)
# recompiles Grammar only if it has changed
name = os.path.splitext(os.path.basename(grammar_src))[0]
if not dsl.recompile_grammar(grammar_src, force=force):
print('\nErrors while recompiling "{}":'.format(grammar_src) +
'\n--------------------------------------\n\n')
with open('{}_ebnf_ERRORS.txt'.format(name)) as f:
print(f.read())
sys.exit(1)
def run_grammar_tests(glob_pattern):
with DHParser.log.logging(LOGGING):
error_report = testing.grammar_suite(
os.path.join(scriptpath, 'grammar_tests'),
get_grammar, get_transformer,
fn_patterns=[glob_pattern], report=True, verbose=True)
DHParser.log.start_logging(LOGGING)
error_report = testing.grammar_suite(
os.path.join(scriptpath, 'grammar_tests'),
get_grammar, get_transformer,
fn_patterns=[glob_pattern], report='REPORT', verbose=True)
return error_report
if __name__ == '__main__':
CONFIG_PRESET = access_presets()
CONFIG_PRESET['ast_serialization'] = "S-expression"
CONFIG_PRESET['test_parallelization'] = False
finalize_presets()
argv = sys.argv[:]
if len(argv) > 1 and sys.argv[1] == "--debug":
LOGGING = True
......
......@@ -18,7 +18,7 @@ try:
import regex as re
except ImportError:
import re
from DHParser import logging, is_filename, load_if_file, \
from DHParser import start_logging, is_filename, load_if_file, \
Grammar, Compiler, nil_preprocessor, PreprocessorToken, Whitespace, DropWhitespace, \
Lookbehind, Lookahead, Alternative, Pop, Token, DropToken, Synonym, AllOf, SomeOf, \
Unordered, Option, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Series, Capture, \
......@@ -33,7 +33,7 @@ from DHParser import logging, is_filename, load_if_file, \
keep_children, is_one_of, not_one_of, has_content, apply_if, remove_first, remove_last, \
remove_anonymous_empty, keep_nodes, traverse_locally, strip, lstrip, rstrip, \
replace_content, replace_content_by, forbid, assert_content, remove_infix_operator, \
error_on, recompile_grammar, GLOBALS
error_on, recompile_grammar, THREAD_LOCALS
#######################################################################
......@@ -107,12 +107,12 @@ class ArithmeticRightRecursiveGrammar(Grammar):
def get_grammar() -> ArithmeticRightRecursiveGrammar:
"""Returns a thread/process-exclusive ArithmeticRightRecursiveGrammar-singleton."""
try:
grammar = GLOBALS.ArithmeticRightRecursive_00000001_grammar_singleton
grammar = THREAD_LOCALS.ArithmeticRightRecursive_00000001_grammar_singleton
except AttributeError:
GLOBALS.ArithmeticRightRecursive_00000001_grammar_singleton = ArithmeticRightRecursiveGrammar()
THREAD_LOCALS.ArithmeticRightRecursive_00000001_grammar_singleton = ArithmeticRightRecursiveGrammar()
if hasattr(get_grammar, 'python_src__'):
GLOBALS.ArithmeticRightRecursive_00000001_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = GLOBALS.ArithmeticRightRecursive_00000001_grammar_singleton
THREAD_LOCALS.ArithmeticRightRecursive_00000001_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = THREAD_LOCALS.ArithmeticRightRecursive_00000001_grammar_singleton
return grammar
......@@ -151,11 +151,11 @@ def ArithmeticRightRecursiveTransform() -> TransformationFunc:
def get_transformer() -> TransformationFunc:
try:
transformer = GLOBALS.ArithmeticRightRecursive_00000001_transformer_singleton
transformer = THREAD_LOCALS.ArithmeticRightRecursive_00000001_transformer_singleton
except AttributeError:
GLOBALS.ArithmeticRightRecursive_00000001_transformer_singleton = \
THREAD_LOCALS.ArithmeticRightRecursive_00000001_transformer_singleton = \
ArithmeticRightRecursiveTransform()
transformer = GLOBALS.ArithmeticRightRecursive_00000001_transformer_singleton
transformer = THREAD_LOCALS.ArithmeticRightRecursive_00000001_transformer_singleton
return transformer
......@@ -193,10 +193,10 @@ class ArithmeticRightRecursiveCompiler(Compiler):
def get_compiler() -> ArithmeticRightRecursiveCompiler:
try:
compiler = GLOBALS.ArithmeticRightRecursive_00000001_compiler_singleton
compiler = THREAD_LOCALS.ArithmeticRightRecursive_00000001_compiler_singleton
except AttributeError:
GLOBALS.ArithmeticRightRecursive_00000001_compiler_singleton = ArithmeticRightRecursiveCompiler()
compiler = GLOBALS.ArithmeticRightRecursive_00000001_compiler_singleton
THREAD_LOCALS.ArithmeticRightRecursive_00000001_compiler_singleton = ArithmeticRightRecursiveCompiler()
compiler = THREAD_LOCALS.ArithmeticRightRecursive_00000001_compiler_singleton
return compiler
......@@ -210,12 +210,12 @@ def get_compiler() -> ArithmeticRightRecursiveCompiler:
def compile_src(source, log_dir=''):
"""Compiles ``source`` and returns (result, errors, ast).
"""
with logging(log_dir):
compiler = get_compiler()
cname = compiler.__class__.__name__
result_tuple = compile_source(source, get_preprocessor(),
get_grammar(),
get_transformer(), compiler)
start_logging(log_dir)
compiler = get_compiler()
cname = compiler.__class__.__name__
result_tuple = compile_source(source, get_preprocessor(),
get_grammar(),
get_transformer(), compiler)
return result_tuple
......
......@@ -6,7 +6,7 @@ LOGGING = True
sys.path.extend(['../../', '../', './'])
from DHParser import grammar_provider, logging, CONFIG_PRESET
from DHParser import grammar_provider
CONFIG_PRESET['ast_serialization'] = "S-expression"
......@@ -31,5 +31,4 @@ arithmetic_syntax = """
if __name__ == "__main__":
arithmetic = grammar_provider(arithmetic_syntax)()
assert arithmetic
with logging():
syntax_tree = arithmetic("(a + b) * (a - b)")
syntax_tree = arithmetic("(a + b) * (a - b)")
......@@ -6,7 +6,7 @@
import os
import sys
LOGGING = True
LOGGING = 'LOGS'