Notice to GitKraken users: A vulnerability has been found in the SSH key generation of GitKraken versions 7.6.0 to 8.0.0 (https://www.gitkraken.com/blog/weak-ssh-key-fix). If you use GitKraken and have generated a SSH key using one of these versions, please remove it both from your local workstation and from your LRZ GitLab profile.

21.10.2021, 9:00 - 11:00: Due to updates GitLab may be unavailable for some minutes between 09:00 and 11:00.

Commit 70ee21b7 authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

- toolkit.py: added context manager for supression of warnings

parent 5dc386ad
...@@ -16,15 +16,16 @@ implied. See the License for the specific language governing ...@@ -16,15 +16,16 @@ implied. See the License for the specific language governing
permissions and limitations under the License. permissions and limitations under the License.
""" """
from collections import OrderedDict
import keyword import keyword
from collections import OrderedDict
try: try:
import regex as re import regex as re
except ImportError: except ImportError:
import re import re
from typing import Callable, Dict, List, Set, Tuple from typing import Callable, Dict, List, Set, Tuple
from DHParser.toolkit import load_if_file, escape_re, md5, sane_parser_name from DHParser.toolkit import load_if_file, escape_re, md5, sane_parser_name, warnings
from DHParser.parsers import Grammar, mixin_comment, nil_scanner, Forward, RE, NegativeLookahead, \ from DHParser.parsers import Grammar, mixin_comment, nil_scanner, Forward, RE, NegativeLookahead, \
Alternative, Sequence, Optional, Required, OneOrMore, ZeroOrMore, Token, Compiler, \ Alternative, Sequence, Optional, Required, OneOrMore, ZeroOrMore, Token, Compiler, \
ScannerFunc ScannerFunc
...@@ -439,17 +440,19 @@ class EBNFCompiler(Compiler): ...@@ -439,17 +440,19 @@ class EBNFCompiler(Compiler):
# check for unconnected rules # check for unconnected rules
defined_symbols.difference_update(self.RESERVED_SYMBOLS) if warnings():
defined_symbols.difference_update(self.RESERVED_SYMBOLS)
def remove_connections(symbol):
if symbol in defined_symbols: def remove_connections(symbol):
defined_symbols.remove(symbol) if symbol in defined_symbols:
for related in self.rules[symbol][1:]: defined_symbols.remove(symbol)
remove_connections(str(related)) for related in self.rules[symbol][1:]:
remove_connections(self.root) remove_connections(str(related))
for leftover in defined_symbols:
self.rules[leftover][0].add_error(('Rule "%s" is not connected to parser ' remove_connections(self.root)
'root "%s"') % (leftover, self.root)) for leftover in defined_symbols:
self.rules[leftover][0].add_error(('Rule "%s" is not connected to parser '
'root "%s"') % (leftover, self.root))
# set root parser and assemble python grammar definition # set root parser and assemble python grammar definition
......
...@@ -487,7 +487,7 @@ def dsl_error_msg(parser: Parser, error_str: str) -> str: ...@@ -487,7 +487,7 @@ def dsl_error_msg(parser: Parser, error_str: str) -> str:
str: An error message including the call stack if history str: An error message including the call stack if history
tacking has been turned in the grammar object. tacking has been turned in the grammar object.
""" """
msg = ["DSL parser specification error:", error_str, "caught by parser", str(parser)] msg = ["DSL parser specification error:", error_str, 'Caught by parser "%s".' % str(parser)]
if parser.grammar.history: if parser.grammar.history:
msg.extend(["\nCall stack:", parser.grammar.history[-1].stack]) msg.extend(["\nCall stack:", parser.grammar.history[-1].stack])
else: else:
...@@ -768,7 +768,7 @@ class Optional(UnaryOperator): ...@@ -768,7 +768,7 @@ class Optional(UnaryOperator):
super(Optional, self).__init__(parser, name) super(Optional, self).__init__(parser, name)
# assert isinstance(parser, Parser) # assert isinstance(parser, Parser)
assert not isinstance(parser, Optional), \ assert not isinstance(parser, Optional), \
"Nesting options would be redundant: %s(%s)" % \ "Redundant nesting of options: %s(%s)" % \
(str(name), str(parser.name)) (str(name), str(parser.name))
assert not isinstance(parser, Required), \ assert not isinstance(parser, Required), \
"Nesting options with required elements is contradictory: " \ "Nesting options with required elements is contradictory: " \
......
...@@ -237,9 +237,10 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, verbose=False): ...@@ -237,9 +237,10 @@ def grammar_unit(test_unit, parser_factory, transformer_factory, verbose=False):
transform(ast) transform(ast)
tests.setdefault('__ast__', {})[test_name] = ast tests.setdefault('__ast__', {})[test_name] = ast
if cst.error_flag: if cst.error_flag:
errata.append('Match test "%s" for parser "%s" failed:\n\tExpr.: %s\n\t%s' % errata.append('Match test "%s" for parser "%s" failed:\n\tExpr.: %s\n\n\t%s' %
(test_name, parser_name, '\n\t'.join(test_code.split('\n')), (test_name, parser_name, '\n\t'.join(test_code.split('\n')),
'\n\t'.join(error_messages(test_code, cst.collect_errors())))) '\n\t'.join(m.replace('\n', '\n\t\t') for m in
error_messages(test_code, cst.collect_errors()))))
tests.setdefault('__err__', {})[test_name] = errata[-1] tests.setdefault('__err__', {})[test_name] = errata[-1]
elif "cst" in tests and mock_syntax_tree(tests["cst"][test_name]) != cst: elif "cst" in tests and mock_syntax_tree(tests["cst"][test_name]) != cst:
errata.append('Concrete syntax tree test "%s" for parser "%s" failed:\n%s' % errata.append('Concrete syntax tree test "%s" for parser "%s" failed:\n%s' %
......
...@@ -45,6 +45,8 @@ __all__ = ['logging', ...@@ -45,6 +45,8 @@ __all__ = ['logging',
'is_logging', 'is_logging',
'log_dir', 'log_dir',
'logfile_basename', 'logfile_basename',
'supress_warnings',
'warnings',
'line_col', 'line_col',
'error_messages', 'error_messages',
'compact_sexpr', 'compact_sexpr',
...@@ -97,7 +99,7 @@ def log_dir() -> str: ...@@ -97,7 +99,7 @@ def log_dir() -> str:
@contextlib.contextmanager @contextlib.contextmanager
def logging(dirname="LOGS"): def logging(dirname: str = "LOGS"):
"""Context manager. Log files within this context will be stored in """Context manager. Log files within this context will be stored in
directory ``dirname``. Logging is turned off if name is empty. directory ``dirname``. Logging is turned off if name is empty.
...@@ -116,7 +118,7 @@ def logging(dirname="LOGS"): ...@@ -116,7 +118,7 @@ def logging(dirname="LOGS"):
LOGGING = save LOGGING = save
def is_logging(): def is_logging() -> bool:
"""-> True, if logging is turned on.""" """-> True, if logging is turned on."""
global LOGGING global LOGGING
try: try:
...@@ -125,6 +127,26 @@ def is_logging(): ...@@ -125,6 +127,26 @@ def is_logging():
return False return False
@contextlib.contextmanager
def supress_warnings(supress: bool = True):
global SUPRESS_WARNINGS
try:
save = SUPRESS_WARNINGS
except NameError:
save = False # global default for warning supression is False
SUPRESS_WARNINGS = supress
yield
SUPRESS_WARNINGS = save
def warnings() -> bool:
global SUPRESS_WARNINGS
try:
return not SUPRESS_WARNINGS
except NameError:
return True
def line_col(text: str, pos: int) -> Tuple[int, int]: def line_col(text: str, pos: int) -> Tuple[int, int]:
"""Returns the position within a text as (line, column)-tuple. """Returns the position within a text as (line, column)-tuple.
""" """
......
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
latexdoc = preamble document latexdoc = preamble document
preamble = { command }+ preamble = { command }+
document = { [PARSEP] paragraph } [PARSEP] §EOF document = [PARSEP] sequence [PARSEP] §EOF
genericenv = beginenv sequence §endenv genericenv = beginenv sequence §endenv
beginenv = "\begin" §( "{" NAME "}" ) beginenv = "\begin" §( "{" NAME "}" )
...@@ -38,9 +38,9 @@ ESCAPED = /\\[%$&]/ ...@@ -38,9 +38,9 @@ ESCAPED = /\\[%$&]/
BRACKETS = /[\[\]]/ # left or right square bracket: [ ] BRACKETS = /[\[\]]/ # left or right square bracket: [ ]
TEXTCHUNK = /[^\\%$&\{\}\[\]\s\n]+/ # some piece of text excluding whitespace, TEXTCHUNK = /[^\\%$&\{\}\[\]\s\n]+/ # some piece of text excluding whitespace,
# linefeed and special characters # linefeed and special characters
WSPC = /[ \t]*\n?(?!\s*\n)[ \t]*/ # whitespace, including at most one linefeed WSPC = /[ \t]*\n?(?![ \t]*\n)[ \t]*/ # whitespace, including at most one linefeed
LF = /[ \t]*\n(?!\s*\n)/ # a linefeed, but not an empty line (i.e. par) LF = /[ \t]*\n(?!\s*\n)/ # a linefeed, but not an empty line (i.e. par)
PARSEP = /\s*\n\s*\n/ # at least one empty line, i.e. PARSEP = /\n[ \t]*(?=\n)/~ # at least one empty line, i.e.
# [whitespace] linefeed [whitespace] linefeed # [whitespace] linefeed [whitespace] linefeed
EOF = !/./ EOF = !/./
...@@ -25,4 +25,7 @@ import sys ...@@ -25,4 +25,7 @@ import sys
sys.path.extend(['../../', '../', './']) sys.path.extend(['../../', '../', './'])
from DHParser.testing import recompile_grammar from DHParser.testing import recompile_grammar
recompile_grammar('.', True) from DHParser.toolkit import supress_warnings
with supress_warnings(True):
recompile_grammar('.', True)
...@@ -27,7 +27,8 @@ from DHParser import toolkit ...@@ -27,7 +27,8 @@ from DHParser import toolkit
from LaTeXCompiler import get_grammar, get_transformer from LaTeXCompiler import get_grammar, get_transformer
with toolkit.logging(True): with toolkit.logging(True):
error_report = testing.grammar_suite('grammar_tests', get_grammar, get_transformer, verbose=True) error_report = testing.grammar_suite('grammar_tests', get_grammar,
get_transformer, verbose=True)
assert not error_report, error_report assert not error_report, error_report
...@@ -14,4 +14,4 @@ so werd' ich ganz und gar gesund. ...@@ -14,4 +14,4 @@ so werd' ich ganz und gar gesund.
Wenn ich mich lehn' an deine Brust, Wenn ich mich lehn' an deine Brust,
kommt's über mich wie Himmelslust, kommt's über mich wie Himmelslust,
doch wenn du sprichst: Ich liebe dich! doch wenn du sprichst: Ich liebe dich!
so muß ich weinen bitterlich. so muß ich weinen bitterlich.
\ No newline at end of file
...@@ -25,4 +25,5 @@ import sys ...@@ -25,4 +25,5 @@ import sys
sys.path.extend(['../../', '../', './']) sys.path.extend(['../../', '../', './'])
from DHParser.testing import recompile_grammar from DHParser.testing import recompile_grammar
recompile_grammar('.')
recompile_grammar('.', force=True)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment