Commit 56c19380 authored by eckhart's avatar eckhart

- a few ameliorations

parent c6f0a058
......@@ -788,7 +788,7 @@ class EBNFCompiler(Compiler):
# What makes it (look) more complicated is the handling of the
# mandatory §-operator
mandatory_marker = []
filtered_children = []
filtered_children = [] # type: List[Node]
for nd in node.children:
if nd.parser.ptype == TOKEN_PTYPE and nd.content == "§":
mandatory_marker.append(len(filtered_children))
......
......@@ -57,7 +57,7 @@ from DHParser.error import line_col
from DHParser.stringview import StringView
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE
from DHParser.toolkit import is_filename, escape_control_characters, typing
from typing import List, Union
from typing import List, Tuple, Union
__all__ = ('log_dir',
'logging',
......
......@@ -237,7 +237,7 @@ class Parser(ParserBase):
def __init__(self, name: str = '') -> None:
# assert isinstance(name, str), str(name)
super().__init__(name)
self._grammar = None # type: 'Grammar'
self._grammar = None # type: Optional['Grammar']
self.reset()
# add "aspect oriented" wrapper around parser calls
......@@ -602,14 +602,14 @@ class Grammar:
if self.WSP__:
try:
probe = self.whitespace__
probe = self.whitespace__ # type: RegExp
assert self.whitespace__.regexp.pattern == self.WSP__
except AttributeError:
self.whitespace__ = Whitespace(self.WSP__)
self.whitespace__ = Whitespace(self.WSP__) # type: RegExp
self.whitespace__.grammar = self
self.all_parsers__.add(self.whitespace__) # don't you forget about me...
else:
self.whitespace__ = ZOMBIE_PARSER
self.whitespace__ = cast(RegExp, ZOMBIE_PARSER)
assert not self.wspL__ or self.wspL__ == self.WSP__
assert not self.wspR__ or self.wspR__ == self.WSP__
......
......@@ -30,7 +30,7 @@ import copy
from DHParser.error import Error, linebreaks, line_col
from DHParser.stringview import StringView
from DHParser.toolkit import re, typing
from typing import Callable, cast, Iterator, List, Set, Union, Tuple, Optional
from typing import Callable, cast, Iterator, List, AbstractSet, Set, Union, Tuple, Optional
__all__ = ('ParserBase',
......@@ -257,7 +257,7 @@ class Node(collections.abc.Sized):
# The following if-clause is merely an optimization, i.e. a fast-path for leaf-Nodes
if leafhint:
self._result = result # type: StrictResultType
self._content = None # type: str
self._content = None # type: Optional[str]
self.children = NoChildren # type: ChildrenType
self._len = -1 # type: int # lazy evaluation
else:
......@@ -413,7 +413,7 @@ class Node(collections.abc.Sized):
@property
def content(self) -> Union[StringView, str]:
def content(self) -> str:
"""
Returns content as string, omitting error messages.
"""
......@@ -670,7 +670,7 @@ class Node(collections.abc.Sized):
yield node
def select_by_tag(self, tag_names: Union[str, Set[str]],
def select_by_tag(self, tag_names: Union[str, AbstractSet[str]],
include_root: bool=True) -> Iterator['Node']:
"""
Returns an iterator that runs through all descendants that have one
......@@ -699,7 +699,7 @@ class Node(collections.abc.Sized):
Node: All nodes which have a given tag name.
"""
if isinstance(tag_names, str):
tag_names = frozenset(tag_names)
tag_names = frozenset({tag_names})
return self.select(lambda node: node.tag_name in tag_names, include_root)
......
......@@ -242,14 +242,15 @@ def traverse(root_node: Node,
if '__cache__' in processing_table:
# assume that processing table has already been expanded
table = processing_table # type: ProcessingTableType
cache = processing_table['__cache__'] # type: Dictionary[str, List[Callable]]
cache = cast(TransformationDict, processing_table['__cache__']) # type: TransformationDict
else:
# normalize processing_table entries by turning single values
# into lists with a single value
table = {name: cast(Sequence[Callable], smart_list(call))
for name, call in list(processing_table.items())}
table = expand_table(table)
cache = table.setdefault('__cache__', cast(TransformationDict, dict()))
cache = cast(TransformationDict,
table.setdefault('__cache__', cast(TransformationDict, dict())))
# change processing table in place, so its already expanded and cache filled next time
processing_table.clear()
processing_table.update(table)
......
......@@ -311,16 +311,16 @@ class TestSeries:
st = parser("DEAB_"); assert st.error_flag
assert st.collect_errors()[0].code == Error.MANDATORY_CONTINUATION
def test_boundary_cases(self):
lang = """
document = series | §!single | /.*/
series = "A" "B" §"C" "D"
single = "E"
"""
parser_class = grammar_provider(lang)
parser = parser_class()
print(parser.python_src__)
print(parser_class.python_src__)
# def test_boundary_cases(self):
# lang = """
# document = series | §!single | /.*/
# series = "A" "B" §"C" "D"
# single = "E"
# """
# parser_class = grammar_provider(lang)
# parser = parser_class()
# print(parser.python_src__)
# print(parser_class.python_src__)
class TestAllOfSomeOf:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment