Commit 315a0736 authored by eckhart's avatar eckhart
Browse files

- type errors fixed

parent e8b56892
......@@ -158,7 +158,7 @@ class Compiler:
if self._dirty_flag:
self.reset()
self._dirty_flag = True
self.tree = root # type: RootNode
self.tree = root
# self.source = source # type: str
self.prepare()
result = self.compile(root)
......@@ -239,7 +239,7 @@ def logfile_basename(filename_or_text, function_or_class_or_instance) -> str:
return name[:i] + '_out' if i >= 0 else name
GrammarCallable = Union[Grammar, Callable[[str], Node], functools.partial]
GrammarCallable = Union[Grammar, Callable[[str], RootNode], functools.partial]
CompilerCallable = Union[Compiler, Callable[[Node], Any], functools.partial]
......@@ -339,7 +339,9 @@ def compile_source(source: str,
try:
result = compiler(syntax_tree)
except Exception as e:
node = compiler.context[-1] if compiler.context else syntax_tree
node = syntax_tree # type: Node
if isinstance(compiler, Compiler) and compiler.context:
node = compiler.context[-1]
syntax_tree.new_error(
node, "Compilation failed, most likely, due to errors earlier "
"in the processing pipeline. Crash Message: %s: %s"
......
......@@ -139,7 +139,7 @@ def access_thread_locals() -> Any:
return THREAD_LOCALS
def get_config_value(key: Hashable) -> Any:
def get_config_value(key: str) -> Any:
"""
Retrieves a configuration value thread-safely.
:param key: the key (an immutable, usually a string)
......@@ -160,7 +160,7 @@ def get_config_value(key: Hashable) -> Any:
return value
def set_config_value(key: Hashable, value: Any):
def set_config_value(key: str, value: Any):
"""
Changes a configuration value thread-safely. The configuration
value will be set only for the current thread. In order to
......@@ -411,14 +411,14 @@ CONFIG_PRESET['test_parallelization'] = True
# in case a reported error may only be due to the fact that the test
# string a) either did include a substring for a lookahead check, the
# was then left over when parsing stopped (which would usually result
# in a "parser stopped before end"-error) or b) did not include a
# in a "parser stopped before end"-error) or b) did not include a
# substring expected by a lookahead check as this is not part of the
# sequence that the tested parser should return in form of a concrete
# syntax-tree. (This would otherwise result in a 'parser did not match'-
# error.)
# Beware that a) these heuristics can fail, so that certain
# Beware that a) these heuristics can fail, so that certain
# test-failures may fail to be reported and b) the abstract-syntax-trees
# resulting from parsers that contain lookahead checks may have a
# structure that would not occur outside the testing-environment.
# resulting from parsers that contain lookahead checks may have a
# structure that would not occur outside the testing-environment.
# Default value: True
CONFIG_PRESET['test_supress_lookahead_failures'] = True
......@@ -37,7 +37,7 @@ from DHParser.log import suspend_logging, resume_logging, is_logging, log_dir, a
from DHParser.parse import Grammar
from DHParser.preprocess import nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node
from DHParser.transform import TransformationFunc
from DHParser.transform import TransformationFunc, TransformationDict
from DHParser.toolkit import DHPARSER_DIR, load_if_file, is_python_code, compile_python_object, re
from typing import Any, cast, List, Tuple, Union, Iterator, Iterable, Optional, \
Callable, Generator
......@@ -429,7 +429,7 @@ def compile_on_disk(source_file: str, compiler_suite="", extension=".xml") -> It
sections = RX_SECTION_MARKER.split(source)
intro, imports, preprocessor, _, ast, compiler, outro = sections
ast_trans_python_src = DHPARSER_IMPORTS + ast
ast_trans_table = dict()
ast_trans_table = dict() # type: TransformationDict
try:
ast_trans_table = compile_python_object(ast_trans_python_src,
r'(?:\w+_)?AST_transformation_table$')
......
......@@ -28,7 +28,7 @@ from collections import OrderedDict
from functools import partial
import keyword
import os
from typing import Callable, Dict, List, Set, Tuple, Sequence, Union, Optional, Any, cast
from typing import Callable, Dict, List, Set, Tuple, Sequence, Union, Optional, Any
from DHParser.compile import CompilerError, Compiler, compile_source, visitor_name
from DHParser.configuration import THREAD_LOCALS, get_config_value
......@@ -41,7 +41,7 @@ from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE
from DHParser.toolkit import load_if_file, escape_re, md5, sane_parser_name, re, expand_table, \
unrepr, compile_python_object, DHPARSER_PARENTDIR, RX_NEVER_MATCH
from DHParser.transform import TransformationFunc, traverse, remove_brackets, \
reduce_single_child, replace_by_single_child, remove_whitespace, remove_empty, \
reduce_single_child, replace_by_single_child, remove_empty, \
remove_tokens, flatten, forbid, assert_content
from DHParser.versionnumber import __version__
......@@ -184,7 +184,7 @@ class EBNFGrammar(Grammar):
"""
expression = Forward()
source_hash__ = "82a7c668f86b83f86515078e6c9093ed"
static_analysis_pending__ = []
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
COMMENT__ = r'#.*(?:\n|$)'
WHITESPACE__ = r'\s*'
......@@ -333,7 +333,7 @@ def get_preprocessor() -> PreprocessorFunc:
GRAMMAR_FACTORY = '''
def get_grammar() -> {NAME}Grammar:
"""Returns a thread/process-exclusive {NAME}Grammar-singleton."""
THREAD_LOCALS = access_thread_locals()
THREAD_LOCALS = access_thread_locals()
try:
grammar = THREAD_LOCALS.{NAME}_{ID:08d}_grammar_singleton
except AttributeError:
......@@ -450,15 +450,15 @@ class EBNFDirectives:
def __init__(self):
self.whitespace = WHITESPACE_TYPES['vertical'] # type: str
self.comment = '' # type: str
self.literalws = {'right'} # type: Collection[str]
self.tokens = set() # type: Collection[str]
self.filter = dict() # type: Dict[str, str]
self.error = dict() # type: Dict[str, List[Tuple[ReprType, ReprType]]]
self.skip = dict() # type: Dict[str, List[Union[unrepr, str]]]
self.resume = dict() # type: Dict[str, List[Union[unrepr, str]]]
self.drop = set() # type: Set[str]
self._super_ws = None # type: Optional[str]
self.comment = '' # type: str
self.literalws = {'right'} # type: Set[str]
self.tokens = set() # type: Set[str]
self.filter = dict() # type: Dict[str, str]
self.error = dict() # type: Dict[str, List[Tuple[ReprType, ReprType]]]
self.skip = dict() # type: Dict[str, List[Union[unrepr, str]]]
self.resume = dict() # type: Dict[str, List[Union[unrepr, str]]]
self.drop = set() # type: Set[str]
self._super_ws = None # type: Optional[str]
def __getitem__(self, key):
return getattr(self, key)
......@@ -665,7 +665,6 @@ class EBNFCompiler(Compiler):
self.grammar_source = load_if_file(grammar_source)
return self
# methods for generating skeleton code for preprocessor, transformer, and compiler
def gen_preprocessor_skeleton(self) -> str:
......@@ -904,12 +903,13 @@ class EBNFCompiler(Compiler):
.format(symbol), Error.UNUSED_ERROR_HANDLING_WARNING)
except KeyError:
def match_function(nd: Node) -> bool:
return nd.children and nd.children[0].content.startswith(symbol + '_')
return bool(nd.children) and nd.children[0].content.startswith(symbol + '_')
dir_node = self.tree.pick(match_function)
directive = dir_node.children[0].content
self.tree.new_error(
dir_node, 'Directive "{}" relates to undefined symbol "{}"!'
.format(directive, directive.split('_')[0]))
if dir_node:
directive = dir_node.children[0].content
self.tree.new_error(
dir_node, 'Directive "{}" relates to undefined symbol "{}"!'
.format(directive, directive.split('_')[0]))
# prepare and add skip-rules
......@@ -1035,7 +1035,8 @@ class EBNFCompiler(Compiler):
self.grammar_name)
_ = grammar_class()
grammar_python_src = grammar_python_src.replace(
'static_analysis_pending__ = [True]', 'static_analysis_pending__ = []', 1)
'static_analysis_pending__ = [True]',
'static_analysis_pending__ = [] # type: List[bool]', 1)
except NameError:
pass # undefined name in the grammar are already caught and reported
except GrammarError as error:
......@@ -1485,6 +1486,7 @@ class EBNFCompiler(Compiler):
def on_whitespace(self, node: Node) -> str:
return self.WSPC_PARSER()
def get_ebnf_compiler(grammar_name="", grammar_source="") -> EBNFCompiler:
try:
compiler = THREAD_LOCALS.ebnf_compiler_singleton
......
......@@ -111,7 +111,7 @@ def resume_logging(log_dir: str=''):
set_config_value('log_dir', log_dir)
def log_dir(path: str="") -> Union[str, bool]:
def log_dir(path: str = "") -> str:
"""Creates a directory for log files (if it does not exist) and
returns its path.
......@@ -119,7 +119,7 @@ def log_dir(path: str="") -> Union[str, bool]:
Don't use a directory name that could be the name of a directory
for other purposes than logging.
ATTENTION: The log-dir is sotred thread locally, which means the log-dir
ATTENTION: The log-dir is stored thread locally, which means the log-dir
as well as the information whether logging is turned on or off will not
automatically be transferred to any subprocesses. This needs to be done
explicitly. (See `testing.grammar_suite()` for an example, how this can
......@@ -130,13 +130,12 @@ def log_dir(path: str="") -> Union[str, bool]:
used: `configuration.get_config_value('log_dir')`.
Returns:
name of the logging directory (str) or False (bool) if logging has
not been switched on with the logging-contextmanager (see below), yet.
str - name of the logging directory or '' if logging is turned off.
"""
# the try-except clauses in the following are precautions for multithreading
dirname = path if path else get_config_value('log_dir')
if not dirname:
return False
return ''
# `try ... except` rather `if os.path.exists(...)` to create directory
# to ensure thread-saftey.
try:
......@@ -187,7 +186,7 @@ def create_log(log_name: str) -> str:
return ''
def append_log(log_name: str, *strings, echo: bool=False) -> None:
def append_log(log_name: str, *strings, echo: bool = False) -> None:
"""
Appends one or more strings to the log-file with the name 'log_name', if
logging is turned on and log_name is not the empty string,
......@@ -202,7 +201,7 @@ def append_log(log_name: str, *strings, echo: bool=False) -> None:
:param echo: If True, the log message will be echoed on the terminal. This
will also happen if logging is turned off.
"""
ldir, file_name = os.path.split(log_name)
ldir, _ = os.path.split(log_name)
if not ldir:
ldir = log_dir()
if ldir and log_name:
......@@ -262,7 +261,7 @@ class HistoryRecord:
ERROR = "ERROR"
FAIL = "FAIL"
Snapshot_Fields = ('line', 'column', 'stack', 'status', 'text')
Snapshot = collections.namedtuple('Snapshot', Snapshot_Fields)
Snapshot = collections.namedtuple('Snapshot', ('line', 'column', 'stack', 'status', 'text'))
COLGROUP = '<colgroup>\n<col style="width:2%"/><col style="width:2%"/><col ' \
'style="width:65%"/><col style="width:6%"/><col style="width:25%"/>\n</colgroup>'
......@@ -287,15 +286,15 @@ class HistoryRecord:
'</style>\n</head>\n<body>\n')
HTML_LEAD_OUT = '\n</body>\n</html>\n'
def __init__(self, call_stack: List[str],
node: Optional[Node],
def __init__(self, call_stack: List[Tuple[str, int]],
node: Node,
text: StringView,
line_col: Tuple[int, int],
errors: List[Error] = []) -> None:
# copy call stack, dropping uninformative Forward-Parsers
# self.call_stack = call_stack # type: Tuple[Tuple[str, int],...]
self.call_stack = tuple((tn, pos) for tn, pos in call_stack if tn != ":Forward") # type: Tuple[Tuple[str, int],...]
self.node = node # type: Optional[Node]
self.node = node # type: Node
self.text = text # type: StringView
self.line_col = line_col # type: Tuple[int, int]
assert all(isinstance(err, Error) for err in errors)
......@@ -356,7 +355,7 @@ class HistoryRecord:
classes[idx['text']] = 'failtext'
else: # ERROR
stack += '<br/>\n"%s"' % self.err_msg()
tpl = self.Snapshot(str(self.line_col[0]), str(self.line_col[1]), stack, status, excerpt)
tpl = self.Snapshot(str(self.line_col[0]), str(self.line_col[1]), stack, status, excerpt) # type: Tuple[str, str, str, str, str]
return ''.join(['<tr>'] + [('<td class="%s">%s</td>' % (cls, item))
for cls, item in zip(classes, tpl)] + ['</tr>'])
......@@ -514,7 +513,7 @@ def log_parsing_history(grammar, log_file_name: str = '', html: bool = True) ->
log.append('\n'.join(['</table>\n<table>', HistoryRecord.COLGROUP]))
if not is_logging():
return
return False
if not log_file_name:
name = grammar.__class__.__name__
......
......@@ -32,7 +32,7 @@ for an example.
from collections import defaultdict
import copy
from typing import Callable, cast, List, Tuple, Set, Container, Dict, \
from typing import Callable, cast, List, Tuple, Sequence, Set, Dict, \
DefaultDict, Union, Optional, Any
from DHParser.configuration import get_config_value
......@@ -40,7 +40,7 @@ from DHParser.error import Error, linebreaks, line_col
from DHParser.log import HistoryRecord
from DHParser.preprocess import BEGIN_TOKEN, END_TOKEN, RX_TOKEN_NAME
from DHParser.stringview import StringView, EMPTY_STRING_VIEW
from DHParser.syntaxtree import Node, FrozenNode, RootNode, WHITESPACE_PTYPE, \
from DHParser.syntaxtree import ChildrenType, Node, RootNode, WHITESPACE_PTYPE, \
TOKEN_PTYPE, ZOMBIE_TAG, EMPTY_NODE, ResultType
from DHParser.toolkit import sane_parser_name, escape_control_characters, re, cython, \
abbreviate_middle, RX_NEVER_MATCH, RxPatternType
......@@ -110,10 +110,10 @@ class ParserError(Exception):
different kind of error like `UnknownParserError`) is when a `Series`-
or `AllOf`-parser detects a missing mandatory element.
"""
def __init__(self, node: Node, rest: StringView, error: Optional[Error], first_throw: bool):
self.node = node # type: Node
self.rest = rest # type: StringView
self.error = error # type: Optional[Error]
def __init__(self, node: Node, rest: StringView, error: Error, first_throw: bool):
self.node = node # type: Node
self.rest = rest # type: StringView
self.error = error # type: Error
self.first_throw = first_throw # type: bool
def __str__(self):
......@@ -212,7 +212,7 @@ def reentry_point(rest: StringView,
ApplyFunc = Callable[['Parser'], None]
FlagFunc = Callable[[ApplyFunc, Set[ApplyFunc]], bool]
ParseFunc = Callable[['Parser', StringView], Tuple[Optional[Node], StringView]]
ParseFunc = Callable[[StringView], Tuple[Optional[Node], StringView]]
def copy_parser_attrs(src: 'Parser', duplicate: 'Parser'):
......@@ -315,8 +315,8 @@ class Parser:
self.tag_name = self.ptype # type: str
self.cycle_detection = set() # type: Set[ApplyFunc]
# this indirection is required for Cython-compatibility
self.__parse = self._parse # type: ParseMethod
# self.proxied = None # type: Optional[ParseMethod]
self.__parse = self._parse # type: ParseFunc
# self.proxied = None # type: Optional[ParseFunc]
try:
self._grammar = GRAMMAR_PLACEHOLDER # type: Grammar
except NameError:
......@@ -413,12 +413,12 @@ class Parser:
# if i < 0:
# i = 1
try:
zombie = pe.node[ZOMBIE_TAG]
zombie = pe.node[ZOMBIE_TAG] # type: Optional[Node]
except (KeyError, ValueError):
zombie = None
if zombie and not zombie.result:
zombie.result = rest[:i]
tail = tuple()
tail = tuple() # type: ChildrenType
else:
nd = Node(ZOMBIE_TAG, rest[:i]).with_pos(location)
# nd.attr['err'] = pe.error.message
......@@ -463,7 +463,7 @@ class Parser:
"Refactor grammar to avoid slow parsing.",
node.pos if node else location,
Error.LEFT_RECURSION_WARNING))
error_id = id(node)
# error_id = id(node)
grammar.last_recursion_location__ = location
# don't overwrite any positive match (i.e. node not None) in the cache
# and don't add empty entries for parsers returning from left recursive calls!
......@@ -525,7 +525,7 @@ class Parser:
# assume that proxy is a function
proxy = proxy.__get__(self, type(self))
else:
# if proxy is a method it must be a method od self
# if proxy is a method it must be a method of self
assert proxy.__self__ == self
self.__parse = proxy
......@@ -553,11 +553,11 @@ class Parser:
except NameError: # Cython: No access to GRAMMA_PLACEHOLDER, yet :-(
self._grammar = grammar
def sub_parsers(self) -> List['Parser']:
def sub_parsers(self) -> Tuple['Parser', ...]:
"""Returns the list of sub-parsers if there are any.
Overridden by Unary, Nary and Forward.
"""
return []
return tuple()
def _apply(self, func: ApplyFunc, flip: FlagFunc) -> bool:
"""
......@@ -1222,7 +1222,7 @@ class Grammar:
and any('Lookahead' in tag for tag, _ in h.call_stack):
break
else:
h = HistoryRecord([], None, StringView(''), (0, 0))
h = HistoryRecord([], EMPTY_NODE, StringView(''), (0, 0))
if h.status == h.MATCH and (h.node.pos + len(h.node) == len(self.document__)):
# TODO: this case still needs unit-tests and support in testing.py
error_msg = "Parser stopped before end, but matched with lookahead."
......@@ -1254,7 +1254,7 @@ class Grammar:
# # stop history tracking when parser returned too early
# self.history_tracking__ = False
else:
rest = '' # if complete_match is False, ignore the rest and leave while loop
rest = StringView('') # if complete_match is False, ignore the rest and leave while loop
if stitches:
if rest:
stitches.append(Node(ZOMBIE_TAG, rest))
......@@ -1323,7 +1323,7 @@ class Grammar:
def static_analysis(self) -> List[GrammarErrorType]:
"""
EXPERIMENTAL
EXPERIMENTAL
Checks the parser tree statically for possible errors. At the moment,
no checks are implemented
......@@ -1403,11 +1403,11 @@ class PreprocessorToken(Parser):
if text[0:1] == BEGIN_TOKEN:
end = text.find(END_TOKEN, 1)
if end < 0:
node = Node(self.tag_name, '')
node = Node(self.tag_name, '') # type: Node
self.grammar.tree__.new_error(
node,
'END_TOKEN delimiter missing from preprocessor token. '
'(Most likely due to a preprocessor bug!)') # type: Node
'(Most likely due to a preprocessor bug!)')
return node, text[1:]
elif end == 0:
node = Node(self.tag_name, '')
......@@ -1517,11 +1517,11 @@ class RegExp(Parser):
def DropToken(text: str) -> Token:
return Drop(Token(text))
return cast(Token, Drop(Token(text)))
def DropRegExp(regexp) -> RegExp:
return Drop(RegExp(regexp))
return cast(RegExp, Drop(RegExp(regexp)))
def withWS(parser_factory, wsL='', wsR=r'\s*'):
......@@ -1553,7 +1553,7 @@ def TKN(token, wsL='', wsR=r'\s*'):
def DTKN(token, wsL='', wsR=r'\s*'):
"""Syntactic Sugar for 'Series(Whitespace(wsL), DropToken(token), Whitespace(wsR))'"""
return withWS(lambda: DropToken(token), wsL, wsR)
return withWS(lambda: Drop(Token(token)), wsL, wsR)
class Whitespace(RegExp):
......@@ -1667,8 +1667,8 @@ class UnaryParser(MetaParser):
copy_parser_attrs(self, duplicate)
return duplicate
def sub_parsers(self) -> List['Parser']:
return [self.parser]
def sub_parsers(self) -> Tuple['Parser', ...]:
return (self.parser,)
class NaryParser(MetaParser):
......@@ -1701,12 +1701,12 @@ class NaryParser(MetaParser):
if not self._grammar.resume_notices__:
return
notice = Error('Skipping within parser {} to point {}'
.format(self.pname or self.pytpe, repr(_text[:10])),
.format(self.pname or self.ptype, repr(_text[:10])),
self._grammar.document_length__ - len(_text),
Error.RESUME_NOTICE)
self._grammar.tree__.add_error(err_node, notice)
def sub_parsers(self) -> List['Parser']:
def sub_parsers(self) -> Tuple['Parser', ...]:
return self.parsers
......@@ -2717,5 +2717,5 @@ class Forward(Parser):
self.parser = parser
self.drop_content = parser.drop_content
def sub_parsers(self) -> List['Parser']:
return [self.parser]
def sub_parsers(self) -> Tuple['Parser', ...]:
return (self.parser,)
......@@ -53,7 +53,7 @@ import subprocess
import sys
import time
from typing import Callable, Coroutine, Optional, Union, Dict, List, Tuple, Sequence, Set, \
Iterator, Any, cast
Iterator, Iterable, Any, cast
from DHParser.configuration import access_thread_locals, get_config_value
from DHParser.syntaxtree import DHParser_JSONEncoder
......@@ -152,8 +152,8 @@ def substitute_default_host_and_port(host, port):
def as_json_rpc(func: Callable,
params: Union[List[JSON_Type], Dict[str, JSON_Type]]=(),
ID: Optional[int]=None) -> str:
params: Union[List[JSON_Type], Dict[str, JSON_Type]] = [],
ID: Optional[int] = None) -> str:
"""Generates a JSON-RPC-call for `func` with parameters `params`"""
return json.dumps({"jsonrpc": "2.0", "method": func.__name__, "params": params, "id": ID})
......@@ -223,7 +223,7 @@ async def asyncio_connect(host: str = USE_DEFAULT_HOST, port: int = USE_DEFAULT_
delay *= 1.5
else:
delay = retry_timeout # exit while loop
if connected:
if connected and reader is not None and writer is not None:
return reader, writer
else:
raise save_error
......@@ -233,7 +233,7 @@ def GMT_timestamp() -> str:
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
ALL_RPCs = frozenset('*') # Magic value denoting all remote procedures
ALL_RPCs = set('*') # Magic value denoting all remote procedures
def default_fallback(*args, **kwargs) -> str:
......@@ -313,7 +313,7 @@ class Server:
"""
def __init__(self, rpc_functions: RPC_Type,
cpu_bound: Set[str] = ALL_RPCs,
blocking: Set[str] = frozenset(),
blocking: Set[str] = set(),
server_name: str = ''):
self.server_name = server_name or '%s_%s' % (self.__class__.__name__, hex(id(self))[2:])
if isinstance(rpc_functions, Dict):
......@@ -377,7 +377,8 @@ class Server:
self.finished_tasks = dict() # type: Dict[int, Set[int]]
self.connections = set() # type: Set
self.kill_switch = False # type: bool
self.loop = None # just for python 3.5 compatibility...
# just for python 3.5 compatibility...
self.loop = None # type: Optional[asyncio.AbstractEventLoop]
def start_logging(self, filename: str = "") -> str:
if not filename:
......@@ -434,15 +435,15 @@ class Server:
async def execute(self, executor: Optional[Executor],
method: Callable,
params: Union[Dict, Sequence])\
-> Tuple[JSON_Type, RPC_Error_Type]:
-> Tuple[Optional[JSON_Type], Optional[RPC_Error_Type]]:
"""Executes a method with the given parameters in a given executor
(ThreadPoolExcecutor or ProcessPoolExecutor). `execute()`waits for the
completion and returns the JSON result and an RPC error tuple (see the
type definition above). The result may be None and the error may be
zero, i.e. no error. If `executor` is `None`the method will be called
directly instead of deferring it to an executor."""
result = None # type: JSON_Type
rpc_error = None # type: RPC_Error_Type
result = None # type: Optional[JSON_Type]
rpc_error = None # type: Optional[RPC_Error_Type]
if params is None:
params = tuple()
has_kw_params = isinstance(params, Dict)
......@@ -469,7 +470,7 @@ class Server:
return result, rpc_error
async def run(self, method_name: str, method: Callable, params: Union[Dict, Sequence]) \
-> Tuple[JSON_Type, RPC_Error_Type]:
-> Tuple[Optional[JSON_Type], Optional[RPC_Error_Type]]:
"""Picks the right execution method (process, thread or direct execution) and
runs it in the respective executor. In case of a broken ProcessPoolExecutor it
restarts the ProcessPoolExecutor and tries to execute the method again."""
......@@ -478,7 +479,8 @@ class Server:
# c) in a process pool if it is cpu bound
# see: https://docs.python.org/3/library/asyncio-eventloop.html
# #executing-code-in-thread-or-process-pools
result, rpc_error = None, None
result = None # type: Optional[JSON_Type]
rpc_error = None # type: Optional[RPC_Error_Type]
executor = self.pp_executor if method_name in self.cpu_bound else \
self.tp_executor if method_name in self.blocking else None
result, rpc_error = await self.execute(executor, method, params)
......@@ -563,7 +565,8 @@ class Server:
await self.respond(writer, http_response("Data too large! Only %i MB allowed"
% (self.max_data_size // (1024 ** 2))))
else:
result, rpc_error = None, None
result = None # type: Optional[JSON_Type]
rpc_error = None # type: Optional[RPC_Error_Type]
m = re.match(RE_GREP_URL, data)
# m = RX_GREP_URL(data.decode())
if m:
......@@ -599,7 +602,8 @@ class Server:
writer: asyncio.StreamWriter,
json_obj: Dict):
# TODO: handle cancellation calls!
result, rpc_error = None, None
result = None # type: Optional[JSON_Type]
rpc_error = None # type: Optional[RPC_Error_Type]
if json_obj.get('jsonrpc', '0.0') < '2.0':
rpc_error = -32600, 'Invalid Request: jsonrpc version 2.0 needed, version "' \
' "%s" found.' % json_obj.get('jsonrpc', b'unknown')
......@@ -652,8 +656,8 @@ class Server:
id_writer = id(writer) # type: int
self.connections.add(id_writer)
self.log('SERVER MESSAGE: New connection: ', str(id_writer), '\n')
self.active_tasks[id_writer] = dict() # type: Dict[id, asyncio.Task]
self.finished_tasks[id_writer] = set() # type: Set[int]
self.active_tasks[id_writer] = dict()
self.finished_tasks[id_writer] = set()
buffer = b'' # type: bytes
while not self.kill_switch and id_writer in self.connections:
# reset the data variable
......@@ -771,10 +775,10 @@ class Server:
else:
# assume json
# TODO: add batch processing capability! (put calls to execute in asyncio tasks, use asyncio.gather)
json_id = 0
raw = None
json_obj = {}
rpc_error = None
json_id = 0 # type: int
raw = None # type: Optional[JSON_Type]
json_obj = {} # type: JSON_Type
rpc_error = None # type: Optional[RPC_Error_Type]
# see: https://microsoft.github.io/language-server-protocol/specification#header-part
# i = max(data.find(b'\n\n'), data.find(b'\r\n\r\n')) + 2
i = data.find(b'{')
......@@ -819,7 +823,7 @@ class Server:
open_tasks = {task for id, task in self.active_tasks[id_writer].items()
if id not in self.finished_tasks[id_writer]}
if open_tasks:
done, pending = await asyncio.wait(open_tasks, timeout=3.0)
done, pending = await asyncio.wait(open_tasks, timeout=3.0) # type: Set[asyncio.Future], Set[asyncio.Future]
for task in pending:
task.cancel()
del self.active_tasks[id_writer]
......@@ -840,8 +844,8 @@ class Server: