Commit d1b0a79e authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

more support for Laguage Server Protocol added

parent 050b0f1f
......@@ -43,7 +43,7 @@ from DHParser.parse import Parser, Grammar, mixin_comment, mixin_nonempty, Forwa
Text, Capture, Retrieve, Pop, optional_last_value, GrammarError, Whitespace, Always, Never, \
INFINITE, matching_bracket, ParseFunc, update_scanner
from DHParser.preprocess import nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node, WHITESPACE_PTYPE, TOKEN_PTYPE, EMPTY_NODE
from DHParser.syntaxtree import Node, RootNode, WHITESPACE_PTYPE, TOKEN_PTYPE, EMPTY_NODE
from DHParser.toolkit import load_if_file, escape_re, escape_control_characters, md5, \
sane_parser_name, re, expand_table, unrepr, compile_python_object, DHPARSER_PARENTDIR, \
RX_NEVER_MATCH
......@@ -1886,12 +1886,9 @@ class EBNFCompiler(Compiler):
def on_expression(self, node) -> str:
# TODO: Add check for errors like "a" | "ab" (which will always yield a, even for ab)
# The following algorithm reorders literal alternatives, so that earlier alternatives
# doe not pre-empt later alternatives, e.g. 'ID' | 'IDREF' will be reordered as
# 'IDREF' | 'ID'
# TODO: Add symbols defined as literals, here...
def move_items(l: List, a: int, b: int):
"""Moves all items in the interval [a:b[ one position forward and moves the
......@@ -2345,7 +2342,7 @@ def get_ebnf_compiler(grammar_name="", grammar_source="") -> EBNFCompiler:
return compiler
def compile_ebnf_ast(ast: Node) -> str:
def compile_ebnf_ast(ast: RootNode) -> str:
"""Compiles the abstract-syntax-tree of an EBNF-source-text into
python code of a class derived from `parse.Grammar` that can
parse text following the grammar described with the EBNF-code."""
......
......@@ -35,7 +35,7 @@ the string representations of the error objects. For example::
print("There have been warnings, but no errors.")
"""
from typing import Iterable, Iterator, Union, List
from typing import Iterable, Iterator, Union, List, Optional, Sequence, Tuple
from DHParser.preprocess import SourceMapFunc
from DHParser.stringview import StringView
......@@ -160,22 +160,31 @@ RECURSION_DEPTH_LIMIT_HIT = ErrorCode(10400)
class Error:
__slots__ = ['message', 'code', '_pos', 'orig_pos', 'line', 'column']
__slots__ = ['message', 'code', '_pos', 'orig_pos', 'line', 'column',
'length', 'end_line', 'end_column', 'related', 'relatedUri']
def __init__(self, message: str, pos: int, code: ErrorCode = ERROR,
orig_pos: int = -1, line: int = -1, column: int = -1) -> None:
orig_pos: int = -1, line: int = -1, column: int = -1,
length: int = 1, related: Sequence[Tuple['Error', str]] = []) -> None:
assert isinstance(code, ErrorCode)
assert not isinstance(pos, ErrorCode)
assert code >= 0
assert pos >= 0
assert length >= 1
self.message = message # type: str
self._pos = pos # type: int
# TODO: Add some logic to avoid double assignment of the same error code?
# Problem: Same code might allowedly be used by two different parsers/compilers
# Add some logic to avoid double assignment of the same error code?
# Problem: Same code might allowedly be used by two different parsers/compilers
self.code = code # type: ErrorCode
self.orig_pos = orig_pos # type: int
self.line = line # type: int
self.column = column # type: int
# support for Languager Server Protocol Diagnostics
# see: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#diagnostic
self.length = length # type: int
self.end_line = -1 # type: int
self.end_column = -1 # type: int
self.related = tuple(related) # type: Sequence[Tuple['Error', str]]
def __str__(self):
prefix = ''
......@@ -196,6 +205,7 @@ class Error:
self._pos = value
# reset line and column values, because they might now not be valid any more
self.line, self.column = -1, -1
self.end_line, self.end_column = -1, -1
@property
def severity(self):
......@@ -216,6 +226,42 @@ class Error:
stop = document.find('\n', self.pos)
return document[start:stop] + '\n' + ' ' * (self.pos - start) + '^\n'
def rangeObj(self) -> dict:
"""Returns the range (position plus length) of the error as an LSP-Range-Object.
https://microsoft.github.io/language-server-protocol/specifications/specification-current/#range
"""
assert self.line >= 0 and self.column >= 0 and self.end_line >= 0 and self.end_column >= 0
return {'start': {'line': self.line, 'character': self.column},
'end': {'line': self.end_line, 'character': self.end_column}}
def diagnosticObj(self) -> dict:
"""Returns the Error as as Language Server Protocol Diagnostic object.
https://microsoft.github.io/language-server-protocol/specifications/specification-current/#diagnostic
"""
def relatedObj(relatedError: Sequence[Tuple['Error', str]]) -> dict:
err, uri = relatedError
return {
'location': {'uri': uri, 'range': err.rangeObj()},
'message': err.message
}
if self.code < WARNING:
severity = 3
elif self.code < ERROR:
severity = 2
else:
severity = 1
return {
'range': self.rangeObj(),
'severity': severity,
'code': self.code,
'source': 'DHParser',
'message': self.message,
# 'tags': []
'related': [relatedObj(err) for err in self.related]
}
def is_warning(code: int) -> bool:
"""Returns True, if error is merely a warning or a message."""
......@@ -290,3 +336,4 @@ def adjust_error_locations(errors: List[Error],
assert err.pos >= 0
err.orig_pos = source_mapping(err.pos)
err.line, err.column = line_col(line_breaks, err.orig_pos)
err.end_line, err.end_column = line_col(line_breaks, err.orig_pos + err.length)
......@@ -2469,7 +2469,7 @@ class Series(MandatoryNary):
self, other = other, self
other_parsers = cast('Series', other).parsers if isinstance(other, Series) \
else cast(Tuple[Parser, ...], (other,)) # type: Tuple[Parser, ...]
return Series(*(other_parsers + self.parsers),
return Series(*(other_parsers + cast(NaryParser, self).parsers),
mandatory=combined_mandatory(other, self))
other_parsers = cast('Series', other).parsers if isinstance(other, Series) \
else cast(Tuple[Parser, ...], (other,)) # type: Tuple[Parser, ...]
......@@ -2575,7 +2575,7 @@ class Alternative(NaryParser):
self, other = other, self
other_parsers = cast('Alternative', other).parsers if isinstance(other, Alternative) \
else cast(Tuple[Parser, ...], (other,)) # type: Tuple[Parser, ...]
return Alternative(*(other_parsers + self.parsers))
return Alternative(*(other_parsers + cast(NaryParser, self).parsers))
other_parsers = cast('Alternative', other).parsers if isinstance(other, Alternative) \
else cast(Tuple[Parser, ...], (other,)) # type: Tuple[Parser, ...]
return Alternative(*(self.parsers + other_parsers))
......@@ -2768,7 +2768,7 @@ class Interleave(MandatoryNary):
# for some reason cython called __mul__ instead of __rmul__,
# so we have to flip self and other...
self, other = other, self
parsers, mandatory, repetitions = self._prepare_combined(other)
parsers, mandatory, repetitions = cast(Interleave, self)._prepare_combined(other)
return Interleave(*parsers, mandatory=mandatory, repetitions=repetitions)
def __rmul__(self, other: Parser) -> 'Interleave':
......@@ -2828,9 +2828,6 @@ class Lookahead(FlowParser):
def _parse(self, text: StringView) -> Tuple[Optional[Node], StringView]:
node, _ = self.parser(text)
if self.sign(node is not None):
# TODO: Delete following comment
# static analysis requires lookahead to be disabled at document end
# or (self.grammar.static_analysis_pending__ and not text)):
return (EMPTY_NODE if self.anonymous else Node(self.tag_name, '')), text
else:
return None, text
......@@ -3420,8 +3417,7 @@ class Forward(UnaryParser):
self.drop_content = parser.drop_content
def sub_parsers(self) -> Tuple[Parser, ...]:
"""Note: Sub-Parsers are not passed through by Forward-Parser.
TODO: Should this be changed?"""
"""Note: Sub-Parsers are not passed through by Forward-Parser."""
if is_parser_placeholder(self.parser):
return tuple()
return self.parser,
......@@ -193,11 +193,11 @@ def substitute_default_host_and_port(host, port):
return host, port
def as_json_rpc(func: Callable,
params: Union[List[JSON_Type], Dict[str, JSON_Type]] = [],
ID: Optional[int] = None) -> str:
"""Generates a JSON-RPC-call for `func` with parameters `params`"""
return json.dumps({"jsonrpc": "2.0", "method": func.__name__, "params": params, "id": ID})
# def as_json_rpc(func: Callable,
# params: Union[List[JSON_Type], Dict[str, JSON_Type]] = [],
# ID: Optional[int] = None) -> str:
# """Generates a JSON-RPC-call for `func` with parameters `params`"""
# return json.dumps({"jsonrpc": "2.0", "method": func.__name__, "params": params, "id": ID})
def convert_argstr(s: str) -> Union[None, bool, int, str, List, Dict]:
......@@ -334,7 +334,7 @@ def strip_header_delimiter(data: str) -> Tuple[str]:
def gen_task_id() -> int:
"""Generate a unique task id. This is always a negative number to
distinguish the taks id's from the json-rpc ids.
distinguish the task id's from the json-rpc ids.
"""
THREAD_LOCALS = access_thread_locals()
try:
......@@ -698,12 +698,14 @@ class Connection:
assert self.response_queue is not None
self.response_queue.put_nowait(json_obj)
async def server_call(self, json_obj: JSON_Type):
async def server_call(self, method: str, params: JSON_Type, ID: int):
"""Issues a json-rpc call from the server to the client."""
json_obj = {"jsonrpc": "2.0", "method": method, "params": params, "id": ID}
json_str = json.dumps(json_obj)
if self.log_file:
self.log('CALL: ', json_str, '\n\n')
self.log('CALL: ', json_str, '\n\n')
request = json_str.encode()
# self.writer.write(JSONRPC_HEADER_BYTES % len(request))
# sefl.writer.write(request)
request = JSONRPC_HEADER_BYTES % len(request) + request
self.writer.write(request)
await self.writer.drain()
......
......@@ -86,9 +86,6 @@ def pack_index(index: int, length: int) -> int:
"""
# assert length >= 0
index = index if index >= 0 else index + length
# TODO: Test the following code for speedup
# if index < 0:
# index += length
return 0 if index < 0 else length if index > length else index
......@@ -385,7 +382,7 @@ class TextBuffer:
def __init__(self, text: Union[str, StringView], version: int = 0):
self._text = text # type: Union[str, StringView]
self._buffer = [] # type: List[Union[str, StringView]]
self.version = version # type: int
self.version = version if version >= 0 else 0 # type: int
def _lazy_init(self):
self._buffer = [line.strip('\r') for line in self._text.split('\n')]
......
......@@ -148,9 +148,9 @@ def asyncio_run(coroutine):
loop.close()
def json_rpc(func, params={}, ID=None) -> str:
def json_rpc(func_name, params={}, ID=None) -> dict:
"""Generates a JSON-RPC-call for `func` with parameters `params`"""
return str({"jsonrpc": "2.0", "method": func.__name__, "params": params, "id": ID})
return {"jsonrpc": "2.0", "method": func_name, "params": params, "id": ID}
class DSLCPUBoundTasks:
......
......@@ -896,7 +896,7 @@ def run_path(path):
def clean_report(report_dir='REPORT'):
"""Deletes any test-report-files in the REPORT sub-directory and removes
the REPORT sub-directory, if it is empty after deleting the files."""
# TODO: make this thread safe, if possible!!!!
# TODO: make this thread/process safe, if possible!!!!
if os.path.exists(report_dir):
files = os.listdir(report_dir)
flag = False
......
......@@ -876,7 +876,8 @@ def collapse(context: List[Node]):
(place "p.26b,18")
"""
node = context[-1]
# TODO: update attributes
for descendant in node.select_if(lambda nd: nd.has_attr()):
node.attr.update(descendant.attr)
node.result = node.content
......@@ -1365,7 +1366,6 @@ def remove_tokens(context: List[Node], tokens: AbstractSet[str] = frozenset()):
remove_children_if(context, partial(is_token, tokens=tokens))
# TODO: refactor to "remove_children"!?
@transformation_factory(collections.abc.Set)
def remove_children(context: List[Node], tag_names: AbstractSet[str]):
"""Removes children by tag name."""
......
......@@ -147,9 +147,9 @@ def asyncio_run(coroutine):
loop.close()
def json_rpc(func, params={}, ID=None) -> str:
def json_rpc(func_name, params={}, ID=None) -> dict:
"""Generates a JSON-RPC-call for `func` with parameters `params`"""
return str({"jsonrpc": "2.0", "method": func.__name__, "params": params, "id": ID})
return {"jsonrpc": "2.0", "method": func_name, "params": params, "id": ID}
class EBNFCPUBoundTasks:
......@@ -249,8 +249,10 @@ class EBNFLanguageServerProtocol:
assert self.lsp_fulltable.keys().isdisjoint(self.blocking.lsp_table.keys())
self.lsp_fulltable.update(self.blocking.lsp_table)
self.pending_changes = dict() # uri -> text
self.current_text = dict() # uri -> TextBuffer
self.current_text = dict() # uri -> TextBuffer
self.last_compiled_version = dict() # uri -> int
self.server_call_ID = 0 # unique id
# self.completionItems = [{k: v for k, v in chain(zip(self.completion_fields, item),
# [['kind', 2]])}
......@@ -276,24 +278,35 @@ class EBNFLanguageServerProtocol:
return {}
async def compile_text(self, uri: str) -> None:
text_buffer = self.pending_changes.get(uri, None)
if text_buffer:
text_buffer = self.current_text.get(uri, None)
version = text_buffer.version
if text_buffer and version > self.last_compiled_version.get(uri, -1):
exenv = self.connection.exec
del self.pending_changes[uri]
self.connection.log('Compiling: ', uri, '\n')
self.last_compiled_version[uri] = version
result, rpc_error = await exenv.execute(exenv.process_executor,
self.cpu_bound.compile_EBNF,
(text_buffer.snapshot(),))
diagnostics = [] # TODO: Generate Diagnostics
publishDiagnostics = {
'uri' : uri,
'version': text_buffer.version,
'diagnostics': diagnostics
}
# werte Ergebnis aus
# sende eine PublishDiagnostics-Notification via self.connect
# sende eine PublishDiagnostics-Notification via self.connection
self.server_call_ID += 1
self.connection.server_call('textDocument/publishDiagnostics',
publishDiagnostics,
self.server_call_ID)
return None
async def lsp_textDocument_didOpen(self, textDocument):
from DHParser.stringview import TextBuffer
uri = textDocument['uri']
text = textDocument['text']
text_buffer = TextBuffer(text, int(textDocument.get('version', 0)))
text_buffer = TextBuffer(text, int(textDocument.get('version', -1)))
self.current_text[uri] = text_buffer
self.pending_changes[uri] = text_buffer
await self.compile_text(uri)
return None
......@@ -305,7 +318,7 @@ class EBNFLanguageServerProtocol:
async def lsp_textDocument_didChange(self, textDocument: dict, contentChanges: list):
uri = textDocument['uri']
version = int(textDocument['version'])
version = int(textDocument.get('version', -1))
if uri not in self.current_text:
return {}, (-32602, "Invalid uri: " + uri)
if contentChanges:
......@@ -313,12 +326,12 @@ class EBNFLanguageServerProtocol:
if 'range' in contentChanges[0]:
text_buffer = self.current_text[uri]
text_buffer.text_edits(contentChanges, version)
self.pending_changes[uri] = text_buffer
else:
text_buffer = TextBuffer(contentChanges[0]['text'], version)
self.current_text[uri] = text_buffer
await asyncio.sleep(RECOMPILE_DELAY)
await self.compile_text(uri)
if 'publishDiagnostics' in self.lsp_data['clientCapabilities']:
await asyncio.sleep(RECOMPILE_DELAY)
await self.compile_text(uri)
return None
def lsp_textDocument_completion(self, textDocument: dict, position: dict, context: dict):
......@@ -583,8 +596,8 @@ if __name__ == "__main__":
sys.path.append(dhparserdir)
# import subprocess
# subprocess.run(['gedit', dhparserdir.replace('/', '_')])
from DHParser import configuration
configuration.CONFIG_PRESET['log_server'] = True
# from DHParser import configuration
# configuration.CONFIG_PRESET['log_server'] = True
from argparse import ArgumentParser
parser = ArgumentParser(description="Setup and Control of a Server for processing EBNF-files.")
......@@ -620,7 +633,8 @@ if __name__ == "__main__":
if port >= 0 or host:
echo('Specifying host and port when using streams as transport does not make sense')
sys.exit(1)
run_server('', -1)
log_path, _ = parse_logging_args(args)
run_server('', -1, log_path)
sys.exit(0)
if port < 0 or not host:
......
......@@ -127,21 +127,19 @@ function startLangServerTCP(addr: number) : Disposable {
return disposable;
}
export function activate(context: ExtensionContext) {
console.log('activating language server connector!');
let disposable = startLangServerStream("python", ["EBNFServer.py", "--stream"]);
let disposable = startLangServerStream("python", ["EBNFServer.py", "--stream", "--logging"]);
// let disposable = startLangServerTCP(defaultPort);
context.subscriptions.push(disposable);
}
// TODO: EBNFServer when finished!
// export function deactivate(): Thenable<void> | undefined {
// if (!client) {
// return undefined;
// }
// console.log('stop lsp client');
// return client.stop();
// }
export function deactivate(): Thenable<void> | undefined {
if (!client) {
return undefined;
}
console.log('stop lsp client');
return client.stop();
}
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment