Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

Commit 056be947 authored by di68kap's avatar di68kap
Browse files

- server.Server : renamed and handle compilation extended (still not finished)

parent dcd152d7
......@@ -129,9 +129,11 @@ CONFIG_PRESET['add_grammar_source_to_parser_docstring'] = False
#
########################################################################
# Maximum allowed source code size in bytes
# Default value: 16 MB
CONFIG_PRESET['max_source_size'] = 16 * 1024 * 1024
# Maximum allowed source size for reomote procedure calls (including
# parameters) in server.Server. The default value is rather large in
# order to allow transmitting complete source texts as parameter.
# Default value: 4 MB
CONFIG_PRESET['max_rpc_size'] = 4 * 1024 * 1024
########################################################################
......
......@@ -31,21 +31,23 @@ compilation in serialized form, or just save the compilation results on the
file system an merely return an success or failure message. Module `server`
does not define any of these message. This is completely up to the clients
of module `server`, i.e. the compilation-modules, to decide.
The communication, i.e. requests and responses, follows the json-rpc protocol
(https://www.jsonrpc.org/specification)
"""
import asyncio
import json
from multiprocessing import Process, Value, Queue
from typing import Callable, Optional, Any
from typing import Callable, Optional, Union, Dict, List, Sequence, cast
from DHParser.toolkit import get_config_value
# TODO: implement compilation-server!
RPC_Table = Dict[str, Callable]
RPC_Type = Union[RPC_Table, List[Callable], Callable]
SERVER_ERROR = "COMPILER-SERVER-ERROR"
CompileFunc = Callable[[str, str], Any] # compiler_src(source: str, log_dir: str) -> Any
SERVER_OFFLINE = 0
SERVER_STARTING = 1
......@@ -53,11 +55,20 @@ SERVER_ONLINE = 2
SERVER_TERMINATE = 3
class Server:
def __init__(self, rpc_functions: RPC_Type):
if isinstance(rpc_functions, Dict):
self.rpc_table = cast(RPC_Table, rpc_functions) # type: RPC_Table
elif isinstance(rpc_functions, List):
self.rpc_table = {}
for func in cast(List, rpc_functions):
self.rpc_table[func.__name__] = func
else:
assert isinstance(rpc_functions, Callable)
func = cast(Callable, rpc_functions)
self.rpc_table = { func.__name__: func }
class CompilerServer:
def __init__(self, compiler: CompileFunc):
self.compiler = compiler
self.max_source_size = get_config_value('max_source_size')
self.max_source_size = get_config_value('max_rpc_size')
self.stage = Value('b', SERVER_OFFLINE)
self.server = None # type: Optional[asyncio.base_events.Server]
self.server_messages = Queue() # type: Queue
......@@ -68,11 +79,39 @@ class CompilerServer:
writer: asyncio.StreamWriter):
data = await reader.read(self.max_source_size + 1)
if len(data) > self.max_source_size:
writer.write(BEGIN_TOKEN + SERVER_ERROR + TOKEN_DELIMITER +
"Source code to large! Only %iMB allowed." %
(self.max_source_size // (1024**2)) + END_TOKEN)
writer.write('{"jsonrpc": "2.0", "error": {"code": -32600, "message": '
'"Invaild Request: Source code too large! Only %i MB allowed"}, '
'"id": null}' % (self.max_source_size // (1024**2)))
else:
writer.write(data) # for now, only echo
obj = json.loads(data)
rpc_error = None
json_id = obj.get('id', 'null') if isinstance(obj, Dict) else 'null'
if not isinstance(obj, Dict):
rpc_error = -32700, 'Parse error: Request does not appear to be an RPC-call!?'
elif obj.get('jsonrpc', 'unknown') != '2.0':
rpc_error = -32600, 'Invalid Request: jsonrpc version 2.0 needed, version "%s" ' \
'found.' % obj.get('jsonrpc', 'unknown')
elif not 'method' in obj:
rpc_error = -32600, 'Invalid Request: No method specified.'
elif obj['method'] not in self.rpc_table:
rpc_error = -32601, 'Method not found: ' + str(obj['method'])
else:
method = self.rpc_table[obj['method']]
params = obj['params'] if 'params' in obj else ()
try:
if isinstance(params, Sequence):
result = method(*params)
elif isinstance(params, Dict):
result = method(**params)
except Exception as e:
rpc_error = -32602, "Invalid Params: " + str(e)
if rpc_error is None:
json_result = {"jsonrpc": "2.0", "result": result, "id": json_id}
json.dump(writer, json_result)
else:
writer.write(b'{"jsonrpc": "2.0", "error": {"code": %i, "message": %s}, "id": %s '
% (rpc_error[0], rpc_error[1], json_id))
await writer.drain()
writer.close()
# TODO: add these lines in case a terminate signal is received, i.e. exit server coroutine
......
......@@ -25,6 +25,7 @@ parser classes are defined in the ``parse`` module.
from collections import OrderedDict
import copy
import json
from typing import Callable, cast, Iterator, List, AbstractSet, Set, Union, Tuple, Optional, Dict
from DHParser.error import Error, ErrorCode, linebreaks, line_col
......@@ -752,12 +753,15 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
def to_json_obj(self) -> Dict:
"""Seralize a node or tree as json-object"""
return { '__class__': 'DHParser.Node',
'data': [ self.tag_name,
[child.to_json_obj() for child in self.children] if self.children
else str(self._result),
self._pos,
dict(self._xml_attr) if self.attr_active() else None ] }
data = [ self.tag_name,
[child.to_json_obj() for child in self.children] if self.children
else str(self._result)]
has_attr = self.attr_active()
if self._pos >= 0 or has_attr:
data.append(self._pos)
if has_attr:
data.append(dict(self._xml_attr))
return { '__class__': 'DHParser.Node', 'data': data }
@staticmethod
......@@ -768,7 +772,7 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
if json_obj.get('__class__', '') != 'DHParser.Node':
raise ValueError('JSON object: ' + str(json_obj) +
' does not represent a Node object.')
tag_name, result, pos, attr = json_obj['data']
tag_name, result, pos, attr = (json_obj['data'] + [-1, None])[:4]
if isinstance(result, str):
leafhint = True
else:
......@@ -780,12 +784,16 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
node.attr.update(attr)
return node
def as_json(self, indent: Optional[int] = 2, ensure_ascii=False) -> str:
return json.dumps(self.to_json_obj(), indent=indent, ensure_ascii=ensure_ascii,
separators=(', ', ': ') if indent is not None else (',', ':'))
def serialize(node: Node, how: str='default') -> str:
"""
Serializes the tree starting with `node` either as S-expression, XML
Serializes the tree starting with `node` either as S-expression, XML, JSON,
or in compact form. Possible values for `how` are 'S-expression',
'XML', 'compact' accordingly, or 'AST', 'CST', 'default' in which case
'XML', 'JSON', 'compact' accordingly, or 'AST', 'CST', 'default' in which case
the value of respective configuration variable determines the
serialization format. (See module `configuration.py`.)
"""
......@@ -802,6 +810,8 @@ def serialize(node: Node, how: str='default') -> str:
return node.as_sxpr(flatten_threshold=get_config_value('flatten_sxpr_threshold'))
elif switch == 'xml':
return node.as_xml()
elif switch == 'json':
return node.as_json()
elif switch == 'compact':
return node.as_sxpr(compact=True)
else:
......@@ -947,7 +957,6 @@ class RootNode(Node):
"""
self._result = node._result
self.children = node.children
self._len = node._len
self._pos = node._pos
self.tag_name = node.tag_name
if node.attr_active():
......
......@@ -20,6 +20,7 @@ limitations under the License.
"""
import copy
import json
import sys
sys.path.extend(['../', './'])
......@@ -72,14 +73,23 @@ class TestParseXML:
class TestParseJSON:
def test_roundtrip(self):
tree = parse_sxpr('(a (b c) (d (e f) (h i)))')
d = tree.pick('d')
def setup(self):
self.tree = parse_sxpr('(a (b ä) (d (e ö) (h ü)))')
d = self.tree.pick('d')
d.attr['name'] = "James Bond"
d.attr['id'] = '007'
json_obj_tree = tree.to_json_obj()
def test_json_obj_roundtrip(self):
json_obj_tree = self.tree.to_json_obj()
tree_copy = Node.from_json_obj(json_obj_tree)
assert tree_copy.equals(tree)
assert tree_copy.equals(self.tree)
def test_json_rountrip(self):
s = self.tree.as_json(indent=None, ensure_ascii=True)
tree_copy = Node.from_json_obj(json.loads(s))
assert tree_copy.equals(self.tree)
s = self.tree.as_json(indent=2, ensure_ascii=False)
tree_copy = Node.from_json_obj(json.loads(s))
class TestNode:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment