Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

Commit 99c6fa7a authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

- error.Error methods for JSON-serialization added (also neede for Node)

parent 91acfa10
......@@ -59,7 +59,7 @@ class ErrorCode(int):
class Error:
__slots__ = ['message', 'level', 'code', '_pos', 'orig_pos', 'line', 'column', '_node_keep']
__slots__ = ['message', 'code', '_pos', 'orig_pos', 'line', 'column']
# error levels
......@@ -133,6 +133,22 @@ class Error:
stop = document.find('\n', self.pos)
return document[start:stop] + '\n' + ' ' * (self.pos - start) + '^\n'
def to_json_obj(self) -> Dict:
"""Serialize Error object as json-object."""
return { '__class__': 'DHParser.Error',
'data': [self.message, self._pos, self.code, self.orig_pos,
self.line, self.column] }
@static
def from_json_obj(self, json_obj: Dict) -> Error:
"""Convert a json object representing an Error object back into an
Error object. Raises a ValueError, if json_obj does not represent
an error object"""
if json_obj.get('__class__', '') != 'DHParser.Error':
raise ValueError('JSON object: ' + str(json_obj) +
' does not represent an Error object.')
return Error(*json_obj['data'])
def is_warning(code: int) -> bool:
"""Returns True, if error is merely a warning."""
......
......@@ -38,7 +38,6 @@ import asyncio
from multiprocessing import Process, Value, Queue
from typing import Callable, Optional, Any
from DHParser.preprocess import BEGIN_TOKEN, END_TOKEN, TOKEN_DELIMITER
from DHParser.toolkit import get_config_value
......@@ -60,6 +59,7 @@ class CompilerServer:
self.compiler = compiler
self.max_source_size = get_config_value('max_source_size')
self.stage = Value('b', SERVER_OFFLINE)
self.server = None # type: Optional[asyncio.base_events.Server]
self.server_messages = Queue() # type: Queue
self.server_process = None # type: Optional[Process]
......@@ -75,13 +75,18 @@ class CompilerServer:
writer.write(data) # for now, only echo
await writer.drain()
writer.close()
# TODO: add these lines in case a terminate signal is received, i.e. exit server coroutine
# gracefully.
# self.server.cancel()
async def serve(self, address: str = '127.0.0.1', port: int = 8888):
server = await asyncio.start_server(self.handle_compilation_request, address, port)
async with server:
self.server = await asyncio.start_server(self.handle_compilation_request, address, port)
print(type(self.server))
async with self.server:
self.stage.value = SERVER_ONLINE
self.server_messages.put(SERVER_ONLINE)
await server.serve_forever()
await self.server.serve_forever()
# self.server.wait_until_closed()
def run_server(self, address: str = '127.0.0.1', port: int = 8888):
self.stage.value = SERVER_STARTING
......@@ -101,7 +106,7 @@ class CompilerServer:
def terminate_server_process(self):
self.server_process.terminate()
def wait_for_termination(self):
def wait_for_termination_request(self):
assert self.server_process
# self.wait_until_server_online()
while self.server_messages.get() != SERVER_TERMINATE:
......
......@@ -170,7 +170,7 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
S-Expression-output.
"""
__slots__ = '_result', 'children', '_len', '_pos', 'tag_name', '_xml_attr'
__slots__ = '_result', 'children', '_pos', 'tag_name', '_xml_attr'
def __init__(self, tag_name: str, result: ResultType, leafhint: bool = False) -> None:
"""
......@@ -182,9 +182,7 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
# The following if-clause is merely an optimization, i.e. a fast-path for leaf-Nodes
if leafhint:
self._result = result # type: StrictResultType # cast(StrictResultType, result)
# self._content = None # type: Optional[str]
self.children = NoChildren # type: ChildrenType
self._len = -1 # type: int # lazy evaluation
else:
self.result = result
self.tag_name = tag_name # type: str
......@@ -195,7 +193,6 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
else:
duplicate = self.__class__(self.tag_name, self.result, True)
duplicate._pos = self._pos
duplicate._len = self._len
if self.attr_active():
duplicate.attr.update(copy.deepcopy(self._xml_attr))
# duplicate._xml_attr = copy.deepcopy(self._xml_attr) # this is not cython compatible
......@@ -223,28 +220,18 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
def __len__(self):
if self._len < 0:
self._len = sum(len(child) for child in self.children) \
if self.children else len(self._result)
return self._len
return (sum(len(child) for child in self.children)
if self.children else len(self._result))
def __bool__(self):
# A node that is not None is always True, even if it's empty
"""Returns the bool value of a node, which is always True. The reason
for this is that a boolean test on a variable that can contain a node
or None will only yield `False` in case of None.
"""
return True
# can lead to obscure mistakes, where default object comparison behaviour is expected
# def __eq__(self, other):
# """
# Equality of nodes: Two nodes are considered as equal, if their tag
# name is the same, if their results are equal and if their attributes
# and attribute values are the same.
# """
# return self.tag_name == other.tag_name and self.result == other.result \
# and self.compare_attr(other)
def __hash__(self):
return hash(self.tag_name)
......@@ -761,6 +748,20 @@ class Node: # (collections.abc.Sized): Base class omitted for cython-compatibil
return sum(child.tree_size() for child in self.children) + 1
def to_json_obj(self) -> Dict:
return { '__class__': 'DHParser.Nose',
'content': [ self.tag_name,
[child.to_json_obj() for child in self.children] if self.children
else self.result_,
self._pos,
self._xmlattr if self.attr_active() else None ] }
@static
def from_json_obj(self, json_obj: Dict) -> Error:
pass
def serialize(node: Node, how: str='default') -> str:
"""
Serializes the tree starting with `node` either as S-expression, XML
......@@ -899,7 +900,6 @@ class RootNode(Node):
duplicate.children = NoChildren
duplicate._result = self._result
duplicate._pos = self._pos
duplicate._len = self._len
if self.attr_active():
duplicate.attr.update(copy.deepcopy(self._xml_attr))
# duplicate._xml_attr = copy.deepcopy(self._xml_attr) # this is blocked by cython
......
......@@ -57,7 +57,7 @@ class LaTeXGrammar(Grammar):
paragraph = Forward()
tabular_config = Forward()
text_element = Forward()
source_hash__ = "ce7ad2f909c81bc5eb6aeee0c38874c0"
source_hash__ = "8ec9110209a992e0cf91b283b220fa83"
static_analysis_pending__ = [True]
parser_initialization__ = ["upon instantiation"]
resume_rules__ = {}
......@@ -154,7 +154,7 @@ class LaTeXGrammar(Grammar):
root__ = latexdoc
def get_grammar() -> LaTeXGrammar:
"""Returns a thread/process-exclusive LaTeXGrammar-singleton."""
global GLOBALS
try:
grammar = GLOBALS.LaTeX_00000001_grammar_singleton
except AttributeError:
......
......@@ -49,6 +49,7 @@ class TestServer:
data = await reader.read(100)
print(f'Received: {data.decode()!r}')
writer.close()
asyncio.run(compile('Test', ''))
cs.terminate_server_process()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment