The expiration time for new job artifacts in CI/CD pipelines is now 30 days (GitLab default). Previously generated artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

Commit 43efd6a5 authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

some refactoring

parent 68c5ee9c
......@@ -38,12 +38,12 @@ import copy
import functools
import os
import traceback
from typing import Any, Optional, Tuple, List, Set, Union, Callable, cast
from typing import Any, Optional, Tuple, List, Set, Dict, Union, Callable, cast
from DHParser.configuration import get_config_value
from DHParser.preprocess import PreprocessorFunc
from DHParser.syntaxtree import Node, RootNode, EMPTY_PTYPE, TreeContext
from DHParser.transform import TransformationFunc
from DHParser.transform import TransformerCallable
from DHParser.parse import Grammar
from DHParser.preprocess import gen_neutral_srcmap_func
from DHParser.error import is_error, is_fatal, Error, FATAL, \
......@@ -54,8 +54,8 @@ from DHParser.toolkit import load_if_file, is_filename
__all__ = ('CompilerError',
'Compiler',
'GrammarCallable',
'CompilerCallable',
'TreeProcessorCallable',
'ResultTuple',
'compile_source',
'visitor_name',
......@@ -287,8 +287,7 @@ def logfile_basename(filename_or_text, function_or_class_or_instance) -> str:
return name[:i] + '_out' if i >= 0 else name
GrammarCallable = Union[Grammar, Callable[[str], RootNode], functools.partial]
CompilerCallable = Union[Compiler, Callable[[Node], Any], functools.partial]
CompilerCallable = Union[Compiler, Callable[[RootNode], Any], functools.partial]
ResultTuple = Tuple[Optional[Any], List[Error], Optional[Node]]
......@@ -305,13 +304,11 @@ def filter_stacktrace(stacktrace: List[str]) -> List[str]:
def compile_source(source: str,
preprocessor: Optional[PreprocessorFunc], # str -> str
parser: GrammarCallable, # str -> Node (concrete syntax tree (CST))
transformer: TransformationFunc, # Node (CST) -> Node (abstract ST (AST))
compiler: CompilerCallable, # Node (AST), Source -> Any
# out_source_data: list = NOPE, # Tuple[str, SourceMapFunc]
*, preserve_AST: bool = False) \
-> Tuple[Optional[Any], List[Error], Optional[Node]]:
preprocessor: Optional[PreprocessorFunc],
parser: Grammar,
transformer: TransformerCallable,
compiler: CompilerCallable,
*, preserve_AST: bool = False) -> ResultTuple:
"""Compiles a source in four stages:
1. Pre-Processing (if needed)
......@@ -459,7 +456,10 @@ class TreeProcessor(Compiler):
return cast(RootNode, result)
def process_tree(tp: TreeProcessor, tree: RootNode) -> Tuple[RootNode, List[Error]]:
TreeProcessorCallable = Union[TreeProcessor, Callable[[RootNode], RootNode], functools.partial]
def process_tree(tp: TreeProcessorCallable, tree: RootNode) -> Tuple[RootNode, List[Error]]:
"""Process a tree with the tree-processor `tp` only if no fatal error
has occurred so far. Catch any Python-exceptions in case
any normal errors have occurred earlier in the processing pipeline.
......@@ -509,17 +509,9 @@ def process_tree(tp: TreeProcessor, tree: RootNode) -> Tuple[RootNode, List[Erro
return tree, messages
# def compiler_factory(compiler_class: Compiler) -> CompilerCallable
#
# def get_compiler() -> CompilerCallable:
# """Returns a thread/process-exclusive Compiler-singleton."""
# THREAD_LOCALS = access_thread_locals()
# try:
# compiler = THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton
# except AttributeError:
# THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton = {NAME}Compiler()
# compiler = THREAD_LOCALS.{NAME}_{ID:08d}_compiler_singleton
# return compiler
# TODO: Verify compiler against grammar,
# i.e. make sure that for all on_X()-methods, `X` is the name of a parser
......
......@@ -38,7 +38,7 @@ from DHParser.log import suspend_logging, resume_logging, is_logging, log_dir, a
from DHParser.parse import Grammar
from DHParser.preprocess import nil_preprocessor, PreprocessorFunc
from DHParser.syntaxtree import Node
from DHParser.transform import TransformationFunc, TransformationDict
from DHParser.transform import TransformerCallable, TransformationDict
from DHParser.toolkit import DHPARSER_DIR, DHPARSER_PARENTDIR, load_if_file, is_python_code, \
compile_python_object, re, as_identifier, is_filename, relative_path
from typing import Any, cast, List, Tuple, Union, Iterator, Iterable, Optional, \
......@@ -183,7 +183,7 @@ def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
def compileDSL(text_or_file: str,
preprocessor: Optional[PreprocessorFunc],
dsl_grammar: Union[str, Grammar],
ast_transformation: TransformationFunc,
ast_transformation: TransformerCallable,
compiler: Compiler,
fail_when: ErrorCode = ERROR) -> Any:
"""
......
......@@ -1492,7 +1492,7 @@ from DHParser.syntaxtree import Node, RootNode, WHITESPACE_PTYPE, TOKEN_PTYPE, Z
from DHParser.toolkit import load_if_file, escape_re, escape_ctrl_chars, md5, \
sane_parser_name, re, expand_table, unrepr, compile_python_object, DHPARSER_PARENTDIR, \
cython
from DHParser.transform import TransformationFunc, traverse, remove_brackets, \
from DHParser.transform import TransformerCallable, traverse, remove_brackets, \
reduce_single_child, replace_by_single_child, is_empty, remove_children, \
remove_tokens, flatten, forbid, assert_content, remove_children_if, all_of, not_one_of, \
BLOCK_LEAVES
......@@ -1554,7 +1554,7 @@ from DHParser import start_logging, suspend_logging, resume_logging, is_filename
Option, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Series, Capture, TreeReduction, \\
ZeroOrMore, Forward, NegativeLookahead, Required, CombinedParser, mixin_comment, \\
compile_source, grammar_changed, last_value, matching_bracket, PreprocessorFunc, is_empty, \\
remove_if, Node, TransformationFunc, TransformationDict, transformation_factory, traverse, \\
remove_if, Node, TransformationDict, TransformerCallable, transformation_factory, traverse, \\
remove_children_if, move_adjacent, normalize_whitespace, is_anonymous, matches_re, \\
reduce_single_child, replace_by_single_child, replace_or_reduce, remove_whitespace, \\
replace_by_children, remove_empty, remove_tokens, flatten, all_of, any_of, \\
......@@ -2260,11 +2260,11 @@ EBNF_AST_transformation_table = {
}
def EBNFTransform() -> TransformationFunc:
def EBNFTransform() -> TransformerCallable:
return partial(traverse, processing_table=EBNF_AST_transformation_table.copy())
def get_ebnf_transformer() -> TransformationFunc:
def get_ebnf_transformer() -> TransformerCallable:
THREAD_LOCALS = access_thread_locals()
try:
transformer = THREAD_LOCALS.EBNF_transformer_singleton
......@@ -2290,7 +2290,7 @@ def transform_ebnf(cst: Node) -> None:
PreprocessorFactoryFunc = Callable[[], PreprocessorFunc]
ParserFactoryFunc = Callable[[], Grammar]
TransformerFactoryFunc = Callable[[], TransformationFunc]
TransformerFactoryFunc = Callable[[], TransformerCallable]
CompilerFactoryFunc = Callable[[], Compiler]
PREPROCESSOR_FACTORY = '''
......@@ -2345,7 +2345,7 @@ def parse_{NAME}(document, start_parser = "root_parser__", *, complete_match=Tru
TRANSFORMER_FACTORY = '''
def {NAME}Transformer() -> TransformationFunc:
def {NAME}Transformer() -> TransformerCallable:
"""Creates a transformation function that does not share state with other
threads or processes."""
return partial(traverse, processing_table={NAME}_AST_transformation_table.copy())
......
......@@ -113,6 +113,7 @@ FindIncludeFunc = Union[Callable[[str, int], IncludeInfo], # (document: str,
PreprocessorFunc = Union[Callable[[str, str], PreprocessorResult], # text: str, filename: str
functools.partial]
Tokenizer = Union[Callable[[str], Tuple[str, List[Error]]], functools.partial]
# a functions that merely adds preprocessor tokens to a source text
......
......@@ -42,9 +42,9 @@ from DHParser.toolkit import issubtype, isgenerictype, expand_table, smart_list,
__all__ = ('TransformationDict',
'TransformationProc',
'TransformationFunc',
'ConditionFunc',
'KeyFunc',
'TransformerCallable',
'transformation_factory',
'key_tag_name',
'Filter',
......@@ -147,11 +147,11 @@ class Filter:
TransformationProc = Callable[[TreeContext], None]
TransformationDict = Dict[str, Union[Callable, Sequence[Callable]]]
TransformationCache = Dict[str, Tuple[Sequence[Filter], Sequence[Callable]]]
TransformationFunc = Union[Callable[[TreeContext], Any], partial]
ProcessingTableType = Dict[str, Union[Sequence[Callable], TransformationDict]]
ConditionFunc = Callable # Callable[[TreeContext], bool]
KeyFunc = Callable[[Node], str]
CriteriaType = Union[int, str, Callable]
TransformerCallable = Union[Callable[[Node], None], partial]
def transformation_factory(t1=None, t2=None, t3=None, t4=None, t5=None):
......
......@@ -71,7 +71,10 @@ M6: """interface Color {
*/
readonly alpha: decimal;
}"""
M6: """export interface SemanticTokensRegistrationOptions extends
TextDocumentRegistrationOptions, SemanticTokensOptions,
StaticRegistrationOptions {
}"""
[ast:interface]
......
......@@ -191,8 +191,10 @@ def ts2dataclassTransformer() -> TransformationFunc:
threads or processes."""
return partial(traverse, processing_table=ts2dataclass_AST_transformation_table.copy())
get_transformer = ThreadLocalSingletonFactory(ts2dataclassTransformer, ident=1)
def transform_ts2dataclass(cst):
get_transformer()(cst)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment