Currently job artifacts in CI/CD pipelines on LRZ GitLab never expire. Starting from Wed 26.1.2022 the default expiration time will be 30 days (GitLab default). Currently existing artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

Commit 0cf9ea37 authored by Eckhart Arnold's avatar Eckhart Arnold
Browse files

- DSLsupport.run_compiler now produces callable DSL compiler script stubs;...

- DSLsupport.run_compiler now produces callable DSL compiler script stubs; code reorganization all DHParser modules moved to a subdirectory containing the DHParser package; dhparser.py script remains in the main directory
parent 139d6128
......@@ -29,10 +29,11 @@ try:
except ImportError:
import re
from EBNFcompiler import EBNFGrammar, EBNF_ASTPipeline, EBNFCompiler
from toolkit import IS_LOGGING, load_if_file, is_python_code, md5, compile_python_object
from parsercombinators import GrammarBase, CompilerBase, full_compilation, nil_scanner
from syntaxtree import Node
from .__init__ import __version__
from .EBNFcompiler import EBNFGrammar, EBNF_ASTPipeline, EBNFCompiler
from .toolkit import IS_LOGGING, load_if_file, is_python_code, md5, compile_python_object
from .parsercombinators import GrammarBase, CompilerBase, full_compilation, nil_scanner
from .syntaxtree import Node
__all__ = ['GrammarError',
......@@ -88,21 +89,44 @@ class CompilationError(Exception):
DHPARSER_IMPORTS = """
from functools import partial
import sys
try:
import regex as re
except ImportError:
import re
from parsercombinators import GrammarBase, CompilerBase, nil_scanner, \\
from DHParser.toolkit import load_if_file
from DHParser.parsercombinators import GrammarBase, CompilerBase, nil_scanner, \\
Lookbehind, Lookahead, Alternative, Pop, Required, Token, \\
Optional, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Sequence, RE, Capture, \\
ZeroOrMore, Forward, NegativeLookahead, mixin_comment
from syntaxtree import Node, remove_enclosing_delimiters, remove_children_if, \\
ZeroOrMore, Forward, NegativeLookahead, mixin_comment, full_compilation
from DHParser.syntaxtree import Node, remove_enclosing_delimiters, remove_children_if, \\
reduce_single_child, replace_by_single_child, remove_whitespace, TOKEN_KEYWORD, \\
no_operation, remove_expendables, remove_tokens, flatten, WHITESPACE_KEYWORD, \\
is_whitespace, is_expendable
"""
DHPARSER_COMPILER = '''
def compile_{NAME}(source):
"""Compiles ``source`` and returns (result, errors, ast).
"""
source_text = load_if_file(source)
return full_compilation({NAME}Scanner(source_text),
{NAME}Grammar(), {NAME}_ASTPipeline, {NAME}Compiler())
if __name__ == "__main__":
if len(sys.argv) > 1:
result, errors, ast = compile_{NAME}(sys.argv[1])
if errors:
for error in errors:
print(error)
sys.exit(1)
else:
print(result)
else:
print("Usage: {NAME}_compiler.py [FILENAME]")
'''
def get_grammar_instance(grammar):
"""Returns a grammar object and the source code of the grammar, from
......@@ -141,14 +165,14 @@ def load_compiler_suite(compiler_suite):
source = load_if_file(compiler_suite)
if is_python_code(compiler_suite):
try:
intro, syms, scanner_py, parser_py, ast_py, compiler_py, outro = \
intro, imports, scanner_py, parser_py, ast_py, compiler_py, outro = \
RX_SECTION_MARKER.split(source)
except ValueError as error:
raise ValueError('File "' + compiler_suite + '" seems to be corrupted. '
'Please delete or repair file manually.')
scanner = compile_python_object(DHPARSER_IMPORTS + scanner_py, '\w*Scanner$')
ast = compile_python_object(DHPARSER_IMPORTS + ast_py, '\w*Pipeline$')
compiler = compile_python_object(DHPARSER_IMPORTS + compiler_py, '\w*Compiler$')
scanner = compile_python_object(imports + scanner_py, '\w*Scanner$')
ast = compile_python_object(imports + ast_py, '\w*Pipeline$')
compiler = compile_python_object(imports + compiler_py, '\w*Compiler$')
else:
# assume source is an ebnf grammar
parser_py, errors, AST = full_compilation(
......@@ -215,6 +239,7 @@ def run_compiler(source_file, compiler_suite="", extension=".xml"):
with open(source_file, encoding="utf-8") as f:
source = f.read()
rootname = os.path.splitext(filepath)[0]
compiler_name = os.path.basename(rootname)
if compiler_suite:
scanner, parser, trans, cclass = load_compiler_suite(compiler_suite)
compiler = cclass()
......@@ -222,7 +247,7 @@ def run_compiler(source_file, compiler_suite="", extension=".xml"):
scanner = nil_scanner
parser = EBNFGrammar()
trans = EBNF_ASTPipeline
compiler = EBNFCompiler(os.path.basename(rootname), source)
compiler = EBNFCompiler(compiler_name, source)
result, errors, ast = full_compilation(scanner(source), parser,
trans, compiler)
if errors:
......@@ -236,10 +261,10 @@ def run_compiler(source_file, compiler_suite="", extension=".xml"):
try:
f = open(rootname + '_compiler.py', 'r', encoding="utf-8")
source = f.read()
intro, syms, scanner, parser, ast, compiler, outro = RX_SECTION_MARKER.split(source)
intro, imports, scanner, parser, ast, compiler, outro = RX_SECTION_MARKER.split(source)
except (PermissionError, FileNotFoundError, IOError) as error:
intro, outro = '', ''
syms = DHPARSER_IMPORTS
imports = DHPARSER_IMPORTS
scanner = compiler.gen_scanner_skeleton()
ast = compiler.gen_AST_skeleton()
compiler = compiler.gen_compiler_skeleton()
......@@ -251,9 +276,10 @@ def run_compiler(source_file, compiler_suite="", extension=".xml"):
try:
f = open(rootname + '_compiler.py', 'w', encoding="utf-8")
f.write("#!/usr/bin/python")
f.write(intro)
f.write(SECTION_MARKER.format(marker=SYMBOLS_SECTION))
f.write(syms)
f.write(imports)
f.write(SECTION_MARKER.format(marker=SCANNER_SECTION))
f.write(scanner)
f.write(SECTION_MARKER.format(marker=PARSER_SECTION))
......@@ -264,6 +290,7 @@ def run_compiler(source_file, compiler_suite="", extension=".xml"):
f.write(compiler)
f.write(SECTION_MARKER.format(marker=END_SECTIONS_MARKER))
f.write(outro)
f.write(DHPARSER_COMPILER.format(NAME=compiler_name))
except (PermissionError, FileNotFoundError, IOError) as error:
print('# Could not write file "' + rootname + '_compiler.py" because of: '
+ "\n# ".join(str(error).split('\n)')))
......
......@@ -26,13 +26,13 @@ try:
except ImportError:
import re
from toolkit import load_if_file, escape_re, md5, sane_parser_name
from parsercombinators import GrammarBase, mixin_comment, Forward, RE, NegativeLookahead, \
from .__init__ import __version__
from .toolkit import load_if_file, escape_re, md5, sane_parser_name
from .parsercombinators import GrammarBase, mixin_comment, Forward, RE, NegativeLookahead, \
Alternative, Sequence, Optional, Required, OneOrMore, ZeroOrMore, Token, CompilerBase
from syntaxtree import Node, remove_enclosing_delimiters, reduce_single_child, \
from .syntaxtree import Node, remove_enclosing_delimiters, reduce_single_child, \
replace_by_single_child, TOKEN_KEYWORD, remove_expendables, remove_tokens, flatten, \
WHITESPACE_KEYWORD
from __init__ import __version__
__all__ = ['EBNFGrammar',
......@@ -202,7 +202,7 @@ class EBNFCompiler(CompilerBase):
self.grammar_name + '-grammar']
for name in self.definition_names:
transtable.append(' "' + name + '": no_operation,')
transtable += [' "": no_operation', '}', '', pl_name + ' = [%s]' % tt_name]
transtable += [' "": no_operation', '}', '', pl_name + ' = [%s]' % tt_name, '']
return '\n'.join(transtable)
def gen_compiler_skeleton(self):
......@@ -225,7 +225,7 @@ class EBNFCompiler(CompilerBase):
else:
compiler += [' def ' + name + '(self, node):',
' pass', '']
return '\n'.join(compiler + [''])
return '\n'.join(compiler)
def gen_parser(self, definitions):
# fix capture of variables that have been defined before usage [sic!]
......
......@@ -22,3 +22,5 @@ import os
__version__ = '0.5.4' + '_dev' + str(os.stat(__file__).st_mtime)
__all__ = ['toolkit', 'syntaxtree', 'parsercombinators', 'EBNFcompiler', 'DSLsupport']
__author__ = "Eckhart Arnold <eckhart.arnold@posteo.de>"
__copyright__ = "http://www.apache.org/licenses/LICENSE-2.0"
......@@ -59,8 +59,8 @@ try:
except ImportError:
import re
from toolkit import IS_LOGGING, LOGS_DIR, escape_re, sane_parser_name, sequence
from syntaxtree import WHITESPACE_KEYWORD, TOKEN_KEYWORD, ZOMBIE_PARSER, Node, \
from .toolkit import IS_LOGGING, LOGS_DIR, escape_re, sane_parser_name, sequence
from .syntaxtree import WHITESPACE_KEYWORD, TOKEN_KEYWORD, ZOMBIE_PARSER, Node, \
error_messages, traverse
......
......@@ -29,7 +29,7 @@ except ImportError:
import re
from typing import NamedTuple
from toolkit import IS_LOGGING, LOGS_DIR, expand_table, line_col, sequence
from .toolkit import IS_LOGGING, LOGS_DIR, expand_table, line_col, sequence
__all__ = ['WHITESPACE_KEYWORD',
......
......@@ -18,13 +18,15 @@ implied. See the License for the specific language governing
permissions and limitations under the License.
"""
#TODO: This is still a stub...
import os
import sys
from functools import partial
from DSLsupport import compileDSL, run_compiler
from EBNFcompiler import EBNFGrammar, EBNF_ASTPipeline, EBNFCompiler
from parsercombinators import full_compilation
from DHParser.DSLsupport import compileDSL, run_compiler
from DHParser.EBNFcompiler import EBNFGrammar, EBNF_ASTPipeline, EBNFCompiler
from DHParser.parsercombinators import full_compilation
def selftest(file_name):
......@@ -34,8 +36,7 @@ def selftest(file_name):
compiler_name = os.path.basename(os.path.splitext(file_name)[0])
compiler = EBNFCompiler(compiler_name, grammar)
parser = EBNFGrammar()
result, errors, syntax_tree = full_compilation(grammar,
parser, EBNF_ASTPipeline, compiler)
result, errors, syntax_tree = full_compilation(grammar, parser, EBNF_ASTPipeline, compiler)
print(result)
if errors:
print(errors)
......
#!/usr/bin/python
#######################################################################
#
......@@ -8,15 +8,17 @@
from functools import partial
import sys
try:
import regex as re
except ImportError:
import re
from parsercombinators import GrammarBase, CompilerBase, nil_scanner, \
from DHParser.toolkit import load_if_file
from DHParser.parsercombinators import GrammarBase, CompilerBase, nil_scanner, \
Lookbehind, Lookahead, Alternative, Pop, Required, Token, \
Optional, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Sequence, RE, Capture, \
ZeroOrMore, Forward, NegativeLookahead, mixin_comment
from syntaxtree import Node, remove_enclosing_delimiters, remove_children_if, \
ZeroOrMore, Forward, NegativeLookahead, mixin_comment, full_compilation
from DHParser.syntaxtree import Node, remove_enclosing_delimiters, remove_children_if, \
reduce_single_child, replace_by_single_child, remove_whitespace, TOKEN_KEYWORD, \
no_operation, remove_expendables, remove_tokens, flatten, WHITESPACE_KEYWORD, \
is_whitespace, is_expendable
......@@ -47,7 +49,7 @@ class PopRetrieveGrammar(GrammarBase):
delimiter_sign = /`+/
text = /[^`]+/
"""
source_hash__ = "48a3fd5a35aeaa7ce1729e09c65594b0"
source_hash__ = "a418b812a36733a4713eb4e06322e1b5"
parser_initialization__ = "upon instatiation"
COMMENT__ = r''
WSP__ = mixin_comment(whitespace=r'[ ]*', comment=r'')
......@@ -79,6 +81,7 @@ PopRetrieve_ASTTransform = {
PopRetrieve_ASTPipeline = [PopRetrieve_ASTTransform]
#######################################################################
#
# COMPILER SECTION - Can be edited. Changes will be preserved.
......@@ -109,10 +112,28 @@ class PopRetrieveCompiler(CompilerBase):
pass
#######################################################################
#
# END OF PYDSL-SECTIONS
# END OF DHPARSER-SECTIONS
#
#######################################################################
def compile_PopRetrieve(source):
"""Compiles ``source`` and returns (result, errors, ast).
"""
source_text = load_if_file(source)
return full_compilation(PopRetrieveScanner(source_text),
PopRetrieveGrammar(), PopRetrieve_ASTPipeline, PopRetrieveCompiler())
if __name__ == "__main__":
if len(sys.argv) > 1:
result, errors, ast = compile_PopRetrieve(sys.argv[1])
if errors:
for error in errors:
print(error)
sys.exit(1)
else:
print(result)
else:
print("Usage: PopRetrieve_compiler.py [FILENAME]")
......@@ -23,7 +23,7 @@ limitations under the License.
import os
import sys
sys.path.append(os.path.abspath('../../'))
from DSLsupport import run_compiler, source_changed
from DHParser.DSLsupport import run_compiler, source_changed
if (not os.path.exists('PopRetrieve_compiler.py') or
source_changed('PopRetrieve.ebnf', 'PopRetrieve_compiler.py')):
......@@ -33,12 +33,29 @@ if (not os.path.exists('PopRetrieve_compiler.py') or
print(errors)
sys.exit(1)
errors = run_compiler("PopRetrieveTest.txt", 'PopRetrieve_compiler.py')
from PopRetrieve_compiler import compile_PopRetrieve
result, errors, ast = compile_PopRetrieve("PopRetrieveTest.txt")
if errors:
print(errors)
sys.exit(1)
else:
print(result)
errors = run_compiler("PopRetrieveTest2.txt", 'PopRetrieve_compiler.py')
result, errors, ast = compile_PopRetrieve("PopRetrieveTest2.txt")
if errors:
print(errors)
sys.exit(1)
else:
print(result)
# errors = run_compiler("PopRetrieveTest.txt", 'PopRetrieve_compiler.py')
# if errors:
# print(errors)
# sys.exit(1)
#
# errors = run_compiler("PopRetrieveTest2.txt", 'PopRetrieve_compiler.py')
# if errors:
# print(errors)
# sys.exit(1)
......@@ -23,7 +23,7 @@ limitations under the License.
import os
import sys
sys.path.append(os.path.abspath('../../'))
from DSLsupport import compileEBNF, run_compiler, source_changed
from DHParser.DSLsupport import compileEBNF, run_compiler, source_changed
WRITE_LOGS = True
......
......@@ -24,7 +24,7 @@ import os
import re
import sys
sys.path.append(os.path.abspath('../../'))
from syntaxtree import Node, compact_sexpr
from DHParser.syntaxtree import Node, compact_sexpr
class DummyParser:
def __init__(self, name=''):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment