The expiration time for new job artifacts in CI/CD pipelines is now 30 days (GitLab default). Previously generated artifacts in already completed jobs will not be affected by the change. The latest artifacts for all jobs in the latest successful pipelines will be kept. More information: https://gitlab.lrz.de/help/user/admin_area/settings/continuous_integration.html#default-artifacts-expiration

Commit 7e2ac2ed authored by di68kap's avatar di68kap
Browse files

examples fixed

parent 0eba9702
......@@ -678,6 +678,7 @@ comments work as expected::
>>> syntax_tree = extended_parser('What{check this again!}is work?')
>>> print(syntax_tree.errors[0])
1:1: Error (1040): Parser "document = {PBR} [S] paragraph {PBR paragraph} {PBR} [S] _EOF" did not match!
Farthest fail 1:5: {check thi ...
The last error was to be expected, because we did not allow comments
to serve a substitutes for whitespace. The error message might not be
......@@ -936,7 +937,7 @@ example a simple DSL for writing definitions like::
... dog := carnivorous quadrupel that barks
... human := featherless biped'''
Now, let's try to draw up a grammar for definitions::
Now, let's try to draw up a grammar for "definitions"::
>>> def_DSL_first_try = ''' # WARNING: This grammar doesn't work, yet!
... @literalws = right
......@@ -948,6 +949,13 @@ Now, let's try to draw up a grammar for definitions::
... EOF = /$/ '''
>>> def_parser = create_parser(def_DSL_first_try, "defDSL")
Parsing our example with the generated parser yields an error, however::
>>> syntax_tree = def_parser(definitions)
>>> for e in syntax_tree.errors_sorted: print(e)
1:1: Error (1040): Parser "definitions = ~ definition {definition} EOF" did not match!
Farthest fail 2:8: := carn ...
Fail-tolerant Parsing
......
......@@ -496,7 +496,7 @@ class Parser:
memoization_state = grammar.suspend_memoization__
grammar.suspend_memoization__ = False
grammar.farthest_failure__ = 0
grammar.ff_pos__ = 0
# now, the actual parser call!
try:
......@@ -551,8 +551,9 @@ class Parser:
rest=text, first_throw=False)
if node is None:
if location > grammar.farthest_failure__:
grammar.farthest_failure__ = location
if location > grammar.ff_pos__:
grammar.ff_pos__ = location
grammar.ff_parser__ = self
else:
node._pos = location
if not grammar.suspend_memoization__:
......@@ -1046,6 +1047,14 @@ class Grammar:
location to which the parser backtracks. This is done by
calling method :func:`rollback_to__(location)`.
:ivar ff_pos__: The "farthest fail", i.e. the highest location in the
document where a parser failed. This gives a good indication
where and why parsing failed, if the grammar did not match
a text.
:ivar ff_parser__: The parser that failed at the "farthest fail"-location
`ff_pos__`
suspend_memoization__: A flag that if set suspends memoization of
results from returning parsers. This flag is needed by the
left-recursion handling algorithm (see `Parser.__call__`
......@@ -1316,7 +1325,8 @@ class Grammar:
# also needed for call stack tracing
self.moving_forward__ = False # type: bool
self.most_recent_error__ = None # type: Optional[ParserError]
self.farthest_failure__ = 0 # type: int
self.ff_pos__ = 0 # type: int
self.ff_parser__ = PARSER_PLACEHOLDER # type: Parser
@property
def reversed__(self) -> StringView:
......@@ -1445,10 +1455,16 @@ class Grammar:
fwd = rest.find("\n") + 1 or len(rest)
skip, rest = rest[:fwd], rest[fwd:]
if result is None:
err_info = '' if not self.history_tracking__ else \
'\n Most advanced fail: %s\n Last match: %s;' % \
(str(HistoryRecord.most_advanced_fail(self.history__)),
str(HistoryRecord.last_match(self.history__)))
if self.history_tracking__:
err_info = '\n Most advanced fail: %s\n Last match: %s;' % \
(str(HistoryRecord.most_advanced_fail(self.history__)),
str(HistoryRecord.last_match(self.history__)))
else:
i = self.ff_pos__
fs = self.document__[i:i + 10]
if i + 10 < len(self.document__) - 1: fs += ' ...'
# l, c = line_col(linebreaks(self.document__), i)
err_info = f'\n Farthest fail {l}:{c}: {fs}'
# Check if a Lookahead-Parser did match. Needed for testing, because
# in a test case this is not necessarily an error.
if lookahead_failure_only(parser):
......@@ -1472,7 +1488,7 @@ class Grammar:
error_code = PARSER_LOOKAHEAD_MATCH_ONLY
max_parser_dropouts = -1 # no further retries!
else:
i = self.farthest_failure__ or tail_pos(stitches)
i = self.ff_pos__ or tail_pos(stitches)
fs = self.document__[i:i + 10]
if i + 10 < len(self.document__) - 1: fs += ' ...'
error_msg = "Parser stopped before end, at: " + fs \
......@@ -2668,7 +2684,7 @@ class MandatoryNary(NaryParser):
error = Error(msg, location,
MANDATORY_CONTINUATION_AT_EOF if (failed_on_lookahead and not text_)
else MANDATORY_CONTINUATION,
length = max(self.grammar.farthest_failure__ - location, 1))
length = max(self.grammar.ff_pos__ - location, 1))
grammar.tree__.add_error(err_node, error)
if reloc >= 0:
# signal error to tracer directly, because this error is not raised!
......@@ -3163,7 +3179,7 @@ def _negative_match(grammar, bool_value) -> bool:
else:
# invert farthest failure, because, due to negation, it's not
# a failure any more and should be overwritten by any other failure
grammar.farthest_failure__ = - grammar.farthest_failure__
grammar.ff_pos__ = - grammar.ff_pos__
return True
......
......@@ -42,12 +42,11 @@ from DHParser import start_logging, suspend_logging, resume_logging, is_filename
remove_anonymous_empty, keep_nodes, traverse_locally, strip, lstrip, rstrip, \
transform_content, replace_content_with, forbid, assert_content, remove_infix_operator, \
add_error, error_on, recompile_grammar, left_associative, lean_left, set_config_value, \
get_config_value, XML_SERIALIZATION, SXPRESSION_SERIALIZATION, node_maker, \
INDENTED_SERIALIZATION, JSON_SERIALIZATION, access_thread_locals, access_presets, \
get_config_value, node_maker, access_thread_locals, access_presets, \
finalize_presets, ErrorCode, RX_NEVER_MATCH, set_tracer, resume_notices_on, \
trace_history, has_descendant, neg, has_ancestor, optional_last_value, insert, \
positions_of, replace_tag_names, add_attributes, delimit_children, merge_connected, \
has_attr, has_parent
has_attr, has_parent, ThreadLocalSingletonFactory
#######################################################################
......@@ -74,7 +73,7 @@ class ArithmeticGrammar(Grammar):
r"""Parser for an Arithmetic source file.
"""
expression = Forward()
source_hash__ = "2a01036cad49be914c8bb1cb13c532c7"
source_hash__ = "63317dae9799e961704579764f281fcd"
disposable__ = re.compile('..(?<=^)')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......@@ -100,21 +99,18 @@ class ArithmeticGrammar(Grammar):
root__ = expression
_raw_grammar = ThreadLocalSingletonFactory(ArithmeticGrammar, ident=1)
def get_grammar() -> ArithmeticGrammar:
"""Returns a thread/process-exclusive ArithmeticGrammar-singleton."""
THREAD_LOCALS = access_thread_locals()
try:
grammar = THREAD_LOCALS.Arithmetic_00000001_grammar_singleton
except AttributeError:
THREAD_LOCALS.Arithmetic_00000001_grammar_singleton = ArithmeticGrammar()
if hasattr(get_grammar, 'python_src__'):
THREAD_LOCALS.Arithmetic_00000001_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = THREAD_LOCALS.Arithmetic_00000001_grammar_singleton
grammar = _raw_grammar()
if get_config_value('resume_notices'):
resume_notices_on(grammar)
elif get_config_value('history_tracking'):
set_tracer(grammar, trace_history)
return grammar
def parse_Arithmetic(document, start_parser = "root_parser__", *, complete_match=True):
return get_grammar()(document, start_parser, complete_match)
#######################################################################
......
......@@ -75,7 +75,7 @@ class LameArithmeticGrammar(Grammar):
"""
expr = Forward()
term = Forward()
source_hash__ = "69ae2dadf5f31fee7d8ec0d09b3a8659"
source_hash__ = "d68d04adb34c7294d6508a62e1c269d9"
disposable__ = re.compile('..(?<=^)')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......@@ -91,21 +91,18 @@ class LameArithmeticGrammar(Grammar):
root__ = formula
_raw_grammar = ThreadLocalSingletonFactory(LameArithmeticGrammar, ident=1)
def get_grammar() -> LameArithmeticGrammar:
"""Returns a thread/process-exclusive LameArithmeticGrammar-singleton."""
THREAD_LOCALS = access_thread_locals()
try:
grammar = THREAD_LOCALS.LameArithmetic_00000001_grammar_singleton
except AttributeError:
THREAD_LOCALS.LameArithmetic_00000001_grammar_singleton = LameArithmeticGrammar()
if hasattr(get_grammar, 'python_src__'):
THREAD_LOCALS.LameArithmetic_00000001_grammar_singleton.python_src__ = get_grammar.python_src__
grammar = THREAD_LOCALS.LameArithmetic_00000001_grammar_singleton
grammar = _raw_grammar()
if get_config_value('resume_notices'):
resume_notices_on(grammar)
elif get_config_value('history_tracking'):
set_tracer(grammar, trace_history)
return grammar
def parse_LameArithmetic(document, start_parser = "root_parser__", *, complete_match=True):
return get_grammar()(document, start_parser, complete_match)
#######################################################################
......
......@@ -17,8 +17,8 @@ for path in (os.path.join('..', '..'), '.'):
try:
from DHParser import dsl
import DHParser.log
from DHParser import testing, create_test_templates, access_presets, finalize_presets, \
set_preset_value
from DHParser import testing, access_presets, finalize_presets, set_preset_value
from DHParser.testing import create_test_templates
except ModuleNotFoundError:
print('Could not import DHParser. Please adjust sys.path in file '
'"%s" manually' % __file__)
......
......@@ -80,22 +80,22 @@ class ArithmeticRightRecursiveGrammar(Grammar):
PLUS = RegExp('\\+')
i = Text("i")
e = Text("e")
pi = Alternative(Drop(Text("pi")), Drop(Text("π")))
pi = Alternative(Text("pi"), Text("π"))
special = Alternative(pi, e)
number = Synonym(NUMBER)
log = Series(Series(Drop(Text('log(')), dwsp__), expression, Drop(Text(")")), mandatory=1)
tan = Series(Series(Drop(Text('tan(')), dwsp__), expression, Drop(Text(")")), mandatory=1)
cos = Series(Series(Drop(Text('cos(')), dwsp__), expression, Drop(Text(")")), mandatory=1)
sin = Series(Series(Drop(Text('sin(')), dwsp__), expression, Drop(Text(")")), mandatory=1)
log = Series(Series(Drop(Text('log(')), dwsp__), expression, Text(")"), mandatory=1)
tan = Series(Series(Drop(Text('tan(')), dwsp__), expression, Text(")"), mandatory=1)
cos = Series(Series(Drop(Text('cos(')), dwsp__), expression, Text(")"), mandatory=1)
sin = Series(Series(Drop(Text('sin(')), dwsp__), expression, Text(")"), mandatory=1)
function = Alternative(sin, cos, tan, log)
group = Series(Drop(Text("(")), expression, Drop(Text(")")), mandatory=1)
group = Series(Text("("), expression, Text(")"), mandatory=1)
tail_value = Alternative(special, function, VARIABLE, group)
tail_pow = Series(tail_value, Option(i), Drop(Text("^")), element)
tail_pow = Series(tail_value, Option(i), Text("^"), element)
tail_elem = Alternative(tail_pow, tail_value)
value = Series(Alternative(number, tail_value), Option(i))
sign = Alternative(PLUS, MINUS)
add = Series(term, Series(Drop(Text("+")), dwsp__), expression)
pow = Series(value, Drop(Text("^")), Option(sign), element)
pow = Series(value, Text("^"), Option(sign), element)
seq = Series(tail_elem, tail)
sub = Series(term, Series(Drop(Text("-")), dwsp__), expression)
factor = Series(Option(sign), Alternative(Series(Option(element), tail), element), dwsp__)
......
......@@ -18,8 +18,9 @@ for path in (os.path.join('..', '..'), '.'):
try:
from DHParser import dsl
import DHParser.log
from DHParser import testing, create_test_templates, access_presets, finalize_presets, \
from DHParser import testing, access_presets, finalize_presets, \
set_preset_value
from DHParser.testing import create_test_templates
except ModuleNotFoundError:
print('Could not import DHParser. Please adjust sys.path in file '
'"%s" manually' % __file__)
......
......@@ -17,8 +17,9 @@ for path in (os.path.join('..', '..'), '.'):
try:
from DHParser import dsl
import DHParser.log
from DHParser import testing, create_test_templates, access_presets, finalize_presets, \
from DHParser import testing, access_presets, finalize_presets, \
set_preset_value
from DHParser.testing import create_test_templates
except ModuleNotFoundError:
print('Could not import DHParser. Please adjust sys.path in file '
'"%s" manually' % __file__)
......
......@@ -75,7 +75,7 @@ class EBNFGrammar(Grammar):
countable = Forward()
element = Forward()
expression = Forward()
source_hash__ = "d72459c32970e09870946ca46fb612a8"
source_hash__ = "a61d3b1b834c8e16f190d6e5e8b41cf0"
disposable__ = re.compile('component$|pure_elem$|countable$|FOLLOW_UP$|SYM_REGEX$|ANY_SUFFIX$|EOF$')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......
......@@ -42,8 +42,7 @@ from DHParser import start_logging, suspend_logging, resume_logging, is_filename
remove_anonymous_empty, keep_nodes, traverse_locally, strip, lstrip, rstrip, \
transform_content, replace_content_with, forbid, assert_content, remove_infix_operator, \
add_error, error_on, recompile_grammar, left_associative, lean_left, set_config_value, \
get_config_value, XML_SERIALIZATION, SXPRESSION_SERIALIZATION, node_maker, \
INDENTED_SERIALIZATION, JSON_SERIALIZATION, access_thread_locals, access_presets, \
get_config_value, node_maker, access_thread_locals, access_presets, \
finalize_presets, ErrorCode, RX_NEVER_MATCH, set_tracer, resume_notices_on, \
trace_history, has_descendant, neg, has_ancestor, optional_last_value, insert, \
positions_of, replace_tag_names, add_attributes, delimit_children, merge_connected, \
......@@ -76,7 +75,7 @@ class FixedEBNFGrammar(Grammar):
countable = Forward()
element = Forward()
expression = Forward()
source_hash__ = "3db954fa768f359924b256e32786fd0c"
source_hash__ = "8dbc09df6de2f2758e43fc351a3671c7"
disposable__ = re.compile('component$|pure_elem$|countable$|FOLLOW_UP$|SYM_REGEX$|ANY_SUFFIX$|EOF$')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......
......@@ -85,7 +85,7 @@ class jsonGrammar(Grammar):
wsp__ = Whitespace(WSP_RE__)
dwsp__ = Drop(Whitespace(WSP_RE__))
_EOF = NegativeLookahead(RegExp('.'))
EXP = Option(Series(Alternative(Drop(Text("E")), Drop(Text("e"))), Option(Alternative(Drop(Text("+")), Drop(Text("-")))), RegExp('[0-9]+')))
EXP = Option(Series(Alternative(Text("E"), Text("e")), Option(Alternative(Text("+"), Text("-"))), RegExp('[0-9]+')))
DOT = Text(".")
FRAC = Option(Series(DOT, RegExp('[0-9]+')))
NEG = Text("-")
......@@ -98,7 +98,7 @@ class jsonGrammar(Grammar):
null = Series(Text("null"), dwsp__)
bool = Alternative(Series(RegExp('true'), dwsp__), Series(RegExp('false'), dwsp__))
number = Series(INT, FRAC, EXP, dwsp__)
string = Series(Drop(Text('"')), _CHARACTERS, Drop(Text('"')), dwsp__, mandatory=1)
string = Series(Text('"'), _CHARACTERS, Text('"'), dwsp__, mandatory=1)
array = Series(Series(Drop(Text("[")), dwsp__), RegExp('[_element { "," _element }]'), Series(Drop(Text("]")), dwsp__))
member = Series(string, Series(Drop(Text(":")), dwsp__), _element, mandatory=1)
object = Series(Series(Drop(Text("{")), dwsp__), member, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), member, mandatory=1)), Series(Drop(Text("}")), dwsp__), mandatory=3)
......
......@@ -75,7 +75,7 @@ class EBNFGrammar(Grammar):
countable = Forward()
element = Forward()
expression = Forward()
source_hash__ = "ddd0814f1e77d77a00fd019cd7349409"
source_hash__ = "ead36885ff2ed0c73538d02d8df5a817"
disposable__ = re.compile('pure_elem$|countable$|FOLLOW_UP$|SYM_REGEX$|ANY_SUFFIX$|EOF$')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......
......@@ -76,7 +76,7 @@ class FixedEBNFGrammar(Grammar):
countable = Forward()
element = Forward()
expression = Forward()
source_hash__ = "52c3a9b478e562f05a8838b06d47da79"
source_hash__ = "0c9d87738d12d9501bf90b9a88560efb"
disposable__ = re.compile('pure_elem$|countable$|FOLLOW_UP$|SYM_REGEX$|ANY_SUFFIX$|EOF$')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......
......@@ -85,7 +85,7 @@ class jsonGrammar(Grammar):
wsp__ = Whitespace(WSP_RE__)
dwsp__ = Drop(Whitespace(WSP_RE__))
_EOF = NegativeLookahead(RegExp('.'))
EXP = Option(Series(Alternative(Drop(Text("E")), Drop(Text("e"))), Option(Alternative(Drop(Text("+")), Drop(Text("-")))), RegExp('[0-9]+')))
EXP = Option(Series(Alternative(Text("E"), Text("e")), Option(Alternative(Text("+"), Text("-"))), RegExp('[0-9]+')))
DOT = Text(".")
FRAC = Option(Series(DOT, RegExp('[0-9]+')))
NEG = Text("-")
......@@ -98,7 +98,7 @@ class jsonGrammar(Grammar):
null = Series(Text("null"), dwsp__)
bool = Alternative(Series(RegExp('true'), dwsp__), Series(RegExp('false'), dwsp__))
number = Series(INT, FRAC, EXP, dwsp__)
string = Series(Drop(Text('"')), _CHARACTERS, Drop(Text('"')), dwsp__, mandatory=1)
string = Series(Text('"'), _CHARACTERS, Text('"'), dwsp__, mandatory=1)
array = Series(Series(Drop(Text("[")), dwsp__), RegExp('[_element { "," _element }]'), Series(Drop(Text("]")), dwsp__))
member = Series(string, Series(Drop(Text(":")), dwsp__), _element, mandatory=1)
object = Series(Series(Drop(Text("{")), dwsp__), member, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), member, mandatory=1)), Series(Drop(Text("}")), dwsp__), mandatory=3)
......
......@@ -7,10 +7,11 @@
#######################################################################
import collections
from functools import partial
import os
import sys
from typing import Tuple, List
sys.path.extend([os.path.join('..', '..'), '..', '.'])
......@@ -48,7 +49,8 @@ from DHParser import start_logging, suspend_logging, resume_logging, is_filename
finalize_presets, ErrorCode, RX_NEVER_MATCH, set_tracer, resume_notices_on, \
trace_history, has_descendant, neg, has_ancestor, optional_last_value, insert, \
positions_of, replace_tag_names, add_attributes, delimit_children, merge_connected, \
has_attr, has_parent, ThreadLocalSingletonFactory
has_attr, has_parent, ThreadLocalSingletonFactory, NEVER_MATCH_PATTERN, gen_find_include_func, \
preprocess_includes, make_preprocessor, chain_preprocessors, Error
#######################################################################
......@@ -57,12 +59,26 @@ from DHParser import start_logging, suspend_logging, resume_logging, is_filename
#
#######################################################################
def LyrikPreprocessor(text):
return text, lambda i: i
RE_INCLUDE = NEVER_MATCH_PATTERN
# To capture includes, replace the NEVER_MATCH_PATTERN
# by a pattern with group "name" here, e.g. r'\input{(?P<name>.*)}'
def LyrikTokenizer(original_text) -> Tuple[str, List[Error]]:
# Here, a function body can be filled in that adds preprocessor tokens
# to the source code and returns the modified source.
return original_text, []
def preprocessor_factory() -> PreprocessorFunc:
# below, the second parameter must always be the same as LyrikGrammar.COMMENT__!
find_next_include = gen_find_include_func(RE_INCLUDE, '#.*')
include_prep = partial(preprocess_includes, find_next_include=find_next_include)
tokenizing_prep = make_preprocessor(LyrikTokenizer)
return chain_preprocessors(include_prep, tokenizing_prep)
def get_preprocessor() -> PreprocessorFunc:
return LyrikPreprocessor
get_preprocessor = ThreadLocalSingletonFactory(preprocessor_factory, ident=1)
#######################################################################
......@@ -74,7 +90,7 @@ def get_preprocessor() -> PreprocessorFunc:
class LyrikGrammar(Grammar):
r"""Parser for a Lyrik source file.
"""
source_hash__ = "673a4495cbfd5cfbb4c3541f59d2b35f"
source_hash__ = "26385fa0fbbe6e28b8b15d563a5407c9"
disposable__ = re.compile('JAHRESZAHL$|ZEICHENFOLGE$|ENDE$|LEERRAUM$|ziel$|wortfolge$')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......
......@@ -11,6 +11,8 @@ import collections
from functools import partial
import os
import sys
from typing import Tuple, List
sys.path.extend([os.path.join('..', '..'), '..', '.'])
......@@ -48,7 +50,8 @@ from DHParser import start_logging, suspend_logging, resume_logging, is_filename
finalize_presets, ErrorCode, RX_NEVER_MATCH, set_tracer, resume_notices_on, \
trace_history, has_descendant, neg, has_ancestor, optional_last_value, insert, \
positions_of, replace_tag_names, add_attributes, delimit_children, merge_connected, \
has_attr, has_parent
has_attr, has_parent, ThreadLocalSingletonFactory, NEVER_MATCH_PATTERN, gen_find_include_func, \
preprocess_includes, make_preprocessor, chain_preprocessors, Error
#######################################################################
......@@ -57,12 +60,26 @@ from DHParser import start_logging, suspend_logging, resume_logging, is_filename
#
#######################################################################
def LyrikPreprocessor(text):
return text, lambda i: i
RE_INCLUDE = NEVER_MATCH_PATTERN
# To capture includes, replace the NEVER_MATCH_PATTERN
# by a pattern with group "name" here, e.g. r'\input{(?P<name>.*)}'
def LyrikTokenizer(original_text) -> Tuple[str, List[Error]]:
# Here, a function body can be filled in that adds preprocessor tokens
# to the source code and returns the modified source.
return original_text, []
def preprocessor_factory() -> PreprocessorFunc:
# below, the second parameter must always be the same as LyrikGrammar.COMMENT__!
find_next_include = gen_find_include_func(RE_INCLUDE, '#.*')
include_prep = partial(preprocess_includes, find_next_include=find_next_include)
tokenizing_prep = make_preprocessor(LyrikTokenizer)
return chain_preprocessors(include_prep, tokenizing_prep)
def get_preprocessor() -> PreprocessorFunc:
return LyrikPreprocessor
get_preprocessor = ThreadLocalSingletonFactory(preprocessor_factory, ident=1)
#######################################################################
......
......@@ -92,7 +92,7 @@ class LaTeXGrammar(Grammar):
paragraph = Forward()
param_block = Forward()
tabular_config = Forward()
source_hash__ = "cc0e3104f5aa10387b05d19de9edde41"
source_hash__ = "d443c74c1540aca5ee7ed767a0da896e"
disposable__ = re.compile('_\\w+')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......
......@@ -79,7 +79,7 @@ class jsonGrammar(Grammar):
r"""Parser for a json source file.
"""
_element = Forward()
source_hash__ = "bd32b246b5aa5fbdb1e18ac24d1da53b"
source_hash__ = "69d5b7e2833481a1db43787199960580"
disposable__ = re.compile('_[A-Za-z]+|[A-Z]+')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......
......@@ -74,7 +74,7 @@ class json_fail_tolerantGrammar(Grammar):
r"""Parser for a json_fail_tolerant source file.
"""
_element = Forward()
source_hash__ = "d958a85b4f3c116848d2648ca5c1febd"
source_hash__ = "42cb00a4f8192986733859d4709c5b37"
disposable__ = re.compile('..(?<=^)')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
......@@ -95,14 +95,14 @@ class json_fail_tolerantGrammar(Grammar):
WSP_RE__ = mixin_comment(whitespace=WHITESPACE__, comment=COMMENT__)
wsp__ = Whitespace(WSP_RE__)
dwsp__ = Drop(Whitespace(WSP_RE__))
_ARRAY_SEPARATOR = Series(NegativeLookahead(Drop(Text("]"))), Lookahead(Drop(Text(","))), Option(Series(Drop(Text(",")), dwsp__)), mandatory=1)
_OBJECT_SEPARATOR = Series(NegativeLookahead(Drop(Text("}"))), Lookahead(Drop(Text(","))), Option(Series(Drop(Text(",")), dwsp__)), mandatory=1)
_ARRAY_SEPARATOR = Series(NegativeLookahead(Text("]")), Lookahead(Text(",")), Option(Series(Drop(Text(",")), dwsp__)), mandatory=1)
_OBJECT_SEPARATOR = Series(NegativeLookahead(Text("}")), Lookahead(Text(",")), Option(Series(Drop(Text(",")), dwsp__)), mandatory=1)
_EOF = NegativeLookahead(RegExp('.'))
EXP = Option(Series(Alternative(Drop(Text("E")), Drop(Text("e"))), Option(Alternative(Drop(Text("+")), Drop(Text("-")))), RegExp('[0-9]+')))
EXP = Option(Series(Alternative(Text("E"), Text("e")), Option(Alternative(Text("+"), Text("-"))), RegExp('[0-9]+')))
DOT = Text(".")
FRAC = Option(Series(DOT, RegExp('[0-9]+')))
NEG = Text("-")
INT = Alternative(Series(Option(NEG), RegExp('[0-9]')), RegExp('[1-9][0-9]+'))
INT = Series(Option(NEG), Alternative(RegExp('[0-9]'), RegExp('[1-9][0-9]+')))
HEX = RegExp('[0-9a-fA-F][0-9a-fA-F]')
UNICODE = Series(Series(Drop(Text("\\u")), dwsp__), HEX, HEX)
ESCAPE = Alternative(RegExp('\\\\[/bnrt\\\\]'), UNICODE)
......@@ -111,7 +111,7 @@ class json_fail_tolerantGrammar(Grammar):
null = Series(Text("null"), dwsp__)
bool = Alternative(Series(RegExp('true'), dwsp__), Series(RegExp('false'), dwsp__))
number = Series(INT, FRAC, EXP, dwsp__)
string = Series(Drop(Text('"')), _CHARACTERS, Drop(Text('"')), dwsp__, mandatory=1)
string = Series(Text('"'), _CHARACTERS, Text('"'), dwsp__, mandatory=1)
array = Series(Series(Drop(Text("[")), dwsp__), Option(Series(_element, ZeroOrMore(Series(_ARRAY_SEPARATOR, _element, mandatory=1)))), Series(Drop(Text("]")), dwsp__))
member = Series(string, Series(Drop(Text(":")), dwsp__), _element, mandatory=1)
object = Series(Series(Drop(Text("{")), dwsp__), member, ZeroOrMore(Series(_OBJECT_SEPARATOR, member, mandatory=1)), Series(Drop(Text("}")), dwsp__), mandatory=3)
......
......@@ -75,16 +75,16 @@ class yamlGrammar(Grammar):
wsp__ = Whitespace(WSP_RE__)
dwsp__ = Drop(Whitespace(WSP_RE__))
EOF = NegativeLookahead(RegExp('.'))
EXP = Option(Series(Alternative(Drop(Text("E")), Drop(Text("e"))), Option(Alternative(Drop(Text("+")), Drop(Text("-")))), RegExp('[0-9]+')))
FRAC = Option(Series(Drop(Text(".")), RegExp('[0-9]+')))
INT = Alternative(Series(Option(Drop(Text("-"))), RegExp('[0-9]')), RegExp('[1-9][0-9]+'))
EXP = Option(Series(Alternative(Text("E"), Text("e")), Option(Alternative(Text("+"), Text("-"))), RegExp('[0-9]+')))
FRAC = Option(Series(Text("."), RegExp('[0-9]+')))
INT = Alternative(Series(Option(Text("-")), RegExp('[0-9]')), RegExp('[1-9][0-9]+'))
HEX = RegExp('[0-9a-fA-F]')
ESCAPE = Alternative(RegExp('\\\\[/bnrt\\\\]'), Series(RegExp('\\\\u'), HEX, HEX, HEX, HEX))
CHARACTERS = ZeroOrMore(Alternative(RegExp('[^"\\\\]+'), ESCAPE))
null = Series(Text("null"), dwsp__)
bool = Alternative(Series(RegExp('true'), dwsp__), Series(RegExp('false'), dwsp__))
number = Series(INT, FRAC, EXP, dwsp__)
string = Series(Drop(Text('"')), CHARACTERS, Drop(Text('"')), dwsp__)
string = Series(Text('"'), CHARACTERS, Text('"'), dwsp__)
array = Series(Series(Drop(Text("[")), dwsp__), Option(Series(value, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), value)))), Series(Drop(Text("]")), dwsp__))
member = Series(string, Series(Drop(Text(":")), dwsp__), element)
object = Series(Series(Drop(Text("{")), dwsp__), Option(Series(member, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), member)))), Series(Drop(Text("}")), dwsp__))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment