In January 2021 we will introduce a 10 GB quota for project repositories. Higher limits for individual projects will be available on request. Please see https://doku.lrz.de/display/PUBLIC/GitLab for more information.

Commit c74091ca authored by Eckhart Arnold's avatar Eckhart Arnold

- added type annotations for better documentation and mypy type checks

parent 4589c6b6
...@@ -20,17 +20,18 @@ compilation of domain specific languages based on an EBNF-grammar. ...@@ -20,17 +20,18 @@ compilation of domain specific languages based on an EBNF-grammar.
""" """
import os import os
try: try:
import regex as re import regex as re
except ImportError: except ImportError:
import re import re
from typing import Any, Tuple, cast
from .ebnf import EBNFTransformer, grammar_changed, \ from DHParser.ebnf import EBNFTransformer, EBNFCompiler, grammar_changed, \
get_ebnf_scanner, get_ebnf_grammar, get_ebnf_transformer, get_ebnf_compiler get_ebnf_scanner, get_ebnf_grammar, get_ebnf_transformer, get_ebnf_compiler, \
from .toolkit import logging, load_if_file, is_python_code, compile_python_object ScannerFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc
from .parsers import Grammar, CompilerBase, compile_source, nil_scanner from DHParser.toolkit import logging, load_if_file, is_python_code, compile_python_object
from .syntaxtree import Node from DHParser.parsers import Grammar, Compiler, compile_source, nil_scanner, ScannerFunc
from DHParser.syntaxtree import Node, TransformerFunc
__all__ = ['GrammarError', __all__ = ['GrammarError',
...@@ -71,7 +72,7 @@ try: ...@@ -71,7 +72,7 @@ try:
except ImportError: except ImportError:
import re import re
from DHParser.toolkit import logging, is_filename, load_if_file from DHParser.toolkit import logging, is_filename, load_if_file
from DHParser.parsers import Grammar, CompilerBase, nil_scanner, \\ from DHParser.parsers import Grammar, Compiler, nil_scanner, \\
Lookbehind, Lookahead, Alternative, Pop, Required, Token, \\ Lookbehind, Lookahead, Alternative, Pop, Required, Token, \\
Optional, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Sequence, RE, Capture, \\ Optional, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Sequence, RE, Capture, \\
ZeroOrMore, Forward, NegativeLookahead, mixin_comment, compile_source, \\ ZeroOrMore, Forward, NegativeLookahead, mixin_comment, compile_source, \\
...@@ -137,7 +138,7 @@ class CompilationError(Exception): ...@@ -137,7 +138,7 @@ class CompilationError(Exception):
return '\n'.join(self.error_messages) return '\n'.join(self.error_messages)
def grammar_instance(grammar_representation): def grammar_instance(grammar_representation) -> Tuple[Grammar, str]:
"""Returns a grammar object and the source code of the grammar, from """Returns a grammar object and the source code of the grammar, from
the given `grammar`-data which can be either a file name, ebnf-code, the given `grammar`-data which can be either a file name, ebnf-code,
python-code, a Grammar-derived grammar class or an instance of python-code, a Grammar-derived grammar class or an instance of
...@@ -167,7 +168,11 @@ def grammar_instance(grammar_representation): ...@@ -167,7 +168,11 @@ def grammar_instance(grammar_representation):
return parser_root, grammar_src return parser_root, grammar_src
def compileDSL(text_or_file, scanner, dsl_grammar, ast_transformation, compiler): def compileDSL(text_or_file: str,
scanner: ScannerFunc,
dsl_grammar: Grammar,
ast_transformation: TransformerFunc,
compiler: Compiler) -> Any:
"""Compiles a text in a domain specific language (DSL) with an """Compiles a text in a domain specific language (DSL) with an
EBNF-specified grammar. Returns the compiled text or raises a EBNF-specified grammar. Returns the compiled text or raises a
compilation error. compilation error.
...@@ -176,10 +181,10 @@ def compileDSL(text_or_file, scanner, dsl_grammar, ast_transformation, compiler) ...@@ -176,10 +181,10 @@ def compileDSL(text_or_file, scanner, dsl_grammar, ast_transformation, compiler)
CompilationError if any errors occurred during compilation CompilationError if any errors occurred during compilation
""" """
assert isinstance(text_or_file, str) assert isinstance(text_or_file, str)
assert isinstance(compiler, CompilerBase) assert isinstance(compiler, Compiler)
parser_root, grammar_src = grammar_instance(dsl_grammar) parser, grammar_src = grammar_instance(dsl_grammar)
result, errors, AST = compile_source(text_or_file, scanner, parser_root, result, errors, AST = compile_source(text_or_file, scanner, parser,
ast_transformation, compiler) ast_transformation, compiler)
if errors: if errors:
src = load_if_file(text_or_file) src = load_if_file(text_or_file)
...@@ -187,7 +192,7 @@ def compileDSL(text_or_file, scanner, dsl_grammar, ast_transformation, compiler) ...@@ -187,7 +192,7 @@ def compileDSL(text_or_file, scanner, dsl_grammar, ast_transformation, compiler)
return result return result
def raw_compileEBNF(ebnf_src, branding="DSL"): def raw_compileEBNF(ebnf_src: str, branding="DSL") -> EBNFCompiler:
"""Compiles an EBNF grammar file and returns the compiler object """Compiles an EBNF grammar file and returns the compiler object
that was used and which can now be queried for the result as well that was used and which can now be queried for the result as well
as skeleton code for scanner, transformer and compiler objects. as skeleton code for scanner, transformer and compiler objects.
...@@ -208,7 +213,7 @@ def raw_compileEBNF(ebnf_src, branding="DSL"): ...@@ -208,7 +213,7 @@ def raw_compileEBNF(ebnf_src, branding="DSL"):
return compiler return compiler
def compileEBNF(ebnf_src, branding="DSL"): def compileEBNF(ebnf_src: str, branding="DSL") -> str:
"""Compiles an EBNF source file and returns the source code of a """Compiles an EBNF source file and returns the source code of a
compiler suite with skeletons for scanner, transformer and compiler suite with skeletons for scanner, transformer and
compiler. compiler.
...@@ -234,7 +239,7 @@ def compileEBNF(ebnf_src, branding="DSL"): ...@@ -234,7 +239,7 @@ def compileEBNF(ebnf_src, branding="DSL"):
return '\n'.join(src) return '\n'.join(src)
def parser_factory(ebnf_src, branding="DSL"): def parser_factory(ebnf_src: str, branding="DSL") -> Grammar:
"""Compiles an EBNF grammar and returns a grammar-parser factory """Compiles an EBNF grammar and returns a grammar-parser factory
function for that grammar. function for that grammar.
...@@ -253,7 +258,8 @@ def parser_factory(ebnf_src, branding="DSL"): ...@@ -253,7 +258,8 @@ def parser_factory(ebnf_src, branding="DSL"):
return compile_python_object(DHPARSER_IMPORTS + grammar_src, 'get_(?:\w+_)?grammar$') return compile_python_object(DHPARSER_IMPORTS + grammar_src, 'get_(?:\w+_)?grammar$')
def load_compiler_suite(compiler_suite): def load_compiler_suite(compiler_suite: str) -> \
Tuple[ScannerFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc]:
"""Extracts a compiler suite from file or string ``compiler suite`` """Extracts a compiler suite from file or string ``compiler suite``
and returns it as a tuple (scanner, parser, ast, compiler). and returns it as a tuple (scanner, parser, ast, compiler).
...@@ -282,13 +288,14 @@ def load_compiler_suite(compiler_suite): ...@@ -282,13 +288,14 @@ def load_compiler_suite(compiler_suite):
if errors: if errors:
raise GrammarError('\n\n'.join(errors), source) raise GrammarError('\n\n'.join(errors), source)
scanner = get_ebnf_scanner scanner = get_ebnf_scanner
parser = get_ebnf_grammar
ast = get_ebnf_transformer ast = get_ebnf_transformer
compiler = compile_python_object(imports + compiler_py, 'get_(?:\w+_)?compiler$') compiler = compile_python_object(imports + compiler_py, 'get_(?:\w+_)?compiler$')
return scanner, parser, ast, compiler return scanner, parser, ast, compiler
def is_outdated(compiler_suite, grammar_source): def is_outdated(compiler_suite: str, grammar_source: str) -> bool:
"""Returns ``True`` if the ``compile_suite`` needs to be updated. """Returns ``True`` if the ``compile_suite`` needs to be updated.
An update is needed, if either the grammar in the compieler suite An update is needed, if either the grammar in the compieler suite
...@@ -313,7 +320,7 @@ def is_outdated(compiler_suite, grammar_source): ...@@ -313,7 +320,7 @@ def is_outdated(compiler_suite, grammar_source):
return True return True
def run_compiler(text_or_file, compiler_suite): def run_compiler(text_or_file: str, compiler_suite: str) -> Any:
"""Compiles a source with a given compiler suite. """Compiles a source with a given compiler suite.
Args: Args:
...@@ -336,7 +343,7 @@ def run_compiler(text_or_file, compiler_suite): ...@@ -336,7 +343,7 @@ def run_compiler(text_or_file, compiler_suite):
return compileDSL(text_or_file, scanner(), parser(), ast(), compiler()) return compileDSL(text_or_file, scanner(), parser(), ast(), compiler())
def compile_on_disk(source_file, compiler_suite="", extension=".xml"): def compile_on_disk(source_file: str, compiler_suite="", extension=".xml"):
"""Compiles the a source file with a given compiler and writes the """Compiles the a source file with a given compiler and writes the
result to a file. result to a file.
...@@ -373,18 +380,20 @@ def compile_on_disk(source_file, compiler_suite="", extension=".xml"): ...@@ -373,18 +380,20 @@ def compile_on_disk(source_file, compiler_suite="", extension=".xml"):
rootname = os.path.splitext(filepath)[0] rootname = os.path.splitext(filepath)[0]
compiler_name = os.path.basename(rootname) compiler_name = os.path.basename(rootname)
if compiler_suite: if compiler_suite:
scanner, parser, trans, cfactory = load_compiler_suite(compiler_suite) sfactory, pfactory, tfactory, cfactory = load_compiler_suite(compiler_suite)
else: else:
scanner = get_ebnf_scanner sfactory = get_ebnf_scanner
parser = get_ebnf_grammar pfactory = get_ebnf_grammar
trans = get_ebnf_transformer tfactory = get_ebnf_transformer
cfactory = get_ebnf_compiler cfactory = get_ebnf_compiler
compiler1 = cfactory(compiler_name, source_file) compiler1 = cfactory()
result, errors, ast = compile_source(source_file, scanner(), parser(), trans(), compiler1) compiler1.set_grammar_name(compiler_name, source_file)
result, errors, ast = compile_source(source_file, sfactory(), pfactory(), tfactory(), compiler1)
if errors: if errors:
return errors return errors
elif cfactory == get_ebnf_compiler: # trans == get_ebnf_transformer or trans == EBNFTransformer: # either an EBNF- or no compiler suite given elif cfactory == get_ebnf_compiler: # trans == get_ebnf_transformer or trans == EBNFTransformer: # either an EBNF- or no compiler suite given
ebnf_compiler = cast(EBNFCompiler, compiler1)
global SECTION_MARKER, RX_SECTION_MARKER, SCANNER_SECTION, PARSER_SECTION, \ global SECTION_MARKER, RX_SECTION_MARKER, SCANNER_SECTION, PARSER_SECTION, \
AST_SECTION, COMPILER_SECTION, END_SECTIONS_MARKER, RX_WHITESPACE, \ AST_SECTION, COMPILER_SECTION, END_SECTIONS_MARKER, RX_WHITESPACE, \
DHPARSER_MAIN, DHPARSER_IMPORTS DHPARSER_MAIN, DHPARSER_IMPORTS
...@@ -412,11 +421,11 @@ def compile_on_disk(source_file, compiler_suite="", extension=".xml"): ...@@ -412,11 +421,11 @@ def compile_on_disk(source_file, compiler_suite="", extension=".xml"):
if RX_WHITESPACE.fullmatch(imports): if RX_WHITESPACE.fullmatch(imports):
imports = DHPARSER_IMPORTS imports = DHPARSER_IMPORTS
if RX_WHITESPACE.fullmatch(scanner): if RX_WHITESPACE.fullmatch(scanner):
scanner = compiler1.gen_scanner_skeleton() scanner = ebnf_compiler.gen_scanner_skeleton()
if RX_WHITESPACE.fullmatch(ast): if RX_WHITESPACE.fullmatch(ast):
ast = compiler1.gen_transformer_skeleton() ast = ebnf_compiler.gen_transformer_skeleton()
if RX_WHITESPACE.fullmatch(compiler): if RX_WHITESPACE.fullmatch(compiler):
compiler = compiler1.gen_compiler_skeleton() compiler = ebnf_compiler.gen_compiler_skeleton()
try: try:
f = open(rootname + 'Compiler.py', 'w', encoding="utf-8") f = open(rootname + 'Compiler.py', 'w', encoding="utf-8")
...@@ -441,6 +450,7 @@ def compile_on_disk(source_file, compiler_suite="", extension=".xml"): ...@@ -441,6 +450,7 @@ def compile_on_disk(source_file, compiler_suite="", extension=".xml"):
if f: f.close() if f: f.close()
else: else:
f = None
try: try:
f = open(rootname + extension, 'w', encoding="utf-8") f = open(rootname + extension, 'w', encoding="utf-8")
if isinstance(result, Node): if isinstance(result, Node):
......
This diff is collapsed.
This diff is collapsed.
...@@ -25,9 +25,9 @@ try: ...@@ -25,9 +25,9 @@ try:
import regex as re import regex as re
except ImportError: except ImportError:
import re import re
from typing import NamedTuple from typing import Any, Callable, cast, Iterator, NamedTuple, Union, Tuple, List
from .toolkit import log_dir, expand_table, line_col, smart_list from DHParser.toolkit import log_dir, expand_table, line_col, smart_list
__all__ = ['WHITESPACE_PTYPE', __all__ = ['WHITESPACE_PTYPE',
...@@ -35,6 +35,7 @@ __all__ = ['WHITESPACE_PTYPE', ...@@ -35,6 +35,7 @@ __all__ = ['WHITESPACE_PTYPE',
'ZOMBIE_PARSER', 'ZOMBIE_PARSER',
'Error', 'Error',
'Node', 'Node',
'TransformerFunc',
'key_parser_name', 'key_parser_name',
'key_tag_name', 'key_tag_name',
'traverse', 'traverse',
...@@ -117,6 +118,11 @@ ZOMBIE_PARSER = ZombieParser() ...@@ -117,6 +118,11 @@ ZOMBIE_PARSER = ZombieParser()
Error = NamedTuple('Error', [('pos', int), ('msg', str)]) Error = NamedTuple('Error', [('pos', int), ('msg', str)])
ChildrenType = Tuple['Node', ...]
ResultType = Union[ChildrenType, str]
SloppyResultT = Union[ChildrenType, 'Node', str, None]
class Node: class Node:
""" """
Represents a node in the concrete or abstract syntax tree. Represents a node in the concrete or abstract syntax tree.
...@@ -157,19 +163,21 @@ class Node: ...@@ -157,19 +163,21 @@ class Node:
parsing stage and never during or after the parsing stage and never during or after the
AST-transformation. AST-transformation.
""" """
def __init__(self, parser, result: SloppyResultT) -> None:
def __init__(self, parser, result):
"""Initializes the ``Node``-object with the ``Parser``-Instance """Initializes the ``Node``-object with the ``Parser``-Instance
that generated the node and the parser's result. that generated the node and the parser's result.
""" """
self._result = '' # type: ResultType
self._errors = [] # type: List[str]
self._children = () # type: ChildrenType
self._len = len(self.result) if not self.children else \
sum(child._len for child in self.children) # type: int
# self.pos: int = 0 # continuous updating of pos values
self._pos = -1 # type: int
self.result = result self.result = result
self.parser = parser or ZOMBIE_PARSER self.parser = parser or ZOMBIE_PARSER
self._errors = [] self.error_flag = any(r.error_flag for r in self.children) \
self.error_flag = any(r.error_flag for r in self.result) if self.children else False if self.children else False # type: bool
self._len = len(self.result) if not self.children else \
sum(child._len for child in self.children)
# self.pos = 0 # continuous updating of pos values
self._pos = -1
def __str__(self): def __str__(self):
if self.children: if self.children:
...@@ -190,39 +198,41 @@ class Node: ...@@ -190,39 +198,41 @@ class Node:
return other return other
@property @property
def tag_name(self): def tag_name(self) -> str:
return self.parser.name or self.parser.ptype return self.parser.name or self.parser.ptype
# ONLY FOR DEBUGGING: return self.parser.name + ':' + self.parser.ptype # ONLY FOR DEBUGGING: return self.parser.name + ':' + self.parser.ptype
@property @property
def result(self): def result(self) -> ResultType:
return self._result return self._result
@result.setter @result.setter
def result(self, result): def result(self, result: SloppyResultT):
assert ((isinstance(result, tuple) and all(isinstance(child, Node) for child in result)) # # made obsolete by static type checking with mypy is done
or isinstance(result, Node) # assert ((isinstance(result, tuple) and all(isinstance(child, Node) for child in result))
or isinstance(result, str)), str(result) # or isinstance(result, Node)
# or isinstance(result, str)), str(result)
self._result = (result,) if isinstance(result, Node) else result or '' self._result = (result,) if isinstance(result, Node) else result or ''
self._children = self._result if isinstance(self._result, tuple) else () self._children = cast(ChildrenType, self._result) \
if isinstance(self._result, tuple) else cast(ChildrenType, ())
@property @property
def children(self): def children(self) -> ChildrenType:
return self._children return self._children
@property @property
def len(self): def len(self) -> int:
# DEBUGGING: print(self.tag_name, str(self.pos), str(self._len), str(self)[:10].replace('\n','.')) # DEBUGGING: print(self.tag_name, str(self.pos), str(self._len), str(self)[:10].replace('\n','.'))
return self._len return self._len
@property @property
def pos(self): def pos(self) -> int:
assert self._pos >= 0, "position value not initialized!" assert self._pos >= 0, "position value not initialized!"
return self._pos return self._pos
@pos.setter @pos.setter
def pos(self, pos): def pos(self, pos: int):
assert isinstance(pos, int) # assert isinstance(pos, int)
self._pos = pos self._pos = pos
offset = 0 offset = 0
for child in self.children: for child in self.children:
...@@ -230,10 +240,10 @@ class Node: ...@@ -230,10 +240,10 @@ class Node:
offset += child.len offset += child.len
@property @property
def errors(self): def errors(self) -> List[Error]:
return [Error(self.pos, err) for err in self._errors] return [Error(self.pos, err) for err in self._errors]
def _tree_repr(self, tab, openF, closeF, dataF=lambda s: s): def _tree_repr(self, tab, openF, closeF, dataF=lambda s: s) -> str:
""" """
Generates a tree representation of this node and its children Generates a tree representation of this node and its children
in string from. in string from.
...@@ -266,19 +276,19 @@ class Node: ...@@ -266,19 +276,19 @@ class Node:
if self.children: if self.children:
content = [] content = []
for child in self.result: for child in self.children:
subtree = child._tree_repr(tab, openF, closeF, dataF).split('\n') subtree = child._tree_repr(tab, openF, closeF, dataF).split('\n')
content.append('\n'.join((tab + s) for s in subtree)) content.append('\n'.join((tab + s) for s in subtree))
return head + '\n'.join(content) + tail return head + '\n'.join(content) + tail
if head[0] == "<" and self.result.find('\n') < 0: res = cast(str, self.result) # safe, because if there are no children, result is a string
if head[0] == "<" and res.find('\n') < 0:
# for XML: place tags for leaf-nodes on one line if possible # for XML: place tags for leaf-nodes on one line if possible
return head[:-1] + self.result + tail[1:] return head[:-1] + self.result + tail[1:]
else: else:
return head + '\n'.join([tab + dataF(s) return head + '\n'.join([tab + dataF(s) for s in res.split('\n')]) + tail
for s in self.result.split('\n')]) + tail
def as_sexpr(self, src=None): def as_sexpr(self, src=None) -> str:
""" """
Returns content as S-expression, i.e. in lisp-like form. Returns content as S-expression, i.e. in lisp-like form.
...@@ -290,7 +300,7 @@ class Node: ...@@ -290,7 +300,7 @@ class Node:
of leaf nodes shall be applied for better readability. of leaf nodes shall be applied for better readability.
""" """
def opening(node): def opening(node) -> str:
s = '(' + node.tag_name s = '(' + node.tag_name
# s += " '(pos %i)" % node.pos # s += " '(pos %i)" % node.pos
if src: if src:
...@@ -307,7 +317,7 @@ class Node: ...@@ -307,7 +317,7 @@ class Node:
return self._tree_repr(' ', opening, lambda node: ')', pretty) # pretty if prettyprint else lambda s: s) return self._tree_repr(' ', opening, lambda node: ')', pretty) # pretty if prettyprint else lambda s: s)
def as_xml(self, src=None): def as_xml(self, src=None) -> str:
""" """
Returns content as XML-tree. Returns content as XML-tree.
...@@ -317,7 +327,7 @@ class Node: ...@@ -317,7 +327,7 @@ class Node:
column. column.
""" """
def opening(node): def opening(node) -> str:
s = '<' + node.tag_name s = '<' + node.tag_name
# s += ' pos="%i"' % node.pos # s += ' pos="%i"' % node.pos
if src: if src:
...@@ -333,7 +343,7 @@ class Node: ...@@ -333,7 +343,7 @@ class Node:
return self._tree_repr(' ', opening, closing) return self._tree_repr(' ', opening, closing)
def add_error(self, error_str): def add_error(self, error_str) -> 'Node':
self._errors.append(error_str) self._errors.append(error_str)
self.error_flag = True self.error_flag = True
return self return self
...@@ -347,7 +357,7 @@ class Node: ...@@ -347,7 +357,7 @@ class Node:
child.propagate_error_flags() child.propagate_error_flags()
self.error_flag |= child.error_flag self.error_flag |= child.error_flag
def collect_errors(self, clear_errors=False): def collect_errors(self, clear_errors=False) -> List[Error]:
""" """
Returns all errors of this node or any child node in the form Returns all errors of this node or any child node in the form
of a set of tuples (position, error_message), where position of a set of tuples (position, error_message), where position
...@@ -358,7 +368,7 @@ class Node: ...@@ -358,7 +368,7 @@ class Node:
self._errors = [] self._errors = []
self.error_flag = False self.error_flag = False
if self.children: if self.children:
for child in self.result: for child in self.children:
errors.extend(child.collect_errors(clear_errors)) errors.extend(child.collect_errors(clear_errors))
return errors return errors
...@@ -367,7 +377,7 @@ class Node: ...@@ -367,7 +377,7 @@ class Node:
with open(os.path.join(log_dir(), st_file_name), "w", encoding="utf-8") as f: with open(os.path.join(log_dir(), st_file_name), "w", encoding="utf-8") as f:
f.write(self.as_sexpr()) f.write(self.as_sexpr())
def find(self, match_function): def find(self, match_function) -> Iterator['Node']:
"""Finds nodes in the tree that match a specific criterion. """Finds nodes in the tree that match a specific criterion.
``find`` is a generator that yields all nodes for which the ``find`` is a generator that yields all nodes for which the
...@@ -436,15 +446,18 @@ class Node: ...@@ -436,15 +446,18 @@ class Node:
######################################################################## ########################################################################
TransformerFunc = Union[Callable[[Node], Any], partial]
WHITESPACE_PTYPE = ':Whitespace' WHITESPACE_PTYPE = ':Whitespace'
TOKEN_PTYPE = ':Token' TOKEN_PTYPE = ':Token'
def key_parser_name(node): def key_parser_name(node) -> str:
return node.parser.name return node.parser.name
def key_tag_name(node): def key_tag_name(node) -> str:
return node.tag_name return node.tag_name
......
...@@ -38,6 +38,7 @@ try: ...@@ -38,6 +38,7 @@ try:
import regex as re import regex as re
except ImportError: except ImportError:
import re import re
from typing import List, Tuple
__all__ = ['logging', __all__ = ['logging',
...@@ -123,7 +124,7 @@ def is_logging(): ...@@ -123,7 +124,7 @@ def is_logging():
return False return False
def line_col(text, pos): def line_col(text: str, pos: int) -> Tuple[int, int]:
"""Returns the position within a text as (line, column)-tuple. """Returns the position within a text as (line, column)-tuple.
""" """
assert pos < len(text), str(pos) + " >= " + str(len(text)) assert pos < len(text), str(pos) + " >= " + str(len(text))
...@@ -132,7 +133,7 @@ def line_col(text, pos): ...@@ -132,7 +133,7 @@ def line_col(text, pos):
return line, column return line, column
def error_messages(source_text, errors): def error_messages(source_text, errors) -> List[str]:
"""Returns the sequence or iterator of error objects as an intertor """Returns the sequence or iterator of error objects as an intertor
of error messages with line and column numbers at the beginning. of error messages with line and column numbers at the beginning.