Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
badw-it
DHParser
Commits
663e5268
Commit
663e5268
authored
Dec 26, 2017
by
Eckhart Arnold
Browse files
- source_map test
parent
d18f157c
Changes
2
Show whitespace changes
Inline
Side-by-side
DHParser/preprocess.py
View file @
663e5268
...
@@ -19,10 +19,10 @@ permissions and limitations under the License.
...
@@ -19,10 +19,10 @@ permissions and limitations under the License.
import
bisect
import
bisect
import
collections
import
collections
import
functools
import
functools
from
DHParser.toolkit
import
typing
,
re
from
typing
import
Union
,
Callable
from
typing
import
Union
,
Callable
from
DHParser.toolkit
import
re
__all__
=
(
'RX_TOKEN_NAME'
,
__all__
=
(
'RX_TOKEN_NAME'
,
'BEGIN_TOKEN'
,
'BEGIN_TOKEN'
,
'TOKEN_DELIMITER'
,
'TOKEN_DELIMITER'
,
...
@@ -102,9 +102,9 @@ def tokenized_to_original_mapping(tokenized_source: str) -> SourceMap:
...
@@ -102,9 +102,9 @@ def tokenized_to_original_mapping(tokenized_source: str) -> SourceMap:
d
=
tokenized_source
.
find
(
TOKEN_DELIMITER
,
i
)
d
=
tokenized_source
.
find
(
TOKEN_DELIMITER
,
i
)
e
=
tokenized_source
.
find
(
END_TOKEN
,
i
)
e
=
tokenized_source
.
find
(
END_TOKEN
,
i
)
assert
0
<=
d
<
e
assert
0
<=
d
<
e
o
-=
(
d
-
i
+
2
)
o
-=
(
d
-
i
+
3
)
positions
.
extend
([
d
+
1
,
e
+
1
])
positions
.
extend
([
d
+
1
,
e
+
1
])
offsets
.
extend
([
o
,
o
-
1
])
offsets
.
extend
([
o
+
1
,
o
])
i
=
tokenized_source
.
find
(
BEGIN_TOKEN
,
e
+
1
)
i
=
tokenized_source
.
find
(
BEGIN_TOKEN
,
e
+
1
)
# post conditions
# post conditions
...
...
test/test_preprocess.py
View file @
663e5268
...
@@ -22,10 +22,10 @@ limitations under the License.
...
@@ -22,10 +22,10 @@ limitations under the License.
# import sys
# import sys
# sys.path.append('../')
# sys.path.append('../')
from
DHParser.toolkit
import
re
,
lstrip_docstring
,
logging
from
DHParser.preprocess
import
make_token
,
tokenized_to_original_mapping
,
source_map
,
\
BEGIN_TOKEN
,
END_TOKEN
,
TOKEN_DELIMITER
,
pp_tokenized
from
DHParser.dsl
import
grammar_provider
from
DHParser.dsl
import
grammar_provider
from
DHParser.preprocess
import
make_token
,
tokenized_to_original_mapping
,
source_map
,
\
BEGIN_TOKEN
,
END_TOKEN
,
TOKEN_DELIMITER
from
DHParser.toolkit
import
lstrip_docstring
class
TestMakeToken
:
class
TestMakeToken
:
...
@@ -109,6 +109,14 @@ class TestTokenParsing:
...
@@ -109,6 +109,14 @@ class TestTokenParsing:
self
.
tokenized
=
self
.
preprocess_indentation
(
self
.
code
)
self
.
tokenized
=
self
.
preprocess_indentation
(
self
.
code
)
self
.
srcmap
=
tokenized_to_original_mapping
(
self
.
tokenized
)
self
.
srcmap
=
tokenized_to_original_mapping
(
self
.
tokenized
)
def
verify_mapping
(
self
,
teststr
,
orig_text
,
preprocessed_text
):
mapped_pos
=
preprocessed_text
.
find
(
teststr
)
assert
mapped_pos
>=
0
original_pos
=
source_map
(
mapped_pos
,
self
.
srcmap
)
assert
orig_text
[
original_pos
:
original_pos
+
len
(
teststr
)]
==
teststr
,
\
'"%s" (%i) wrongly mapped onto "%s" (%i)'
%
\
(
teststr
,
mapped_pos
,
orig_text
[
original_pos
:
original_pos
+
len
(
teststr
)],
original_pos
)
def
test_parse_tokenized
(
self
):
def
test_parse_tokenized
(
self
):
cst
=
self
.
grammar
(
self
.
tokenized
)
cst
=
self
.
grammar
(
self
.
tokenized
)
# for e in cst.collect_errors(self.tokenized):
# for e in cst.collect_errors(self.tokenized):
...
@@ -116,6 +124,13 @@ class TestTokenParsing:
...
@@ -116,6 +124,13 @@ class TestTokenParsing:
# print()
# print()
assert
not
cst
.
error_flag
assert
not
cst
.
error_flag
def
test_source_mapping
(
self
):
self
.
verify_mapping
(
"def func"
,
self
.
code
,
self
.
tokenized
)
self
.
verify_mapping
(
"x > 0:"
,
self
.
code
,
self
.
tokenized
)
self
.
verify_mapping
(
"if y > 0:"
,
self
.
code
,
self
.
tokenized
)
self
.
verify_mapping
(
"print(x)"
,
self
.
code
,
self
.
tokenized
)
self
.
verify_mapping
(
"print(y)"
,
self
.
code
,
self
.
tokenized
)
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
# tp = TestTokenParsing()
# tp = TestTokenParsing()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment