-
-
Notifications
You must be signed in to change notification settings - Fork 3.2k
Expand file tree
/
Copy pathtestexportjson.py
More file actions
86 lines (78 loc) · 3.49 KB
/
testexportjson.py
File metadata and controls
86 lines (78 loc) · 3.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
"""Test cases for the mypy cache JSON export tool."""
from __future__ import annotations
import json
import os
import re
import sys
from mypy import build
from mypy.errors import CompileError
from mypy.exportjson import convert_binary_cache_meta_to_json, convert_binary_cache_to_json
from mypy.modulefinder import BuildSource
from mypy.options import Options
from mypy.test.config import test_temp_dir
from mypy.test.data import DataDrivenTestCase, DataSuite
from mypy.test.helpers import assert_string_arrays_equal
class TypeExportSuite(DataSuite):
required_out_section = True
files = ["exportjson.test"]
def run_case(self, testcase: DataDrivenTestCase) -> None:
error = False
src = "\n".join(testcase.input)
try:
options = Options()
options.use_builtins_fixtures = True
options.show_traceback = True
options.allow_empty_bodies = True
options.fixed_format_cache = True
fnam = os.path.join(self.base_path, "main.py")
with open(fnam, "w") as f:
f.write(src)
result = build.build(
sources=[BuildSource(fnam, "main")], options=options, alt_lib_path=test_temp_dir
)
a = result.errors
error = bool(a)
major, minor = sys.version_info[:2]
cache_dir = os.path.join(".mypy_cache", f"{major}.{minor}")
for module in result.files:
if module in (
"builtins",
"typing",
"_typeshed",
"__future__",
"typing_extensions",
"sys",
):
continue
fnam = os.path.join(cache_dir, f"{module}.data.ff")
is_meta = testcase.name.endswith("_meta")
if not is_meta:
with open(fnam, "rb") as f:
json_data = convert_binary_cache_to_json(f.read(), implicit_names=False)
else:
meta_fnam = os.path.join(cache_dir, f"{module}.meta.ff")
with open(meta_fnam, "rb") as f:
json_data = convert_binary_cache_meta_to_json(f.read(), fnam)
for line in json.dumps(json_data, indent=4).splitlines():
if '"path": ' in line:
# The source file path is unpredictable, so filter it out
line = re.sub(r'"[^"]+\.pyi?"', "...", line)
if is_meta:
if '"version_id"' in line:
line = re.sub(r'"[0-9][^"]+"', "...", line)
if '"mtime"' in line or '"data_mtime"' in line:
line = re.sub(r": [0-9]+", ": ...", line)
if '"platform"' in line:
line = re.sub(': "[^"]+"', ": ...", line)
if '"hash"' not in line:
# Some hashes are unpredictable so filter them out
line = re.sub(r'"[a-f0-9]{40}"', '"<hash>"', line)
assert "ERROR" not in line, line
a.append(line)
except CompileError as e:
a = e.messages
error = True
if error or "\n".join(testcase.output).strip() != "<not checked>":
assert_string_arrays_equal(
testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})"
)