11
11
import re
12
12
import shutil
13
13
14
- from typing import List , Tuple , Dict , Optional , Set
14
+ from typing import List , Tuple , Optional , cast
15
15
16
16
from mypy import build
17
17
from mypy .build import BuildManager , BuildSource , Graph
18
- from mypy .errors import Errors , CompileError
19
- from mypy .nodes import Node , MypyFile , SymbolTable , SymbolTableNode , TypeInfo , Expression
18
+ from mypy .errors import CompileError
20
19
from mypy .options import Options
21
- from mypy .server .astmerge import merge_asts
22
- from mypy .server .subexpr import get_subexpressions
23
20
from mypy .server .update import FineGrainedBuildManager
24
- from mypy .strconv import StrConv , indent
25
- from mypy .test .config import test_temp_dir , test_data_prefix
21
+ from mypy .test .config import test_temp_dir
26
22
from mypy .test .data import (
27
- parse_test_cases , DataDrivenTestCase , DataSuite , UpdateFile , module_from_path
23
+ DataDrivenTestCase , DataSuite , UpdateFile , module_from_path
28
24
)
29
25
from mypy .test .helpers import assert_string_arrays_equal , parse_options
30
- from mypy .test .testtypegen import ignore_node
31
- from mypy .types import TypeStrVisitor , Type
32
- from mypy .util import short_type
33
26
from mypy .server .mergecheck import check_consistency
27
+ from mypy .dmypy_server import Server
28
+ from mypy .main import expand_dir
34
29
35
30
import pytest # type: ignore # no pytest in typeshed
36
31
32
+ # TODO: This entire thing is a weird semi-duplication of testdmypy.
33
+ # One of them should be eliminated and its remaining useful features
34
+ # merged into the other.
37
35
38
36
# Set to True to perform (somewhat expensive) checks for duplicate AST nodes after merge
39
37
CHECK_CONSISTENCY = False
@@ -75,52 +73,56 @@ def run_case(self, testcase: DataDrivenTestCase) -> None:
75
73
return
76
74
77
75
main_src = '\n ' .join (testcase .input )
76
+ main_path = os .path .join (test_temp_dir , 'main' )
77
+ with open (main_path , 'w' ) as f :
78
+ f .write (main_src )
79
+
80
+ server = Server (self .get_options (main_src , testcase , build_cache = False ),
81
+ alt_lib_path = test_temp_dir )
82
+
78
83
step = 1
79
- sources_override = self .parse_sources (main_src , step )
80
- messages , manager , graph = self .build (main_src , testcase , sources_override ,
81
- build_cache = self .use_cache ,
82
- enable_cache = self .use_cache )
84
+ sources = self .parse_sources (main_src , step )
85
+ if self .use_cache :
86
+ messages = self .build (self .get_options (main_src , testcase , build_cache = True ), sources )
87
+ else :
88
+ messages = self .run_check (server , sources )
89
+
83
90
a = []
84
91
if messages :
85
92
a .extend (normalize_messages (messages ))
86
93
87
- fine_grained_manager = None
88
- if not self .use_cache :
89
- fine_grained_manager = FineGrainedBuildManager (manager , graph )
94
+ if server .fine_grained_manager :
90
95
if CHECK_CONSISTENCY :
91
- check_consistency (fine_grained_manager )
96
+ check_consistency (server . fine_grained_manager )
92
97
93
98
steps = testcase .find_steps ()
94
99
all_triggered = []
95
100
for operations in steps :
96
101
step += 1
97
- modules = []
98
102
for op in operations :
99
103
if isinstance (op , UpdateFile ):
100
104
# Modify/create file
105
+
106
+ # In some systems, mtime has a resolution of 1 second which can cause
107
+ # annoying-to-debug issues when a file has the same size after a
108
+ # change. We manually set the mtime to circumvent this.
109
+ new_time = None
110
+ if os .path .isfile (op .target_path ):
111
+ new_time = os .stat (op .target_path ).st_mtime + 1
112
+
101
113
shutil .copy (op .source_path , op .target_path )
102
- modules .append ((op .module , op .target_path ))
114
+ if new_time :
115
+ os .utime (op .target_path , times = (new_time , new_time ))
103
116
else :
104
117
# Delete file
105
118
os .remove (op .path )
106
- modules .append ((op .module , op .path ))
107
- sources_override = self .parse_sources (main_src , step )
108
- if sources_override is not None :
109
- modules = [(module , path )
110
- for module , path in sources_override
111
- if any (m == module for m , _ in modules )]
112
-
113
- # If this is the second iteration and we are using a
114
- # cache, now we need to set it up
115
- if fine_grained_manager is None :
116
- messages , manager , graph = self .build (main_src , testcase , sources_override ,
117
- build_cache = False , enable_cache = True )
118
- fine_grained_manager = FineGrainedBuildManager (manager , graph )
119
-
120
- new_messages = fine_grained_manager .update (modules )
121
- if CHECK_CONSISTENCY :
122
- check_consistency (fine_grained_manager )
123
- all_triggered .append (fine_grained_manager .triggered )
119
+ sources = self .parse_sources (main_src , step )
120
+ new_messages = self .run_check (server , sources )
121
+
122
+ if server .fine_grained_manager :
123
+ if CHECK_CONSISTENCY :
124
+ check_consistency (server .fine_grained_manager )
125
+ all_triggered .append (server .fine_grained_manager .triggered )
124
126
new_messages = normalize_messages (new_messages )
125
127
126
128
a .append ('==' )
@@ -141,39 +143,39 @@ def run_case(self, testcase: DataDrivenTestCase) -> None:
141
143
'Invalid active triggers ({}, line {})' .format (testcase .file ,
142
144
testcase .line ))
143
145
144
- def build (self ,
145
- source : str ,
146
- testcase : DataDrivenTestCase ,
147
- sources_override : Optional [List [Tuple [str , str ]]],
148
- build_cache : bool ,
149
- enable_cache : bool ) -> Tuple [List [str ], BuildManager , Graph ]:
146
+ def get_options (self ,
147
+ source : str ,
148
+ testcase : DataDrivenTestCase ,
149
+ build_cache : bool ) -> Options :
150
150
# This handles things like '# flags: --foo'.
151
151
options = parse_options (source , testcase , incremental_step = 1 )
152
152
options .incremental = True
153
153
options .use_builtins_fixtures = True
154
154
options .show_traceback = True
155
155
options .fine_grained_incremental = not build_cache
156
- options .use_fine_grained_cache = enable_cache and not build_cache
157
- options .cache_fine_grained = enable_cache
156
+ options .use_fine_grained_cache = self . use_cache and not build_cache
157
+ options .cache_fine_grained = self . use_cache
158
158
options .local_partial_types = True
159
+ if options .follow_imports == 'normal' :
160
+ options .follow_imports = 'error'
159
161
160
- main_path = os .path .join (test_temp_dir , 'main' )
161
- with open (main_path , 'w' ) as f :
162
- f .write (source )
163
- if sources_override is not None :
164
- sources = [BuildSource (path , module , None )
165
- for module , path in sources_override ]
166
- else :
167
- sources = [BuildSource (main_path , None , None )]
162
+ return options
163
+
164
+ def run_check (self , server : Server , sources : List [BuildSource ]) -> List [str ]:
165
+ response = server .check (sources )
166
+ out = cast (str , response ['out' ] or response ['err' ])
167
+ return out .splitlines ()
168
+
169
+ def build (self ,
170
+ options : Options ,
171
+ sources : List [BuildSource ]) -> List [str ]:
168
172
try :
169
173
result = build .build (sources = sources ,
170
174
options = options ,
171
175
alt_lib_path = test_temp_dir )
172
176
except CompileError as e :
173
- # TODO: We need a manager and a graph in this case as well
174
- assert False , str ('\n ' .join (e .messages ))
175
- return e .messages , None , None
176
- return result .errors , result .manager , result .graph
177
+ return e .messages
178
+ return result .errors
177
179
178
180
def format_triggered (self , triggered : List [List [str ]]) -> List [str ]:
179
181
result = []
@@ -185,11 +187,22 @@ def format_triggered(self, triggered: List[List[str]]) -> List[str]:
185
187
return result
186
188
187
189
def parse_sources (self , program_text : str ,
188
- incremental_step : int ) -> Optional [List [Tuple [str , str ]]]:
189
- """Return target (module, path) tuples for a test case, if not using the defaults.
190
+ incremental_step : int ) -> List [BuildSource ]:
191
+ """Return target BuildSources for a test case.
192
+
193
+ Normally, the unit tests will check all files included in the test
194
+ case. This differs from how testcheck works by default, as dmypy
195
+ doesn't currently support following imports.
196
+
197
+ You can override this behavior and instruct the tests to check
198
+ multiple modules by using a comment like this in the test case
199
+ input:
200
+
201
+ # cmd: main a.py
202
+
203
+ You can also use `# cmdN:` to have a different cmd for incremental
204
+ step N (2, 3, ...).
190
205
191
- These are defined through a comment like '# cmd: main a.py' in the test case
192
- description.
193
206
"""
194
207
m = re .search ('# cmd: mypy ([a-zA-Z0-9_./ ]+)$' , program_text , flags = re .MULTILINE )
195
208
regex = '# cmd{}: mypy ([a-zA-Z0-9_./ ]+)$' .format (incremental_step )
@@ -209,9 +222,11 @@ def parse_sources(self, program_text: str,
209
222
module = module_from_path (path )
210
223
if module == 'main' :
211
224
module = '__main__'
212
- result .append (( module , path ))
225
+ result .append (BuildSource ( path , module , None ))
213
226
return result
214
- return None
227
+ else :
228
+ base = BuildSource (os .path .join (test_temp_dir , 'main' ), '__main__' , None )
229
+ return [base ] + expand_dir (test_temp_dir )
215
230
216
231
217
232
def normalize_messages (messages : List [str ]) -> List [str ]:
0 commit comments