@@ -315,10 +315,14 @@ def default_lib_path(data_dir: str, pyversion: Tuple[int, int],
315
315
('path' , str ),
316
316
('mtime' , float ),
317
317
('size' , int ),
318
- ('dependencies' , List [str ]),
318
+ ('dependencies' , List [str ]), # names of imported modules
319
319
('data_mtime' , float ), # mtime of data_json
320
320
('data_json' , str ), # path of <id>.data.json
321
+ ('suppressed' , List [str ]), # dependencies that weren't imported
321
322
])
323
+ # NOTE: dependencies + suppressed == all unreachable imports;
324
+ # suppressed contains those reachable imports that were prevented by
325
+ # --silent-imports or simply not found.
322
326
323
327
324
328
class BuildManager :
@@ -684,9 +688,10 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
684
688
meta .get ('path' ),
685
689
meta .get ('mtime' ),
686
690
meta .get ('size' ),
687
- meta .get ('dependencies' ),
691
+ meta .get ('dependencies' , [] ),
688
692
meta .get ('data_mtime' ),
689
693
data_json ,
694
+ meta .get ('suppressed' , []),
690
695
)
691
696
if (m .id != id or m .path != path or
692
697
m .mtime is None or m .size is None or
@@ -710,7 +715,8 @@ def random_string():
710
715
return binascii .hexlify (os .urandom (8 )).decode ('ascii' )
711
716
712
717
713
- def write_cache (id : str , path : str , tree : MypyFile , dependencies : List [str ],
718
+ def write_cache (id : str , path : str , tree : MypyFile ,
719
+ dependencies : List [str ], suppressed : List [str ],
714
720
manager : BuildManager ) -> None :
715
721
"""Write cache files for a module.
716
722
@@ -719,6 +725,7 @@ def write_cache(id: str, path: str, tree: MypyFile, dependencies: List[str],
719
725
path: module path
720
726
tree: the fully checked module data
721
727
dependencies: module IDs on which this module depends
728
+ suppressed: module IDs which were suppressed as dependencies
722
729
manager: the build manager (for pyversion, log/trace)
723
730
"""
724
731
path = os .path .abspath (path )
@@ -746,6 +753,7 @@ def write_cache(id: str, path: str, tree: MypyFile, dependencies: List[str],
746
753
'size' : size ,
747
754
'data_mtime' : data_mtime ,
748
755
'dependencies' : dependencies ,
756
+ 'suppressed' : suppressed ,
749
757
}
750
758
with open (meta_json_tmp , 'w' ) as f :
751
759
json .dump (meta , f , sort_keys = True )
@@ -919,6 +927,7 @@ class State:
919
927
data = None # type: Optional[str]
920
928
tree = None # type: Optional[MypyFile]
921
929
dependencies = None # type: List[str]
930
+ suppressed = None # type: List[str] # Suppressed/missing dependencies
922
931
923
932
# Map each dependency to the line number where it is first imported
924
933
dep_line_map = None # type: Dict[str, int]
@@ -1017,11 +1026,15 @@ def __init__(self,
1017
1026
# TODO: Get mtime if not cached.
1018
1027
self .add_ancestors ()
1019
1028
if self .meta :
1020
- self .dependencies = self .meta .dependencies
1029
+ # Make copies, since we may modify these and want to
1030
+ # compare them to the originals later.
1031
+ self .dependencies = list (self .meta .dependencies )
1032
+ self .suppressed = list (self .meta .suppressed )
1021
1033
self .dep_line_map = {}
1022
1034
else :
1023
1035
# Parse the file (and then some) to get the dependencies.
1024
1036
self .parse_file ()
1037
+ self .suppressed = []
1025
1038
1026
1039
def skipping_ancestor (self , id : str , path : str , ancestor_for : 'State' ) -> None :
1027
1040
# TODO: Read the path (the __init__.py file) and return
@@ -1064,9 +1077,13 @@ def add_ancestors(self) -> None:
1064
1077
1065
1078
def is_fresh (self ) -> bool :
1066
1079
"""Return whether the cache data for this file is fresh."""
1067
- return self .meta is not None
1080
+ # NOTE: self.dependencies may differ from
1081
+ # self.meta.dependencies when a dependency is dropped due to
1082
+ # suppression by --silent-imports. However when a suppressed
1083
+ # dependency is added back we find out later in the process.
1084
+ return self .meta is not None and self .dependencies == self .meta .dependencies
1068
1085
1069
- def clear_fresh (self ) -> None :
1086
+ def mark_stale (self ) -> None :
1070
1087
"""Throw away the cache data for this file, marking it as stale."""
1071
1088
self .meta = None
1072
1089
@@ -1147,17 +1164,24 @@ def parse_file(self) -> None:
1147
1164
# Add all direct imports (this is why we needed the first pass).
1148
1165
# Also keep track of each dependency's source line.
1149
1166
dependencies = []
1167
+ suppressed = []
1150
1168
dep_line_map = {} # type: Dict[str, int] # id -> line
1151
1169
for id , line in manager .all_imported_modules_in_file (self .tree ):
1170
+ if id == self .id :
1171
+ continue
1152
1172
# Omit missing modules, as otherwise we could not type-check
1153
1173
# programs with missing modules.
1154
- if id == self .id or id in manager .missing_modules :
1174
+ if id in manager .missing_modules :
1175
+ if id not in dep_line_map :
1176
+ suppressed .append (id )
1177
+ dep_line_map [id ] = line
1155
1178
continue
1156
1179
if id == '' :
1157
1180
# Must be from a relative import.
1158
1181
manager .errors .set_file (self .xpath )
1159
1182
manager .errors .report (line , "No parent module -- cannot perform relative import" ,
1160
1183
blocker = True )
1184
+ continue
1161
1185
if id not in dep_line_map :
1162
1186
dependencies .append (id )
1163
1187
dep_line_map [id ] = line
@@ -1172,6 +1196,7 @@ def parse_file(self) -> None:
1172
1196
# explicitly checked for this, but ran afoul of other reasons
1173
1197
# for differences (e.g. --silent-imports).
1174
1198
self .dependencies = dependencies
1199
+ self .suppressed = suppressed
1175
1200
self .dep_line_map = dep_line_map
1176
1201
self .check_blockers ()
1177
1202
@@ -1211,7 +1236,9 @@ def type_check(self) -> None:
1211
1236
1212
1237
def write_cache (self ) -> None :
1213
1238
if self .path and INCREMENTAL in self .manager .flags and not self .manager .errors .is_errors ():
1214
- write_cache (self .id , self .path , self .tree , list (self .dependencies ), self .manager )
1239
+ write_cache (self .id , self .path , self .tree ,
1240
+ list (self .dependencies ), list (self .suppressed ),
1241
+ self .manager )
1215
1242
1216
1243
1217
1244
Graph = Dict [str , State ]
@@ -1260,6 +1287,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
1260
1287
except ModuleNotFound :
1261
1288
if dep in st .dependencies :
1262
1289
st .dependencies .remove (dep )
1290
+ st .suppressed .append (dep )
1263
1291
else :
1264
1292
assert newst .id not in graph , newst .id
1265
1293
graph [newst .id ] = newst
@@ -1299,6 +1327,16 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
1299
1327
deps -= ascc
1300
1328
stale_deps = {id for id in deps if not graph [id ].is_fresh ()}
1301
1329
fresh = fresh and not stale_deps
1330
+ undeps = set ()
1331
+ if fresh :
1332
+ # Check if any dependencies that were suppressed according
1333
+ # to the cache have heen added back in this run.
1334
+ # NOTE: Newly suppressed dependencies are handled by is_fresh().
1335
+ for id in scc :
1336
+ undeps .update (graph [id ].suppressed )
1337
+ undeps &= graph .keys ()
1338
+ if undeps :
1339
+ fresh = False
1302
1340
if fresh :
1303
1341
# All cache files are fresh. Check that no dependency's
1304
1342
# cache file is newer than any scc node's cache file.
@@ -1325,6 +1363,8 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
1325
1363
fresh_msg = "out of date by %.0f seconds" % (newest_in_deps - oldest_in_scc )
1326
1364
else :
1327
1365
fresh_msg = "fresh"
1366
+ elif undeps :
1367
+ fresh_msg = "stale due to changed suppression (%s)" % " " .join (sorted (undeps ))
1328
1368
elif stale_scc :
1329
1369
fresh_msg = "inherently stale (%s)" % " " .join (sorted (stale_scc ))
1330
1370
if stale_deps :
@@ -1357,7 +1397,7 @@ def process_fresh_scc(graph: Graph, scc: List[str]) -> None:
1357
1397
def process_stale_scc (graph : Graph , scc : List [str ]) -> None :
1358
1398
"""Process the modules in one SCC from source code."""
1359
1399
for id in scc :
1360
- graph [id ].clear_fresh ()
1400
+ graph [id ].mark_stale ()
1361
1401
for id in scc :
1362
1402
# We may already have parsed the module, or not.
1363
1403
# If the former, parse_file() is a no-op.
0 commit comments