35
35
36
36
from mypy .nodes import (MODULE_REF , MypyFile , Node , ImportBase , Import , ImportFrom , ImportAll )
37
37
from mypy .semanal_pass1 import SemanticAnalyzerPass1
38
- from mypy .semanal import SemanticAnalyzerPass2
38
+ from mypy .semanal import SemanticAnalyzerPass2 , apply_semantic_analyzer_patches
39
39
from mypy .semanal_pass3 import SemanticAnalyzerPass3
40
40
from mypy .checker import TypeChecker
41
41
from mypy .indirection import TypeIndirectionVisitor
@@ -389,7 +389,6 @@ def default_lib_path(data_dir: str,
389
389
CacheMeta = NamedTuple ('CacheMeta' ,
390
390
[('id' , str ),
391
391
('path' , str ),
392
- ('memory_only' , bool ), # no corresponding json files (fine-grained only)
393
392
('mtime' , int ),
394
393
('size' , int ),
395
394
('hash' , str ),
@@ -415,7 +414,6 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta:
415
414
return CacheMeta (
416
415
meta .get ('id' , sentinel ),
417
416
meta .get ('path' , sentinel ),
418
- meta .get ('memory_only' , False ),
419
417
int (meta ['mtime' ]) if 'mtime' in meta else sentinel ,
420
418
meta .get ('size' , sentinel ),
421
419
meta .get ('hash' , sentinel ),
@@ -569,7 +567,7 @@ class BuildManager:
569
567
plugin: Active mypy plugin(s)
570
568
errors: Used for reporting all errors
571
569
flush_errors: A function for processing errors after each SCC
572
- saved_cache: Dict with saved cache state for dmypy and fine -grained incremental mode
570
+ saved_cache: Dict with saved cache state for coarse -grained dmypy
573
571
(read-write!)
574
572
stats: Dict with various instrumentation numbers
575
573
"""
@@ -590,6 +588,7 @@ def __init__(self, data_dir: str,
590
588
self .data_dir = data_dir
591
589
self .errors = errors
592
590
self .errors .set_ignore_prefix (ignore_prefix )
591
+ self .only_load_from_cache = options .use_fine_grained_cache
593
592
self .lib_path = tuple (lib_path )
594
593
self .source_set = source_set
595
594
self .reports = reports
@@ -626,6 +625,8 @@ def all_imported_modules_in_file(self,
626
625
627
626
Return list of tuples (priority, module id, import line number)
628
627
for all modules imported in file; lower numbers == higher priority.
628
+
629
+ Can generate blocking errors on bogus relative imports.
629
630
"""
630
631
631
632
def correct_rel_imp (imp : Union [ImportFrom , ImportAll ]) -> str :
@@ -640,6 +641,12 @@ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str:
640
641
file_id = "." .join (file_id .split ("." )[:- rel ])
641
642
new_id = file_id + "." + imp .id if imp .id else file_id
642
643
644
+ if not new_id :
645
+ self .errors .set_file (file .path , file .name ())
646
+ self .errors .report (imp .line , 0 ,
647
+ "No parent module -- cannot perform relative import" ,
648
+ blocker = True )
649
+
643
650
return new_id
644
651
645
652
res = [] # type: List[Tuple[int, str, int]]
@@ -1129,12 +1136,6 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
1129
1136
manager .log ('Metadata abandoned for {}: errors were previously ignored' .format (id ))
1130
1137
return None
1131
1138
1132
- if meta .memory_only :
1133
- # Special case for fine-grained incremental mode when the JSON file is missing but
1134
- # we want to cache the module anyway.
1135
- manager .log ('Memory-only metadata for {}' .format (id ))
1136
- return meta
1137
-
1138
1139
assert path is not None , "Internal error: meta was provided without a path"
1139
1140
# Check data_json; assume if its mtime matches it's good.
1140
1141
# TODO: stat() errors
@@ -1623,7 +1624,8 @@ def __init__(self,
1623
1624
self .ignore_all = True
1624
1625
else :
1625
1626
# In 'error' mode, produce special error messages.
1626
- manager .log ("Skipping %s (%s)" % (path , id ))
1627
+ if id not in manager .missing_modules :
1628
+ manager .log ("Skipping %s (%s)" % (path , id ))
1627
1629
if follow_imports == 'error' :
1628
1630
if ancestor_for :
1629
1631
self .skipping_ancestor (id , path , ancestor_for )
@@ -1673,9 +1675,16 @@ def __init__(self,
1673
1675
for id , line in zip (self .meta .dependencies , self .meta .dep_lines )}
1674
1676
self .child_modules = set (self .meta .child_modules )
1675
1677
else :
1678
+ # In fine-grained cache mode, pretend we only know about modules that
1679
+ # have cache information and defer handling new modules until the
1680
+ # fine-grained update.
1681
+ if manager .only_load_from_cache :
1682
+ manager .log ("Deferring module to fine-grained update %s (%s)" % (path , id ))
1683
+ raise ModuleNotFound
1684
+
1676
1685
# Parse the file (and then some) to get the dependencies.
1677
1686
self .parse_file ()
1678
- self .suppressed = []
1687
+ self .compute_dependencies ()
1679
1688
self .child_modules = set ()
1680
1689
1681
1690
def skipping_ancestor (self , id : str , path : str , ancestor_for : 'State' ) -> None :
@@ -1830,6 +1839,8 @@ def fix_suppressed_dependencies(self, graph: Graph) -> None:
1830
1839
"""
1831
1840
# TODO: See if it's possible to move this check directly into parse_file in some way.
1832
1841
# TODO: Find a way to write a test case for this fix.
1842
+ # TODO: I suspect that splitting compute_dependencies() out from parse_file
1843
+ # obviates the need for this but lacking a test case for the problem this fixed...
1833
1844
silent_mode = (self .options .ignore_missing_imports or
1834
1845
self .options .follow_imports == 'skip' )
1835
1846
if not silent_mode :
@@ -1896,49 +1907,48 @@ def parse_file(self) -> None:
1896
1907
# TODO: Why can't SemanticAnalyzerPass1 .analyze() do this?
1897
1908
self .tree .names = manager .semantic_analyzer .globals
1898
1909
1910
+ self .check_blockers ()
1911
+
1912
+ def compute_dependencies (self ) -> None :
1913
+ """Compute a module's dependencies after parsing it.
1914
+
1915
+ This is used when we parse a file that we didn't have
1916
+ up-to-date cache information for. When we have an up-to-date
1917
+ cache, we just use the cached info.
1918
+ """
1919
+ manager = self .manager
1920
+ assert self .tree is not None
1921
+
1899
1922
# Compute (direct) dependencies.
1900
1923
# Add all direct imports (this is why we needed the first pass).
1901
1924
# Also keep track of each dependency's source line.
1902
1925
dependencies = []
1903
- suppressed = []
1904
1926
priorities = {} # type: Dict[str, int] # id -> priority
1905
1927
dep_line_map = {} # type: Dict[str, int] # id -> line
1906
1928
for pri , id , line in manager .all_imported_modules_in_file (self .tree ):
1907
1929
priorities [id ] = min (pri , priorities .get (id , PRI_ALL ))
1908
1930
if id == self .id :
1909
1931
continue
1910
- # Omit missing modules, as otherwise we could not type-check
1911
- # programs with missing modules.
1912
- if id in manager .missing_modules :
1913
- if id not in dep_line_map :
1914
- suppressed .append (id )
1915
- dep_line_map [id ] = line
1916
- continue
1917
- if id == '' :
1918
- # Must be from a relative import.
1919
- manager .errors .set_file (self .xpath , self .id )
1920
- manager .errors .report (line , 0 ,
1921
- "No parent module -- cannot perform relative import" ,
1922
- blocker = True )
1923
- continue
1924
1932
if id not in dep_line_map :
1925
1933
dependencies .append (id )
1926
1934
dep_line_map [id ] = line
1927
1935
# Every module implicitly depends on builtins.
1928
1936
if self .id != 'builtins' and 'builtins' not in dep_line_map :
1929
1937
dependencies .append ('builtins' )
1930
1938
1931
- # If self.dependencies is already set, it was read from the
1932
- # cache, but for some reason we're re-parsing the file.
1933
1939
# NOTE: What to do about race conditions (like editing the
1934
1940
# file while mypy runs)? A previous version of this code
1935
1941
# explicitly checked for this, but ran afoul of other reasons
1936
1942
# for differences (e.g. silent mode).
1943
+
1944
+ # Missing dependencies will be moved from dependencies to
1945
+ # suppressed when they fail to be loaded in load_graph.
1937
1946
self .dependencies = dependencies
1938
- self .suppressed = suppressed
1947
+ self .suppressed = []
1939
1948
self .priorities = priorities
1940
1949
self .dep_line_map = dep_line_map
1941
- self .check_blockers ()
1950
+
1951
+ self .check_blockers () # Can fail due to bogus relative imports
1942
1952
1943
1953
def semantic_analysis (self ) -> None :
1944
1954
assert self .tree is not None , "Internal error: method must be called on parsed file only"
@@ -1958,9 +1968,7 @@ def semantic_analysis_pass_three(self) -> None:
1958
1968
self .patches = patches + self .patches
1959
1969
1960
1970
def semantic_analysis_apply_patches (self ) -> None :
1961
- patches_by_priority = sorted (self .patches , key = lambda x : x [0 ])
1962
- for priority , patch_func in patches_by_priority :
1963
- patch_func ()
1971
+ apply_semantic_analyzer_patches (self .patches )
1964
1972
1965
1973
def type_check_first_pass (self ) -> None :
1966
1974
if self .options .semantic_analysis_only :
@@ -1992,7 +2000,9 @@ def finish_passes(self) -> None:
1992
2000
return
1993
2001
with self .wrap_context ():
1994
2002
# Some tests want to look at the set of all types.
1995
- if manager .options .use_builtins_fixtures or manager .options .dump_deps :
2003
+ options = manager .options
2004
+ if ((options .use_builtins_fixtures and not options .fine_grained_incremental ) or
2005
+ manager .options .dump_deps ):
1996
2006
manager .all_types .update (self .type_map ())
1997
2007
1998
2008
if self .options .incremental :
@@ -2091,6 +2101,15 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
2091
2101
manager .log ("Mypy version %s" % __version__ )
2092
2102
t0 = time .time ()
2093
2103
graph = load_graph (sources , manager )
2104
+
2105
+ # This is a kind of unfortunate hack to work around some of fine-grained's
2106
+ # fragility: if we have loaded less than 50% of the specified files from
2107
+ # cache in fine-grained cache mode, load the graph again honestly.
2108
+ if manager .options .use_fine_grained_cache and len (graph ) < 0.50 * len (sources ):
2109
+ manager .log ("Redoing load_graph because too much was missing" )
2110
+ manager .only_load_from_cache = False
2111
+ graph = load_graph (sources , manager )
2112
+
2094
2113
t1 = time .time ()
2095
2114
manager .add_stats (graph_size = len (graph ),
2096
2115
stubs_found = sum (g .path is not None and g .path .endswith ('.pyi' )
@@ -2193,13 +2212,19 @@ def dump_graph(graph: Graph) -> None:
2193
2212
print ("[" + ",\n " .join (node .dumps () for node in nodes ) + "\n ]" )
2194
2213
2195
2214
2196
- def load_graph (sources : List [BuildSource ], manager : BuildManager ) -> Graph :
2215
+ def load_graph (sources : List [BuildSource ], manager : BuildManager ,
2216
+ old_graph : Optional [Graph ] = None ) -> Graph :
2197
2217
"""Given some source files, load the full dependency graph.
2198
2218
2219
+ If an old_graph is passed in, it is used as the starting point and
2220
+ modified during graph loading.
2221
+
2199
2222
As this may need to parse files, this can raise CompileError in case
2200
2223
there are syntax errors.
2201
2224
"""
2202
- graph = {} # type: Graph
2225
+
2226
+ graph = old_graph if old_graph is not None else {} # type: Graph
2227
+
2203
2228
# The deque is used to implement breadth-first traversal.
2204
2229
# TODO: Consider whether to go depth-first instead. This may
2205
2230
# affect the order in which we process files within import cycles.
0 commit comments