Skip to content

Commit 8a222da

Browse files
committed
Merge branch 'master' into import-cycle
* master: New files shouldn't trigger a coarse-grained rebuild in fg cache mode (python#4669) Bump version to 0.580-dev Update revision history for 0.570 (python#4662) Fine-grained: Fix crashes when refreshing synthetic types (python#4667) Fine-grained: Support NewType and reset subtype caches (python#4656) Fine-grained: Detect changes in additional TypeInfo attributes (python#4659) Fine-grained: Apply semantic analyzer patch callbacks (python#4658) Optimize fine-grained update by using Graph as the cache (python#4622) Cleanup check_reverse_op_method (python#4017) Fine-grained: Fix AST merge issues (python#4652) Optionally check that we don't have duplicate nodes after AST merge (python#4647)
2 parents 2a37e80 + ac90292 commit 8a222da

33 files changed

+1283
-393
lines changed

docs/source/revision_history.rst

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,11 @@ Revision history
33

44
List of major changes:
55

6+
- March 2018
7+
* Publish ``mypy`` version 0.570 on PyPI.
8+
9+
* Add support for :ref:`attrs_package`.
10+
611
- December 2017
712
* Publish ``mypy`` version 0.560 on PyPI.
813

mypy/build.py

Lines changed: 62 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535

3636
from mypy.nodes import (MODULE_REF, MypyFile, Node, ImportBase, Import, ImportFrom, ImportAll)
3737
from mypy.semanal_pass1 import SemanticAnalyzerPass1
38-
from mypy.semanal import SemanticAnalyzerPass2
38+
from mypy.semanal import SemanticAnalyzerPass2, apply_semantic_analyzer_patches
3939
from mypy.semanal_pass3 import SemanticAnalyzerPass3
4040
from mypy.checker import TypeChecker
4141
from mypy.indirection import TypeIndirectionVisitor
@@ -389,7 +389,6 @@ def default_lib_path(data_dir: str,
389389
CacheMeta = NamedTuple('CacheMeta',
390390
[('id', str),
391391
('path', str),
392-
('memory_only', bool), # no corresponding json files (fine-grained only)
393392
('mtime', int),
394393
('size', int),
395394
('hash', str),
@@ -415,7 +414,6 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta:
415414
return CacheMeta(
416415
meta.get('id', sentinel),
417416
meta.get('path', sentinel),
418-
meta.get('memory_only', False),
419417
int(meta['mtime']) if 'mtime' in meta else sentinel,
420418
meta.get('size', sentinel),
421419
meta.get('hash', sentinel),
@@ -569,7 +567,7 @@ class BuildManager:
569567
plugin: Active mypy plugin(s)
570568
errors: Used for reporting all errors
571569
flush_errors: A function for processing errors after each SCC
572-
saved_cache: Dict with saved cache state for dmypy and fine-grained incremental mode
570+
saved_cache: Dict with saved cache state for coarse-grained dmypy
573571
(read-write!)
574572
stats: Dict with various instrumentation numbers
575573
"""
@@ -590,6 +588,7 @@ def __init__(self, data_dir: str,
590588
self.data_dir = data_dir
591589
self.errors = errors
592590
self.errors.set_ignore_prefix(ignore_prefix)
591+
self.only_load_from_cache = options.use_fine_grained_cache
593592
self.lib_path = tuple(lib_path)
594593
self.source_set = source_set
595594
self.reports = reports
@@ -626,6 +625,8 @@ def all_imported_modules_in_file(self,
626625
627626
Return list of tuples (priority, module id, import line number)
628627
for all modules imported in file; lower numbers == higher priority.
628+
629+
Can generate blocking errors on bogus relative imports.
629630
"""
630631

631632
def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str:
@@ -640,6 +641,12 @@ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str:
640641
file_id = ".".join(file_id.split(".")[:-rel])
641642
new_id = file_id + "." + imp.id if imp.id else file_id
642643

644+
if not new_id:
645+
self.errors.set_file(file.path, file.name())
646+
self.errors.report(imp.line, 0,
647+
"No parent module -- cannot perform relative import",
648+
blocker=True)
649+
643650
return new_id
644651

645652
res = [] # type: List[Tuple[int, str, int]]
@@ -1129,12 +1136,6 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
11291136
manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id))
11301137
return None
11311138

1132-
if meta.memory_only:
1133-
# Special case for fine-grained incremental mode when the JSON file is missing but
1134-
# we want to cache the module anyway.
1135-
manager.log('Memory-only metadata for {}'.format(id))
1136-
return meta
1137-
11381139
assert path is not None, "Internal error: meta was provided without a path"
11391140
# Check data_json; assume if its mtime matches it's good.
11401141
# TODO: stat() errors
@@ -1623,7 +1624,8 @@ def __init__(self,
16231624
self.ignore_all = True
16241625
else:
16251626
# In 'error' mode, produce special error messages.
1626-
manager.log("Skipping %s (%s)" % (path, id))
1627+
if id not in manager.missing_modules:
1628+
manager.log("Skipping %s (%s)" % (path, id))
16271629
if follow_imports == 'error':
16281630
if ancestor_for:
16291631
self.skipping_ancestor(id, path, ancestor_for)
@@ -1673,9 +1675,16 @@ def __init__(self,
16731675
for id, line in zip(self.meta.dependencies, self.meta.dep_lines)}
16741676
self.child_modules = set(self.meta.child_modules)
16751677
else:
1678+
# In fine-grained cache mode, pretend we only know about modules that
1679+
# have cache information and defer handling new modules until the
1680+
# fine-grained update.
1681+
if manager.only_load_from_cache:
1682+
manager.log("Deferring module to fine-grained update %s (%s)" % (path, id))
1683+
raise ModuleNotFound
1684+
16761685
# Parse the file (and then some) to get the dependencies.
16771686
self.parse_file()
1678-
self.suppressed = []
1687+
self.compute_dependencies()
16791688
self.child_modules = set()
16801689

16811690
def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None:
@@ -1830,6 +1839,8 @@ def fix_suppressed_dependencies(self, graph: Graph) -> None:
18301839
"""
18311840
# TODO: See if it's possible to move this check directly into parse_file in some way.
18321841
# TODO: Find a way to write a test case for this fix.
1842+
# TODO: I suspect that splitting compute_dependencies() out from parse_file
1843+
# obviates the need for this but lacking a test case for the problem this fixed...
18331844
silent_mode = (self.options.ignore_missing_imports or
18341845
self.options.follow_imports == 'skip')
18351846
if not silent_mode:
@@ -1896,49 +1907,48 @@ def parse_file(self) -> None:
18961907
# TODO: Why can't SemanticAnalyzerPass1 .analyze() do this?
18971908
self.tree.names = manager.semantic_analyzer.globals
18981909

1910+
self.check_blockers()
1911+
1912+
def compute_dependencies(self) -> None:
1913+
"""Compute a module's dependencies after parsing it.
1914+
1915+
This is used when we parse a file that we didn't have
1916+
up-to-date cache information for. When we have an up-to-date
1917+
cache, we just use the cached info.
1918+
"""
1919+
manager = self.manager
1920+
assert self.tree is not None
1921+
18991922
# Compute (direct) dependencies.
19001923
# Add all direct imports (this is why we needed the first pass).
19011924
# Also keep track of each dependency's source line.
19021925
dependencies = []
1903-
suppressed = []
19041926
priorities = {} # type: Dict[str, int] # id -> priority
19051927
dep_line_map = {} # type: Dict[str, int] # id -> line
19061928
for pri, id, line in manager.all_imported_modules_in_file(self.tree):
19071929
priorities[id] = min(pri, priorities.get(id, PRI_ALL))
19081930
if id == self.id:
19091931
continue
1910-
# Omit missing modules, as otherwise we could not type-check
1911-
# programs with missing modules.
1912-
if id in manager.missing_modules:
1913-
if id not in dep_line_map:
1914-
suppressed.append(id)
1915-
dep_line_map[id] = line
1916-
continue
1917-
if id == '':
1918-
# Must be from a relative import.
1919-
manager.errors.set_file(self.xpath, self.id)
1920-
manager.errors.report(line, 0,
1921-
"No parent module -- cannot perform relative import",
1922-
blocker=True)
1923-
continue
19241932
if id not in dep_line_map:
19251933
dependencies.append(id)
19261934
dep_line_map[id] = line
19271935
# Every module implicitly depends on builtins.
19281936
if self.id != 'builtins' and 'builtins' not in dep_line_map:
19291937
dependencies.append('builtins')
19301938

1931-
# If self.dependencies is already set, it was read from the
1932-
# cache, but for some reason we're re-parsing the file.
19331939
# NOTE: What to do about race conditions (like editing the
19341940
# file while mypy runs)? A previous version of this code
19351941
# explicitly checked for this, but ran afoul of other reasons
19361942
# for differences (e.g. silent mode).
1943+
1944+
# Missing dependencies will be moved from dependencies to
1945+
# suppressed when they fail to be loaded in load_graph.
19371946
self.dependencies = dependencies
1938-
self.suppressed = suppressed
1947+
self.suppressed = []
19391948
self.priorities = priorities
19401949
self.dep_line_map = dep_line_map
1941-
self.check_blockers()
1950+
1951+
self.check_blockers() # Can fail due to bogus relative imports
19421952

19431953
def semantic_analysis(self) -> None:
19441954
assert self.tree is not None, "Internal error: method must be called on parsed file only"
@@ -1958,9 +1968,7 @@ def semantic_analysis_pass_three(self) -> None:
19581968
self.patches = patches + self.patches
19591969

19601970
def semantic_analysis_apply_patches(self) -> None:
1961-
patches_by_priority = sorted(self.patches, key=lambda x: x[0])
1962-
for priority, patch_func in patches_by_priority:
1963-
patch_func()
1971+
apply_semantic_analyzer_patches(self.patches)
19641972

19651973
def type_check_first_pass(self) -> None:
19661974
if self.options.semantic_analysis_only:
@@ -1992,7 +2000,9 @@ def finish_passes(self) -> None:
19922000
return
19932001
with self.wrap_context():
19942002
# Some tests want to look at the set of all types.
1995-
if manager.options.use_builtins_fixtures or manager.options.dump_deps:
2003+
options = manager.options
2004+
if ((options.use_builtins_fixtures and not options.fine_grained_incremental) or
2005+
manager.options.dump_deps):
19962006
manager.all_types.update(self.type_map())
19972007

19982008
if self.options.incremental:
@@ -2091,6 +2101,15 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
20912101
manager.log("Mypy version %s" % __version__)
20922102
t0 = time.time()
20932103
graph = load_graph(sources, manager)
2104+
2105+
# This is a kind of unfortunate hack to work around some of fine-grained's
2106+
# fragility: if we have loaded less than 50% of the specified files from
2107+
# cache in fine-grained cache mode, load the graph again honestly.
2108+
if manager.options.use_fine_grained_cache and len(graph) < 0.50 * len(sources):
2109+
manager.log("Redoing load_graph because too much was missing")
2110+
manager.only_load_from_cache = False
2111+
graph = load_graph(sources, manager)
2112+
20942113
t1 = time.time()
20952114
manager.add_stats(graph_size=len(graph),
20962115
stubs_found=sum(g.path is not None and g.path.endswith('.pyi')
@@ -2193,13 +2212,19 @@ def dump_graph(graph: Graph) -> None:
21932212
print("[" + ",\n ".join(node.dumps() for node in nodes) + "\n]")
21942213

21952214

2196-
def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
2215+
def load_graph(sources: List[BuildSource], manager: BuildManager,
2216+
old_graph: Optional[Graph] = None) -> Graph:
21972217
"""Given some source files, load the full dependency graph.
21982218
2219+
If an old_graph is passed in, it is used as the starting point and
2220+
modified during graph loading.
2221+
21992222
As this may need to parse files, this can raise CompileError in case
22002223
there are syntax errors.
22012224
"""
2202-
graph = {} # type: Graph
2225+
2226+
graph = old_graph if old_graph is not None else {} # type: Graph
2227+
22032228
# The deque is used to implement breadth-first traversal.
22042229
# TODO: Consider whether to go depth-first instead. This may
22052230
# affect the order in which we process files within import cycles.

0 commit comments

Comments
 (0)