diff --git a/mypy/build.py b/mypy/build.py index 817cb5ea9a1f..ce80256eb2e2 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -281,6 +281,7 @@ def default_lib_path(data_dir: str, pyversion: Tuple[int, int]) -> List[str]: ('data_mtime', float), # mtime of data_json ('data_json', str), # path of .data.json ('suppressed', List[str]), # dependencies that weren't imported + ('child_modules', List[str]), # all submodules of the given module ('options', Optional[Dict[str, bool]]), # build options ('dep_prios', List[int]), ]) @@ -726,6 +727,7 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache meta.get('data_mtime'), data_json, meta.get('suppressed', []), + meta.get('child_modules', []), meta.get('options'), meta.get('dep_prios', []), ) @@ -749,6 +751,7 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache if st.st_mtime != m.mtime or st.st_size != m.size: manager.log('Metadata abandoned because of modified file {}'.format(path)) return None + # It's a match on (id, path, mtime, size). # Check data_json; assume if its mtime matches it's good. # TODO: stat() errors @@ -777,7 +780,7 @@ def random_string() -> str: def write_cache(id: str, path: str, tree: MypyFile, dependencies: List[str], suppressed: List[str], - dep_prios: List[int], + child_modules: List[str], dep_prios: List[int], manager: BuildManager) -> None: """Write cache files for a module. @@ -816,6 +819,7 @@ def write_cache(id: str, path: str, tree: MypyFile, 'data_mtime': data_mtime, 'dependencies': dependencies, 'suppressed': suppressed, + 'child_modules': child_modules, 'options': select_options_affecting_cache(manager.options), 'dep_prios': dep_prios, } @@ -998,6 +1002,9 @@ class State: # Parent package, its parent, etc. ancestors = None # type: Optional[List[str]] + # A list of all direct submodules of a given module + child_modules = None # type: Optional[Set[str]] + # List of (path, line number) tuples giving context for import import_context = None # type: List[Tuple[str, int]] @@ -1095,11 +1102,13 @@ def __init__(self, assert len(self.meta.dependencies) == len(self.meta.dep_prios) self.priorities = {id: pri for id, pri in zip(self.meta.dependencies, self.meta.dep_prios)} + self.child_modules = set(self.meta.child_modules) self.dep_line_map = {} else: # Parse the file (and then some) to get the dependencies. self.parse_file() self.suppressed = [] + self.child_modules = set() def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None: # TODO: Read the path (the __init__.py file) and return @@ -1146,7 +1155,13 @@ def is_fresh(self) -> bool: # self.meta.dependencies when a dependency is dropped due to # suppression by --silent-imports. However when a suppressed # dependency is added back we find out later in the process. - return self.meta is not None and self.dependencies == self.meta.dependencies + return (self.meta is not None + and self.dependencies == self.meta.dependencies + and self.child_modules == set(self.meta.child_modules)) + + def has_new_submodules(self) -> bool: + """Return if this module has new submodules after being loaded from a warm cache.""" + return self.meta is not None and self.child_modules != set(self.meta.child_modules) def mark_stale(self) -> None: """Throw away the cache data for this file, marking it as stale.""" @@ -1307,7 +1322,7 @@ def write_cache(self) -> None: if self.path and self.manager.options.incremental and not self.manager.errors.is_errors(): dep_prios = [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies] write_cache(self.id, self.path, self.tree, - list(self.dependencies), list(self.suppressed), + list(self.dependencies), list(self.suppressed), list(self.child_modules), dep_prios, self.manager) @@ -1365,6 +1380,11 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph: assert newst.id not in graph, newst.id graph[newst.id] = newst new.append(newst) + if dep in st.ancestors and dep in graph: + graph[dep].child_modules.add(st.id) + for id, g in graph.items(): + if g.has_new_submodules(): + g.parse_file() return graph diff --git a/mypy/test/data.py b/mypy/test/data.py index 9881ed27f40a..d2626a8c1a3a 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -124,6 +124,7 @@ def __init__(self, name, input, output, file, line, lastline, def set_up(self) -> None: super().set_up() + encountered_files = set() self.clean_up = [] for path, content in self.files: dir = os.path.dirname(path) @@ -133,6 +134,13 @@ def set_up(self) -> None: f.write(content) f.close() self.clean_up.append((False, path)) + encountered_files.add(path) + if path.endswith(".next"): + # Make sure new files introduced in the second run are accounted for + renamed_path = path[:-5] + if renamed_path not in encountered_files: + encountered_files.add(renamed_path) + self.clean_up.append((False, renamed_path)) def add_dirs(self, dir: str) -> List[str]: """Add all subdirectories required to create dir. diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index c2b442c430f0..43592d6db2c7 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -155,3 +155,33 @@ import parent.a [builtins fixtures/args.py] [stale] [out] + +[case testIncrementalReferenceNewFileWithImportFrom] +from parent import a + +[file parent/__init__.py] + +[file parent/a.py] + +[file parent/a.py.next] +from parent import b + +[file parent/b.py.next] + +[stale parent, parent.a, parent.b] +[out] + +[case testIncrementalReferenceExistingFileWithImportFrom] +from parent import a, b + +[file parent/__init__.py] + +[file parent/a.py] + +[file parent/b.py] + +[file parent/a.py.next] +from parent import b + +[stale parent.a] +[out]