Skip to content

Commit 9a69107

Browse files
committed
Eliminate serializing into and out of SavedCache
1 parent 088a631 commit 9a69107

File tree

8 files changed

+49
-140
lines changed

8 files changed

+49
-140
lines changed

mypy/build.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2186,13 +2186,16 @@ def dump_graph(graph: Graph) -> None:
21862186
print("[" + ",\n ".join(node.dumps() for node in nodes) + "\n]")
21872187

21882188

2189-
def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
2189+
def load_graph(sources: List[BuildSource], manager: BuildManager,
2190+
old_graph: Optional[Graph] = None) -> Graph:
21902191
"""Given some source files, load the full dependency graph.
21912192
21922193
As this may need to parse files, this can raise CompileError in case
21932194
there are syntax errors.
21942195
"""
2195-
graph = {} # type: Graph
2196+
2197+
graph = old_graph or {} # type: Graph
2198+
21962199
# The deque is used to implement breadth-first traversal.
21972200
# TODO: Consider whether to go depth-first instead. This may
21982201
# affect the order in which we process files within import cycles.

mypy/checker.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -216,6 +216,16 @@ def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Option
216216
# for processing module top levels in fine-grained incremental mode.
217217
self.recurse_into_functions = True
218218

219+
def reset(self) -> None:
220+
"""Cleanup stale state that might be left over from a typechecking run.
221+
222+
This allows us to reuse TypeChecker objects in fine-grained
223+
incremental mode.
224+
"""
225+
self.partial_reported.clear()
226+
assert self.partial_types == []
227+
assert self.deferred_nodes == []
228+
219229
def check_first_pass(self) -> None:
220230
"""Type check the entire file, but defer functions with unresolved references.
221231

mypy/dmypy_server.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -282,10 +282,11 @@ def initialize_fine_grained(self, sources: List[mypy.build.BuildSource]) -> Dict
282282
if self.options.use_fine_grained_cache:
283283
# Pull times and hashes out of the saved_cache and stick them into
284284
# the fswatcher, so we pick up the changes.
285-
for meta, mypyfile, type_map in manager.saved_cache.values():
286-
if meta.mtime is None: continue
285+
for state in self.fine_grained_manager.graph.values():
286+
meta = state.meta
287+
if meta is None: continue
287288
self.fswatcher.set_file_data(
288-
mypyfile.path,
289+
state.xpath,
289290
FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash))
290291

291292
# Run an update

mypy/server/update.py

Lines changed: 22 additions & 90 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,7 @@ def __init__(self,
161161
self.previous_modules = get_module_to_path_map(manager)
162162
self.deps = get_all_dependencies(manager, graph, self.options)
163163
self.previous_targets_with_errors = manager.errors.targets()
164+
self.graph = graph
164165
# Module, if any, that had blocking errors in the last run as (id, path) tuple.
165166
# TODO: Handle blocking errors in the initial build
166167
self.blocking_error = None # type: Optional[Tuple[str, str]]
@@ -170,8 +171,7 @@ def __init__(self,
170171
# for the cache. This is kind of a hack and it might be better to have
171172
# this directly reflected in load_graph's interface.
172173
self.options.cache_dir = os.devnull
173-
mark_all_meta_as_memory_only(graph, manager)
174-
manager.saved_cache = preserve_full_cache(graph, manager)
174+
manager.saved_cache = {}
175175
self.type_maps = extract_type_maps(graph)
176176
# Active triggers during the last update
177177
self.triggered = [] # type: List[str]
@@ -261,7 +261,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
261261
old_snapshots[module] = snapshot
262262

263263
manager.errors.reset()
264-
result = update_single_isolated(module, path, manager, previous_modules)
264+
result = update_single_isolated(module, path, manager, previous_modules, self.graph)
265265
if isinstance(result, BlockedUpdate):
266266
# Blocking error -- just give up
267267
module, path, remaining, errors = result
@@ -294,23 +294,13 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
294294
if state.tree is None and id in manager.saved_cache:
295295
meta, tree, type_map = manager.saved_cache[id]
296296
state.tree = tree
297-
mark_all_meta_as_memory_only(graph, manager)
298-
manager.saved_cache = preserve_full_cache(graph, manager)
299297
self.previous_modules = get_module_to_path_map(manager)
300298
self.type_maps = extract_type_maps(graph)
301299

302-
return manager.errors.new_messages(), remaining, (module, path), False
303-
300+
# XXX: I want us to not need this
301+
self.graph = graph
304302

305-
def mark_all_meta_as_memory_only(graph: Dict[str, State],
306-
manager: BuildManager) -> None:
307-
for id, state in graph.items():
308-
if id in manager.saved_cache:
309-
# Don't look at disk.
310-
old = manager.saved_cache[id]
311-
manager.saved_cache[id] = (old[0]._replace(memory_only=True),
312-
old[1],
313-
old[2])
303+
return manager.errors.new_messages(), remaining, (module, path), False
314304

315305

316306
def get_all_dependencies(manager: BuildManager, graph: Dict[str, State],
@@ -350,7 +340,8 @@ def get_all_dependencies(manager: BuildManager, graph: Dict[str, State],
350340
def update_single_isolated(module: str,
351341
path: str,
352342
manager: BuildManager,
353-
previous_modules: Dict[str, str]) -> UpdateResult:
343+
previous_modules: Dict[str, str],
344+
graph: Graph) -> UpdateResult:
354345
"""Build a new version of one changed module only.
355346
356347
Don't propagate changes to elsewhere in the program. Raise CompleError on
@@ -371,11 +362,11 @@ def update_single_isolated(module: str,
371362

372363
old_modules = dict(manager.modules)
373364
sources = get_sources(previous_modules, [(module, path)])
374-
invalidate_stale_cache_entries(manager.saved_cache, [(module, path)])
365+
invalidate_stale_cache_entries(manager.saved_cache, graph, [(module, path)])
375366

376367
manager.missing_modules.clear()
377368
try:
378-
graph = load_graph(sources, manager)
369+
load_graph(sources, manager, graph)
379370
except CompileError as err:
380371
# Parse error somewhere in the program -- a blocker
381372
assert err.module_with_blocker
@@ -437,6 +428,7 @@ def update_single_isolated(module: str,
437428
replace_modules_with_new_variants(manager, graph, old_modules, new_modules)
438429

439430
# Perform type checking.
431+
state.type_checker().reset()
440432
state.type_check_first_pass()
441433
state.type_check_second_pass()
442434
state.compute_fine_grained_deps()
@@ -488,8 +480,9 @@ def delete_module(module_id: str,
488480
manager.log_fine_grained('delete module %r' % module_id)
489481
# TODO: Deletion of a package
490482
# TODO: Remove deps for the module (this only affects memory use, not correctness)
491-
assert module_id not in graph
492483
new_graph = graph.copy()
484+
if module_id in new_graph:
485+
del new_graph[module_id]
493486
if module_id in manager.modules:
494487
del manager.modules[module_id]
495488
if module_id in manager.saved_cache:
@@ -529,9 +522,11 @@ def get_sources(modules: Dict[str, str],
529522
sources = [BuildSource(path, id, None)
530523
for id, path in items
531524
if os.path.isfile(path)]
525+
sources = []
532526
for id, path in changed_modules:
533-
if os.path.isfile(path) and id not in modules:
527+
if os.path.isfile(path):# and id not in modules:
534528
sources.append(BuildSource(path, id, None))
529+
# print(changed_modules, sources)
535530
return sources
536531

537532

@@ -549,75 +544,14 @@ def get_all_changed_modules(root_module: str,
549544
return changed_modules
550545

551546

552-
def preserve_full_cache(graph: Graph, manager: BuildManager) -> SavedCache:
553-
"""Preserve every module with an AST in the graph, including modules with errors."""
554-
saved_cache = {}
555-
for id, state in graph.items():
556-
assert state.id == id
557-
if state.tree is not None:
558-
meta = state.meta
559-
if meta is None:
560-
# No metadata, likely because of an error. We still want to retain the AST.
561-
# There is no corresponding JSON so create partial "memory-only" metadata.
562-
assert state.path
563-
dep_prios = state.dependency_priorities()
564-
dep_lines = state.dependency_lines()
565-
meta = memory_only_cache_meta(
566-
id,
567-
state.path,
568-
state.dependencies,
569-
state.suppressed,
570-
list(state.child_modules),
571-
dep_prios,
572-
dep_lines,
573-
state.source_hash,
574-
state.ignore_all,
575-
manager)
576-
else:
577-
meta = meta._replace(memory_only=True)
578-
saved_cache[id] = (meta, state.tree, state.type_map())
579-
return saved_cache
580-
581-
582-
def memory_only_cache_meta(id: str,
583-
path: str,
584-
dependencies: List[str],
585-
suppressed: List[str],
586-
child_modules: List[str],
587-
dep_prios: List[int],
588-
dep_lines: List[int],
589-
source_hash: str,
590-
ignore_all: bool,
591-
manager: BuildManager) -> CacheMeta:
592-
"""Create cache metadata for module that doesn't have a JSON cache files.
593-
594-
JSON cache files aren't written for modules with errors, but we want to still
595-
cache them in fine-grained incremental mode.
596-
"""
597-
options = manager.options.clone_for_module(id)
598-
# Note that we omit attributes related to the JSON files.
599-
meta = {'id': id,
600-
'path': path,
601-
'memory_only': True, # Important bit: don't expect JSON files to exist
602-
'hash': source_hash,
603-
'dependencies': dependencies,
604-
'suppressed': suppressed,
605-
'child_modules': child_modules,
606-
'options': options.select_options_affecting_cache(),
607-
'dep_prios': dep_prios,
608-
'dep_lines': dep_lines,
609-
'interface_hash': '',
610-
'version_id': manager.version_id,
611-
'ignore_all': ignore_all,
612-
}
613-
return cache_meta_from_dict(meta, '')
614-
615-
616547
def invalidate_stale_cache_entries(cache: SavedCache,
548+
graph: Graph,
617549
changed_modules: List[Tuple[str, str]]) -> None:
618550
for name, _ in changed_modules:
619551
if name in cache:
620552
del cache[name]
553+
if name in graph:
554+
del graph[name]
621555

622556

623557
def verify_dependencies(state: State, manager: BuildManager) -> None:
@@ -809,8 +743,8 @@ def reprocess_nodes(manager: BuildManager,
809743
810744
Return fired triggers.
811745
"""
812-
if module_id not in manager.saved_cache or module_id not in graph:
813-
manager.log_fine_grained('%s not in saved cache or graph (blocking errors or deleted?)' %
746+
if module_id not in graph:
747+
manager.log_fine_grained('%s not in graph (blocking errors or deleted?)' %
814748
module_id)
815749
return set()
816750

@@ -863,10 +797,8 @@ def key(node: DeferredNode) -> int:
863797
merge_asts(file_node, old_symbols[name], file_node, new_symbols[name])
864798

865799
# Type check.
866-
meta, file_node, type_map = manager.saved_cache[module_id]
867-
graph[module_id].tree = file_node
868-
graph[module_id].type_checker().type_map = type_map
869800
checker = graph[module_id].type_checker()
801+
checker.reset()
870802
# We seem to need additional passes in fine-grained incremental mode.
871803
checker.pass_num = 0
872804
checker.last_pass = 3

mypy/test/testcheck.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,7 @@ def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0)
149149
options = parse_options(original_program_text, testcase, incremental_step)
150150
options.use_builtins_fixtures = True
151151
options.show_traceback = True
152+
options.verbosity = 1
152153
if 'optional' in testcase.file:
153154
options.strict_optional = True
154155
if incremental_step:

test-data/unit/fine-grained-cycles.test

Lines changed: 1 addition & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -174,44 +174,7 @@ def h() -> None:
174174
==
175175
a.py:1: error: Module 'b' has no attribute 'C'
176176

177-
[case testReferenceToTypeThroughCycleAndReplaceWithFunction-skip-cache]
178-
-- Different cache/no-cache tests because:
179-
-- Cache mode has a "need type annotation" message (like coarse incremental does)
180-
181-
import a
182-
183-
[file a.py]
184-
from b import C
185-
186-
def f() -> C: pass
187-
188-
[file b.py]
189-
import a
190-
191-
class C:
192-
def g(self) -> None: pass
193-
194-
def h() -> None:
195-
c = a.f()
196-
c.g()
197-
198-
[file b.py.2]
199-
import a
200-
201-
def C() -> int: pass
202-
203-
def h() -> None:
204-
c = a.f()
205-
c.g()
206-
207-
[out]
208-
==
209-
a.py:3: error: Invalid type "b.C"
210-
211-
[case testReferenceToTypeThroughCycleAndReplaceWithFunction-skip-nocache]
212-
-- Different cache/no-cache tests because:
213-
-- Cache mode has a "need type annotation" message (like coarse incremental does)
214-
177+
[case testReferenceToTypeThroughCycleAndReplaceWithFunction]
215178
import a
216179

217180
[file a.py]

test-data/unit/fine-grained-modules.test

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -236,24 +236,23 @@ main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports"
236236

237237
[case testDeletionOfSubmoduleTriggersImportFrom1-skip-cache]
238238
-- Different cache/no-cache tests because:
239-
-- Cache mode matches the message from regular mode and no-cache mode
240-
-- matches the message from coarse incremental mode...
239+
-- missing module error message mismatch
241240
from p import q
242241
[file p/__init__.py]
243242
[file p/q.py]
244243
[delete p/q.py.2]
245244
[file p/q.py.3]
246245
[out]
247246
==
248-
main:1: error: Cannot find module named 'p.q'
247+
main:1: error: Module 'p' has no attribute 'q'
249248
-- TODO: The following messages are different compared to non-incremental mode
249+
main:1: error: Cannot find module named 'p.q'
250250
main:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
251251
==
252252

253253
[case testDeletionOfSubmoduleTriggersImportFrom1-skip-nocache]
254254
-- Different cache/no-cache tests because:
255-
-- Cache mode matches the message from regular mode and no-cache mode
256-
-- matches the message from coarse incremental mode...
255+
-- missing module error message mismatch
257256
from p import q
258257
[file p/__init__.py]
259258
[file p/q.py]

test-data/unit/fine-grained.test

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1108,9 +1108,9 @@ def f() -> Iterator[None]:
11081108
[out]
11091109
main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]'
11101110
==
1111+
main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]'
11111112
a.py:3: error: Cannot find module named 'b'
11121113
a.py:3: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help)
1113-
main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]'
11141114
==
11151115
main:2: error: Revealed type is 'contextlib.GeneratorContextManager[builtins.None]'
11161116

@@ -2355,7 +2355,7 @@ def f() -> None:
23552355
d.py:3: error: Argument 1 to "A" has incompatible type "str"; expected "int"
23562356
==
23572357

2358-
[case testNonePartialType]
2358+
[case testNonePartialType1]
23592359
import a
23602360
a.y
23612361

0 commit comments

Comments
 (0)