Skip to content

Commit 4da63be

Browse files
committed
Clean up a lot of cruft
1 parent 9a69107 commit 4da63be

File tree

4 files changed

+25
-72
lines changed

4 files changed

+25
-72
lines changed

mypy/build.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -389,7 +389,6 @@ def default_lib_path(data_dir: str,
389389
CacheMeta = NamedTuple('CacheMeta',
390390
[('id', str),
391391
('path', str),
392-
('memory_only', bool), # no corresponding json files (fine-grained only)
393392
('mtime', int),
394393
('size', int),
395394
('hash', str),
@@ -415,7 +414,6 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta:
415414
return CacheMeta(
416415
meta.get('id', sentinel),
417416
meta.get('path', sentinel),
418-
meta.get('memory_only', False),
419417
int(meta['mtime']) if 'mtime' in meta else sentinel,
420418
meta.get('size', sentinel),
421419
meta.get('hash', sentinel),
@@ -1121,12 +1119,6 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
11211119
manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id))
11221120
return None
11231121

1124-
if meta.memory_only:
1125-
# Special case for fine-grained incremental mode when the JSON file is missing but
1126-
# we want to cache the module anyway.
1127-
manager.log('Memory-only metadata for {}'.format(id))
1128-
return meta
1129-
11301122
assert path is not None, "Internal error: meta was provided without a path"
11311123
# Check data_json; assume if its mtime matches it's good.
11321124
# TODO: stat() errors

mypy/server/update.py

Lines changed: 16 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -115,15 +115,14 @@
115115
116116
- Fully support multiple type checking passes
117117
- Use mypy.fscache to access file system
118-
- Don't use load_graph() and update the import graph incrementally
119118
"""
120119

121120
import os.path
122121
from typing import Dict, List, Set, Tuple, Iterable, Union, Optional, Mapping, NamedTuple
123122

124123
from mypy.build import (
125-
BuildManager, State, BuildSource, Graph, load_graph, SavedCache, CacheMeta,
126-
cache_meta_from_dict, find_module_clear_caches, DEBUG_FINE_GRAINED
124+
BuildManager, State, BuildSource, Graph, load_graph, find_module_clear_caches,
125+
DEBUG_FINE_GRAINED,
127126
)
128127
from mypy.checker import DeferredNode
129128
from mypy.errors import Errors, CompileError
@@ -172,7 +171,6 @@ def __init__(self,
172171
# this directly reflected in load_graph's interface.
173172
self.options.cache_dir = os.devnull
174173
manager.saved_cache = {}
175-
self.type_maps = extract_type_maps(graph)
176174
# Active triggers during the last update
177175
self.triggered = [] # type: List[str]
178176

@@ -253,6 +251,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
253251
# TODO: If new module brings in other modules, we parse some files multiple times.
254252
manager = self.manager
255253
previous_modules = self.previous_modules
254+
graph = self.graph
256255

257256
# Record symbol table snaphot of old version the changed module.
258257
old_snapshots = {} # type: Dict[str, Dict[str, SnapshotItem]]
@@ -261,14 +260,14 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
261260
old_snapshots[module] = snapshot
262261

263262
manager.errors.reset()
264-
result = update_single_isolated(module, path, manager, previous_modules, self.graph)
263+
result = update_single_isolated(module, path, manager, previous_modules, graph)
265264
if isinstance(result, BlockedUpdate):
266265
# Blocking error -- just give up
267266
module, path, remaining, errors = result
268267
self.previous_modules = get_module_to_path_map(manager)
269268
return errors, remaining, (module, path), True
270269
assert isinstance(result, NormalUpdate) # Work around #4124
271-
module, path, remaining, tree, graph = result
270+
module, path, remaining, tree = result
272271

273272
# TODO: What to do with stale dependencies?
274273
triggered = calculate_active_triggers(manager, old_snapshots, {module: tree})
@@ -285,20 +284,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
285284

286285
# Preserve state needed for the next update.
287286
self.previous_targets_with_errors = manager.errors.targets()
288-
# If deleted, module won't be in the graph.
289-
if module in graph:
290-
# Generate metadata so that we can reuse the AST in the next run.
291-
graph[module].write_cache()
292-
for id, state in graph.items():
293-
# Look up missing ASTs from saved cache.
294-
if state.tree is None and id in manager.saved_cache:
295-
meta, tree, type_map = manager.saved_cache[id]
296-
state.tree = tree
297287
self.previous_modules = get_module_to_path_map(manager)
298-
self.type_maps = extract_type_maps(graph)
299-
300-
# XXX: I want us to not need this
301-
self.graph = graph
302288

303289
return manager.errors.new_messages(), remaining, (module, path), False
304290

@@ -317,15 +303,13 @@ def get_all_dependencies(manager: BuildManager, graph: Dict[str, State],
317303
# - Id of the changed module (can be different from the module argument)
318304
# - Path of the changed module
319305
# - New AST for the changed module (None if module was deleted)
320-
# - The entire updated build graph
321306
# - Remaining changed modules that are not processed yet as (module id, path)
322307
# tuples (non-empty if the original changed module imported other new
323308
# modules)
324309
NormalUpdate = NamedTuple('NormalUpdate', [('module', str),
325310
('path', str),
326311
('remaining', List[Tuple[str, str]]),
327-
('tree', Optional[MypyFile]),
328-
('graph', Graph)])
312+
('tree', Optional[MypyFile])])
329313

330314
# The result of update_single_isolated when there is a blocking error. Items
331315
# are similar to NormalUpdate (but there are fewer).
@@ -362,10 +346,11 @@ def update_single_isolated(module: str,
362346

363347
old_modules = dict(manager.modules)
364348
sources = get_sources(previous_modules, [(module, path)])
365-
invalidate_stale_cache_entries(manager.saved_cache, graph, [(module, path)])
366349

367350
manager.missing_modules.clear()
368351
try:
352+
if module in graph:
353+
del graph[module]
369354
load_graph(sources, manager, graph)
370355
except CompileError as err:
371356
# Parse error somewhere in the program -- a blocker
@@ -383,8 +368,8 @@ def update_single_isolated(module: str,
383368
return BlockedUpdate(err.module_with_blocker, path, remaining_modules, err.messages)
384369

385370
if not os.path.isfile(path):
386-
graph = delete_module(module, graph, manager)
387-
return NormalUpdate(module, path, [], None, graph)
371+
delete_module(module, graph, manager)
372+
return NormalUpdate(module, path, [], None)
388373

389374
# Find any other modules brought in by imports.
390375
changed_modules = get_all_changed_modules(module, path, previous_modules, graph)
@@ -438,7 +423,7 @@ def update_single_isolated(module: str,
438423

439424
graph[module] = state
440425

441-
return NormalUpdate(module, path, remaining_modules, state.tree, graph)
426+
return NormalUpdate(module, path, remaining_modules, state.tree)
442427

443428

444429
def find_relative_leaf_module(modules: List[Tuple[str, str]], graph: Graph) -> Tuple[str, str]:
@@ -475,14 +460,13 @@ def assert_equivalent_paths(path1: str, path2: str) -> None:
475460

476461

477462
def delete_module(module_id: str,
478-
graph: Dict[str, State],
479-
manager: BuildManager) -> Dict[str, State]:
463+
graph: Graph,
464+
manager: BuildManager) -> None:
480465
manager.log_fine_grained('delete module %r' % module_id)
481466
# TODO: Deletion of a package
482467
# TODO: Remove deps for the module (this only affects memory use, not correctness)
483-
new_graph = graph.copy()
484-
if module_id in new_graph:
485-
del new_graph[module_id]
468+
if module_id in graph:
469+
del graph[module_id]
486470
if module_id in manager.modules:
487471
del manager.modules[module_id]
488472
if module_id in manager.saved_cache:
@@ -496,7 +480,6 @@ def delete_module(module_id: str,
496480
parent = manager.modules[parent_id]
497481
if components[-1] in parent.names:
498482
del parent.names[components[-1]]
499-
return new_graph
500483

501484

502485
def dedupe_modules(modules: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
@@ -518,15 +501,10 @@ def get_sources(modules: Dict[str, str],
518501
changed_modules: List[Tuple[str, str]]) -> List[BuildSource]:
519502
# TODO: Race condition when reading from the file system; we should only read each
520503
# bit of external state once during a build to have a consistent view of the world
521-
items = sorted(modules.items(), key=lambda x: x[0])
522-
sources = [BuildSource(path, id, None)
523-
for id, path in items
524-
if os.path.isfile(path)]
525504
sources = []
526505
for id, path in changed_modules:
527-
if os.path.isfile(path):# and id not in modules:
506+
if os.path.isfile(path):
528507
sources.append(BuildSource(path, id, None))
529-
# print(changed_modules, sources)
530508
return sources
531509

532510

@@ -544,16 +522,6 @@ def get_all_changed_modules(root_module: str,
544522
return changed_modules
545523

546524

547-
def invalidate_stale_cache_entries(cache: SavedCache,
548-
graph: Graph,
549-
changed_modules: List[Tuple[str, str]]) -> None:
550-
for name, _ in changed_modules:
551-
if name in cache:
552-
del cache[name]
553-
if name in graph:
554-
del graph[name]
555-
556-
557525
def verify_dependencies(state: State, manager: BuildManager) -> None:
558526
"""Report errors for import targets in module that don't exist."""
559527
for dep in state.dependencies + state.suppressed: # TODO: ancestors?
@@ -907,11 +875,5 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod
907875
return [DeferredNode(node, active_class_name, active_class)]
908876

909877

910-
def extract_type_maps(graph: Graph) -> Dict[str, Dict[Expression, Type]]:
911-
# This is used to export information used only by the testmerge harness.
912-
return {id: state.type_map() for id, state in graph.items()
913-
if state.tree}
914-
915-
916878
def is_verbose(manager: BuildManager) -> bool:
917879
return manager.options.verbosity >= 1 or DEBUG_FINE_GRAINED

mypy/test/testcheck.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,6 @@ def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0)
149149
options = parse_options(original_program_text, testcase, incremental_step)
150150
options.use_builtins_fixtures = True
151151
options.show_traceback = True
152-
options.verbosity = 1
153152
if 'optional' in testcase.file:
154153
options.strict_optional = True
155154
if incremental_step:

mypy/test/testmerge.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from typing import List, Tuple, Dict, Optional
66

77
from mypy import build
8-
from mypy.build import BuildManager, BuildSource, State
8+
from mypy.build import BuildManager, BuildSource, State, Graph
99
from mypy.errors import Errors, CompileError
1010
from mypy.nodes import (
1111
Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, Var, UNBOUND_IMPORTED
@@ -77,13 +77,13 @@ def run_case(self, testcase: DataDrivenTestCase) -> None:
7777
target_path = os.path.join(test_temp_dir, 'target.py')
7878
shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), target_path)
7979

80-
a.extend(self.dump(manager, kind))
80+
a.extend(self.dump(fine_grained_manager, kind))
8181
old_subexpr = get_subexpressions(manager.modules['target'])
8282

8383
a.append('==>')
8484

8585
new_file, new_types = self.build_increment(fine_grained_manager, 'target', target_path)
86-
a.extend(self.dump(manager, kind))
86+
a.extend(self.dump(fine_grained_manager, kind))
8787

8888
for expr in old_subexpr:
8989
# Verify that old AST nodes are removed from the expression type map.
@@ -119,13 +119,13 @@ def build_increment(self, manager: FineGrainedBuildManager,
119119
Dict[Expression, Type]]:
120120
manager.update([(module_id, path)])
121121
module = manager.manager.modules[module_id]
122-
type_map = manager.type_maps[module_id]
122+
type_map = manager.graph[module_id].type_map()
123123
return module, type_map
124124

125125
def dump(self,
126-
manager: BuildManager,
126+
manager: FineGrainedBuildManager,
127127
kind: str) -> List[str]:
128-
modules = manager.modules
128+
modules = manager.manager.modules
129129
if kind == AST:
130130
return self.dump_asts(modules)
131131
elif kind == TYPEINFO:
@@ -203,14 +203,14 @@ def dump_typeinfo(self, info: TypeInfo) -> List[str]:
203203
type_str_conv=self.type_str_conv)
204204
return s.splitlines()
205205

206-
def dump_types(self, manager: BuildManager) -> List[str]:
206+
def dump_types(self, manager: FineGrainedBuildManager) -> List[str]:
207207
a = []
208208
# To make the results repeatable, we try to generate unique and
209209
# deterministic sort keys.
210-
for module_id in sorted(manager.modules):
210+
for module_id in sorted(manager.manager.modules):
211211
if not is_dumped_module(module_id):
212212
continue
213-
type_map = manager.saved_cache[module_id][2]
213+
type_map = manager.graph[module_id].type_map()
214214
if type_map:
215215
a.append('## {}'.format(module_id))
216216
for expr in sorted(type_map, key=lambda n: (n.line, short_type(n),

0 commit comments

Comments
 (0)