Skip to content

Have -v enable DEBUG logging from fine-grained updates. #4550

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Feb 7, 2018
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 23 additions & 32 deletions mypy/server/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,10 +143,6 @@
from mypy.server.trigger import make_trigger


# If True, print out debug logging output.
DEBUG = False


MAX_ITER = 1000


Expand Down Expand Up @@ -205,25 +201,23 @@ def update(self, changed_modules: List[Tuple[str, str]]) -> List[str]:
self.triggered = []
changed_modules = dedupe_modules(changed_modules + self.stale)
initial_set = {id for id, _ in changed_modules}
if DEBUG:
print('==== update %s ====' % ', '.join(repr(id)
for id, _ in changed_modules))
if self.previous_targets_with_errors:
print('previous targets with errors: %s' %
sorted(self.previous_targets_with_errors))
self.manager.log('fine-grained: ==== update %s ====' % ', '.join(
repr(id) for id, _ in changed_modules))
if self.previous_targets_with_errors and self.options.verbosity >= 1:
self.manager.log('fine-grained: previous targets with errors: %s' %
sorted(self.previous_targets_with_errors))

if self.blocking_error:
# Handle blocking errors first. We'll exit as soon as we find a
# module that still has blocking errors.
if DEBUG:
print('existing blocker: %s' % self.blocking_error[0])
self.manager.log('fine-grained: existing blocker: %s' % self.blocking_error[0])
changed_modules = dedupe_modules([self.blocking_error] + changed_modules)
self.blocking_error = None

while changed_modules:
next_id, next_path = changed_modules.pop(0)
if next_id not in self.previous_modules and next_id not in initial_set:
print('skip %r (module not in import graph)' % next_id)
self.manager.log('fine-grained: skip %r (module not in import graph)' % next_id)
continue
result = self.update_single(next_id, next_path)
messages, remaining, (next_id, next_path), blocker = result
Expand Down Expand Up @@ -254,8 +248,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
- Module which was actually processed as (id, path) tuple
- Whether there was a blocking error in the module
"""
if DEBUG:
print('--- update single %r ---' % module)
self.manager.log('fine-grained: --- update single %r ---' % module)

# TODO: If new module brings in other modules, we parse some files multiple times.
manager = self.manager
Expand All @@ -279,10 +272,10 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],

# TODO: What to do with stale dependencies?
triggered = calculate_active_triggers(manager, old_snapshots, {module: tree})
if DEBUG:
if self.options.verbosity >= 1:
filtered = [trigger for trigger in triggered
if not trigger.endswith('__>')]
print('triggered:', sorted(filtered))
self.manager.log('fine-grained: triggered: %r' % sorted(filtered))
self.triggered.extend(triggered | self.previous_targets_with_errors)
collect_dependencies({module: tree}, self.deps, graph)
remaining += propagate_changes_using_dependencies(
Expand Down Expand Up @@ -373,8 +366,8 @@ def update_single_isolated(module: str,
"""
if module in manager.modules:
assert_equivalent_paths(path, manager.modules[module].path)
elif DEBUG:
print('new module %r' % module)
else:
manager.log('fine-grained: new module %r' % module)

old_modules = dict(manager.modules)
sources = get_sources(previous_modules, [(module, path)])
Expand Down Expand Up @@ -418,8 +411,7 @@ def update_single_isolated(module: str,
else:
del manager.modules[id]
del graph[id]
if DEBUG:
print('--> %r (newly imported)' % module)
manager.log('fine-grained: --> %r (newly imported)' % module)
else:
remaining_modules = []

Expand Down Expand Up @@ -493,8 +485,7 @@ def assert_equivalent_paths(path1: str, path2: str) -> None:
def delete_module(module_id: str,
graph: Dict[str, State],
manager: BuildManager) -> Dict[str, State]:
if DEBUG:
print('delete module %r' % module_id)
manager.log('fine-grained: delete module %r' % module_id)
# TODO: Deletion of a package
# TODO: Remove deps for the module (this only affects memory use, not correctness)
assert module_id not in graph
Expand Down Expand Up @@ -735,16 +726,16 @@ def propagate_changes_using_dependencies(
if num_iter > MAX_ITER:
raise RuntimeError('Max number of iterations (%d) reached (endless loop?)' % MAX_ITER)

todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules)
todo = find_targets_recursive(manager, triggered, deps,
manager.modules, up_to_date_modules)
# Also process targets that used to have errors, as otherwise some
# errors might be lost.
for target in targets_with_errors:
id = module_prefix(manager.modules, target)
if id is not None and id not in up_to_date_modules:
if id not in todo:
todo[id] = set()
if DEBUG:
print('process', target)
manager.log('fine-grained: process: %s' % target)
todo[id].update(lookup_target(manager.modules, target))
triggered = set()
# TODO: Preserve order (set is not optimal)
Expand All @@ -762,13 +753,14 @@ def propagate_changes_using_dependencies(
# dependency loop that loops back to an originally processed module.
up_to_date_modules = set()
targets_with_errors = set()
if DEBUG:
print('triggered:', list(triggered))
if manager.options.verbosity >= 1:
manager.log('fine-grained: triggered: %r' % list(triggered))

return remaining_modules


def find_targets_recursive(
manager: BuildManager,
triggers: Set[str],
deps: Dict[str, Set[str]],
modules: Dict[str, MypyFile],
Expand Down Expand Up @@ -801,8 +793,7 @@ def find_targets_recursive(
continue
if module_id not in result:
result[module_id] = set()
if DEBUG:
print('process', target)
manager.log('fine-grained: process %s' % target)
deferred = lookup_target(modules, target)
result[module_id].update(deferred)

Expand All @@ -819,8 +810,8 @@ def reprocess_nodes(manager: BuildManager,
Return fired triggers.
"""
if module_id not in manager.saved_cache or module_id not in graph:
if DEBUG:
print('%s not in saved cache or graph (blocking errors or deleted?)' % module_id)
manager.log('fine-grained: %s not in saved cache or graph (blocking errors or deleted?)' %
module_id)
return set()

file_node = manager.modules[module_id]
Expand Down