25
25
import types
26
26
27
27
from typing import (AbstractSet , Any , Dict , Iterable , Iterator , List ,
28
- Mapping , NamedTuple , Optional , Set , Tuple , Union , Callable )
28
+ Mapping , NamedTuple , Optional , Set , Tuple , Union , Callable , TextIO )
29
29
MYPY = False
30
30
if MYPY :
31
31
from typing import ClassVar
@@ -128,6 +128,8 @@ def build(sources: List[BuildSource],
128
128
alt_lib_path : Optional [str ] = None ,
129
129
flush_errors : Optional [Callable [[List [str ], bool ], None ]] = None ,
130
130
fscache : Optional [FileSystemCache ] = None ,
131
+ stdout : Optional [TextIO ] = None ,
132
+ stderr : Optional [TextIO ] = None ,
131
133
) -> BuildResult :
132
134
"""Analyze a program.
133
135
@@ -159,9 +161,11 @@ def default_flush_errors(new_messages: List[str], is_serious: bool) -> None:
159
161
messages .extend (new_messages )
160
162
161
163
flush_errors = flush_errors or default_flush_errors
164
+ stdout = stdout or sys .stdout
165
+ stderr = stderr or sys .stderr
162
166
163
167
try :
164
- result = _build (sources , options , alt_lib_path , flush_errors , fscache )
168
+ result = _build (sources , options , alt_lib_path , flush_errors , fscache , stdout , stderr )
165
169
result .errors = messages
166
170
return result
167
171
except CompileError as e :
@@ -180,6 +184,8 @@ def _build(sources: List[BuildSource],
180
184
alt_lib_path : Optional [str ],
181
185
flush_errors : Callable [[List [str ], bool ], None ],
182
186
fscache : Optional [FileSystemCache ],
187
+ stdout : TextIO ,
188
+ stderr : TextIO ,
183
189
) -> BuildResult :
184
190
# This seems the most reasonable place to tune garbage collection.
185
191
gc .set_threshold (150 * 1000 )
@@ -197,7 +203,7 @@ def _build(sources: List[BuildSource],
197
203
198
204
source_set = BuildSourceSet (sources )
199
205
errors = Errors (options .show_error_context , options .show_column_numbers )
200
- plugin , snapshot = load_plugins (options , errors )
206
+ plugin , snapshot = load_plugins (options , errors , stdout )
201
207
202
208
# Construct a build manager object to hold state during the build.
203
209
#
@@ -212,12 +218,14 @@ def _build(sources: List[BuildSource],
212
218
plugins_snapshot = snapshot ,
213
219
errors = errors ,
214
220
flush_errors = flush_errors ,
215
- fscache = fscache )
221
+ fscache = fscache ,
222
+ stdout = stdout ,
223
+ stderr = stderr )
216
224
manager .trace (repr (options ))
217
225
218
226
reset_global_state ()
219
227
try :
220
- graph = dispatch (sources , manager )
228
+ graph = dispatch (sources , manager , stdout )
221
229
if not options .fine_grained_incremental :
222
230
TypeState .reset_all_subtype_caches ()
223
231
return BuildResult (manager , graph )
@@ -319,7 +327,10 @@ def import_priority(imp: ImportBase, toplevel_priority: int) -> int:
319
327
return toplevel_priority
320
328
321
329
322
- def load_plugins (options : Options , errors : Errors ) -> Tuple [Plugin , Dict [str , str ]]:
330
+ def load_plugins (options : Options ,
331
+ errors : Errors ,
332
+ stdout : TextIO ,
333
+ ) -> Tuple [Plugin , Dict [str , str ]]:
323
334
"""Load all configured plugins.
324
335
325
336
Return a plugin that encapsulates all plugins chained together. Always
@@ -383,7 +394,8 @@ def plugin_error(message: str) -> None:
383
394
try :
384
395
plugin_type = getattr (module , func_name )(__version__ )
385
396
except Exception :
386
- print ('Error calling the plugin(version) entry point of {}\n ' .format (plugin_path ))
397
+ print ('Error calling the plugin(version) entry point of {}\n ' .format (plugin_path ),
398
+ file = stdout )
387
399
raise # Propagate to display traceback
388
400
389
401
if not isinstance (plugin_type , type ):
@@ -398,7 +410,8 @@ def plugin_error(message: str) -> None:
398
410
custom_plugins .append (plugin_type (options ))
399
411
snapshot [module_name ] = take_module_snapshot (module )
400
412
except Exception :
401
- print ('Error constructing plugin instance of {}\n ' .format (plugin_type .__name__ ))
413
+ print ('Error constructing plugin instance of {}\n ' .format (plugin_type .__name__ ),
414
+ file = stdout )
402
415
raise # Propagate to display traceback
403
416
# Custom plugins take precedence over the default plugin.
404
417
return ChainedPlugin (options , custom_plugins + [default_plugin ]), snapshot
@@ -496,8 +509,10 @@ def __init__(self, data_dir: str,
496
509
errors : Errors ,
497
510
flush_errors : Callable [[List [str ], bool ], None ],
498
511
fscache : FileSystemCache ,
512
+ stdout : TextIO ,
513
+ stderr : TextIO ,
499
514
) -> None :
500
- super ().__init__ ()
515
+ super ().__init__ (stdout , stderr )
501
516
self .start_time = time .time ()
502
517
self .data_dir = data_dir
503
518
self .errors = errors
@@ -558,7 +573,7 @@ def __init__(self, data_dir: str,
558
573
self .plugin = plugin
559
574
self .plugins_snapshot = plugins_snapshot
560
575
self .old_plugins_snapshot = read_plugins_snapshot (self )
561
- self .quickstart_state = read_quickstart_file (options )
576
+ self .quickstart_state = read_quickstart_file (options , self . stdout )
562
577
563
578
def dump_stats (self ) -> None :
564
579
self .log ("Stats:" )
@@ -904,7 +919,9 @@ def read_plugins_snapshot(manager: BuildManager) -> Optional[Dict[str, str]]:
904
919
return snapshot
905
920
906
921
907
- def read_quickstart_file (options : Options ) -> Optional [Dict [str , Tuple [float , int , str ]]]:
922
+ def read_quickstart_file (options : Options ,
923
+ stdout : TextIO ,
924
+ ) -> Optional [Dict [str , Tuple [float , int , str ]]]:
908
925
quickstart = None # type: Optional[Dict[str, Tuple[float, int, str]]]
909
926
if options .quickstart_file :
910
927
# This is very "best effort". If the file is missing or malformed,
@@ -918,7 +935,7 @@ def read_quickstart_file(options: Options) -> Optional[Dict[str, Tuple[float, in
918
935
for file , (x , y , z ) in raw_quickstart .items ():
919
936
quickstart [file ] = (x , y , z )
920
937
except Exception as e :
921
- print ("Warning: Failed to load quickstart file: {}\n " .format (str (e )))
938
+ print ("Warning: Failed to load quickstart file: {}\n " .format (str (e )), file = stdout )
922
939
return quickstart
923
940
924
941
@@ -1769,7 +1786,8 @@ def wrap_context(self, check_blockers: bool = True) -> Iterator[None]:
1769
1786
except CompileError :
1770
1787
raise
1771
1788
except Exception as err :
1772
- report_internal_error (err , self .path , 0 , self .manager .errors , self .options )
1789
+ report_internal_error (err , self .path , 0 , self .manager .errors ,
1790
+ self .options , self .manager .stdout , self .manager .stderr )
1773
1791
self .manager .errors .set_import_context (save_import_context )
1774
1792
# TODO: Move this away once we've removed the old semantic analyzer?
1775
1793
if check_blockers :
@@ -2429,7 +2447,10 @@ def log_configuration(manager: BuildManager) -> None:
2429
2447
# The driver
2430
2448
2431
2449
2432
- def dispatch (sources : List [BuildSource ], manager : BuildManager ) -> Graph :
2450
+ def dispatch (sources : List [BuildSource ],
2451
+ manager : BuildManager ,
2452
+ stdout : TextIO ,
2453
+ ) -> Graph :
2433
2454
log_configuration (manager )
2434
2455
2435
2456
t0 = time .time ()
@@ -2454,11 +2475,11 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
2454
2475
fm_cache_size = len (manager .find_module_cache .results ),
2455
2476
)
2456
2477
if not graph :
2457
- print ("Nothing to do?!" )
2478
+ print ("Nothing to do?!" , file = stdout )
2458
2479
return graph
2459
2480
manager .log ("Loaded graph with %d nodes (%.3f sec)" % (len (graph ), t1 - t0 ))
2460
2481
if manager .options .dump_graph :
2461
- dump_graph (graph )
2482
+ dump_graph (graph , stdout )
2462
2483
return graph
2463
2484
2464
2485
# Fine grained dependencies that didn't have an associated module in the build
@@ -2480,7 +2501,7 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
2480
2501
manager .log ("Error reading fine-grained dependencies cache -- aborting cache load" )
2481
2502
manager .cache_enabled = False
2482
2503
manager .log ("Falling back to full run -- reloading graph..." )
2483
- return dispatch (sources , manager )
2504
+ return dispatch (sources , manager , stdout )
2484
2505
2485
2506
# If we are loading a fine-grained incremental mode cache, we
2486
2507
# don't want to do a real incremental reprocess of the
@@ -2528,7 +2549,7 @@ def dumps(self) -> str:
2528
2549
json .dumps (self .deps ))
2529
2550
2530
2551
2531
- def dump_graph (graph : Graph ) -> None :
2552
+ def dump_graph (graph : Graph , stdout : TextIO = sys . stdout ) -> None :
2532
2553
"""Dump the graph as a JSON string to stdout.
2533
2554
2534
2555
This copies some of the work by process_graph()
@@ -2562,7 +2583,7 @@ def dump_graph(graph: Graph) -> None:
2562
2583
if (dep_id != node .node_id and
2563
2584
(dep_id not in node .deps or pri < node .deps [dep_id ])):
2564
2585
node .deps [dep_id ] = pri
2565
- print ("[" + ",\n " .join (node .dumps () for node in nodes ) + "\n ]" )
2586
+ print ("[" + ",\n " .join (node .dumps () for node in nodes ) + "\n ]" , file = stdout )
2566
2587
2567
2588
2568
2589
def load_graph (sources : List [BuildSource ], manager : BuildManager ,
0 commit comments