11import io
22import logging
3+ import os
34import os .path
45import re
56import sys
910 add_verbosity_cli ,
1011 add_traceback_cli ,
1112 add_sepval_cli ,
13+ add_progress_cli ,
1214 add_files_cli ,
1315 add_commands_cli ,
1416 process_args_by_key ,
1719 filter_filenames ,
1820 iter_marks ,
1921)
20- from c_parser .info import KIND , is_type_decl
22+ from c_parser .info import KIND
23+ from c_parser .match import is_type_decl
24+ from .match import filter_forward
2125from . import (
2226 analyze as _analyze ,
23- check_all as _check_all ,
2427 datafiles as _datafiles ,
28+ check_all as _check_all ,
2529)
2630
2731
4448TABLE_SECTIONS = {
4549 'types' : (
4650 ['kind' , 'name' , 'data' , 'file' ],
47- is_type_decl ,
51+ KIND . is_type_decl ,
4852 (lambda v : (v .kind .value , v .filename or '' , v .name )),
4953 ),
5054 'typedefs' : 'types' ,
@@ -167,9 +171,7 @@ def handle_failure(failure, data):
167171 print (f'{ data .filename } :{ name } - { failure } ' )
168172 elif fmt == 'summary' :
169173 def handle_failure (failure , data ):
170- parent = data .parent or ''
171- funcname = parent if isinstance (parent , str ) else parent .name
172- print (f'{ data .filename :35} \t { funcname or "-" :35} \t { data .name :40} \t { failure } ' )
174+ print (_fmt_one_summary (data , failure ))
173175 elif fmt == 'full' :
174176 div = ''
175177 def handle_failure (failure , data ):
@@ -230,6 +232,15 @@ def section(name):
230232 yield f'grand total: { total } '
231233
232234
235+ def _fmt_one_summary (item , extra = None ):
236+ parent = item .parent or ''
237+ funcname = parent if isinstance (parent , str ) else parent .name
238+ if extra :
239+ return f'{ item .filename :35} \t { funcname or "-" :35} \t { item .name :40} \t { extra } '
240+ else :
241+ return f'{ item .filename :35} \t { funcname or "-" :35} \t { item .name } '
242+
243+
233244def fmt_full (analysis ):
234245 # XXX Support sorting.
235246 items = sorted (analysis , key = lambda v : v .key )
@@ -272,10 +283,12 @@ def process_checks(args):
272283 args .checks = [check ]
273284 else :
274285 process_checks = add_checks_cli (parser , checks = checks )
286+ process_progress = add_progress_cli (parser )
275287 process_output = add_output_cli (parser , default = None )
276288 process_files = add_files_cli (parser , ** kwargs )
277289 return [
278290 process_checks ,
291+ process_progress ,
279292 process_output ,
280293 process_files ,
281294 ]
@@ -288,6 +301,7 @@ def cmd_check(filenames, *,
288301 relroot = None ,
289302 failfast = False ,
290303 iter_filenames = None ,
304+ track_progress = None ,
291305 verbosity = VERBOSITY ,
292306 _analyze = _analyze ,
293307 _CHECKS = CHECKS ,
@@ -304,36 +318,53 @@ def cmd_check(filenames, *,
304318 ) = _get_check_handlers (fmt , printer , verbosity )
305319
306320 filenames = filter_filenames (filenames , iter_filenames )
321+ if track_progress :
322+ filenames = track_progress (filenames )
307323
308- logger .info ('analyzing...' )
324+ logger .info ('analyzing files ...' )
309325 analyzed = _analyze (filenames , ** kwargs )
310326 if relroot :
311327 analyzed .fix_filenames (relroot )
328+ decls = filter_forward (analyzed , markpublic = True )
312329
313- logger .info ('checking...' )
314- numfailed = 0
315- for data , failure in _check_all (analyzed , checks , failfast = failfast ):
330+ logger .info ('checking analysis results ...' )
331+ failed = []
332+ for data , failure in _check_all (decls , checks , failfast = failfast ):
316333 if data is None :
317334 printer .info ('stopping after one failure' )
318335 break
319- if div is not None and numfailed > 0 :
336+ if div is not None and len ( failed ) > 0 :
320337 printer .info (div )
321- numfailed += 1
338+ failed . append ( data )
322339 handle_failure (failure , data )
323340 handle_after ()
324341
325342 printer .info ('-------------------------' )
326- logger .info (f'total failures: { numfailed } ' )
343+ logger .info (f'total failures: { len ( failed ) } ' )
327344 logger .info ('done checking' )
328345
329- if numfailed > 0 :
330- sys .exit (numfailed )
346+ if fmt == 'summary' :
347+ print ('Categorized by storage:' )
348+ print ()
349+ from .match import group_by_storage
350+ grouped = group_by_storage (failed , ignore_non_match = False )
351+ for group , decls in grouped .items ():
352+ print ()
353+ print (group )
354+ for decl in decls :
355+ print (' ' , _fmt_one_summary (decl ))
356+ print (f'subtotal: { len (decls )} ' )
357+
358+ if len (failed ) > 0 :
359+ sys .exit (len (failed ))
331360
332361
333362def _cli_analyze (parser , ** kwargs ):
363+ process_progress = add_progress_cli (parser )
334364 process_output = add_output_cli (parser )
335365 process_files = add_files_cli (parser , ** kwargs )
336366 return [
367+ process_progress ,
337368 process_output ,
338369 process_files ,
339370 ]
@@ -343,6 +374,7 @@ def _cli_analyze(parser, **kwargs):
343374def cmd_analyze (filenames , * ,
344375 fmt = None ,
345376 iter_filenames = None ,
377+ track_progress = None ,
346378 verbosity = None ,
347379 _analyze = _analyze ,
348380 formats = FORMATS ,
@@ -356,56 +388,54 @@ def cmd_analyze(filenames, *,
356388 raise ValueError (f'unsupported fmt { fmt !r} ' )
357389
358390 filenames = filter_filenames (filenames , iter_filenames )
359- if verbosity == 2 :
360- def iter_filenames (filenames = filenames ):
361- marks = iter_marks ()
362- for filename in filenames :
363- print (next (marks ), end = '' )
364- yield filename
365- filenames = iter_filenames ()
366- elif verbosity > 2 :
367- def iter_filenames (filenames = filenames ):
368- for filename in filenames :
369- print (f'<{ filename } >' )
370- yield filename
371- filenames = iter_filenames ()
372-
373- logger .info ('analyzing...' )
391+ if track_progress :
392+ filenames = track_progress (filenames )
393+
394+ logger .info ('analyzing files...' )
374395 analyzed = _analyze (filenames , ** kwargs )
396+ decls = filter_forward (analyzed , markpublic = True )
375397
376- for line in do_fmt (analyzed ):
398+ for line in do_fmt (decls ):
377399 print (line )
378400
379401
380402def _cli_data (parser , filenames = None , known = None ):
381403 ArgumentParser = type (parser )
382404 common = ArgumentParser (add_help = False )
383- if filenames is None :
384- common .add_argument ('filenames' , metavar = 'FILE' , nargs = '+' )
405+ # These flags will get processed by the top-level parse_args().
406+ add_verbosity_cli (common )
407+ add_traceback_cli (common )
385408
386409 subs = parser .add_subparsers (dest = 'datacmd' )
387410
388411 sub = subs .add_parser ('show' , parents = [common ])
389412 if known is None :
390413 sub .add_argument ('--known' , required = True )
414+ if filenames is None :
415+ sub .add_argument ('filenames' , metavar = 'FILE' , nargs = '+' )
391416
392- sub = subs .add_parser ('dump' )
417+ sub = subs .add_parser ('dump' , parents = [ common ] )
393418 if known is None :
394419 sub .add_argument ('--known' )
395420 sub .add_argument ('--show' , action = 'store_true' )
421+ process_progress = add_progress_cli (sub )
396422
397- sub = subs .add_parser ('check' )
423+ sub = subs .add_parser ('check' , parents = [ common ] )
398424 if known is None :
399425 sub .add_argument ('--known' , required = True )
400426
401- return None
427+ def process_args (args ):
428+ if args .datacmd == 'dump' :
429+ process_progress (args )
430+ return process_args
402431
403432
404433def cmd_data (datacmd , filenames , known = None , * ,
405434 _analyze = _analyze ,
406435 formats = FORMATS ,
407436 extracolumns = None ,
408437 relroot = None ,
438+ track_progress = None ,
409439 ** kwargs
410440 ):
411441 kwargs .pop ('verbosity' , None )
@@ -417,6 +447,8 @@ def cmd_data(datacmd, filenames, known=None, *,
417447 for line in do_fmt (known ):
418448 print (line )
419449 elif datacmd == 'dump' :
450+ if track_progress :
451+ filenames = track_progress (filenames )
420452 analyzed = _analyze (filenames , ** kwargs )
421453 if known is None or usestdout :
422454 outfile = io .StringIO ()
0 commit comments