@@ -122,14 +122,14 @@ def process_file(
122122 """
123123
124124 try :
125- # Check file size
125+ # Single os.stat() for both size check and cache signature
126126 try :
127- st_size = os .path . getsize (filepath )
128- if st_size > MAX_FILE_SIZE :
127+ st = os .stat (filepath )
128+ if st . st_size > MAX_FILE_SIZE :
129129 return ProcessingResult (
130130 filepath = filepath ,
131131 success = False ,
132- error = f"File too large: { st_size } bytes (max { MAX_FILE_SIZE } )" ,
132+ error = f"File too large: { st . st_size } bytes (max { MAX_FILE_SIZE } )" ,
133133 error_kind = "file_too_large" ,
134134 )
135135 except OSError as e :
@@ -140,6 +140,8 @@ def process_file(
140140 error_kind = "stat_error" ,
141141 )
142142
143+ stat : FileStat = {"mtime_ns" : st .st_mtime_ns , "size" : st .st_size }
144+
143145 try :
144146 source = Path (filepath ).read_text ("utf-8" )
145147 except UnicodeDecodeError as e :
@@ -157,7 +159,6 @@ def process_file(
157159 error_kind = "source_read_error" ,
158160 )
159161
160- stat = file_stat_signature (filepath )
161162 module_name = module_name_from_path (root , filepath )
162163
163164 units , blocks , segments = extract_units_from_source (
@@ -355,68 +356,44 @@ def _safe_future_result(
355356 return None , str (e )
356357
357358 # Discovery phase
358- try :
359- if args .quiet :
360- for fp in iter_py_files (str (root_path )):
361- files_found += 1
362- stat , cached , warn = _get_cached_entry (fp )
363- if warn :
364- console .print (warn )
365- files_skipped += 1
366- continue
367- if cached and cached .get ("stat" ) == stat :
368- cache_hits += 1
369- all_units .extend (
370- cast (
371- list [GroupItem ],
372- cast (object , cached .get ("units" , [])),
373- )
359+ def _discover_files () -> None :
360+ nonlocal files_found , cache_hits , files_skipped
361+ for fp in iter_py_files (str (root_path )):
362+ files_found += 1
363+ stat , cached , warn = _get_cached_entry (fp )
364+ if warn :
365+ console .print (warn )
366+ files_skipped += 1
367+ continue
368+ if cached and cached .get ("stat" ) == stat :
369+ cache_hits += 1
370+ all_units .extend (
371+ cast (
372+ list [GroupItem ],
373+ cast (object , cached .get ("units" , [])),
374374 )
375- all_blocks . extend (
376- cast (
377- list [ GroupItem ],
378- cast ( object , cached . get ( "blocks" , [])) ,
379- )
375+ )
376+ all_blocks . extend (
377+ cast (
378+ list [ GroupItem ] ,
379+ cast ( object , cached . get ( "blocks" , [])),
380380 )
381- all_segments . extend (
382- cast (
383- list [ GroupItem ],
384- cast ( object , cached . get ( "segments" , [])) ,
385- )
381+ )
382+ all_segments . extend (
383+ cast (
384+ list [ GroupItem ] ,
385+ cast ( object , cached . get ( "segments" , [])),
386386 )
387- else :
388- files_to_process .append (fp )
387+ )
388+ else :
389+ files_to_process .append (fp )
390+
391+ try :
392+ if args .quiet :
393+ _discover_files ()
389394 else :
390395 with console .status (ui .STATUS_DISCOVERING , spinner = "dots" ):
391- for fp in iter_py_files (str (root_path )):
392- files_found += 1
393- stat , cached , warn = _get_cached_entry (fp )
394- if warn :
395- console .print (warn )
396- files_skipped += 1
397- continue
398- if cached and cached .get ("stat" ) == stat :
399- cache_hits += 1
400- all_units .extend (
401- cast (
402- list [GroupItem ],
403- cast (object , cached .get ("units" , [])),
404- )
405- )
406- all_blocks .extend (
407- cast (
408- list [GroupItem ],
409- cast (object , cached .get ("blocks" , [])),
410- )
411- )
412- all_segments .extend (
413- cast (
414- list [GroupItem ],
415- cast (object , cached .get ("segments" , [])),
416- )
417- )
418- else :
419- files_to_process .append (fp )
396+ _discover_files ()
420397 except OSError as e :
421398 console .print (ui .fmt_contract_error (ui .ERR_SCAN_FAILED .format (error = e )))
422399 sys .exit (ExitCode .CONTRACT_ERROR )
0 commit comments