1- """Log buffer - Ring buffer with timestamps and file persistence."""
1+ """
2+ Log buffer - Ring buffer with timestamps and file persistence.
3+
4+ NOTE: Log file writes are performed asynchronously on a background thread.
5+ Qt delivers process output on the main (UI) thread; synchronous disk I/O here
6+ can cause UI stalls/freeze during heavy output or when stopping many processes.
7+ """
28from __future__ import annotations
9+
310from collections import deque
411from datetime import datetime
512from pathlib import Path
13+ import queue
14+ import threading
615from typing import Optional
16+
717from config import LOGS_DIR , MAX_LOG_LINES , HISTORY_CHUNK , normalize , strip_ansi
818
19+
20+ class _AsyncFileWriter :
21+ """Non-blocking log file appender.
22+
23+ UI thread enqueues writes; a worker thread does disk I/O.
24+ """
25+
26+ def __init__ (self , max_queue : int = 10000 ):
27+ self ._q : "queue.Queue[tuple[Path, str]]" = queue .Queue (maxsize = max_queue )
28+ self ._stop = threading .Event ()
29+ self ._dropped = 0
30+ self ._thread = threading .Thread (target = self ._run , name = "log-writer" , daemon = True )
31+ self ._thread .start ()
32+
33+ def write (self , path : Path , text : str ) -> None :
34+ """Enqueue text to append to a file. Never blocks the caller."""
35+ if not text :
36+ return
37+ try :
38+ self ._q .put_nowait ((path , text ))
39+ except queue .Full :
40+ # Never block the UI thread. Drop the chunk and count it.
41+ self ._dropped += 1
42+
43+ def _run (self ) -> None :
44+ while not self ._stop .is_set () or not self ._q .empty ():
45+ try :
46+ path , text = self ._q .get (timeout = 0.2 )
47+ except queue .Empty :
48+ continue
49+
50+ try :
51+ path .parent .mkdir (exist_ok = True )
52+ with open (path , "a" , encoding = "utf-8" , newline = "\n " ) as f :
53+ f .write (text )
54+
55+ # If we dropped anything, record it once we successfully write again.
56+ if self ._dropped :
57+ dropped = self ._dropped
58+ self ._dropped = 0
59+ try :
60+ with open (path , "a" , encoding = "utf-8" , newline = "\n " ) as f :
61+ f .write (f"[log-writer] dropped { dropped } chunks due to backpressure\n " )
62+ except Exception :
63+ pass
64+ except Exception :
65+ # Never crash the writer thread.
66+ pass
67+ finally :
68+ try :
69+ self ._q .task_done ()
70+ except Exception :
71+ pass
72+
73+ def close (self , timeout_s : float = 2.0 ) -> None :
74+ """Request stop and wait briefly for draining."""
75+ self ._stop .set ()
76+ try :
77+ self ._thread .join (timeout = timeout_s )
78+ except Exception :
79+ pass
80+
81+
82+ _GLOBAL_WRITER : Optional [_AsyncFileWriter ] = None
83+
84+
85+ def _writer () -> _AsyncFileWriter :
86+ global _GLOBAL_WRITER
87+ if _GLOBAL_WRITER is None :
88+ _GLOBAL_WRITER = _AsyncFileWriter ()
89+ return _GLOBAL_WRITER
90+
91+
92+ def shutdown_log_writer () -> None :
93+ """Flush pending async log writes and stop the writer thread."""
94+ global _GLOBAL_WRITER
95+ if _GLOBAL_WRITER is None :
96+ return
97+ _GLOBAL_WRITER .close ()
98+ _GLOBAL_WRITER = None
99+
100+
9101class LogBuffer :
10102 __slots__ = ("name" , "lines" , "file" , "_cache" , "_mtime" , "_partial" )
11103
@@ -19,63 +111,92 @@ def __init__(self, name: str):
19111 self ._partial : str = ""
20112
21113 def append (self , text : str ) -> tuple [str , str ]:
22- if not text : return "" , ""
114+ if not text :
115+ return "" , ""
116+
23117 text = normalize (text )
24118 data = self ._partial + text
25119 self ._partial = ""
26- if "\n " not in data : self ._partial = data ; return "" , ""
27-
120+
121+ # No newline yet: keep buffering partial line
122+ if "\n " not in data :
123+ self ._partial = data
124+ return "" , ""
125+
28126 ts = datetime .now ().strftime ("%Y-%m-%d %H:%M:%S" )
29127 parts = data .splitlines (keepends = True )
30- if parts and not parts [- 1 ].endswith ("\n " ): self ._partial = parts .pop ()
31-
128+
129+ # Keep trailing partial (no newline)
130+ if parts and not parts [- 1 ].endswith ("\n " ):
131+ self ._partial = parts .pop ()
132+
32133 display , file_out = [], []
33134 for part in parts :
34135 content = part .rstrip ("\n " )
35136 disp = f"[\x1b [94m{ ts } \x1b [0m] { content } \n "
36137 self .lines .append (disp )
37138 display .append (disp )
38139 file_out .append (f"[{ ts } ] { strip_ansi (content )} \n " )
39-
140+
40141 self ._cache = None
142+
143+ # Persist asynchronously to keep UI thread responsive.
41144 try :
42- with open (self .file , "a" , encoding = "utf-8" , newline = "\n " ) as f : f .write ("" .join (file_out ))
43- except : pass
145+ _writer ().write (self .file , "" .join (file_out ))
146+ except Exception :
147+ pass
148+
44149 return "" .join (display ), "" .join (file_out )
45150
46- def get_recent (self ) -> str : return "" .join (self .lines )
151+ def get_recent (self ) -> str :
152+ return "" .join (self .lines )
47153
48154 def _read_file (self ) -> list [str ]:
49- if not self .file .exists (): return [l .rstrip ("\n " ) for l in self .lines ]
155+ if not self .file .exists ():
156+ return [l .rstrip ("\n " ) for l in self .lines ]
50157 try :
51158 mtime = self .file .stat ().st_mtime
52- if self ._cache and mtime == self ._mtime : return self ._cache
53- self ._cache = normalize (self .file .read_text (encoding = "utf-8" , errors = "replace" )).splitlines ()
159+ if self ._cache and mtime == self ._mtime :
160+ return self ._cache
161+ self ._cache = normalize (
162+ self .file .read_text (encoding = "utf-8" , errors = "replace" )
163+ ).splitlines ()
54164 self ._mtime = mtime
55165 return self ._cache
56- except : return [l .rstrip ("\n " ) for l in self .lines ]
166+ except Exception :
167+ return [l .rstrip ("\n " ) for l in self .lines ]
57168
58- def line_count (self ) -> int : return len (self ._read_file ())
169+ def line_count (self ) -> int :
170+ return len (self ._read_file ())
59171
60172 def _colorize (self , line : str ) -> str :
61173 if line .startswith ("[" ):
62174 b = line .find ("]" )
63- if b > 0 : return f"[\x1b [94m{ line [1 :b ]} \x1b [0m]{ line [b + 1 :]} "
175+ if b > 0 :
176+ return f"[\x1b [94m{ line [1 :b ]} \x1b [0m]{ line [b + 1 :]} "
64177 return line
65178
66179 def search (self , pattern : str ) -> tuple [str , int ]:
67180 p = pattern .lower ()
68181 matches = [l for l in self ._read_file () if p in l .lower ()]
69- return ("" .join (f"{ self ._colorize (l )} \n " for l in matches ), len (matches )) if matches else ("" , 0 )
182+ if not matches :
183+ return "" , 0
184+ return "" .join (f"{ self ._colorize (l )} \n " for l in matches ), len (matches )
70185
71186 def load_chunk (self , end : int , size : int = HISTORY_CHUNK ) -> tuple [str , int ]:
72187 lines = self ._read_file ()
73- if not lines or end <= 0 : return "" , 0
188+ if not lines or end <= 0 :
189+ return "" , 0
74190 start = max (0 , end - size )
75191 chunk = lines [start :end ]
76- return ("" .join (f"{ self ._colorize (l )} \n " for l in chunk ), start ) if chunk else ("" , 0 )
192+ if not chunk :
193+ return "" , 0
194+ return "" .join (f"{ self ._colorize (l )} \n " for l in chunk ), start
77195
78196 def clear (self ) -> None :
79- self .lines .clear (); self ._cache = None
80- try : self .file .write_text ("" , encoding = "utf-8" )
81- except : pass
197+ self .lines .clear ()
198+ self ._cache = None
199+ try :
200+ self .file .write_text ("" , encoding = "utf-8" )
201+ except Exception :
202+ pass
0 commit comments