1- """
2- Log buffer - Ring buffer with timestamps and file persistence.
3-
4- NOTE: Log file writes are performed asynchronously on a background thread.
5- Qt delivers process output on the main (UI) thread; synchronous disk I/O here
6- can cause UI stalls/freeze during heavy output or when stopping many processes.
7- """
1+ """Log buffer - Ring buffer with async file persistence (non-blocking UI)."""
82from __future__ import annotations
9-
3+ import atexit , queue , threading
104from collections import deque
115from datetime import datetime
126from pathlib import Path
13- import queue
14- import threading
157from typing import Optional
16-
178from config import LOGS_DIR , MAX_LOG_LINES , HISTORY_CHUNK , normalize , strip_ansi
189
10+ class _AsyncWriter :
11+ """Background thread for non-blocking log file writes."""
12+ _instance : Optional ["_AsyncWriter" ] = None
1913
20- class _AsyncFileWriter :
21- """Non-blocking log file appender.
22-
23- UI thread enqueues writes; a worker thread does disk I/O.
24- """
25-
26- def __init__ (self , max_queue : int = 10000 ):
27- self ._q : "queue.Queue[tuple[Path, str]]" = queue .Queue (maxsize = max_queue )
14+ def __init__ (self ):
15+ self ._q : queue .Queue [tuple [Path , str ]] = queue .Queue (maxsize = 10000 )
2816 self ._stop = threading .Event ()
29- self ._dropped = 0
30- self ._thread = threading .Thread (target = self ._run , name = "log-writer" , daemon = True )
17+ self ._thread = threading .Thread (target = self ._run , daemon = True )
3118 self ._thread .start ()
19+ atexit .register (self .close )
20+
21+ @classmethod
22+ def get (cls ) -> "_AsyncWriter" :
23+ if cls ._instance is None : cls ._instance = cls ()
24+ return cls ._instance
3225
3326 def write (self , path : Path , text : str ) -> None :
34- """Enqueue text to append to a file. Never blocks the caller."""
35- if not text :
36- return
37- try :
38- self ._q .put_nowait ((path , text ))
39- except queue .Full :
40- # Never block the UI thread. Drop the chunk and count it.
41- self ._dropped += 1
27+ if text :
28+ try : self ._q .put_nowait ((path , text ))
29+ except queue .Full : pass # Drop rather than block UI
4230
4331 def _run (self ) -> None :
4432 while not self ._stop .is_set () or not self ._q .empty ():
45- try :
46- path , text = self ._q .get (timeout = 0.2 )
47- except queue .Empty :
48- continue
49-
33+ try : path , text = self ._q .get (timeout = 0.2 )
34+ except queue .Empty : continue
5035 try :
5136 path .parent .mkdir (exist_ok = True )
52- with open (path , "a" , encoding = "utf-8" , newline = "\n " ) as f :
53- f .write (text )
54-
55- # If we dropped anything, record it once we successfully write again.
56- if self ._dropped :
57- dropped = self ._dropped
58- self ._dropped = 0
59- try :
60- with open (path , "a" , encoding = "utf-8" , newline = "\n " ) as f :
61- f .write (f"[log-writer] dropped { dropped } chunks due to backpressure\n " )
62- except Exception :
63- pass
64- except Exception :
65- # Never crash the writer thread.
66- pass
37+ with open (path , "a" , encoding = "utf-8" , newline = "\n " ) as f : f .write (text )
38+ except : pass
6739 finally :
68- try :
69- self ._q .task_done ()
70- except Exception :
71- pass
40+ try : self ._q .task_done ()
41+ except : pass
7242
73- def close (self , timeout_s : float = 2.0 ) -> None :
74- """Request stop and wait briefly for draining."""
43+ def close (self ) -> None :
7544 self ._stop .set ()
76- try :
77- self ._thread .join (timeout = timeout_s )
78- except Exception :
79- pass
80-
81-
82- _GLOBAL_WRITER : Optional [_AsyncFileWriter ] = None
83-
84-
85- def _writer () -> _AsyncFileWriter :
86- global _GLOBAL_WRITER
87- if _GLOBAL_WRITER is None :
88- _GLOBAL_WRITER = _AsyncFileWriter ()
89- return _GLOBAL_WRITER
90-
91-
92- def shutdown_log_writer () -> None :
93- """Flush pending async log writes and stop the writer thread."""
94- global _GLOBAL_WRITER
95- if _GLOBAL_WRITER is None :
96- return
97- _GLOBAL_WRITER .close ()
98- _GLOBAL_WRITER = None
99-
45+ try : self ._thread .join (timeout = 2.0 )
46+ except : pass
10047
10148class LogBuffer :
10249 __slots__ = ("name" , "lines" , "file" , "_cache" , "_mtime" , "_partial" )
@@ -111,92 +58,57 @@ def __init__(self, name: str):
11158 self ._partial : str = ""
11259
11360 def append (self , text : str ) -> tuple [str , str ]:
114- if not text :
115- return "" , ""
116-
117- text = normalize (text )
118- data = self ._partial + text
119- self ._partial = ""
120-
121- # No newline yet: keep buffering partial line
122- if "\n " not in data :
123- self ._partial = data
124- return "" , ""
125-
61+ if not text : return "" , ""
62+ data = normalize (self ._partial + text ); self ._partial = ""
63+ if "\n " not in data : self ._partial = data ; return "" , ""
64+
12665 ts = datetime .now ().strftime ("%Y-%m-%d %H:%M:%S" )
12766 parts = data .splitlines (keepends = True )
128-
129- # Keep trailing partial (no newline)
130- if parts and not parts [- 1 ].endswith ("\n " ):
131- self ._partial = parts .pop ()
132-
67+ if parts and not parts [- 1 ].endswith ("\n " ): self ._partial = parts .pop ()
68+
13369 display , file_out = [], []
13470 for part in parts :
13571 content = part .rstrip ("\n " )
13672 disp = f"[\x1b [94m{ ts } \x1b [0m] { content } \n "
137- self .lines .append (disp )
138- display .append (disp )
73+ self .lines .append (disp ); display .append (disp )
13974 file_out .append (f"[{ ts } ] { strip_ansi (content )} \n " )
140-
75+
14176 self ._cache = None
142-
143- # Persist asynchronously to keep UI thread responsive.
144- try :
145- _writer ().write (self .file , "" .join (file_out ))
146- except Exception :
147- pass
148-
77+ _AsyncWriter .get ().write (self .file , "" .join (file_out ))
14978 return "" .join (display ), "" .join (file_out )
15079
151- def get_recent (self ) -> str :
152- return "" .join (self .lines )
80+ def get_recent (self ) -> str : return "" .join (self .lines )
15381
15482 def _read_file (self ) -> list [str ]:
155- if not self .file .exists ():
156- return [l .rstrip ("\n " ) for l in self .lines ]
83+ if not self .file .exists (): return [l .rstrip ("\n " ) for l in self .lines ]
15784 try :
15885 mtime = self .file .stat ().st_mtime
159- if self ._cache and mtime == self ._mtime :
160- return self ._cache
161- self ._cache = normalize (
162- self .file .read_text (encoding = "utf-8" , errors = "replace" )
163- ).splitlines ()
86+ if self ._cache and mtime == self ._mtime : return self ._cache
87+ self ._cache = normalize (self .file .read_text (encoding = "utf-8" , errors = "replace" )).splitlines ()
16488 self ._mtime = mtime
16589 return self ._cache
166- except Exception :
167- return [l .rstrip ("\n " ) for l in self .lines ]
90+ except : return [l .rstrip ("\n " ) for l in self .lines ]
16891
169- def line_count (self ) -> int :
170- return len (self ._read_file ())
92+ def line_count (self ) -> int : return len (self ._read_file ())
17193
17294 def _colorize (self , line : str ) -> str :
173- if line .startswith ("[" ):
174- b = line .find ("]" )
175- if b > 0 :
176- return f"[\x1b [94m{ line [1 :b ]} \x1b [0m]{ line [b + 1 :]} "
95+ if line .startswith ("[" ) and (b := line .find ("]" )) > 0 :
96+ return f"[\x1b [94m{ line [1 :b ]} \x1b [0m]{ line [b + 1 :]} "
17797 return line
17898
17999 def search (self , pattern : str ) -> tuple [str , int ]:
180100 p = pattern .lower ()
181101 matches = [l for l in self ._read_file () if p in l .lower ()]
182- if not matches :
183- return "" , 0
184- return "" .join (f"{ self ._colorize (l )} \n " for l in matches ), len (matches )
102+ return ("" .join (f"{ self ._colorize (l )} \n " for l in matches ), len (matches )) if matches else ("" , 0 )
185103
186104 def load_chunk (self , end : int , size : int = HISTORY_CHUNK ) -> tuple [str , int ]:
187105 lines = self ._read_file ()
188- if not lines or end <= 0 :
189- return "" , 0
106+ if not lines or end <= 0 : return "" , 0
190107 start = max (0 , end - size )
191108 chunk = lines [start :end ]
192- if not chunk :
193- return "" , 0
194- return "" .join (f"{ self ._colorize (l )} \n " for l in chunk ), start
109+ return ("" .join (f"{ self ._colorize (l )} \n " for l in chunk ), start ) if chunk else ("" , 0 )
195110
196111 def clear (self ) -> None :
197- self .lines .clear ()
198- self ._cache = None
199- try :
200- self .file .write_text ("" , encoding = "utf-8" )
201- except Exception :
202- pass
112+ self .lines .clear (); self ._cache = None
113+ try : self .file .write_text ("" , encoding = "utf-8" )
114+ except : pass
0 commit comments