Skip to content
Merged
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 30 additions & 8 deletions sentry_sdk/_batcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
self._record_lost_func = record_lost_func
self._running = True
self._lock = threading.Lock()
self._active: "threading.local" = threading.local()

self._flush_event: "threading.Event" = threading.Event()

Expand Down Expand Up @@ -70,23 +71,40 @@
return True

def _flush_loop(self) -> None:
# Mark the flush-loop thread as active for its entire lifetime so
# that any re-entrant add() triggered by GC warnings during wait(),
# flush(), or Event operations is silently dropped instead of
# deadlocking on internal locks.
self._active.flag = True
while self._running:
self._flush_event.wait(self.FLUSH_WAIT_TIME + random.random())
self._flush_event.clear()
self._flush()

def add(self, item: "T") -> None:
if not self._ensure_thread() or self._flusher is None:
# Bail out if the current thread is already executing batcher code.
# This prevents deadlocks when code running inside the batcher (e.g.
# _add_to_envelope during flush, or _flush_event.wait/set) triggers
# a GC-emitted warning that routes back through the logging
# integration into add().
if getattr(self._active, "flag", False):
return None

with self._lock:
if len(self._buffer) >= self.MAX_BEFORE_DROP:
self._record_lost(item)
self._active.flag = True
try:
if not self._ensure_thread() or self._flusher is None:
return None

self._buffer.append(item)
if len(self._buffer) >= self.MAX_BEFORE_FLUSH:
self._flush_event.set()
with self._lock:
if len(self._buffer) >= self.MAX_BEFORE_DROP:
self._record_lost(item)
return None

self._buffer.append(item)
if len(self._buffer) >= self.MAX_BEFORE_FLUSH:
Comment thread
sentry[bot] marked this conversation as resolved.
self._flush_event.set()
finally:
self._active.flag = False

def kill(self) -> None:
if self._flusher is None:
Expand All @@ -96,8 +114,12 @@
self._flush_event.set()
self._flusher = None

def flush(self) -> None:
self._flush()
self._active.flag = True
try:
self._flush()
finally:
self._active.flag = False

Check warning on line 122 in sentry_sdk/_batcher.py

View check run for this annotation

@sentry/warden / warden: code-review

Missing re-entry check in flush() method could still cause deadlock

The `flush()` method sets `self._active.flag = True` but does not check if the flag is already True before proceeding, unlike the `add()` method. If code running during the locked section of `_flush()` (lines 146-151 where `self._lock` is held) triggers a re-entrant call to `flush()` on the same thread, it would attempt to acquire the non-reentrant `threading.Lock()` again, causing a deadlock. The fix adds protection for `add()` but leaves `flush()` vulnerable to the same class of re-entrant deadlock.

Check warning on line 122 in sentry_sdk/_batcher.py

View check run for this annotation

@sentry/warden / warden: find-bugs

Re-entry guard flag can be clobbered when flush() is called during add() execution

The `flush()` method unconditionally sets `_active.flag = True` and then resets it to `False` in its finally block. If `flush()` is called while `add()` is already executing on the same thread (e.g., via a callback triggered during `_ensure_thread()` or buffer operations), the `finally` block in `flush()` will reset the flag to `False` while `add()` is still running. This breaks the re-entry guard for the remainder of `add()`'s execution, allowing a subsequent re-entrant call to proceed and potentially cause the deadlock this PR aims to prevent.
Comment thread
sentry-warden[bot] marked this conversation as resolved.
Outdated
Comment thread
sentry-warden[bot] marked this conversation as resolved.
Outdated
Comment thread
cursor[bot] marked this conversation as resolved.
Outdated

def _add_to_envelope(self, envelope: "Envelope") -> None:
envelope.add_item(
Expand Down
Loading