-
Notifications
You must be signed in to change notification settings - Fork 610
Expand file tree
/
Copy path_span_batcher.py
More file actions
126 lines (104 loc) · 4.23 KB
/
_span_batcher.py
File metadata and controls
126 lines (104 loc) · 4.23 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
import threading
from collections import defaultdict
from datetime import datetime, timezone
from typing import TYPE_CHECKING
from sentry_sdk._batcher import Batcher
from sentry_sdk.consts import SPANSTATUS
from sentry_sdk.envelope import Envelope, Item, PayloadRef
from sentry_sdk.utils import format_timestamp, serialize_attribute, safe_repr
if TYPE_CHECKING:
from typing import Any, Callable, Optional
from sentry_sdk.traces import StreamedSpan
from sentry_sdk._types import SerializedAttributeValue
class SpanBatcher(Batcher["StreamedSpan"]):
# TODO[span-first]: size-based flushes
# TODO[span-first]: adjust flush/drop defaults
MAX_BEFORE_FLUSH = 1000
MAX_BEFORE_DROP = 5000
FLUSH_WAIT_TIME = 5.0
TYPE = "span"
CONTENT_TYPE = "application/vnd.sentry.items.span.v2+json"
def __init__(
self,
capture_func: "Callable[[Envelope], None]",
record_lost_func: "Callable[..., None]",
) -> None:
# Spans from different traces cannot be emitted in the same envelope
# since the envelope contains a shared trace header. That's why we bucket
# by trace_id, so that we can then send the buckets each in its own
# envelope.
# trace_id -> span buffer
self._span_buffer: dict[str, list["StreamedSpan"]] = defaultdict(list)
self._capture_func = capture_func
self._record_lost_func = record_lost_func
self._running = True
self._lock = threading.Lock()
self._flush_event: "threading.Event" = threading.Event()
self._flusher: "Optional[threading.Thread]" = None
self._flusher_pid: "Optional[int]" = None
def get_size(self) -> int:
# caller is responsible for locking before checking this
return sum(len(buffer) for buffer in self._span_buffer.values())
def add(self, span: "StreamedSpan") -> None:
if not self._ensure_thread() or self._flusher is None:
return None
with self._lock:
size = self.get_size()
if size >= self.MAX_BEFORE_DROP:
self._record_lost_func(
reason="queue_overflow",
data_category="span",
quantity=1,
)
return None
self._span_buffer[span.trace_id].append(span)
if size + 1 >= self.MAX_BEFORE_FLUSH:
self._flush_event.set()
@staticmethod
def _to_transport_format(item: "StreamedSpan") -> "Any":
# TODO[span-first]
res: "dict[str, Any]" = {
"name": item.name,
}
if item._attributes:
res["attributes"] = {
k: serialize_attribute(v) for (k, v) in item._attributes.items()
}
return res
def _flush(self) -> None:
with self._lock:
if len(self._span_buffer) == 0:
return None
envelopes = []
for trace_id, spans in self._span_buffer.items():
if spans:
# TODO[span-first]
# dsc = spans[0].dynamic_sampling_context()
dsc = None
envelope = Envelope(
headers={
"sent_at": format_timestamp(datetime.now(timezone.utc)),
"trace": dsc,
}
)
envelope.add_item(
Item(
type="span",
content_type="application/vnd.sentry.items.span.v2+json",
headers={
"item_count": len(spans),
},
payload=PayloadRef(
json={
"items": [
self._to_transport_format(span)
for span in spans
]
}
),
)
)
envelopes.append(envelope)
self._span_buffer.clear()
for envelope in envelopes:
self._capture_func(envelope)