Skip to content

Commit 06b444a

Browse files
committed
Fix SENTRY_TRACES_BACKGROUND_SAMPLE_RATE not respected
1 parent 4fdb3a7 commit 06b444a

File tree

12 files changed

+47
-41
lines changed

12 files changed

+47
-41
lines changed

src/dstack/_internal/server/app.py

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from fastapi.staticfiles import StaticFiles
1515
from packaging.version import Version
1616
from prometheus_client import Counter, Histogram
17-
from sentry_sdk.types import SamplingContext
1817

1918
from dstack._internal import settings as core_settings
2019
from dstack._internal.cli.utils.common import console
@@ -115,7 +114,7 @@ async def lifespan(app: FastAPI):
115114
release=core_settings.DSTACK_VERSION,
116115
environment=settings.SERVER_ENVIRONMENT,
117116
enable_tracing=True,
118-
traces_sampler=_sentry_traces_sampler,
117+
traces_sampler=sentry_utils.sentry_traces_sampler,
119118
profiles_sample_rate=settings.SENTRY_PROFILES_SAMPLE_RATE,
120119
before_send=sentry_utils.AsyncioCancelledErrorFilterEventProcessor(),
121120
)
@@ -426,18 +425,6 @@ def _is_prometheus_request(request: Request) -> bool:
426425
return request.url.path.startswith("/metrics")
427426

428427

429-
def _sentry_traces_sampler(sampling_context: SamplingContext) -> float:
430-
parent_sampling_decision = sampling_context["parent_sampled"]
431-
if parent_sampling_decision is not None:
432-
return float(parent_sampling_decision)
433-
transaction_context = sampling_context["transaction_context"]
434-
name = transaction_context.get("name")
435-
if name is not None:
436-
if name.startswith("background."):
437-
return settings.SENTRY_TRACES_BACKGROUND_SAMPLE_RATE
438-
return settings.SENTRY_TRACES_SAMPLE_RATE
439-
440-
441428
def _print_dstack_logo():
442429
console.print(
443430
r"""[purple] _ _ _

src/dstack/_internal/server/background/pipeline_tasks/compute_groups.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def __init__(
120120
queue_check_delay=queue_check_delay,
121121
)
122122

123-
@sentry_utils.instrument_named_task("pipeline_tasks.ComputeGroupFetcher.fetch")
123+
@sentry_utils.instrument_pipeline_task("ComputeGroupFetcher.fetch")
124124
async def fetch(self, limit: int) -> list[PipelineItem]:
125125
compute_group_lock, _ = get_locker(get_db().dialect_name).get_lockset(
126126
ComputeGroupModel.__tablename__
@@ -188,7 +188,7 @@ def __init__(
188188
pipeline_hinter=pipeline_hinter,
189189
)
190190

191-
@sentry_utils.instrument_named_task("pipeline_tasks.ComputeGroupWorker.process")
191+
@sentry_utils.instrument_pipeline_task("ComputeGroupWorker.process")
192192
async def process(self, item: PipelineItem):
193193
async with get_session_ctx() as session:
194194
res = await session.execute(

src/dstack/_internal/server/background/pipeline_tasks/fleets.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ def __init__(
134134
queue_check_delay=queue_check_delay,
135135
)
136136

137-
@sentry_utils.instrument_named_task("pipeline_tasks.FleetFetcher.fetch")
137+
@sentry_utils.instrument_pipeline_task("FleetFetcher.fetch")
138138
async def fetch(self, limit: int) -> list[PipelineItem]:
139139
fleet_lock, _ = get_locker(get_db().dialect_name).get_lockset(FleetModel.__tablename__)
140140
async with fleet_lock:
@@ -203,7 +203,7 @@ def __init__(
203203
pipeline_hinter=pipeline_hinter,
204204
)
205205

206-
@sentry_utils.instrument_named_task("pipeline_tasks.FleetWorker.process")
206+
@sentry_utils.instrument_pipeline_task("FleetWorker.process")
207207
async def process(self, item: PipelineItem):
208208
process_context = await _load_process_context(item)
209209
if process_context is None:

src/dstack/_internal/server/background/pipeline_tasks/gateways.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def __init__(
129129
queue_check_delay=queue_check_delay,
130130
)
131131

132-
@sentry_utils.instrument_named_task("pipeline_tasks.GatewayFetcher.fetch")
132+
@sentry_utils.instrument_pipeline_task("GatewayFetcher.fetch")
133133
async def fetch(self, limit: int) -> list[GatewayPipelineItem]:
134134
gateway_lock, _ = get_locker(get_db().dialect_name).get_lockset(GatewayModel.__tablename__)
135135
async with gateway_lock:
@@ -207,7 +207,7 @@ def __init__(
207207
pipeline_hinter=pipeline_hinter,
208208
)
209209

210-
@sentry_utils.instrument_named_task("pipeline_tasks.GatewayWorker.process")
210+
@sentry_utils.instrument_pipeline_task("GatewayWorker.process")
211211
async def process(self, item: GatewayPipelineItem):
212212
if item.to_be_deleted:
213213
await _process_to_be_deleted_item(item)

src/dstack/_internal/server/background/pipeline_tasks/instances/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ def __init__(
152152
queue_check_delay=queue_check_delay,
153153
)
154154

155-
@sentry_utils.instrument_named_task("pipeline_tasks.InstanceFetcher.fetch")
155+
@sentry_utils.instrument_pipeline_task("InstanceFetcher.fetch")
156156
async def fetch(self, limit: int) -> list[InstancePipelineItem]:
157157
instance_lock, _ = get_locker(get_db().dialect_name).get_lockset(
158158
InstanceModel.__tablename__
@@ -267,7 +267,7 @@ def __init__(
267267
pipeline_hinter=pipeline_hinter,
268268
)
269269

270-
@sentry_utils.instrument_named_task("pipeline_tasks.InstanceWorker.process")
270+
@sentry_utils.instrument_pipeline_task("InstanceWorker.process")
271271
async def process(self, item: InstancePipelineItem):
272272
process_context: Optional[_ProcessContext] = None
273273
if item.status == InstanceStatus.PENDING:

src/dstack/_internal/server/background/pipeline_tasks/jobs_running.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@ def __init__(
194194
queue_check_delay=queue_check_delay,
195195
)
196196

197-
@sentry_utils.instrument_named_task("pipeline_tasks.JobRunningFetcher.fetch")
197+
@sentry_utils.instrument_pipeline_task("JobRunningFetcher.fetch")
198198
async def fetch(self, limit: int) -> list[JobRunningPipelineItem]:
199199
job_lock, _ = get_locker(get_db().dialect_name).get_lockset(JobModel.__tablename__)
200200
async with job_lock:
@@ -286,7 +286,7 @@ def __init__(
286286
pipeline_hinter=pipeline_hinter,
287287
)
288288

289-
@sentry_utils.instrument_named_task("pipeline_tasks.JobRunningWorker.process")
289+
@sentry_utils.instrument_pipeline_task("JobRunningWorker.process")
290290
async def process(self, item: JobRunningPipelineItem):
291291
context = await _load_process_context(item=item)
292292
if context is None:

src/dstack/_internal/server/background/pipeline_tasks/jobs_submitted.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def __init__(
227227
queue_check_delay=queue_check_delay,
228228
)
229229

230-
@sentry_utils.instrument_named_task("pipeline_tasks.JobSubmittedFetcher.fetch")
230+
@sentry_utils.instrument_pipeline_task("JobSubmittedFetcher.fetch")
231231
async def fetch(self, limit: int) -> list[JobSubmittedPipelineItem]:
232232
now = get_current_datetime()
233233
if limit <= 0:
@@ -309,7 +309,7 @@ def __init__(
309309
pipeline_hinter=pipeline_hinter,
310310
)
311311

312-
@sentry_utils.instrument_named_task("pipeline_tasks.JobSubmittedWorker.process")
312+
@sentry_utils.instrument_pipeline_task("JobSubmittedWorker.process")
313313
async def process(self, item: JobSubmittedPipelineItem):
314314
context = await _load_process_context(item=item)
315315
if context is None:

src/dstack/_internal/server/background/pipeline_tasks/jobs_terminating.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def __init__(
160160
queue_check_delay=queue_check_delay,
161161
)
162162

163-
@sentry_utils.instrument_named_task("pipeline_tasks.JobTerminatingFetcher.fetch")
163+
@sentry_utils.instrument_pipeline_task("JobTerminatingFetcher.fetch")
164164
async def fetch(self, limit: int) -> list[JobTerminatingPipelineItem]:
165165
job_lock, _ = get_locker(get_db().dialect_name).get_lockset(JobModel.__tablename__)
166166
async with job_lock:
@@ -243,7 +243,7 @@ def __init__(
243243
pipeline_hinter=pipeline_hinter,
244244
)
245245

246-
@sentry_utils.instrument_named_task("pipeline_tasks.JobTerminatingWorker.process")
246+
@sentry_utils.instrument_pipeline_task("JobTerminatingWorker.process")
247247
async def process(self, item: JobTerminatingPipelineItem):
248248
async with get_session_ctx() as session:
249249
job_model = await _refetch_locked_job(session=session, item=item)

src/dstack/_internal/server/background/pipeline_tasks/placement_groups.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ def __init__(
117117
queue_check_delay=queue_check_delay,
118118
)
119119

120-
@sentry_utils.instrument_named_task("pipeline_tasks.PlacementGroupFetcher.fetch")
120+
@sentry_utils.instrument_pipeline_task("PlacementGroupFetcher.fetch")
121121
async def fetch(self, limit: int) -> list[PipelineItem]:
122122
placement_group_lock, _ = get_locker(get_db().dialect_name).get_lockset(
123123
PlacementGroupModel.__tablename__
@@ -187,7 +187,7 @@ def __init__(
187187
pipeline_hinter=pipeline_hinter,
188188
)
189189

190-
@sentry_utils.instrument_named_task("pipeline_tasks.PlacementGroupWorker.process")
190+
@sentry_utils.instrument_pipeline_task("PlacementGroupWorker.process")
191191
async def process(self, item: PipelineItem):
192192
async with get_session_ctx() as session:
193193
res = await session.execute(

src/dstack/_internal/server/background/pipeline_tasks/runs/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def __init__(
129129
queue_check_delay=queue_check_delay,
130130
)
131131

132-
@sentry_utils.instrument_named_task("pipeline_tasks.RunFetcher.fetch")
132+
@sentry_utils.instrument_pipeline_task("RunFetcher.fetch")
133133
async def fetch(self, limit: int) -> list[RunPipelineItem]:
134134
if limit <= 0:
135135
return []
@@ -243,7 +243,7 @@ def __init__(
243243
pipeline_hinter=pipeline_hinter,
244244
)
245245

246-
@sentry_utils.instrument_named_task("pipeline_tasks.RunWorker.process")
246+
@sentry_utils.instrument_pipeline_task("RunWorker.process")
247247
async def process(self, item: RunPipelineItem):
248248
# Currently `dstack` supports runs with
249249
# * one multi-node replica (multi-node tasks)

0 commit comments

Comments
 (0)