-
Notifications
You must be signed in to change notification settings - Fork 18
Expand file tree
/
Copy pathvalgrind.py
More file actions
201 lines (172 loc) · 6.89 KB
/
valgrind.py
File metadata and controls
201 lines (172 loc) · 6.89 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
from __future__ import annotations
import os
import warnings
from contextlib import contextmanager
from typing import TYPE_CHECKING
from pytest_codspeed import __semver_version__
from pytest_codspeed.instruments import Instrument
from pytest_codspeed.instruments.hooks import InstrumentHooks
from pytest_codspeed.utils import SUPPORTS_PERF_TRAMPOLINE
if TYPE_CHECKING:
from collections.abc import Awaitable
from typing import Any, Callable
from pytest import Session
from pytest_codspeed.config import PedanticOptions
from pytest_codspeed.instruments import P, T
from pytest_codspeed.plugin import BenchmarkMarkerOptions, CodSpeedConfig
class ValgrindInstrument(Instrument):
instrument = "valgrind"
instrument_hooks: InstrumentHooks | None
def __init__(self, config: CodSpeedConfig) -> None:
self.benchmark_count = 0
try:
self.instrument_hooks = InstrumentHooks()
self.instrument_hooks.set_integration("pytest-codspeed", __semver_version__)
except RuntimeError as e:
if os.environ.get("CODSPEED_ENV") is not None:
raise Exception(
"Failed to initialize CPU simulation instrument hooks"
) from e
self.instrument_hooks = None
self.should_measure = self.instrument_hooks is not None
def get_instrument_config_str_and_warns(self) -> tuple[str, list[str]]:
config = (
f"mode: simulation, "
f"callgraph: {'enabled' if SUPPORTS_PERF_TRAMPOLINE else 'not supported'}"
)
warnings = []
if not self.should_measure:
warnings.append(
"\033[1m"
"NOTICE: codspeed is enabled, but no performance measurement"
" will be made since it's running in an unknown environment."
"\033[0m"
)
return config, warnings
@contextmanager
def _measure_context(self, uri: str):
self.benchmark_count += 1
if not self.instrument_hooks:
yield
return
# Manually call the library function to avoid an extra stack frame. Also
# call the callgrind markers directly to avoid extra overhead.
self.instrument_hooks.lib.callgrind_start_instrumentation()
try:
yield
finally:
# Ensure instrumentation is stopped even if the test failed
self.instrument_hooks.lib.callgrind_stop_instrumentation()
self.instrument_hooks.set_executed_benchmark(uri)
def measure(
self,
marker_options: BenchmarkMarkerOptions,
name: str,
uri: str,
fn: Callable[P, T],
*args: P.args,
**kwargs: P.kwargs,
) -> T:
def __codspeed_root_frame__() -> T:
return fn(*args, **kwargs)
if SUPPORTS_PERF_TRAMPOLINE:
# Warmup CPython performance map cache
__codspeed_root_frame__()
with self._measure_context(uri):
return __codspeed_root_frame__()
async def measure_async(
self,
marker_options: BenchmarkMarkerOptions,
name: str,
uri: str,
fn: Callable[P, Awaitable[T]],
*args: P.args,
**kwargs: P.kwargs,
) -> T:
async def __codspeed_root_frame__() -> T:
return await fn(*args, **kwargs)
if SUPPORTS_PERF_TRAMPOLINE:
# Warmup CPython performance map cache
await __codspeed_root_frame__()
with self._measure_context(uri):
return await __codspeed_root_frame__()
@contextmanager
def _measure_pedantic_context(
self,
pedantic_options: PedanticOptions[T],
uri: str,
) -> T:
if pedantic_options.rounds != 1 or pedantic_options.iterations != 1:
warnings.warn(
"Valgrind instrument ignores rounds and iterations settings "
"in pedantic mode"
)
if not self.instrument_hooks:
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
yield
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)
return
# Compute the actual result of the function
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
self.instrument_hooks.lib.callgrind_start_instrumentation()
try:
yield
finally:
self.instrument_hooks.lib.callgrind_stop_instrumentation()
self.instrument_hooks.set_executed_benchmark(uri)
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)
def measure_pedantic(
self,
marker_options: BenchmarkMarkerOptions,
pedantic_options: PedanticOptions[T],
name: str,
uri: str,
) -> T:
def __codspeed_root_frame__(*args, **kwargs) -> T:
return pedantic_options.target(*args, **kwargs)
# Warmup
warmup_rounds = max(
pedantic_options.warmup_rounds, 1 if SUPPORTS_PERF_TRAMPOLINE else 0
)
for _ in range(warmup_rounds):
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
__codspeed_root_frame__(*args, **kwargs)
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)
with self._measure_pedantic_context(pedantic_options, uri):
return __codspeed_root_frame__(*args, **kwargs)
async def measure_pedantic_async(
self,
marker_options: BenchmarkMarkerOptions,
pedantic_options: PedanticOptions[T],
name: str,
uri: str,
) -> T:
async def __codspeed_root_frame__(*args, **kwargs) -> T:
return await pedantic_options.target(*args, **kwargs)
# Warmup
warmup_rounds = max(
pedantic_options.warmup_rounds, 1 if SUPPORTS_PERF_TRAMPOLINE else 0
)
for _ in range(warmup_rounds):
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
await __codspeed_root_frame__(*args, **kwargs)
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)
with self._measure_pedantic_context(pedantic_options, uri):
return await __codspeed_root_frame__(*args, **kwargs)
def report(self, session: Session) -> None:
reporter = session.config.pluginmanager.get_plugin("terminalreporter")
assert reporter is not None, "terminalreporter not found"
count_suffix = "benchmarked" if self.should_measure else "benchmark tested"
reporter.write_sep(
"=",
f"{self.benchmark_count} {count_suffix}",
)
def get_result_dict(self) -> dict[str, Any]:
return {
"instrument": {"type": self.instrument},
# bench results will be dumped by valgrind
}