Skip to content

Commit da53257

Browse files
committed
feat: use instrument hooks
1 parent 6667f17 commit da53257

File tree

2 files changed

+22
-17
lines changed

2 files changed

+22
-17
lines changed

src/pytest_codspeed/instruments/valgrind.py

+8-14
Original file line numberDiff line numberDiff line change
@@ -6,36 +6,30 @@
66

77
from pytest_codspeed import __semver_version__
88
from pytest_codspeed.instruments import Instrument
9-
from pytest_codspeed.instruments.valgrind._wrapper import get_lib
9+
from pytest_codspeed.instruments.hooks import InstrumentHooks
1010

1111
if TYPE_CHECKING:
1212
from typing import Any, Callable
1313

1414
from pytest import Session
1515

1616
from pytest_codspeed.instruments import P, T
17-
from pytest_codspeed.instruments.valgrind._wrapper import LibType
1817
from pytest_codspeed.plugin import CodSpeedConfig
1918

2019
SUPPORTS_PERF_TRAMPOLINE = sys.version_info >= (3, 12)
2120

2221

2322
class ValgrindInstrument(Instrument):
2423
instrument = "valgrind"
25-
lib: LibType | None
2624

2725
def __init__(self, config: CodSpeedConfig) -> None:
2826
self.benchmark_count = 0
2927
self.should_measure = os.environ.get("CODSPEED_ENV") is not None
3028
if self.should_measure:
31-
self.lib = get_lib()
32-
self.lib.dump_stats_at(
33-
f"Metadata: pytest-codspeed {__semver_version__}".encode("ascii")
34-
)
29+
InstrumentHooks.set_integration("pytest-codspeed", __semver_version__)
30+
3531
if SUPPORTS_PERF_TRAMPOLINE:
3632
sys.activate_stack_trampoline("perf") # type: ignore
37-
else:
38-
self.lib = None
3933

4034
def get_instrument_config_str_and_warns(self) -> tuple[str, list[str]]:
4135
config = (
@@ -61,7 +55,8 @@ def measure(
6155
**kwargs: P.kwargs,
6256
) -> T:
6357
self.benchmark_count += 1
64-
if self.lib is None: # Thus should_measure is False
58+
59+
if not InstrumentHooks.is_instrumented(): # Thus should_measure is False
6560
return fn(*args, **kwargs)
6661

6762
def __codspeed_root_frame__() -> T:
@@ -71,14 +66,13 @@ def __codspeed_root_frame__() -> T:
7166
# Warmup CPython performance map cache
7267
__codspeed_root_frame__()
7368

74-
self.lib.zero_stats()
75-
self.lib.start_instrumentation()
69+
InstrumentHooks.start_benchmark()
7670
try:
7771
return __codspeed_root_frame__()
7872
finally:
7973
# Ensure instrumentation is stopped even if the test failed
80-
self.lib.stop_instrumentation()
81-
self.lib.dump_stats_at(uri.encode("ascii"))
74+
InstrumentHooks.stop_benchmark()
75+
InstrumentHooks.set_current_benchmark(uri)
8276

8377
def report(self, session: Session) -> None:
8478
reporter = session.config.pluginmanager.get_plugin("terminalreporter")

src/pytest_codspeed/instruments/walltime.py

+14-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
import sys
34
from dataclasses import asdict, dataclass
45
from math import ceil
56
from statistics import mean, quantiles, stdev
@@ -11,7 +12,9 @@
1112
from rich.table import Table
1213
from rich.text import Text
1314

15+
from pytest_codspeed import __semver_version__
1416
from pytest_codspeed.instruments import Instrument
17+
from pytest_codspeed.instruments.hooks import InstrumentHooks
1518

1619
if TYPE_CHECKING:
1720
from typing import Any, Callable
@@ -133,15 +136,18 @@ class Benchmark:
133136
def run_benchmark(
134137
name: str, uri: str, fn: Callable[P, T], args, kwargs, config: BenchmarkConfig
135138
) -> tuple[Benchmark, T]:
139+
def __codspeed_root_frame__() -> T:
140+
return fn(*args, **kwargs)
141+
136142
# Compute the actual result of the function
137-
out = fn(*args, **kwargs)
143+
out = __codspeed_root_frame__()
138144

139145
# Warmup
140146
times_per_round_ns: list[float] = []
141147
warmup_start = start = perf_counter_ns()
142148
while True:
143149
start = perf_counter_ns()
144-
fn(*args, **kwargs)
150+
__codspeed_root_frame__()
145151
end = perf_counter_ns()
146152
times_per_round_ns.append(end - start)
147153
if end - warmup_start > config.warmup_time_ns:
@@ -166,16 +172,19 @@ def run_benchmark(
166172
# Benchmark
167173
iter_range = range(iter_per_round)
168174
run_start = perf_counter_ns()
175+
InstrumentHooks.start_benchmark()
169176
for _ in range(rounds):
170177
start = perf_counter_ns()
171178
for _ in iter_range:
172-
fn(*args, **kwargs)
179+
__codspeed_root_frame__()
173180
end = perf_counter_ns()
174181
times_per_round_ns.append(end - start)
175182

176183
if end - run_start > config.max_time_ns:
177184
# TODO: log something
178185
break
186+
InstrumentHooks.stop_benchmark()
187+
InstrumentHooks.set_current_benchmark(uri)
179188
benchmark_end = perf_counter_ns()
180189
total_time = (benchmark_end - run_start) / 1e9
181190

@@ -196,6 +205,8 @@ class WallTimeInstrument(Instrument):
196205
def __init__(self, config: CodSpeedConfig) -> None:
197206
self.config = config
198207
self.benchmarks: list[Benchmark] = []
208+
sys.activate_stack_trampoline("perf") # type: ignore
209+
InstrumentHooks.set_integration("pytest-codspeed", __semver_version__)
199210

200211
def get_instrument_config_str_and_warns(self) -> tuple[str, list[str]]:
201212
return f"mode: walltime, timer_resolution: {TIMER_RESOLUTION_NS:.1f}ns", []

0 commit comments

Comments
 (0)