1
1
from __future__ import annotations
2
2
3
+ import sys
3
4
from dataclasses import asdict , dataclass
4
5
from math import ceil
5
6
from statistics import mean , quantiles , stdev
11
12
from rich .table import Table
12
13
from rich .text import Text
13
14
15
+ from pytest_codspeed import __semver_version__
14
16
from pytest_codspeed .instruments import Instrument
17
+ from pytest_codspeed .instruments .hooks import InstrumentHooks
15
18
16
19
if TYPE_CHECKING :
17
20
from typing import Any , Callable
@@ -131,17 +134,26 @@ class Benchmark:
131
134
132
135
133
136
def run_benchmark (
134
- name : str , uri : str , fn : Callable [P , T ], args , kwargs , config : BenchmarkConfig
137
+ instrument_hooks : InstrumentHooks ,
138
+ name : str ,
139
+ uri : str ,
140
+ fn : Callable [P , T ],
141
+ args ,
142
+ kwargs ,
143
+ config : BenchmarkConfig ,
135
144
) -> tuple [Benchmark , T ]:
145
+ def __codspeed_root_frame__ () -> T :
146
+ return fn (* args , ** kwargs )
147
+
136
148
# Compute the actual result of the function
137
- out = fn ( * args , ** kwargs )
149
+ out = __codspeed_root_frame__ ( )
138
150
139
151
# Warmup
140
152
times_per_round_ns : list [float ] = []
141
153
warmup_start = start = perf_counter_ns ()
142
154
while True :
143
155
start = perf_counter_ns ()
144
- fn ( * args , ** kwargs )
156
+ __codspeed_root_frame__ ( )
145
157
end = perf_counter_ns ()
146
158
times_per_round_ns .append (end - start )
147
159
if end - warmup_start > config .warmup_time_ns :
@@ -166,16 +178,19 @@ def run_benchmark(
166
178
# Benchmark
167
179
iter_range = range (iter_per_round )
168
180
run_start = perf_counter_ns ()
181
+ instrument_hooks .start_benchmark ()
169
182
for _ in range (rounds ):
170
183
start = perf_counter_ns ()
171
184
for _ in iter_range :
172
- fn ( * args , ** kwargs )
185
+ __codspeed_root_frame__ ( )
173
186
end = perf_counter_ns ()
174
187
times_per_round_ns .append (end - start )
175
188
176
189
if end - run_start > config .max_time_ns :
177
190
# TODO: log something
178
191
break
192
+ instrument_hooks .stop_benchmark ()
193
+ instrument_hooks .set_current_benchmark (uri )
179
194
benchmark_end = perf_counter_ns ()
180
195
total_time = (benchmark_end - run_start ) / 1e9
181
196
@@ -192,10 +207,16 @@ def run_benchmark(
192
207
193
208
class WallTimeInstrument (Instrument ):
194
209
instrument = "walltime"
210
+ instrument_hooks : InstrumentHooks
195
211
196
212
def __init__ (self , config : CodSpeedConfig ) -> None :
213
+ self .instrument_hooks = InstrumentHooks ()
214
+
197
215
self .config = config
198
216
self .benchmarks : list [Benchmark ] = []
217
+ sys .activate_stack_trampoline ("perf" ) # type: ignore
218
+
219
+ self .instrument_hooks .set_integration ("pytest-codspeed" , __semver_version__ )
199
220
200
221
def get_instrument_config_str_and_warns (self ) -> tuple [str , list [str ]]:
201
222
return f"mode: walltime, timer_resolution: { TIMER_RESOLUTION_NS :.1f} ns" , []
@@ -209,6 +230,7 @@ def measure(
209
230
** kwargs : P .kwargs ,
210
231
) -> T :
211
232
bench , out = run_benchmark (
233
+ instrument_hooks = self .instrument_hooks ,
212
234
name = name ,
213
235
uri = uri ,
214
236
fn = fn ,
0 commit comments