Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 37 additions & 2 deletions src/pytest_codspeed/instruments/hooks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,19 @@

from .dist_instrument_hooks import InstrumentHooksPointer, LibType

# Feature flags for instrument hooks
FEATURE_DISABLE_CALLGRIND_MARKERS = 0

def _load_lib_constant(name: str) -> int:
from .dist_instrument_hooks import lib # type: ignore

return getattr(lib, name)


# Constants are defined in instrument-hooks/includes/core.h and exposed via cffi.
FEATURE_DISABLE_CALLGRIND_MARKERS = _load_lib_constant("FEATURE_DISABLE_CALLGRIND_MARKERS")
MARKER_TYPE_SAMPLE_START = _load_lib_constant("MARKER_TYPE_SAMPLE_START")
MARKER_TYPE_SAMPLE_END = _load_lib_constant("MARKER_TYPE_SAMPLE_END")
MARKER_TYPE_BENCHMARK_START = _load_lib_constant("MARKER_TYPE_BENCHMARK_START")
MARKER_TYPE_BENCHMARK_END = _load_lib_constant("MARKER_TYPE_BENCHMARK_END")


class InstrumentHooks:
Expand Down Expand Up @@ -79,6 +90,30 @@ def set_executed_benchmark(self, uri: str, pid: int | None = None) -> None:
if ret != 0:
warnings.warn("Failed to set executed benchmark", RuntimeWarning)

@staticmethod
def current_timestamp() -> int:
"""Return a monotonic timestamp in nanoseconds from the native library."""
from .dist_instrument_hooks import lib # type: ignore

return lib.instrument_hooks_current_timestamp()

def add_marker(
self, marker_type: int, timestamp: int, pid: int | None = None
) -> None:
"""Emit a single marker at the given timestamp."""
if pid is None:
pid = os.getpid()
ret = self.lib.instrument_hooks_add_marker(
self.instance, pid, marker_type, timestamp
)
if ret != 0:
warnings.warn("Failed to add marker", RuntimeWarning)

def add_benchmark_timestamps(self, start: int, end: int) -> None:
"""Emit a BenchmarkStart/BenchmarkEnd marker pair around a captured window."""
self.add_marker(MARKER_TYPE_BENCHMARK_START, start)
self.add_marker(MARKER_TYPE_BENCHMARK_END, end)

def set_integration(self, name: str, version: str) -> None:
"""Set the integration name and version."""
ret = self.lib.instrument_hooks_set_integration(
Expand Down
4 changes: 4 additions & 0 deletions src/pytest_codspeed/instruments/hooks/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,10 @@
void callgrind_start_instrumentation();
void callgrind_stop_instrumentation();

typedef enum {
FEATURE_DISABLE_CALLGRIND_MARKERS = 0,
} instrument_hooks_feature_t;

void instrument_hooks_set_feature(uint64_t feature, bool enabled);

uint8_t instrument_hooks_set_environment(InstrumentHooks *, const char *section_name,
Expand Down
51 changes: 37 additions & 14 deletions src/pytest_codspeed/instruments/walltime.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,21 +232,33 @@ def __codspeed_root_frame__() -> T:
# Benchmark
iter_range = range(iter_per_round)
run_start = perf_counter_ns()
if self.instrument_hooks:
self.instrument_hooks.start_benchmark()
hooks = self.instrument_hooks
if hooks:
hooks.start_benchmark()
for _ in range(rounds):
start = perf_counter_ns()
if hooks:
start = hooks.current_timestamp()
else:
start = perf_counter_ns()

for _ in iter_range:
__codspeed_root_frame__()
end = perf_counter_ns()

if hooks:
end = hooks.current_timestamp()
hooks.current_timestamp()
hooks.add_benchmark_timestamps(start, end)
else:
end = perf_counter_ns()

times_per_round_ns.append(end - start)
Comment on lines +239 to 254
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we shouldn't move the perf_counter_ns -> it's the measuring code

We need to measure both the time AND define our markers


if end - run_start > benchmark_config.max_time_ns:
# TODO: log something
break
if self.instrument_hooks:
self.instrument_hooks.stop_benchmark()
self.instrument_hooks.set_executed_benchmark(uri)
if hooks:
hooks.stop_benchmark()
hooks.set_executed_benchmark(uri)
benchmark_end = perf_counter_ns()
total_time = (benchmark_end - run_start) / 1e9

Expand Down Expand Up @@ -290,20 +302,31 @@ def __codspeed_root_frame__(*args, **kwargs) -> T:
# Benchmark
times_per_round_ns: list[float] = []
benchmark_start = perf_counter_ns()
if self.instrument_hooks:
self.instrument_hooks.start_benchmark()
hooks = self.instrument_hooks
if hooks:
hooks.start_benchmark()
for _ in range(pedantic_options.rounds):
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
start = perf_counter_ns()
if hooks:
start = hooks.current_timestamp()
else:
start = perf_counter_ns()

for _ in iter_range:
__codspeed_root_frame__(*args, **kwargs)
end = perf_counter_ns()

if hooks:
end = hooks.current_timestamp()
hooks.add_benchmark_timestamps(start, end)
else:
end = perf_counter_ns()

Comment on lines +310 to +323
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same here

times_per_round_ns.append(end - start)
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)
if self.instrument_hooks:
self.instrument_hooks.stop_benchmark()
self.instrument_hooks.set_executed_benchmark(uri)
if hooks:
hooks.stop_benchmark()
hooks.set_executed_benchmark(uri)
benchmark_end = perf_counter_ns()
total_time = (benchmark_end - benchmark_start) / 1e9
stats = BenchmarkStats.from_list(
Expand Down
Loading