diff --git a/.github/workflows/analysis_workflow.yml b/.github/workflows/analysis_workflow.yml index 2fe6394474..b4ed146b42 100644 --- a/.github/workflows/analysis_workflow.yml +++ b/.github/workflows/analysis_workflow.yml @@ -161,6 +161,8 @@ jobs: - name: Configure sccache uses: mozilla-actions/sccache-action@v0.0.9 + with: + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Extra envs shell: bash -l {0} diff --git a/.github/workflows/benchmark_commits.yml b/.github/workflows/benchmark_commits.yml index 09ebe9256e..aede931e48 100644 --- a/.github/workflows/benchmark_commits.yml +++ b/.github/workflows/benchmark_commits.yml @@ -54,6 +54,8 @@ jobs: - name: Configure sccache uses: mozilla-actions/sccache-action@v0.0.9 + with: + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Extra envs shell: bash -l {0} @@ -122,16 +124,22 @@ jobs: - name: Benchmark given commit if: github.event_name != 'pull_request_target' || inputs.run_all_benchmarks == true - shell: bash -el {0} + shell: bash -l {0} run: | git config --global --add safe.directory . - python -m asv run -v --show-stderr --bench $SUITE ${{ inputs.commit }}^! - + python -m asv run --show-stderr --durations all --bench $SUITE ${{ inputs.commit }}^! + exit_code=$? + python build_tooling/summarize_asv_run.py --commit-hash ${{ inputs.commit }} + exit $exit_code + - name: Benchmark against master if: github.event_name == 'pull_request_target' && inputs.run_all_benchmarks == false - shell: bash -el {0} + shell: bash -l {0} run: | - python -m asv continuous -v --show-stderr --bench $SUITE origin/master HEAD -f 1.15 + python -m asv continuous --show-stderr --bench $SUITE origin/master HEAD -f 1.15 + exit_code=$? + python build_tooling/summarize_asv_run.py --commit-hash $(git rev-parse HEAD) + exit $exit_code - name: Add results to ArcticDB database shell: bash -el {0} diff --git a/.github/workflows/build_steps.yml b/.github/workflows/build_steps.yml index 0237d9cc22..1b48d041cb 100644 --- a/.github/workflows/build_steps.yml +++ b/.github/workflows/build_steps.yml @@ -97,6 +97,7 @@ jobs: with: version: "v0.10.0" token: ${{ secrets.GITHUB_TOKEN }} + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Setup macOS build environment if: matrix.os == 'macos' diff --git a/.github/workflows/build_with_conda.yml b/.github/workflows/build_with_conda.yml index 30407a7455..52de213da0 100644 --- a/.github/workflows/build_with_conda.yml +++ b/.github/workflows/build_with_conda.yml @@ -87,6 +87,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Get number of CPU cores uses: SimenB/github-actions-cpu-cores@v2.0.0 @@ -150,6 +151,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Get number of CPU cores uses: SimenB/github-actions-cpu-cores@v2.0.0 @@ -213,6 +215,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Get number of CPU cores uses: SimenB/github-actions-cpu-cores@v2.0.0 @@ -278,6 +281,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Free Disk Space (Ubuntu) uses: jlumbroso/free-disk-space@v1.3.1 @@ -363,6 +367,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Free Disk Space (Ubuntu) uses: jlumbroso/free-disk-space@v1.3.1 @@ -448,6 +453,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Download build artifacts uses: actions/download-artifact@v4 @@ -538,6 +544,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Download build artifacts uses: actions/download-artifact@v4 @@ -676,6 +683,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Download build artifacts uses: actions/download-artifact@v4 @@ -810,6 +818,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Download build artifacts uses: actions/download-artifact@v4 @@ -941,6 +950,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Get number of CPU cores uses: SimenB/github-actions-cpu-cores@v2.0.0 @@ -1022,6 +1032,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Install Conda environment from environment-dev.yml uses: mamba-org/setup-micromamba@v2.0.6 @@ -1122,6 +1133,7 @@ jobs: uses: mozilla-actions/sccache-action@v0.0.9 with: version: v0.12.0 + disable_annotations: 'true' # supress noisy report that pollutes the summary page - name: Install Conda environment from environment-dev.yml uses: mamba-org/setup-micromamba@v2.0.6 diff --git a/build_tooling/summarize_asv_run.py b/build_tooling/summarize_asv_run.py new file mode 100644 index 0000000000..8a84cd38ef --- /dev/null +++ b/build_tooling/summarize_asv_run.py @@ -0,0 +1,61 @@ +""" +Copyright 2025 Man Group Operations Limited + +Use of this software is governed by the Business Source License 1.1 included in the file licenses/BSL.txt. + +As of the Change Date specified in that file, in accordance with the Business Source License, use of this software will be governed by the Apache License, version 2.0. +""" + +import json +import os +import glob +import sys +from argparse import ArgumentParser + + +def main(commit_hash: str): + assert commit_hash, "commit_hash must be provided but was blank" + shortened_hash = commit_hash[:8] + failures = [] + # Search for all result files in the asv results directory + results_pattern = f"*/.asv/results/*/{shortened_hash}*-*.json" + result_files = glob.glob(results_pattern, recursive=True) + assert len(result_files) == 1, f"Expected one result file matching pattern {results_pattern} but found {result_files}" + result_file = result_files[0] + with open(result_file, 'r') as f: + data = json.load(f) + # Results are stored in a dictionary; failed ones are null + for bench_name, result in data.get('results', {}).items(): + if result[0] is None: + failures.append(bench_name) + + # Write to GitHub Step Summary + with open(os.environ['GITHUB_STEP_SUMMARY'], 'a') as summary: + summary.write("### 🚀 ASV Benchmark Report\n") + if failures: + summary.write("❌ **The following benchmarks failed:**\n\n") + summary.write("| Benchmark Name |\n") + summary.write("| :--- |\n") + for f in sorted(set(failures)): + summary.write(f"| `{f}` |\n") + summary.write("\n> Check the 'Benchmark given commit' step logs for specific tracebacks.") + else: + summary.write("✅ All benchmarks passed successfully!\n") + + if failures: + print("Check the workflow Summary page for a report on ASV failures.") + sys.exit(1) + else: + print("There were no ASV failures to report.") + sys.exit(0) + + +if __name__ == "__main__": + parser = ArgumentParser() + parser.add_argument( + "--commit-hash", + help="Commit hash to summarize results for", + required=True + ) + args = parser.parse_args() + main(args.commit_hash) diff --git a/build_tooling/transform_asv_results.py b/build_tooling/transform_asv_results.py index 23fab720e4..08c08f20fe 100644 --- a/build_tooling/transform_asv_results.py +++ b/build_tooling/transform_asv_results.py @@ -5,6 +5,7 @@ As of the Change Date specified in that file, in accordance with the Business Source License, use of this software will be governed by the Apache License, version 2.0. """ +import os import pandas as pd @@ -165,11 +166,6 @@ def analyze_asv_results(lib, hash): cache_setup_df = cache_setup_df.reset_index().rename(columns={'index': 'Step'}) cache_setup_df = cache_setup_df.sort_values(by="Duration (s)", ascending=False) - print("Time spent outside of benchmarks (excluding build):\n") - with pd.option_context('display.max_rows', None, 'display.max_colwidth', None): - print(cache_setup_df) - print("\n") - def extract_time(r): """r looks like the "results" mentioned in the docstring above. Using eval as the results can contain nan, inf etc which json.loads cannot parse""" as_list = eval(r) @@ -179,15 +175,28 @@ def extract_time(r): benchmark_results = benchmark_results[["test_name", "Duration (s)"]] benchmark_results = benchmark_results.sort_values(by="Duration (s)", ascending=False) - print("Time spent in benchmarks:\n") - with pd.option_context('display.max_rows', None, 'display.max_colwidth', None): - print(benchmark_results) - - print(f"\nSummary:") cache_setup_time = cache_setup_df["Duration (s)"].sum() / 60 - print(f"Total time outside benchmarks (mins): {cache_setup_time}") benchmarks_run_time = benchmark_results["Duration (s)"].sum() / 60 - print(f"Total time running benchmarks (mins): {benchmarks_run_time}") + + summary_content = ["### Time spent outside of benchmarks (excluding build)\n", + cache_setup_df.to_markdown(index=False), + "\n### Time spent in benchmarks\n", + benchmark_results.to_markdown(index=False), + "\n### Summary\n", + f"* **Total time outside benchmarks (mins):** {cache_setup_time:.2f}", + f"* **Total time running benchmarks (mins):** {benchmarks_run_time:.2f}"] + + final_output = "\n".join(summary_content) + + summary_file_path = os.environ.get('GITHUB_STEP_SUMMARY') + + if summary_file_path: + # If running in Github, write to the summary + print("Check the workflow Summary page for a report on the time spent running ASV benchmarks.") + with open(summary_file_path, "a") as f: + f.write(final_output + "\n") + else: + print(final_output) def get_result_json_path(json_path, sym, json_data):