Skip to content

benchadapt: asv adapter (#1590) #3

benchadapt: asv adapter (#1590)

benchadapt: asv adapter (#1590) #3

Workflow file for this run

name: ci
on:
push:
branches:
- main
pull_request:
env:
# secrets.GITHUB_TOKEN is provided by GitHub Actions and not bound to a
# personal user account. It allows for 1000 GitHub HTTP API requests per
# hour. A personal token allows for 5000 of those. The pool has N of these
# configured, allowing for automatic rotation after quota consumption.
# Also see https://github.com/conbench/conbench/issues/917.
# Future: potentially rename that variable: _POOL suffix
# We fall back to secrets.GITHUB_TOKEN in case this is a fork without the pool configured.
GITHUB_API_TOKEN: ${{ secrets.PERSONAL_GITHUB_API_TOKEN_POOL || secrets.GITHUB_TOKEN }}
# To enable the handful of tests that get info from the live GitHub HTTP API.
permissions:
contents: read
pull-requests: read
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.11"
- name: Install linting dependencies
# For now, ignorantly add `requirements-webapp` so that mypy can use
# all modules for type inspection.
run: pip install -r requirements-dev.txt -r requirements-webapp.txt
- name: lint
run: make lint-ci
testsuite:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: Run `make tests`
run: make tests
check-docs:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: Install sphinx dev dependencies, and our packages (for autodoc)
run: |
pip install \
-r requirements-dev.txt \
-e ./benchclients/python \
-e ./benchalerts \
-e ./benchadapt/python \
-e ./benchconnect \
-e ./benchrun/python \
-e ./legacy
- name: Build docs, exiting non-zero on warnings (which can be very bad)
run: make build-docs SPHINXOPTS='-W --keep-going'
# This is to confirm that commonly used developer commands still work.
dev-cmds:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: rebuild-expected-api-docs
run: |
pip install black
make rebuild-expected-api-docs
# Launch application as containerized stack. This terminates after
# health checks confirm that stack is running.
- name: run-app-bg
run: make run-app-bg
# Run `db-populate` twice to see that this can be repeated w/o failing.
- name: test `make db-populate`, twice
run: |
pip install requests # the nonly non-stdlib dependency as of now
export CONBENCH_BASE_URL=http://$(docker compose port app 5000) && \
make db-populate && make db-populate
docker-compose logs # view logs to see why transient errs happen
- name: test `make teardown-app`
run: make teardown-app
# The goal of this is to largely test `make run-app-dev`.
- name: test-run-app-dev
run: make test-run-app-dev
ui-screenshots:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: run-app-bg
run: make run-app-bg
- name: populate DB & take screenshots
run: |
pip install requests # the nonly non-stdlib dependency as of now
export CONBENCH_BASE_URL=http://$(docker compose port app 5000)
# Get first line of stdout, interpret as benchmark ID
export SOME_BENCHMARK_ID="$(make -s db-populate | head -1)"
echo "conbench base URL: $CONBENCH_BASE_URL"
echo "a valid benchmark ID: $SOME_BENCHMARK_ID"
cd ci
docker build . -t conbench-screenshot -f screenshot.Dockerfile
mkdir ci-artifacts
docker run --net=host -v $(pwd)/ci-artifacts:/artifacts conbench-screenshot \
python screenshot.py ${CONBENCH_BASE_URL} /artifacts frontpage
docker run --net=host -v $(pwd)/ci-artifacts:/artifacts conbench-screenshot \
python screenshot.py "${CONBENCH_BASE_URL}/benchmarks/${SOME_BENCHMARK_ID}/" \
/artifacts benchmark-result-view --wait-for-canvas
/bin/ls -ahltr ci-artifacts/
- name: upload-artifacts
uses: actions/upload-artifact@v3
# Upload screenshots _especially_ when result was unexpected.
if: always()
with:
name: screenshots
path: ci/ci-artifacts/
- name: get container logs
if: always()
# If the browser-initiated GET requests led to e.g an Internal Server Error
# then we see that in the screenshot(s). For debugging that, however,
# it's crucial to get web application logs. Note that this shows
# container logs interleaved, but prefixed so that it's after all
# unambiguous which line came from which container.
run: docker compose logs --since 30m
- name: test `make teardown-app`
# Run teardown especially when previous step(s) failed, because here
# we get some good log output for debuggability.
if: always()
run: make teardown-app
db-migrations:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: Start PostgreSQL DB
run: docker compose run --detach db
# Run `alembic upgrade head` via docker compose. This runs in the web
# application container image. Running this via docker compose makes it
# so that it is within the virtual network that the DB is also in,
# reachable via DNS name `db`. Set CREATE_ALL_TABLES=false to defuse some
# webapp bootstrap code (not needed here, because these Python modules
# run in the context of alembic, not in the context of the actual web
# application bootstrapping itself).
- name: Test database migrations
run: make set-build-info && docker compose run -e CREATE_ALL_TABLES app alembic upgrade head
env:
CREATE_ALL_TABLES: false
# a better way to test this would be to change source code to
# generate a non-noop migration.
- name: Test make alembic-new-migration
run: |
make alembic-new-migration
cat migrations/versions/*dummy_migr_name*
env:
ALEMBIC_MIGRATION_NAME: dummy_migr_name
libraries:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
# in order of dependency
pip install \
-e './benchclients/python[dev]' \
-e './benchalerts[dev]' \
-e ./benchadapt/python \
-e ./benchconnect \
-e ./benchrun/python \
-e './legacy[dev]'
- name: Test benchclients
run: |
pytest -vv benchclients/python/tests
- name: Test benchalerts
env:
GITHUB_APP_ID: ${{ secrets.CONBENCH_APP_ID }}
GITHUB_APP_PRIVATE_KEY: ${{ secrets.CONBENCH_APP_PRIVATE_KEY }}
run: |
pytest -vv --log-level DEBUG benchalerts/tests
- name: Test benchadapt
run: |
pytest -vv benchadapt/python/tests
- name: Test benchrun
run: |
pytest -vv benchrun/python/tests
- name: Test benchconnect
run: |
pytest -vv benchconnect
- name: Test conbenchlegacy
run: |
pytest -vv legacy
cb-on-minikube:
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v3
- name: start minikube
id: minikube
uses: medyagh/setup-minikube@e1ee887a96c50e34066a4bf9f172eb94ae69d454
with:
kubernetes-version: v1.26.7
# By convention, both locally and in CI we call this mk-conbench.
start-args: '--profile mk-conbench --extra-config=kubelet.cgroup-driver=systemd'
cpus: max
memory: max
- name: ci/minikube/test-conbench-on-mk.sh
run: export CONBENCH_REPO_ROOT_DIR=$(pwd) && bash ci/minikube/test-conbench-on-mk.sh