Skip to content

Commit

Permalink
feat(ci): only re-run failed tests (#11925)
Browse files Browse the repository at this point in the history
* fix(tests): only run failed tests when rerunning

* fix(ci): when all tests pass, create empty 'failed' file

* fix(ci): scope 'failed tests file' artifact to current workflow run

* fix(tests): remove test batch balancing
  • Loading branch information
hanshuebner authored Nov 20, 2023
1 parent c75c7e0 commit 67970ea
Show file tree
Hide file tree
Showing 3 changed files with 87 additions and 21 deletions.
54 changes: 34 additions & 20 deletions .ci/run_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,25 @@ set -e
function cyan() {
echo -e "\033[1;36m$*\033[0m"
}

function red() {
echo -e "\033[1;31m$*\033[0m"
}

export BUSTED_ARGS="--no-k -o htest -v --exclude-tags=flaky,ipv6"
function get_failed {
if [ ! -z "$FAILED_TEST_FILES_FILE" -a -f "$FAILED_TEST_FILES_FILE" ]
then
cat < $FAILED_TEST_FILES_FILE
else
echo "$@"
fi
}

BUSTED_ARGS="--keep-going -o htest -v --exclude-tags=flaky,ipv6"
if [ ! -z "$FAILED_TEST_FILES_FILE" ]
then
BUSTED_ARGS="--helper=spec/busted-log-failed.lua $BUSTED_ARGS"
fi

if [ "$KONG_TEST_DATABASE" == "postgres" ]; then
export TEST_CMD="bin/busted $BUSTED_ARGS,off"
Expand All @@ -29,56 +43,56 @@ else
export TEST_CMD="bin/busted $BUSTED_ARGS,postgres,db"
fi

if [[ "$KONG_TEST_COVERAGE" = true ]]; then
export TEST_CMD="$TEST_CMD --keep-going"
fi

if [ "$TEST_SUITE" == "integration" ]; then
if [[ "$TEST_SPLIT" == first* ]]; then
# GitHub Actions, run first batch of integration tests
eval "$TEST_CMD" $(ls -d spec/02-integration/* | sort | grep -v 05-proxy)
files=$(ls -d spec/02-integration/* | sort | grep -v 05-proxy)
files=$(get_failed $files)
eval "$TEST_CMD" $files

elif [[ "$TEST_SPLIT" == second* ]]; then
# GitHub Actions, run second batch of integration tests
# Note that the split here is chosen carefully to result
# in a similar run time between the two batches, and should
# be adjusted if imbalance become significant in the future
eval "$TEST_CMD" $(ls -d spec/02-integration/* | sort | grep 05-proxy)
files=$(ls -d spec/02-integration/* | sort | grep 05-proxy)
files=$(get_failed $files)
eval "$TEST_CMD" $files

else
# Non GitHub Actions
eval "$TEST_CMD" spec/02-integration/
eval "$TEST_CMD" $(get_failed spec/02-integration/)
fi
fi

if [ "$TEST_SUITE" == "dbless" ]; then
eval "$TEST_CMD" spec/02-integration/02-cmd \
spec/02-integration/05-proxy \
spec/02-integration/04-admin_api/02-kong_routes_spec.lua \
spec/02-integration/04-admin_api/15-off_spec.lua \
spec/02-integration/08-status_api/01-core_routes_spec.lua \
spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua \
spec/02-integration/11-dbless \
spec/02-integration/20-wasm
eval "$TEST_CMD" $(get_failed spec/02-integration/02-cmd \
spec/02-integration/05-proxy \
spec/02-integration/04-admin_api/02-kong_routes_spec.lua \
spec/02-integration/04-admin_api/15-off_spec.lua \
spec/02-integration/08-status_api/01-core_routes_spec.lua \
spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua \
spec/02-integration/11-dbless \
spec/02-integration/20-wasm)
fi
if [ "$TEST_SUITE" == "plugins" ]; then
set +ex
rm -f .failed

if [[ "$TEST_SPLIT" == first* ]]; then
# GitHub Actions, run first batch of plugin tests
PLUGINS=$(ls -d spec/03-plugins/* | head -n22)
PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | head -n22))

elif [[ "$TEST_SPLIT" == second* ]]; then
# GitHub Actions, run second batch of plugin tests
# Note that the split here is chosen carefully to result
# in a similar run time between the two batches, and should
# be adjusted if imbalance become significant in the future
PLUGINS=$(ls -d spec/03-plugins/* | tail -n+23)
PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | tail -n+23))

else
# Non GitHub Actions
PLUGINS=$(ls -d spec/03-plugins/*)
PLUGINS=$(get_failed $(ls -d spec/03-plugins/*))
fi

for p in $PLUGINS; do
Expand All @@ -91,7 +105,7 @@ if [ "$TEST_SUITE" == "plugins" ]; then
$TEST_CMD $p || echo "* $p" >> .failed
done

if [[ "$TEST_SPLIT" == second* ]] || [[ "$TEST_SPLIT" != first* ]]; then
if [[ "$TEST_SPLIT" != first* ]]; then
cat kong-*.rockspec | grep kong- | grep -v zipkin | grep -v sidecar | grep "~" | grep -v kong-prometheus-plugin | while read line ; do
REPOSITORY=`echo $line | sed "s/\"/ /g" | awk -F" " '{print $1}'`
VERSION=`luarocks show $REPOSITORY | grep $REPOSITORY | head -1 | awk -F" " '{print $2}' | cut -f1 -d"-"`
Expand Down
21 changes: 20 additions & 1 deletion .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ jobs:
fail-fast: false
matrix:
suite: [integration, plugins]
split: [first (01-04), second (>= 05)]
split: [first, second]

services:
postgres:
Expand Down Expand Up @@ -231,6 +231,17 @@ jobs:
# arm64 runners may use more connections due to more worker cores
psql -hlocalhost -Ukong kong -tAc 'alter system set max_connections = 5000;'
- name: Generate test rerun filename
run: |
echo FAILED_TEST_FILES_FILE=$(echo '${{ github.run_id }}-${{ matrix.suite }}-${{ matrix.split }}' | tr A-Z a-z | sed -Ee 's/[^a-z0-9]+/-/g').txt >> $GITHUB_ENV
- name: Download test rerun information
uses: actions/download-artifact@v3
continue-on-error: true
with:
name: ${{ env.FAILED_TEST_FILES_FILE }}

- name: Tests
env:
KONG_TEST_PG_DATABASE: kong
Expand All @@ -246,6 +257,14 @@ jobs:
source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
.ci/run_tests.sh
- name: Upload test rerun information
if: always()
uses: actions/upload-artifact@v3
with:
name: ${{ env.FAILED_TEST_FILES_FILE }}
path: ${{ env.FAILED_TEST_FILES_FILE }}
retention-days: 2

- name: Archive coverage stats file
uses: actions/upload-artifact@v3
if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }}
Expand Down
33 changes: 33 additions & 0 deletions spec/busted-log-failed.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
-- busted-log-failed.lua

-- Log which test files run by busted had failures or errors in a
-- file. The file to use for logging is specified in the
-- FAILED_TEST_FILES_FILE environment variable. This is used to
-- reduce test rerun times for flaky tests.

local busted = require 'busted'
local failed_files_file = assert(os.getenv("FAILED_TEST_FILES_FILE"),
"FAILED_TEST_FILES_FILE environment variable not set")

local FAILED_FILES = {}

busted.subscribe({ 'failure' }, function(element, parent, message, debug)
FAILED_FILES[element.trace.source] = true
end)

busted.subscribe({ 'error' }, function(element, parent, message, debug)
FAILED_FILES[element.trace.source] = true
end)

busted.subscribe({ 'suite', 'end' }, function(suite, count, total)
local output = assert(io.open(failed_files_file, "w"))
if next(FAILED_FILES) then
for failed_file in pairs(FAILED_FILES) do
if failed_file:sub(1, 1) == '@' then
failed_file = failed_file:sub(2)
end
assert(output:write(failed_file .. "\n"))
end
end
output:close()
end)

1 comment on commit 67970ea

@khcp-gha-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bazel Build

Docker image available kong/kong:67970ea2b03a8b1538c76b1ede0ace05bff294bf
Artifacts available https://github.com/Kong/kong/actions/runs/6927567512

Please sign in to comment.