Skip to content
Merged
Show file tree
Hide file tree
Changes from 33 commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
5f2198a
ci: use matrix jobs for cumulus tests
lrubasze Jul 11, 2025
293a36c
ci: tweaks
lrubasze Jul 11, 2025
bc2228d
ci: more tweaks
lrubasze Jul 11, 2025
592e05e
ci: fix artifact naming
lrubasze Jul 11, 2025
6ad19c9
ci: use matrix jobs for polkadot tests
lrubasze Jul 11, 2025
73c33fd
ci: use matrix jobs for parachain template tests
lrubasze Jul 11, 2025
df33b5e
ci: use matrix jobs for substrate tests
lrubasze Jul 11, 2025
a32ff98
cleanup
lrubasze Jul 11, 2025
b63a19f
ci: some fixes
lrubasze Jul 11, 2025
dae6ad1
ci: flaky handling
lrubasze Jul 11, 2025
d8cf342
ci: generate test matrix
lrubasze Jul 14, 2025
622ba2c
ci: generate test matrix for parachain-template tests
lrubasze Jul 14, 2025
a2d4f12
ci: generate test matrix for substrate tests
lrubasze Jul 14, 2025
0f3bb8d
ci: generate test matrix for polkadot tests
lrubasze Jul 14, 2025
b5ecb02
Merge branch 'master' into lrubasze-zombienet-ci-improviements
lrubasze Jul 14, 2025
ec2a6b7
ci: fix test matrix setting
lrubasze Jul 14, 2025
a07bd16
ci: fix #2 test matrix setting
lrubasze Jul 14, 2025
218632c
ci: fix #3 test matrix setting
lrubasze Jul 14, 2025
084892c
ci: remove debug
lrubasze Jul 14, 2025
77f0cca
ci: fix test-filter and job-name setting
lrubasze Jul 14, 2025
991bf50
ci: unset rust envs
lrubasze Jul 14, 2025
948f199
ci: cleanup
lrubasze Jul 15, 2025
8efbb76
ci: zombienet readme added
lrubasze Jul 15, 2025
6f85b7b
ci: zombienet readme tweaks
lrubasze Jul 15, 2025
878cd39
Merge branch 'master' into lrubasze-zombienet-ci-improviements
lrubasze Jul 15, 2025
743acb2
ci: add some comments on scripts
lrubasze Jul 15, 2025
faf8395
ci: zombienet readme improvements
lrubasze Jul 16, 2025
3ac7075
Update .github/workflows/zombienet-reusable-preflight.yml
pepoviola Jul 16, 2025
e01a023
ci, try permissions
pepoviola Jul 20, 2025
5acb494
ci, try permissions
pepoviola Jul 21, 2025
8794302
add permissions
pepoviola Jul 21, 2025
ceaabca
Merge branch 'master' into lrubasze-zombienet-ci-improviements
lrubasze Jul 21, 2025
67e6576
Merge branch 'master' into lrubasze-zombienet-ci-improviements
lrubasze Jul 21, 2025
869031a
test update
lrubasze Jul 21, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 9 additions & 6 deletions .github/actions/zombienet-sdk/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,12 @@ inputs:
ref-slug:
description: "Ref slug (e.g branch-name-short)"
required: true
test:
test-filter:
description: "test filter to pass to nextest (e.g: functional::spam_statement_distribution_requests::spam_statement_distribution_requests_test)"
required: true
job-name:
description: "Job name to use for artifact uploading"
required: true
prefix:
description: "Archive prefix for tests files (e.g polkadot, cumulus or substrate)"
required: true
Expand All @@ -25,7 +28,7 @@ runs:
- name: common_vars
shell: bash
env:
TEST_NAME: ${{ inputs.test }}
TEST_FILTER: ${{ inputs.test-filter }}
PREFIX: ${{ inputs.prefix }}
run: |
echo "Vars"
Expand All @@ -36,7 +39,7 @@ runs:
echo "COL_IMAGE: $COL_IMAGE"
echo "MALUS_IMAGE: $MALUS_IMAGE"
echo "Inputs"
echo "test: $TEST_NAME"
echo "test: $TEST_FILTER"
echo "prefix: $PREFIX"

- name: Download binaries for zombienet native tests
Expand Down Expand Up @@ -66,7 +69,7 @@ runs:
env:
# don't retry sdk tests
NEXTEST_RETRIES: 0
TEST_NAME: ${{ inputs.test }}
TEST_FILTER: ${{ inputs.test-filter }}
PREFIX: ${{ inputs.prefix }}
run: |
# RUN_IN_CI=1 shall be set only for k8s provider
Expand All @@ -87,7 +90,7 @@ runs:
# We want to run tests sequentially, '--no-capture' ensures that.
# If we want to get rid of '--no-capture' some day, please use '--test-threads 1' or NEXTEST_TEST_THREADS=1
# Both options cannot coexist for cargo-nextest below v0.9.94
cargo nextest run --archive-file ./artifacts/${PREFIX}-zombienet-tests.tar.zst --no-capture -- ${TEST_NAME}
cargo nextest run --archive-file ./artifacts/${PREFIX}-zombienet-tests.tar.zst --no-capture -- ${TEST_FILTER}

- name: process_logs
if: ${{ ! cancelled() }}
Expand All @@ -108,7 +111,7 @@ runs:
uses: actions/upload-artifact@v4
if: ${{ ! cancelled() }}
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
name: zombienet-logs-${{ inputs.job-name }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*

18 changes: 11 additions & 7 deletions .github/actions/zombienet/action.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
name: "Zombienet test v1"
description: "Runs zombienet tests"
inputs:
test:
test-definition:
description: "test definition (zndsl file)"
required: true
job-name:
description: "Job name to use for artifact uploading"
required: true
local-dir:
description: "Path to the directory tha contains the test file (.zndsl)"
required: true
Expand All @@ -27,7 +30,7 @@ runs:
- name: common_vars
shell: bash
env:
TEST_NAME: ${{ inputs.test }}
TEST_DEFINITION: ${{ inputs.test-definition }}
LOCAL_PATH: ${{ inputs.local-dir }}
CONCURRENCY: ${{ inputs.concurrency }}
run: |
Expand All @@ -36,7 +39,8 @@ runs:
echo "ZOMBIENET_PROVIDER: $ZOMBIENET_PROVIDER"
echo "COL_IMAGE: $COL_IMAGE"
echo "Inputs"
echo "test: $TEST_NAME"
echo "test-definition: $TEST_DEFINITION"
echo "job-name: ${{ inputs.job-name }}"
echo "local-dir: $LOCAL_PATH"
echo "concurrency: $CONCURRENCY"

Expand All @@ -59,7 +63,7 @@ runs:
- name: zombie_test
shell: bash
env:
TEST_NAME: ${{ inputs.test }}
TEST_DEFINITION: ${{ inputs.test-definition }}
LOCAL_PATH: ${{ inputs.local-dir }}
CONCURRENCY: ${{ inputs.concurrency }}
run: |
Expand All @@ -71,7 +75,7 @@ runs:
./.github/scripts/run-zombienet-test.sh \
"$(pwd)/$LOCAL_PATH" \
$CONCURRENCY \
"$TEST_NAME"
"$TEST_DEFINITION"
else
# no need to check other runner variables. for k8s they shall store the same value
if [[ $ZOMBIENET_DEFAULT_RUNNER == "parity-zombienet" ]]; then
Expand All @@ -81,7 +85,7 @@ runs:
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_PATH" \
--concurrency=$CONCURRENCY \
--test="$TEST_NAME"
--test="$TEST_DEFINITION"
fi

- name: process_logs
Expand All @@ -102,7 +106,7 @@ runs:
uses: actions/upload-artifact@v4
if: ${{ ! cancelled() }}
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
name: zombienet-logs-${{ inputs.job-name }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*

123 changes: 123 additions & 0 deletions .github/scripts/dispatch-zombienet-workflow.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
#!/bin/bash

# Zombienet Workflow Dispatcher
#
# This script triggers GitHub Actions workflows for zombienet tests and monitors their execution.
# It can run workflows multiple times for reliability testing and optionally filter tests by pattern.
# Results are automatically saved to a timestamped CSV file for analysis.
#
# Features:
# - Trigger workflows on specific branches
# - Filter tests by pattern (useful for debugging specific tests)
# - Run workflows multiple times for flaky test detection
# - Monitor workflow completion and collect results
# - Export results to CSV with job details (ID, name, conclusion, timing, URLs)
#
# Requirements:
# - GitHub CLI (gh) must be installed and authenticated
# - Must be run from polkadot-sdk repository root
# - Target branch must have corresponding PR with CI enabled

# Exit on error
# set -e

function dbg {
local msg="$@"

local tstamp=$(date "+%Y-%m-%d %T")
printf "%s - %s\n" "$tstamp" "$msg"
}

function write_job_results_to_csv {
local run_id="$1"
local branch="$2"
local csv_file="$3"

dbg "Writing job results for run $run_id to $csv_file"

# Get job details for the completed run, filtering only jobs starting with 'zombienet-' and with success or failure conclusions
gh run view "$run_id" --json jobs --jq \
'.jobs[] | select(.name | startswith("zombienet-")) |
select(.conclusion == "success" or .conclusion == "failure") |
[.databaseId, .name, .conclusion, .startedAt, "'"$branch"'", .url] | @csv' >> "$csv_file"
}

# Parse command line arguments
WORKFLOW_FILE=""
BRANCH=""
MAX_RESULT_CNT=-1
TEST_PATTERN=""

while getopts "w:b:m:p:h" opt; do
case $opt in
w) WORKFLOW_FILE="$OPTARG" ;;
b) BRANCH="$OPTARG" ;;
m) MAX_RESULT_CNT="$OPTARG" ;;
p) TEST_PATTERN="$OPTARG" ;;
h) echo "Usage: $0 -w <workflow-file> -b <branch> [-m max-triggers] [-p test-pattern]"
echo " -w: Workflow file (required)"
echo " -b: Branch name (required)"
echo " -m: Maximum number of triggers (optional, default: infinite)"
echo " -p: Test pattern for workflow input (optional)"
exit 0 ;;
\?) echo "Invalid option -$OPTARG" >&2
echo "Use -h for help"
exit 1 ;;
esac
done

if [[ -z "$WORKFLOW_FILE" || -z "$BRANCH" ]]; then
echo "Error: Both workflow file (-w) and branch (-b) are required"
echo "Usage: $0 -w <workflow-file> -b <branch> [-m max-triggers] [-p test-pattern]"
echo "Use -h for help"
exit 1
fi

# Create CSV file with headers
CSV_FILE="workflow_results_$(date +%Y%m%d_%H%M%S).csv"
echo "job_id,job_name,conclusion,started_at,branch,job_url" > "$CSV_FILE"
dbg "Created CSV file: $CSV_FILE"

dbg "Starting loop for workflow: $WORKFLOW_FILE on branch: $BRANCH"

TRIGGER_CNT=0
RESULT_CNT=0

while [[ $MAX_RESULT_CNT -eq -1 || $RESULT_CNT -lt $MAX_RESULT_CNT ]]; do

dbg "Waiting until workflow $WORKFLOW_FILE (branch: $BRANCH) jobs are completed"

while true ; do
echo ""
gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH -L 5
sleep 2
# if job is completed it should have non-empty conclusion field
ALL_JOBS_COMPLETED=$(gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH --json conclusion --jq 'all(.[]; .conclusion != "")')
if [[ "$ALL_JOBS_COMPLETED" == "true" ]]; then
break
fi
sleep 60
done
dbg "Workflow $WORKFLOW_FILE (branch: $BRANCH) jobs completed"

# Skip the first iteration - latest run id is not the one we triggered here
if [ $TRIGGER_CNT -gt 0 ]; then
# Get the most recent completed run ID and write job results to CSV
LATEST_RUN_ID=$(gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH -L 1 --json databaseId --jq '.[0].databaseId')
write_job_results_to_csv "$LATEST_RUN_ID" "$BRANCH" "$CSV_FILE"
RESULT_CNT=$(( RESULT_CNT + 1 ))
fi

TRIGGER_CNT=$(( TRIGGER_CNT + 1 ))
dbg "Triggering #$TRIGGER_CNT workflow $WORKFLOW_FILE (branch: $BRANCH)"

if [[ -n "$TEST_PATTERN" ]]; then
gh workflow run "$WORKFLOW_FILE" --ref "$BRANCH" -f test_pattern="$TEST_PATTERN"
else
gh workflow run "$WORKFLOW_FILE" --ref "$BRANCH"
fi

dbg "Sleeping 60s"
sleep 60
done

63 changes: 63 additions & 0 deletions .github/scripts/parse-zombienet-tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
#!/usr/bin/env python3

"""
Zombienet Test Matrix Parser

This script parses YAML test definition files and converts them to JSON format
for use as GitHub Actions matrix jobs. It provides filtering capabilities to:

1. Exclude flaky tests (unless a specific test pattern is provided)
2. Filter tests by name pattern for targeted execution
3. Convert YAML test definitions to JSON matrix format

The script is used by GitHub Actions workflows to dynamically generate
test matrices based on YAML configuration files, enabling flexible
test execution and maintenance.

Usage:
python parse-zombienet-tests.py --matrix tests.yml [--flaky-tests flaky.txt] [--test-pattern pattern]

Output:
JSON array of test job objects suitable for GitHub Actions matrix strategy
"""

import argparse
import yaml
import json
import fnmatch

def parse_args():
parser = argparse.ArgumentParser(description="Parse test matrix YAML file with optional filtering")
parser.add_argument("--matrix", required=True, help="Path to the YAML matrix file")
parser.add_argument("--flaky-tests", default="", help="Newline-separated list of flaky job names")
parser.add_argument("--test-pattern", default="", help="Pattern to match job_name (substring or glob)")
return parser.parse_args()

def load_jobs(matrix_path):
with open(matrix_path, "r") as f:
return yaml.safe_load(f)

def filter_jobs(jobs, flaky_tests, test_pattern):
flaky_set = set(name.strip() for name in flaky_tests.splitlines() if name.strip())
filtered = []

for job in jobs:
name = job.get("job-name", "")

# If test_pattern provided then don't care about flaky tests, just check test_pattern
if test_pattern and len(test_pattern) > 0:
if fnmatch.fnmatch(name, f"*{test_pattern}*"):
filtered.append(job)
elif name not in flaky_set:
filtered.append(job)

return filtered

def main():
args = parse_args()
jobs = load_jobs(args.matrix)
result = filter_jobs(jobs, args.flaky_tests, args.test_pattern)
print(json.dumps(result))

if __name__ == "__main__":
main()
26 changes: 26 additions & 0 deletions .github/workflows/zombienet-reusable-preflight.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,13 @@ name: Zombienet Preflight

on:
workflow_call:
inputs:
tests_yaml:
required: true
type: string
test_pattern:
required: false
type: string
# Map the workflow outputs to job outputs
outputs:
changes_substrate:
Expand Down Expand Up @@ -133,6 +140,10 @@ on:
description: |
comma separated list of flaky tests to skip.

TEST_MATRIX:
value: ${{ jobs.preflight.outputs.TEST_MATRIX }}
description: |
JSON formatted test matrix parsed from test yaml
jobs:
#
#
Expand Down Expand Up @@ -166,6 +177,7 @@ jobs:
KUBERNETES_MEMORY_REQUEST: ${{ steps.set_vars.outputs.KUBERNETES_MEMORY_REQUEST }}
TEMP_IMAGES_BASE: ${{ steps.set_vars.outputs.TEMP_IMAGES_BASE }}
FLAKY_TESTS: ${{ steps.set_vars.outputs.FLAKY_TESTS }}
TEST_MATRIX: ${{ steps.generate_test_matrix.outputs.TEST_MATRIX }}

# zombienet-sdk vars
RUST_LOG: ${{ steps.set_vars.outputs.RUST_LOG }}
Expand Down Expand Up @@ -282,6 +294,20 @@ jobs:
echo "ZOMBIENET_IMAGE: ${{ steps.set_vars.outputs.ZOMBIENET_IMAGE }}"
echo "ZOMBIENET_SDK_IMAGE: ${{ steps.set_vars.outputs.ZOMBIENET_SDK_IMAGE }}"

- name: Generate test matrix
id: generate_test_matrix
shell: bash
env:
TESTS_YAML: ${{ inputs.tests_yaml }}
TEST_PATTERN: ${{ inputs.test_pattern || '' }}
run: |
python3 .github/scripts/parse-zombienet-tests.py \
--matrix ${TESTS_YAML} \
--flaky-tests "${{ steps.set_vars.outputs.FLAKY_TESTS }}" \
--test-pattern "${TEST_PATTERN}" > matrix.json
echo "TEST_MATRIX=$(cat matrix.json)" >> $GITHUB_OUTPUT
echo "TEST_MATRIX:"
cat matrix.json | jq '.'
#
#
#
Expand Down
Loading
Loading