Skip to content
Merged
192 changes: 192 additions & 0 deletions .github/scripts/check-hive-results.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
#!/usr/bin/env bash
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This script searches for the log files of failed tests and updates them to an artifact


# Verifies Hive JSON results, prints failing tests, copies related logs,
# and updates the GitHub summary to surface the failures in the workflow UI.

set -euo pipefail

if ! command -v jq >/dev/null 2>&1; then
echo "jq is required to parse Hive results but was not found in PATH"
exit 1
fi

results_dir="${1:-src/results}"

if [ ! -d "$results_dir" ]; then
echo "Hive results directory '${results_dir}' not found"
exit 1
fi

if ! results_dir="$(cd "${results_dir}" >/dev/null 2>&1 && pwd -P)"; then
echo "Failed to resolve absolute path for Hive results directory"
exit 1
fi

results_parent="$(dirname "${results_dir}")"
workspace_logs_dir=""
if [ -d "${results_parent}/workspace/logs" ]; then
workspace_logs_dir="$(cd "${results_parent}/workspace/logs" >/dev/null 2>&1 && pwd -P)"
fi

shopt -s nullglob
json_files=("${results_dir}"/*.json)
shopt -u nullglob

if [ ${#json_files[@]} -eq 0 ]; then
echo "No Hive JSON result files found in ${results_dir}"
exit 1
fi

failures=0
failed_logs_root="${results_dir}/failed_logs"
rm -rf "${failed_logs_root}"
mkdir -p "${failed_logs_root}"

for json_file in "${json_files[@]}"; do
if [[ "${json_file}" == *"hive.json" ]]; then
continue
fi

suite_name="$(jq -r '.name // empty' "${json_file}")"
failed_cases="$(jq '[.testCases[]? | select(.summaryResult.pass != true)] | length' "${json_file}")"

if [ "${failed_cases}" -gt 0 ]; then
echo "Detected ${failed_cases} failing test case(s) in ${suite_name:-$(basename "${json_file}")}"
failure_list="$(
jq -r '
.testCases[]?
| select(.summaryResult.pass != true)
| . as $case
| ($case.summaryResult // {}) as $summary
| ($summary.message // $summary.reason // $summary.error // "") as $message
| (if $summary.log?
then "log lines "
+ (($summary.log.begin // "?") | tostring)
+ "-"
+ (($summary.log.end // "?") | tostring)
else ""
end) as $log_hint
| (if $message != "" then $message else $log_hint end) as $detail
| (if $case.clientInfo?
then ($case.clientInfo
| to_entries
| map((.value.name // .key) + ": " + (.value.logFile // "unknown log"))
| join("; "))
else ""
end) as $clients
| "- " + ($case.name // "unknown test")
+ (if $detail != "" then ": " + $detail else "" end)
+ (if $clients != "" then " (client logs: " + $clients + ")" else "" end)
' "${json_file}"
)"

printf '%s\n' "${failure_list}"

if [ -n "${GITHUB_STEP_SUMMARY:-}" ]; then
{
echo "### Hive failures: ${suite_name:-$(basename "${json_file}" .json)}"
printf '%s\n' "${failure_list}"
echo
} >> "${GITHUB_STEP_SUMMARY}"
fi

suite_slug_raw="${suite_name:-$(basename "${json_file}" .json)}"
suite_slug="$(printf '%s' "${suite_slug_raw}" | tr '[:upper:]' '[:lower:]')"
suite_slug="$(printf '%s' "${suite_slug}" | sed -E 's/[^a-z0-9._-]+/-/g')"
suite_slug="${suite_slug#-}"
suite_slug="${suite_slug%-}"
Comment on lines +94 to +97
Copy link

Copilot AI Oct 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Multiple separate calls to external commands for string manipulation could be combined into a single operation or use bash parameter expansion for better performance.

Suggested change
suite_slug="$(printf '%s' "${suite_slug_raw}" | tr '[:upper:]' '[:lower:]')"
suite_slug="$(printf '%s' "${suite_slug}" | sed -E 's/[^a-z0-9._-]+/-/g')"
suite_slug="${suite_slug#-}"
suite_slug="${suite_slug%-}"
suite_slug="${suite_slug_raw,,}" # lowercase
suite_slug="${suite_slug//[^a-z0-9._-]/-}" # replace non-matching chars with '-'
suite_slug="${suite_slug#-}" # trim leading dash
suite_slug="${suite_slug%-}" # trim trailing dash

Copilot uses AI. Check for mistakes.

suite_dir="${failed_logs_root}/${suite_slug:-suite}"
mkdir -p "${suite_dir}"

{
printf '%s\n' "Detected ${failed_cases} failing test case(s) in ${suite_name:-$(basename "${json_file}")}"
printf '%s\n' "${failure_list}"
echo
} >> "${suite_dir}/failed-tests.txt"

cp "${json_file}" "${suite_dir}/"

suite_logs_output="$(
jq -r '
[
.simLog?,
.testDetailsLog?,
(.testCases[]? | select(.summaryResult.pass != true) | .clientInfo? | to_entries? // [] | map(.value.logFile? // empty) | .[]),
(.testCases[]? | select(.summaryResult.pass != true) | .summaryResult.logFile?),
(.testCases[]? | select(.summaryResult.pass != true) | .logFile?)
]
| map(select(. != null and . != ""))
| unique
| .[]
' "${json_file}" 2>/dev/null || true
)"

if [ -n "${suite_logs_output}" ]; then
while IFS= read -r log_rel; do
[ -z "${log_rel}" ] && continue

log_path=""
if [[ "${log_rel}" == /* ]]; then
if [ -f "${log_rel}" ]; then
log_path="${log_rel}"
fi
else
candidate_paths=(
"${results_dir}/${log_rel}"
"${results_dir}/logs/${log_rel}"
)
if [ -n "${workspace_logs_dir}" ]; then
candidate_paths+=("${workspace_logs_dir}/${log_rel}")
fi

for candidate in "${candidate_paths[@]}"; do
if [ -f "${candidate}" ]; then
log_path="${candidate}"
break
fi
done
fi

if [ -z "${log_path}" ] && [[ "${log_rel}" != /* ]]; then
search_roots=("${results_dir}")
if [ -d "${results_dir}/logs" ]; then
search_roots+=("${results_dir}/logs")
fi
if [ -n "${workspace_logs_dir}" ]; then
search_roots+=("${workspace_logs_dir}")
fi

for search_root in "${search_roots[@]}"; do
[ -d "${search_root}" ] || continue
found_log="$(find "${search_root}" -type f -name "$(basename "${log_rel}")" -print -quit 2>/dev/null || true)"
Copy link

Copilot AI Oct 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The find command with basename could be inefficient for large directory trees. Consider using more specific search patterns or limiting search depth if the log structure is known.

Suggested change
found_log="$(find "${search_root}" -type f -name "$(basename "${log_rel}")" -print -quit 2>/dev/null || true)"
found_log="$(find "${search_root}" -maxdepth 2 -type f -name "$(basename "${log_rel}")" -print -quit 2>/dev/null || true)"

Copilot uses AI. Check for mistakes.

if [ -n "${found_log}" ]; then
log_path="${found_log}"
break
fi
done
fi

if [ -n "${log_path}" ]; then
target_path="${suite_dir}/${log_rel}"
mkdir -p "$(dirname "${target_path}")"
if [ ! -f "${target_path}" ]; then
cp "${log_path}" "${target_path}"
fi
else
echo "Referenced log '${log_rel}' not found for suite ${suite_name:-$(basename "${json_file}")}"
fi
done <<< "${suite_logs_output}"
fi

echo "Saved Hive failure artifacts to ${suite_dir}"

failures=$((failures + failed_cases))
fi
done

if [ "${failures}" -gt 0 ]; then
echo "Hive reported ${failures} failing test cases in total"
exit 1
fi

echo "Hive reported no failing test cases."
63 changes: 35 additions & 28 deletions .github/workflows/pr-main_l1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ jobs:
make test

Copy link

Copilot AI Oct 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The conditional logic skipping EF tests for merge groups lacks documentation. Consider adding a comment explaining why EF tests are skipped for merge group events.

Suggested change
# EF tests are skipped for merge group events to reduce CI load and avoid redundant test runs,
# since merge group events are used for speculative merges and do not require full EF test coverage.

Copilot uses AI. Check for mistakes.

- name: Run Blockchain EF tests
if: ${{ github.event_name != 'merge_group' }}
run: |
make -C tooling/ef_tests/blockchain test

Expand Down Expand Up @@ -143,55 +144,49 @@ jobs:
include:
- name: "Rpc Compat tests"
simulation: ethereum/rpc-compat
limit: ""
limit: "rpc-compat/(debug_[^/]+/.*|eth_blobBaseFee/.*|eth_blockNumber/.*|eth_call/.*|eth_chainId/.*|eth_createAccessList/.*|eth_estimateGas/.*|eth_feeHistory/.*|eth_getBalance/.*|eth_getBlockByHash/.*|eth_getBlockByNumber/.*|eth_getBlockReceipts/.*|eth_getBlockTransactionCountByHash/.*|eth_getBlockTransactionCountByNumber/.*|eth_getCode/.*|eth_getLogs/.*|eth_getProof/.*|eth_getStorageAt/.*|eth_getTransactionByBlockHashAndIndex/.*|eth_getTransactionByBlockNumberAndIndex/.*|eth_getTransactionByHash/.*|eth_getTransactionCount/.*|eth_getTransactionReceipt/.*|eth_sendRawTransaction/.*)"
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

eth-syncing is now failing

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we add a comment mentioning this?

hive_repository: lambdaclass/hive
hive_version: 115f4d6ef1bdd2bfcabe29ec60424f6327e92f43
artifact_prefix: rpc_compat
- name: "Devp2p tests"
simulation: devp2p
limit: discv4|eth|snap/Ping|Findnode/WithoutEndpointProof|Findnode/PastExpiration|Amplification|Status|StorageRanges|ByteCodes|GetBlockHeaders|SimultaneousRequests|SameRequestID|ZeroRequestID|GetBlockBodies|MaliciousHandshake|MaliciousStatus|Transaction|NewPooledTxs|GetBlockReceipts|LargeTxRequest|InvalidTxs|BlockRangeUpdate
# AccountRange and GetTrieNodes don't pass anymore.
limit: discv4|eth|snap/Ping|Findnode/WithoutEndpointProof|Findnode/PastExpiration|Amplification|Status|StorageRanges|ByteCodes|GetBlockHeaders|SimultaneousRequests|SameRequestID|ZeroRequestID|GetBlockBodies|MaliciousHandshake|MaliciousStatus|NewPooledTxs|GetBlockReceipts|BlockRangeUpdate|GetTrieNodes
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

there were some regressions

# Findnode/BasicFindnode fails due to packets being processed out of order
# Findnode/UnsolicitedNeighbors flaky in CI very occasionally. When fixed replace all "Findnode/<test>" with "Findnode"
hive_repository: lambdaclass/hive
hive_version: 115f4d6ef1bdd2bfcabe29ec60424f6327e92f43
hive_repository: ethereum/hive
hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5
artifact_prefix: devp2p
- name: "Engine Auth and EC tests"
simulation: ethereum/engine
limit: engine-(auth|exchange-capabilities)/
hive_repository: ethereum/hive
hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a
hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5
artifact_prefix: engine_auth_ec
- name: "Cancun Engine tests"
simulation: ethereum/engine
limit: "engine-cancun"
hive_repository: ethereum/hive
hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a
artifact_prefix: engine_cancun
# - name: "Cancun Engine tests"
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

regression

# simulation: ethereum/engine
# limit: "engine-cancun"
# hive_repository: ethereum/hive
# hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5
# artifact_prefix: engine_cancun
- name: "Paris Engine tests"
simulation: ethereum/engine
limit: "engine-api"
hive_repository: ethereum/hive
hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a
hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5
artifact_prefix: engine_paris
- name: "Engine withdrawal tests"
simulation: ethereum/engine
limit: "engine-withdrawals/Corrupted Block Hash Payload|Empty Withdrawals|engine-withdrawals test loader|GetPayloadBodies|GetPayloadV2 Block Value|Max Initcode Size|Sync after 2 blocks - Withdrawals on Genesis|Withdraw many accounts|Withdraw to a single account|Withdraw to two accounts|Withdraw zero amount|Withdraw many accounts|Withdrawals Fork on Block 1 - 1 Block Re-Org|Withdrawals Fork on Block 1 - 8 Block Re-Org NewPayload|Withdrawals Fork on Block 2|Withdrawals Fork on Block 3|Withdrawals Fork on Block 8 - 10 Block Re-Org NewPayload|Withdrawals Fork on Canonical Block 8 / Side Block 7 - 10 Block Re-Org [^S]|Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org [^S]"
hive_repository: ethereum/hive
hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a
hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5
artifact_prefix: engine_withdrawals
- name: "Sync full"
simulation: ethereum/sync
limit: ""
hive_repository: ethereum/hive
hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a
artifact_prefix: sync_full
- name: "Sync snap"
simulation: ethereum/sync
limit: ""
hive_repository: ethereum/hive
hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a
artifact_prefix: sync_snap
# Investigate this test
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

regression

# - name: "Sync"
# simulation: ethereum/sync
# limit: ""
# hive_repository: ethereum/hive
# hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5
# artifact_prefix: sync
steps:
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/[email protected]
Expand Down Expand Up @@ -245,8 +240,20 @@ jobs:
client: ethrex
client_config: ${{ steps.client-config.outputs.config }}
extra_flags: ${{ steps.hive-flags.outputs.flags }}
workflow_artifact_upload: true
workflow_artifact_prefix: ${{ matrix.artifact_prefix }}

- name: Check Hive Results For Failures
id: verify-hive-results
if: ${{ success() }}
shell: bash
run: ./.github/scripts/check-hive-results.sh src/results

- name: Upload Hive Failure Logs
if: ${{ failure() && steps.verify-hive-results.conclusion == 'failure' }}
uses: actions/upload-artifact@v4
with:
name: hive_failed_logs_${{ matrix.artifact_prefix }}
path: src/results/failed_logs
if-no-files-found: warn

# The purpose of this job is to add it as a required check in GitHub so that we don't have to add every individual job as a required check
all-tests:
Expand Down