From 651567dffee455301edd063f09a18e6385b29443 Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Fri, 11 Jul 2025 16:53:09 +1000 Subject: [PATCH 1/6] Add release helper script. --- scripts/print_release_diffs.py | 72 ++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 scripts/print_release_diffs.py diff --git a/scripts/print_release_diffs.py b/scripts/print_release_diffs.py new file mode 100644 index 00000000000..d910b1be5bd --- /dev/null +++ b/scripts/print_release_diffs.py @@ -0,0 +1,72 @@ +""" +Summarise pull requests between two Lighthouse releases. + +Usage: + export GITHUB_TOKEN=your_token + python -m pip install requests==2.32.4 + python print_release_diffs.py --base v7.0.1 --head release-v7.1.0 + +Shows commit SHA, PR number, 'backwards-incompat' label status, and PR title. +""" + +import requests +import re +import argparse +import os + +GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN") +if not GITHUB_TOKEN: + raise SystemExit("Error: Please set the GITHUB_TOKEN environment variable.") + +parser = argparse.ArgumentParser(description="Summarise PRs between two Lighthouse versions.") +parser.add_argument("--base", required=True, help="Base tag or branch (older release)") +parser.add_argument("--head", required=True, help="Head tag or branch (newer release)") +args = parser.parse_args() + +BASE = args.base +HEAD = args.head +OWNER = 'sigp' +REPO = 'lighthouse' + +HEADERS = { + 'Authorization': f'token {GITHUB_TOKEN}', + 'Accept': 'application/vnd.github+json' +} + +def get_commits_between(base, head): + url = f'https://api.github.com/repos/{OWNER}/{REPO}/compare/{base}...{head}' + response = requests.get(url, headers=HEADERS) + response.raise_for_status() + return response.json()['commits'] + +def has_backwards_incompat_label(pr_number): + url = f'https://api.github.com/repos/{OWNER}/{REPO}/issues/{pr_number}' + response = requests.get(url, headers=HEADERS) + if response.status_code != 200: + raise Exception(f"Failed to fetch PR #{pr_number}") + labels = response.json().get('labels', []) + return any(label['name'] == 'backwards-incompat' for label in labels) + +def main(): + commits = get_commits_between(BASE, HEAD) + print(" # Commit SHA PR Number Has backwards-incompat Label PR Title") + print("--- ------------ ----------- ------------------------------ --------------------------------------------") + + for i, commit in enumerate(commits, 1): + sha = commit['sha'][:12] + message = commit['commit']['message'] + pr_match = re.search(r"\(#(\d+)\)", message) + + if not pr_match: + print(f"{i:<3} {sha} {'-':<11} {'-':<30} [NO PR MATCH]: {message.splitlines()[0]}") + continue + + pr_number = int(pr_match.group(1)) + try: + has_label = has_backwards_incompat_label(pr_number) + print(f"{i:<3} {sha} {pr_number:<11} {str(has_label):<30} {message.splitlines()[0]}") + except Exception as e: + print(f"{i:<3} {sha} {pr_number:<11} {'ERROR':<30} [ERROR FETCHING PR]: {e}") + +if __name__ == '__main__': + main() From 19688dfbb2274dc447de594c8b958464f8b2b8ee Mon Sep 17 00:00:00 2001 From: Eitan Seri- Levi Date: Tue, 9 Sep 2025 23:43:43 -0700 Subject: [PATCH 2/6] Prevent silently dropping cell proof chunks --- beacon_node/beacon_chain/src/kzg_utils.rs | 41 +++++++++++++++++++--- consensus/types/src/data_column_sidecar.rs | 4 +++ 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/beacon_node/beacon_chain/src/kzg_utils.rs b/beacon_node/beacon_chain/src/kzg_utils.rs index 3063e78337b..ad669e17291 100644 --- a/beacon_node/beacon_chain/src/kzg_utils.rs +++ b/beacon_node/beacon_chain/src/kzg_utils.rs @@ -174,6 +174,13 @@ pub fn blobs_to_data_column_sidecars( let kzg_commitments_inclusion_proof = block.message().body().kzg_commitments_merkle_proof()?; let signed_block_header = block.signed_block_header(); + if cell_proofs.len() != blobs.len() * E::number_of_columns() { + return Err(DataColumnSidecarError::InvalidCellProofLength { + expected: blobs.len() * E::number_of_columns(), + actual: cell_proofs.len(), + }); + } + let proof_chunks = cell_proofs .chunks_exact(E::number_of_columns()) .collect::>(); @@ -365,14 +372,18 @@ pub fn reconstruct_blobs( /// Reconstruct all data columns from a subset of data column sidecars (requires at least 50%). pub fn reconstruct_data_columns( kzg: &Kzg, - data_columns: &[Arc>], + mut data_columns: Vec>>, spec: &ChainSpec, ) -> Result, KzgError> { + // Sort data columns by index to ensure ascending order for KZG operations + data_columns.sort_unstable_by_key(|dc| dc.index); + let first_data_column = data_columns .first() .ok_or(KzgError::InconsistentArrayLength( "data_columns should have at least one element".to_string(), ))?; + let num_of_blobs = first_data_column.kzg_commitments.len(); let blob_cells_and_proofs_vec = @@ -381,7 +392,7 @@ pub fn reconstruct_data_columns( .map(|row_index| { let mut cells: Vec = vec![]; let mut cell_ids: Vec = vec![]; - for data_column in data_columns { + for data_column in &data_columns { let cell = data_column.column.get(row_index).ok_or( KzgError::InconsistentArrayLength(format!( "Missing data column at row index {row_index}" @@ -433,6 +444,7 @@ mod test { test_build_data_columns_empty(&kzg, &spec); test_build_data_columns(&kzg, &spec); test_reconstruct_data_columns(&kzg, &spec); + test_reconstruct_data_columns_unordered(&kzg, &spec); test_reconstruct_blobs_from_data_columns(&kzg, &spec); test_validate_data_columns(&kzg, &spec); } @@ -505,7 +517,7 @@ mod test { #[track_caller] fn test_reconstruct_data_columns(kzg: &Kzg, spec: &ChainSpec) { - let num_of_blobs = 6; + let num_of_blobs = 2; let (signed_block, blobs, proofs) = create_test_fulu_block_and_blobs::(num_of_blobs, spec); let blob_refs = blobs.iter().collect::>(); @@ -516,7 +528,7 @@ mod test { // Now reconstruct let reconstructed_columns = reconstruct_data_columns( kzg, - &column_sidecars.iter().as_slice()[0..column_sidecars.len() / 2], + column_sidecars.iter().as_slice()[0..column_sidecars.len() / 2].to_vec(), spec, ) .unwrap(); @@ -526,6 +538,27 @@ mod test { } } + #[track_caller] + fn test_reconstruct_data_columns_unordered(kzg: &Kzg, spec: &ChainSpec) { + let num_of_blobs = 2; + let (signed_block, blobs, proofs) = + create_test_fulu_block_and_blobs::(num_of_blobs, spec); + let blob_refs = blobs.iter().collect::>(); + let column_sidecars = + blobs_to_data_column_sidecars(&blob_refs, proofs.to_vec(), &signed_block, kzg, spec) + .unwrap(); + + // Test reconstruction with columns in reverse order (non-ascending) + let mut subset_columns: Vec<_> = + column_sidecars.iter().as_slice()[0..column_sidecars.len() / 2].to_vec(); + subset_columns.reverse(); // This would fail without proper sorting in reconstruct_data_columns + let reconstructed_columns = reconstruct_data_columns(kzg, subset_columns, spec).unwrap(); + + for i in 0..E::number_of_columns() { + assert_eq!(reconstructed_columns.get(i), column_sidecars.get(i), "{i}"); + } + } + #[track_caller] fn test_reconstruct_blobs_from_data_columns(kzg: &Kzg, spec: &ChainSpec) { let num_of_blobs = 6; diff --git a/consensus/types/src/data_column_sidecar.rs b/consensus/types/src/data_column_sidecar.rs index 57f7a88e193..c86a6d417e9 100644 --- a/consensus/types/src/data_column_sidecar.rs +++ b/consensus/types/src/data_column_sidecar.rs @@ -143,6 +143,10 @@ pub enum DataColumnSidecarError { PreDeneb, SszError(SszError), BuildSidecarFailed(String), + InvalidCellProofLength { + expected: usize, + actual: usize + } } impl From for DataColumnSidecarError { From 3cfc1189d7e2fdcc778438154242f90bebe017b0 Mon Sep 17 00:00:00 2001 From: Eitan Seri- Levi Date: Tue, 9 Sep 2025 23:45:27 -0700 Subject: [PATCH 3/6] fix --- beacon_node/beacon_chain/src/kzg_utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beacon_node/beacon_chain/src/kzg_utils.rs b/beacon_node/beacon_chain/src/kzg_utils.rs index ad669e17291..11862049934 100644 --- a/beacon_node/beacon_chain/src/kzg_utils.rs +++ b/beacon_node/beacon_chain/src/kzg_utils.rs @@ -174,7 +174,7 @@ pub fn blobs_to_data_column_sidecars( let kzg_commitments_inclusion_proof = block.message().body().kzg_commitments_merkle_proof()?; let signed_block_header = block.signed_block_header(); - if cell_proofs.len() != blobs.len() * E::number_of_columns() { + if cell_proofs.len() != E::number_of_columns() { return Err(DataColumnSidecarError::InvalidCellProofLength { expected: blobs.len() * E::number_of_columns(), actual: cell_proofs.len(), From b113e00f8987a1185f73fb84927ecfb5bf5ce0ee Mon Sep 17 00:00:00 2001 From: Eitan Seri- Levi Date: Tue, 9 Sep 2025 23:51:39 -0700 Subject: [PATCH 4/6] fmt --- consensus/types/src/data_column_sidecar.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/consensus/types/src/data_column_sidecar.rs b/consensus/types/src/data_column_sidecar.rs index c86a6d417e9..2272b1695c9 100644 --- a/consensus/types/src/data_column_sidecar.rs +++ b/consensus/types/src/data_column_sidecar.rs @@ -143,10 +143,7 @@ pub enum DataColumnSidecarError { PreDeneb, SszError(SszError), BuildSidecarFailed(String), - InvalidCellProofLength { - expected: usize, - actual: usize - } + InvalidCellProofLength { expected: usize, actual: usize }, } impl From for DataColumnSidecarError { From d349a1b949d462f754abbdf910edfc5fdb7ec1fd Mon Sep 17 00:00:00 2001 From: Eitan Seri- Levi Date: Wed, 10 Sep 2025 14:19:22 -0700 Subject: [PATCH 5/6] fix test --- beacon_node/beacon_chain/src/kzg_utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beacon_node/beacon_chain/src/kzg_utils.rs b/beacon_node/beacon_chain/src/kzg_utils.rs index 11862049934..ad669e17291 100644 --- a/beacon_node/beacon_chain/src/kzg_utils.rs +++ b/beacon_node/beacon_chain/src/kzg_utils.rs @@ -174,7 +174,7 @@ pub fn blobs_to_data_column_sidecars( let kzg_commitments_inclusion_proof = block.message().body().kzg_commitments_merkle_proof()?; let signed_block_header = block.signed_block_header(); - if cell_proofs.len() != E::number_of_columns() { + if cell_proofs.len() != blobs.len() * E::number_of_columns() { return Err(DataColumnSidecarError::InvalidCellProofLength { expected: blobs.len() * E::number_of_columns(), actual: cell_proofs.len(), From 7dfb07285d75bb6cf0f085bf4e8fd5038851eb49 Mon Sep 17 00:00:00 2001 From: Eitan Seri- Levi Date: Wed, 17 Sep 2025 17:42:39 -0700 Subject: [PATCH 6/6] Fix log message --- beacon_node/http_api/src/publish_blocks.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beacon_node/http_api/src/publish_blocks.rs b/beacon_node/http_api/src/publish_blocks.rs index b6411167d92..05a4a4b7a4a 100644 --- a/beacon_node/http_api/src/publish_blocks.rs +++ b/beacon_node/http_api/src/publish_blocks.rs @@ -412,7 +412,7 @@ fn build_data_columns( error!( error = ?e, %slot, - "Invalid data column - not publishing block" + "Invalid data column - not publishing data columns" ); warp_utils::reject::custom_bad_request(format!("{e:?}")) })?;