Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,10 @@

- [#6166](https://github.com/ChainSafe/forest/pull/6166) Gate `JWT` expiration validation behind environment variable `FOREST_JWT_DISABLE_EXP_VALIDATION`.

- [#6167](https://github.com/ChainSafe/forest/pull/6167) Added `forest-tool state compute` subcommand to generate database snapshot for tipset validation.

- [#6167](https://github.com/ChainSafe/forest/pull/6167) Added `forest-tool state replay-compute` subcommand to replay tipset validation with a minimal database snapshot.

- [#6171](https://github.com/ChainSafe/forest/pull/6171) Enable V2 API support for basic Eth RPC methods: `EthChainId`, `EthProtocolVersion`, `EthSyncing`, `EthAccounts`.

### Changed
Expand Down
7 changes: 6 additions & 1 deletion docs/docs/users/reference/cli.sh
Original file line number Diff line number Diff line change
Expand Up @@ -94,16 +94,21 @@ generate_markdown_section "forest-cli" "f3 ready"
generate_markdown_section "forest-tool" ""

generate_markdown_section "forest-tool" "backup"
generate_markdown_section "forest-tool" "completion"
generate_markdown_section "forest-tool" "backup create"
generate_markdown_section "forest-tool" "backup restore"

generate_markdown_section "forest-tool" "completion"

generate_markdown_section "forest-tool" "benchmark"
generate_markdown_section "forest-tool" "benchmark car-streaming"
generate_markdown_section "forest-tool" "benchmark graph-traversal"
generate_markdown_section "forest-tool" "benchmark forest-encoding"
generate_markdown_section "forest-tool" "benchmark export"

generate_markdown_section "forest-tool" "state"
generate_markdown_section "forest-tool" "state compute"
generate_markdown_section "forest-tool" "state replay-compute"

generate_markdown_section "forest-tool" "state-migration"
generate_markdown_section "forest-tool" "state-migration actor-bundle"

Expand Down
9 changes: 8 additions & 1 deletion src/db/car/any.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use itertools::Either;
use positioned_io::ReadAt;
use std::borrow::Cow;
use std::io::{Error, ErrorKind, Read, Result};
use std::path::Path;
use std::path::{Path, PathBuf};
use std::sync::Arc;

pub enum AnyCar<ReaderT> {
Expand Down Expand Up @@ -138,6 +138,13 @@ impl TryFrom<&Path> for AnyCar<EitherMmapOrRandomAccessFile> {
}
}

impl TryFrom<&PathBuf> for AnyCar<EitherMmapOrRandomAccessFile> {
type Error = std::io::Error;
fn try_from(path: &PathBuf) -> std::io::Result<Self> {
Self::try_from(path.as_path())
}
}

impl<ReaderT> Blockstore for AnyCar<ReaderT>
where
ReaderT: ReadAt,
Expand Down
1 change: 1 addition & 0 deletions src/tool/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ where
Subcommand::Api(cmd) => cmd.run().await,
Subcommand::Net(cmd) => cmd.run().await,
Subcommand::Shed(cmd) => cmd.run(client).await,
Subcommand::State(cmd) => cmd.run().await,
Subcommand::Completion(cmd) => cmd.run(&mut std::io::stdout()),
}
})
Expand Down
2 changes: 1 addition & 1 deletion src/tool/subcommands/api_cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0, MIT

mod api_compare_tests;
mod generate_test_snapshot;
pub(super) mod generate_test_snapshot;
mod report;
mod state_decode_params_tests;
mod stateful_tests;
Expand Down
2 changes: 1 addition & 1 deletion src/tool/subcommands/api_cmd/api_compare_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ impl TestSummary {

/// Data about a failed test. Used for debugging.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(super) struct TestDump {
pub struct TestDump {
pub request: rpc::Request,
pub forest_response: Result<Value, String>,
pub lotus_response: Result<Value, String>,
Expand Down
6 changes: 2 additions & 4 deletions src/tool/subcommands/api_cmd/generate_test_snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,7 @@ pub async fn run_test_with_dump(
Ok(())
}

pub(super) fn load_db(
db_root: &Path,
) -> anyhow::Result<Arc<ReadOpsTrackingStore<ManyCar<ParityDb>>>> {
pub fn load_db(db_root: &Path) -> anyhow::Result<Arc<ReadOpsTrackingStore<ManyCar<ParityDb>>>> {
let db_writer = open_db(db_root.into(), &Default::default())?;
let db = ManyCar::new(db_writer);
let forest_car_db_dir = db_root.join(CAR_DB_DIR_NAME);
Expand Down Expand Up @@ -123,7 +121,7 @@ async fn ctx(
db.clone(),
db,
chain_config,
genesis_header.clone(),
genesis_header,
)
.unwrap(),
);
Expand Down
4 changes: 4 additions & 0 deletions src/tool/subcommands/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ mod index_cmd;
mod net_cmd;
mod shed_cmd;
mod snapshot_cmd;
mod state_compute_cmd;
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Fix “private type in public interface” for Subcommand::State

Subcommand is pub, but it exposes state_compute_cmd::StateCommand from a private module, which breaks visibility (E0446). Re-export the type and reference the re-export, or make the module public. Minimal fix:

 mod state_compute_cmd;
+pub use self::state_compute_cmd::StateCommand;

 ...
-    #[command(subcommand)]
-    State(state_compute_cmd::StateCommand),
+    #[command(subcommand)]
+    State(StateCommand),

Also applies to: 85-87

🤖 Prompt for AI Agents
In src/tool/subcommands/mod.rs around lines 15 and also affecting lines 85-87,
Subcommand::State exposes StateCommand from a private module causing a "private
type in public interface" error; fix by either making the module public (change
the module declaration to pub mod state_compute_cmd) or re-exporting the type
(add pub use state_compute_cmd::StateCommand) and update Subcommand::State to
reference the re-exported StateCommand (e.g., use
crate::tool::subcommands::StateCommand) so the public enum only exposes public
types.

mod state_migration_cmd;

use crate::cli_shared::cli::*;
Expand Down Expand Up @@ -81,5 +82,8 @@ pub enum Subcommand {
#[command(subcommand)]
Shed(shed_cmd::ShedCommands),

#[command(subcommand)]
State(state_compute_cmd::StateCommand),

Completion(CompletionCommand),
}
163 changes: 163 additions & 0 deletions src/tool/subcommands/state_compute_cmd.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
// Copyright 2019-2025 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

use crate::{
chain::{ChainStore, index::ResolveNullTipset},
cli_shared::{chain_path, read_config},
db::{
MemoryDB, SettingsStoreExt,
car::{AnyCar, ManyCar},
db_engine::db_root,
},
genesis::read_genesis_header,
interpreter::VMTrace,
networks::{ChainConfig, NetworkChain},
shim::clock::ChainEpoch,
state_manager::{StateManager, StateOutput},
};
use std::{num::NonZeroUsize, path::PathBuf, sync::Arc, time::Instant};

/// Interact with Filecoin chain state
#[derive(Debug, clap::Subcommand)]
pub enum StateCommand {
Compute(ComputeCommand),
ReplayCompute(ReplayComputeCommand),
}

impl StateCommand {
pub async fn run(self) -> anyhow::Result<()> {
match self {
Self::Compute(cmd) => cmd.run().await,
Self::ReplayCompute(cmd) => cmd.run().await,
}
}
}

/// Compute state tree for an epoch
#[derive(Debug, clap::Args)]
pub struct ComputeCommand {
/// Which epoch to compute the state transition for
#[arg(long, required = true)]
epoch: ChainEpoch,
/// Filecoin network chain
#[arg(long, required = true)]
chain: NetworkChain,
/// Optional path to the database folder
#[arg(long)]
db: Option<PathBuf>,
/// Optional path to the database snapshot `CAR` file to write to for reproducing the computation
#[arg(long)]
export_db_to: Option<PathBuf>,
}

impl ComputeCommand {
pub async fn run(self) -> anyhow::Result<()> {
let Self {
epoch,
chain,
db,
export_db_to,
} = self;
let db_root_path = if let Some(db) = db {
db
} else {
let (_, config) = read_config(None, Some(chain.clone()))?;
db_root(&chain_path(&config))?
};
let db = super::api_cmd::generate_test_snapshot::load_db(&db_root_path)?;
let chain_config = Arc::new(ChainConfig::from_chain(&chain));
let genesis_header =
read_genesis_header(None, chain_config.genesis_bytes(&db).await?.as_deref(), &db)
.await?;
let chain_store = Arc::new(ChainStore::new(
db.clone(),
db.clone(),
db.clone(),
db.clone(),
chain_config,
genesis_header,
)?);
let ts = chain_store.chain_index().tipset_by_height(
epoch,
chain_store.heaviest_tipset(),
ResolveNullTipset::TakeOlder,
)?;
let epoch = ts.epoch();
SettingsStoreExt::write_obj(&db.tracker, crate::db::setting_keys::HEAD_KEY, ts.key())?;
let state_manager = Arc::new(StateManager::new(chain_store.clone())?);

Comment on lines +86 to +88
Copy link
Copy Markdown
Contributor

@coderabbitai coderabbitai Bot Oct 17, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Wrong store used when writing HEAD; likely compile error and unintended DB mutation

SettingsStoreExt::write_obj(&db.tracker, ...) targets the read-ops tracker, not the settings store. This won’t compile and, if pointed to the real DB, could also mutate a live DB’s HEAD. Use the DB wrapper itself, or drop the HEAD write entirely (not needed when passing ts to compute_tipset_state).

-        SettingsStoreExt::write_obj(&db.tracker, crate::db::setting_keys::HEAD_KEY, ts.key())?;
+        // Optional: this write is not strictly necessary for compute; remove if you want no side-effects.
+        SettingsStoreExt::write_obj(&*db, crate::db::setting_keys::HEAD_KEY, &ts.key())?;

If you prefer method syntax:

-        SettingsStoreExt::write_obj(&db.tracker, crate::db::setting_keys::HEAD_KEY, ts.key())?;
+        db.write_obj(crate::db::setting_keys::HEAD_KEY, &ts.key())?;
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
SettingsStoreExt::write_obj(&db.tracker, crate::db::setting_keys::HEAD_KEY, ts.key())?;
let state_manager = Arc::new(StateManager::new(chain_store.clone()).unwrap());
// Optional: this write is not strictly necessary for compute; remove if you want no side-effects.
SettingsStoreExt::write_obj(&*db, crate::db::setting_keys::HEAD_KEY, &ts.key())?;
let state_manager = Arc::new(StateManager::new(chain_store.clone()).unwrap());
🤖 Prompt for AI Agents
In src/tool/subcommands/state_compute_cmd.rs around lines 89 to 91, the call
uses SettingsStoreExt::write_obj(&db.tracker, crate::db::setting_keys::HEAD_KEY,
ts.key()) which targets the tracker store (causing a compile error and risking
mutating the wrong DB); change the call to use the DB wrapper (e.g.
SettingsStoreExt::write_obj(&db, crate::db::setting_keys::HEAD_KEY, ts.key()))
or simply remove the HEAD write entirely since ts is passed into
compute_tipset_state; ensure you use the correct settings store reference or
drop the write to avoid unintended DB mutation.

Copy link
Copy Markdown
Contributor Author

@hanabi1224 hanabi1224 Oct 17, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is intended. Setting the head for exporting the snapshot from the tracker

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Find the ReadOpsTrackingStore definition and understand its structure
ast-grep --pattern 'struct ReadOpsTrackingStore'

Length of output: 0


🏁 Script executed:

#!/bin/bash
# Search for how the snapshot/export mechanism works
rg -nP -C5 'export.*snapshot|snapshot.*export' --type=rust

Length of output: 19887


🏁 Script executed:

#!/bin/bash
# Look for the tracker field usage in the codebase
rg -nP '\btracker\b' --type=rust -C3 -g '!target/**'

Length of output: 17067


🏁 Script executed:

#!/bin/bash
# Find where export_snapshot_to_forest_car is implemented/called
rg -nP 'export_snapshot_to_forest_car|export.*forest.*car' --type=rust -C5

Length of output: 7646


@hanabi1224, thank you for the clarification! You're absolutely right. I now understand the architecture:

The ReadOpsTrackingStore uses its tracker (a MemoryDB) to accumulate all data accessed during the state computation. When export_forest_car is called, it exports from this tracker, creating a minimal snapshot containing only the blocks that were actually read.

Writing HEAD_KEY to db.tracker is indeed necessary because MemoryDB::export_forest_car reads the HEAD_KEY from the store it's exporting. Without this write, the export would fail.

I apologize for the incorrect initial assessment. The code is working as designed.

🐰

  /)_/)
 (•.•)
 />📚  Learning noted!

✏️ Learnings added
Learnt from: hanabi1224
PR: ChainSafe/forest#6167
File: src/tool/subcommands/state_compute_cmd.rs:89-91
Timestamp: 2025-10-17T14:24:47.006Z
Learning: In `src/tool/subcommands/state_compute_cmd.rs`, when using `ReadOpsTrackingStore` to generate minimal snapshots, `HEAD_KEY` should be written to `db.tracker` (not `db` itself) before calling `export_forest_car()`, because the export reads from the tracker MemoryDB which accumulates only the accessed data during computation.

Note: Learnings are effective only in the context of similar code segments. To apply general coding guidelines, please configure review instructions. You can manage existing learnings in the app.

let StateOutput {
state_root,
receipt_root,
..
} = state_manager
.compute_tipset_state(ts, crate::state_manager::NO_CALLBACK, VMTrace::NotTraced)
.await?;
let mut db_snapshot = vec![];
db.export_forest_car(&mut db_snapshot).await?;
println!(
"epoch: {epoch}, state_root: {state_root}, receipt_root: {receipt_root}, db_snapshot_size: {}",
human_bytes::human_bytes(db_snapshot.len() as f64)
);
if let Some(export_db_to) = export_db_to {
std::fs::write(export_db_to, db_snapshot)?;
}
Ok(())
}
}

/// Replay state computation with a db snapshot
Comment thread
hanabi1224 marked this conversation as resolved.
/// To be used in conjunction with `forest-tool state compute`.
#[derive(Debug, clap::Args)]
pub struct ReplayComputeCommand {
/// Path to the database snapshot `CAR` file generated by `forest-tool state compute`
snapshot: PathBuf,
/// Filecoin network chain
#[arg(long, required = true)]
chain: NetworkChain,
/// Number of times to repeat the state computation
#[arg(short, long, default_value_t = NonZeroUsize::new(1).unwrap())]
n: NonZeroUsize,
}

impl ReplayComputeCommand {
pub async fn run(self) -> anyhow::Result<()> {
let Self { snapshot, chain, n } = self;
let snap_car = AnyCar::try_from(&snapshot)?;
let ts = Arc::new(snap_car.heaviest_tipset()?);
let epoch = ts.epoch();
let db = Arc::new(ManyCar::new(MemoryDB::default()).with_read_only(snap_car)?);
let chain_config = Arc::new(ChainConfig::from_chain(&chain));
let genesis_header =
read_genesis_header(None, chain_config.genesis_bytes(&db).await?.as_deref(), &db)
.await?;
let chain_store = Arc::new(ChainStore::new(
db.clone(),
db.clone(),
db.clone(),
db.clone(),
chain_config,
genesis_header,
)?);
let state_manager = Arc::new(StateManager::new(chain_store.clone())?);
for _ in 0..n.get() {
let start = Instant::now();
let StateOutput {
state_root,
receipt_root,
..
} = state_manager
.compute_tipset_state(
ts.clone(),
crate::state_manager::NO_CALLBACK,
VMTrace::NotTraced,
)
.await?;
println!(
"epoch: {epoch}, state_root: {state_root}, receipt_root: {receipt_root}, took {}.",
humantime::format_duration(start.elapsed())
);
}
Ok(())
}
}
Loading