Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion fork-off/src/account_setting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ pub fn apply_account_setting(mut state: Storage, setting: AccountSetting) -> Sto
let account_hash = account.clone().into();
let key = &account_map.join(&account_hash);

state.insert(key.clone(), info.clone().into());
state.top.insert(key.clone(), info.clone().into());
info!(target: "fork-off", "Account info of `{:?}` set to `{:?}`", account, info);
}
state
Expand Down
7 changes: 4 additions & 3 deletions fork-off/src/chainspec_combining.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ impl Index<&StoragePath> for PathCounter {
}
}

/// Combines states - ommiting child state as we assume that it is empty in initial chainspec
pub fn combine_states(
mut state: Storage,
initial_state: Storage,
Expand All @@ -40,7 +41,7 @@ pub fn combine_states(
let mut removed_per_path_count = PathCounter::default();
let mut added_per_path_cnt = PathCounter::default();

state.retain(|k, _v| {
state.top.retain(|k, _v| {
match storage_prefixes
.iter()
.find(|(_, prefix)| prefix.is_prefix_of(k))
Expand All @@ -53,13 +54,13 @@ pub fn combine_states(
}
});

for (k, v) in initial_state.iter() {
for (k, v) in initial_state.top.iter() {
if let Some((path, _)) = storage_prefixes
.iter()
.find(|(_, prefix)| prefix.is_prefix_of(k))
{
added_per_path_cnt.bump(path);
state.insert(k.clone(), v.clone());
state.top.insert(k.clone(), v.clone());
}
}

Expand Down
23 changes: 18 additions & 5 deletions fork-off/src/fetching.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::{collections::HashMap, iter::repeat_with, sync::Arc};
use std::{iter::repeat_with, sync::Arc};

use async_channel::Receiver;
use futures::{future::join_all, join};
Expand Down Expand Up @@ -37,10 +37,23 @@ impl StateFetcher {
.await
.unwrap();

let child_storage_map_res = self
.client
.get_child_storage_for_key(key.clone(), &block)
.await
.unwrap();

let mut output_guard = output.lock();
output_guard.insert(key, value);
if output_guard.len() % LOG_PROGRESS_FREQUENCY == 0 {
info!("Fetched {} values", output_guard.len());
output_guard.top.insert(key.clone(), value);
if let Some(child_storage_map) = child_storage_map_res {
info!("Fetched child trie with {} keys", child_storage_map.len());
output_guard
.child_storage
.insert(key.without_child_storage_prefix(), child_storage_map);
}

if output_guard.top.len() % LOG_PROGRESS_FREQUENCY == 0 {
info!("Fetched {} values", output_guard.top.len());
}
}
}
Expand All @@ -49,7 +62,7 @@ impl StateFetcher {
info!("Fetching state at block {:?}", block_hash);

let (input, key_fetcher) = self.client.stream_all_keys(&block_hash);
let output = Arc::new(Mutex::new(HashMap::new()));
let output = Arc::new(Mutex::new(Storage::default()));

let workers = repeat_with(|| {
self.value_fetching_worker(block_hash.clone(), input.clone(), output.clone())
Expand Down
4 changes: 2 additions & 2 deletions fork-off/src/fsio.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ pub fn save_snapshot_to_file(snapshot: Storage, path: String) {
let data = serde_json::to_vec_pretty(&snapshot).unwrap();
info!(
"Writing snapshot of {} key-val pairs and {} total bytes",
snapshot.len(),
snapshot.top.len(),
data.len()
);
write_to_file(path, &data);
Expand All @@ -50,6 +50,6 @@ pub fn read_snapshot_from_file(path: String) -> Storage {
let snapshot: Storage =
serde_json::from_str(&fs::read_to_string(path).expect("Could not read snapshot file"))
.expect("could not parse from snapshot");
info!("Read snapshot of {} key-val pairs", snapshot.len());
info!("Read snapshot of {} key-val pairs", snapshot.top.len());
snapshot
}
82 changes: 81 additions & 1 deletion fork-off/src/jsonrpc_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use jsonrpc_core::Error;
use jsonrpc_core_client::{transports::ws, RpcError};
use jsonrpc_derive::rpc;

use crate::types::{BlockHash, StorageKey, StorageValue};
use crate::types::{BlockHash, ChildStorageMap, StorageKey, StorageValue};

#[rpc]
pub trait Rpc {
Expand All @@ -28,6 +28,24 @@ pub trait Rpc {
start_key: Option<StorageKey>,
at: Option<BlockHash>,
) -> Result<Vec<StorageKey>, Error>;

#[rpc(name = "childstate_getKeysPaged")]
fn get_child_keys_paged(
&self,
child_storage: StorageKey,
prefix: StorageKey,
count: usize,
start_key: Option<StorageKey>,
at: Option<BlockHash>,
) -> Result<Vec<StorageKey>, Error>;

#[rpc(name = "childstate_getStorageEntries")]
fn get_child_storage_entries(
&self,
child_storage: StorageKey,
keys: Vec<StorageKey>,
at: Option<BlockHash>,
) -> Result<Vec<StorageValue>, Error>;
}

type RpcResult<T> = Result<T, RpcError>;
Expand Down Expand Up @@ -71,6 +89,68 @@ impl Client {
(receiver, self.do_stream_all_keys(sender, at.clone()))
}

/// Returns a map representing a single child trie
pub async fn get_child_storage_for_key(
&self,
child_key: StorageKey,
at: &BlockHash,
) -> RpcResult<Option<ChildStorageMap>> {
let res = self
.get_child_storage_for_key_inner(child_key, at)
.await
.map(Some);

if let Err(RpcError::JsonRpcError(err)) = res {
// Empty child storage is returned as error
if err.message == "Client error: Invalid child storage key" {
Ok(None)
} else {
Err(RpcError::JsonRpcError(err))
}
} else {
res
}
}

async fn get_child_storage_for_key_inner(
&self,
child_key: StorageKey,
at: &BlockHash,
) -> RpcResult<ChildStorageMap> {
let empty_prefix = StorageKey::new("0x");
let mut child_storage_map = ChildStorageMap::new();
let mut start_key = None;

loop {
let keys = self
.client
.get_child_keys_paged(
child_key.clone(),
empty_prefix.clone(),
CHUNK_SIZE,
start_key,
Some(at.clone()),
)
.await?;

let values = self
.client
.get_child_storage_entries(child_key.clone(), keys.clone(), Some(at.clone()))
.await?;

child_storage_map.append(&mut keys.iter().cloned().zip(values).collect());

let fetched = keys.len();
start_key = keys.last().cloned();

if fetched < CHUNK_SIZE {
break;
}
}

Ok(child_storage_map)
}

async fn do_stream_all_keys(&self, sender: Sender<StorageKey>, at: BlockHash) -> RpcResult<()> {
let empty_prefix = StorageKey::new("0x");
let mut start_key = None;
Expand Down
11 changes: 7 additions & 4 deletions fork-off/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,8 @@ async fn main() -> anyhow::Result<()> {
}
let state = read_snapshot_from_file(snapshot_path);

let initial_state: Storage =
serde_json::from_value(initial_spec["genesis"]["raw"]["top"].take())
.expect("Deserialization of state from given chainspec file failed");
// Initialize with state from chainspec + empty child storage
let initial_state = Storage::new(&initial_spec);

let state = combine_states(state, initial_state, storage_keep_state);

Expand All @@ -77,8 +76,12 @@ async fn main() -> anyhow::Result<()> {
};
let state = apply_account_setting(state, account_setting);

let json_state = serde_json::to_value(state).expect("Failed to convert a storage map to json");
let json_state =
serde_json::to_value(state.top).expect("Failed to convert a storage map to json");
let json_child_state =
serde_json::to_value(state.child_storage).expect("Failed to convert a storage map to json");
initial_spec["genesis"]["raw"]["top"] = json_state;
initial_spec["genesis"]["raw"]["childrenDefault"] = json_child_state;
let new_spec = serde_json::to_vec_pretty(&initial_spec)?;

info!(target: "fork-off", "Writing new chainspec to {}", &combined_spec_path);
Expand Down
48 changes: 43 additions & 5 deletions fork-off/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,24 @@
//! to a function, as `clippy` screams outrageously about changing it to `&str` and then the alias
//! is useless.

use std::{collections::HashMap, fmt::Debug, str::FromStr};
use std::{
collections::{BTreeMap, HashMap},
fmt::Debug,
str::FromStr,
};

use codec::Encode;
use frame_support::{sp_runtime::AccountId32, Blake2_128Concat, StorageHasher, Twox128};
use hex::ToHex;
use hex::{encode, ToHex};
use jsonrpc_core::Value;
use serde::{Deserialize, Serialize};

pub trait Get<T = String> {
fn get(self) -> T;
}

/// Remove leading `"0x"`.
fn strip_hex<T: ToString + ?Sized>(t: &T) -> String {
pub fn strip_hex<T: ToString + ?Sized>(t: &T) -> String {
let s = t.to_string();
s.strip_prefix("0x").map(ToString::to_string).unwrap_or(s)
}
Expand Down Expand Up @@ -69,7 +74,7 @@ impl Get for StoragePath {
}

/// Hex-encoded key in raw chainspec.
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize, PartialOrd, Ord)]
pub struct StorageKey(String);

impl From<&StorageKey> for Vec<u8> {
Expand All @@ -78,6 +83,8 @@ impl From<&StorageKey> for Vec<u8> {
}
}

pub const CHILD_STORAGE_PREFIX: &[u8] = b":child_storage:default:";

impl StorageKey {
pub fn new<T: ToString + ?Sized>(content: &T) -> Self {
Self(as_hex(content))
Expand All @@ -96,6 +103,16 @@ impl StorageKey {
let longer = as_hex(&other.0);
longer.starts_with(&shorter)
}

pub fn without_child_storage_prefix(self) -> Self {
StorageKey::new(
&(as_hex(
&self
.get()
.split_off(as_hex(&encode(CHILD_STORAGE_PREFIX)).len()),
)),
)
}
}

/// Convert `AccountId` to `StorageKey` using `Blake2_128Concat` hashing algorithm.
Expand Down Expand Up @@ -169,6 +186,27 @@ impl FromStr for BlockHash {
}

/// Content of `chainspec["genesis"]["raw"]["top"]`.
pub type Storage = HashMap<StorageKey, StorageValue>;
pub type TopStorage = HashMap<StorageKey, StorageValue>;

/// Content of `chainspec["genesis"]["raw"]["childrenDefault"]`.
pub type ChildStorage = HashMap<StorageKey, ChildStorageMap>;

pub type ChildStorageMap = BTreeMap<StorageKey, StorageValue>;

#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct Storage {
pub top: TopStorage,
pub child_storage: ChildStorage,
}

pub type Balance = u128;

impl Storage {
pub fn new(initial_spec: &Value) -> Self {
Storage {
top: serde_json::from_value(initial_spec["genesis"]["raw"]["top"].clone())
.expect("Deserialization of state from initial chainspec has failed"),
child_storage: ChildStorage::new(),
}
}
}