Skip to content
This repository was archived by the owner on Nov 15, 2023. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions bin/node-template/node/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

use std::sync::Arc;
use std::time::Duration;
use sc_client_api::{ExecutorProvider, RemoteBackend};
use sc_client_api::{ExecutorProvider, RemoteBackend, SharedPruningRequirements};
use node_template_runtime::{self, opaque::Block, RuntimeApi};
use sc_service::{error::Error as ServiceError, Configuration, TaskManager};
use sp_inherents::InherentDataProviders;
Expand Down Expand Up @@ -228,8 +228,12 @@ pub fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {

/// Builds a new service for a light client.
pub fn new_light(config: Configuration) -> Result<TaskManager, ServiceError> {
let shared_pruning_requirements = SharedPruningRequirements::default();
let (client, backend, keystore, mut task_manager, on_demand) =
sc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;
sc_service::new_light_parts::<Block, RuntimeApi, Executor>(
&config,
&shared_pruning_requirements,
)?;

let transaction_pool = Arc::new(sc_transaction_pool::BasicPool::new_light(
config.transaction_pool.clone(),
Expand Down
18 changes: 15 additions & 3 deletions bin/node/cli/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ use sp_inherents::InherentDataProviders;
use sc_network::{Event, NetworkService};
use sp_runtime::traits::Block as BlockT;
use futures::prelude::*;
use sc_client_api::{ExecutorProvider, RemoteBackend};
use sc_client_api::{ExecutorProvider, RemoteBackend, SharedPruningRequirements};
use sp_core::traits::BareCryptoStorePtr;
use node_executor::Executor;

Expand Down Expand Up @@ -86,6 +86,7 @@ pub fn new_partial(config: &Configuration) -> Result<sc_service::PartialComponen
sc_consensus_babe::Config::get_or_compute(&*client)?,
grandpa_block_import,
client.clone(),
None,
)?;

let inherent_data_providers = sp_inherents::InherentDataProviders::new();
Expand Down Expand Up @@ -358,8 +359,18 @@ pub fn new_light_base(config: Configuration) -> Result<(
Arc<NetworkService<Block, <Block as BlockT>::Hash>>,
Arc<sc_transaction_pool::LightPool<Block, LightClient, sc_network::config::OnDemand<Block>>>
), ServiceError> {
let (client, backend, keystore, mut task_manager, on_demand) =
sc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;
let mut shared_pruning_requirements = SharedPruningRequirements::default();
sc_consensus_babe::light_pruning_requirements(&mut shared_pruning_requirements);
let (
client,
backend,
keystore,
mut task_manager,
on_demand,
) = sc_service::new_light_parts::<Block, RuntimeApi, Executor>(
&config,
&shared_pruning_requirements,
)?;

let select_chain = sc_consensus::LongestChain::new(backend.clone());

Expand All @@ -384,6 +395,7 @@ pub fn new_light_base(config: Configuration) -> Result<(
sc_consensus_babe::Config::get_or_compute(&*client)?,
grandpa_block_import,
client.clone(),
Some(&shared_pruning_requirements),
)?;

let inherent_data_providers = sp_inherents::InherentDataProviders::new();
Expand Down
9 changes: 9 additions & 0 deletions client/api/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -537,6 +537,15 @@ pub fn changes_tries_state_at_block<'a, Block: BlockT>(
}
}

/// Pruning requirement to share between multiple client component.
#[derive(Clone, Default)]
pub struct SharedPruningRequirements {
/// Some component like babe will need to resolve canonical branch,
/// for it we keep some key lookup mapping when pruning CHT in light
/// client.
pub need_mapping_for_light_pruning: bool,
}

/// Provide CHT roots. These are stored on a light client and generated dynamically on a full
/// client.
pub trait ProvideChtRoots<Block: BlockT> {
Expand Down
13 changes: 13 additions & 0 deletions client/api/src/in_mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,19 @@ impl<Block: BlockT> HeaderBackend<Block> for Blockchain<Block> {
fn hash(&self, number: <<Block as BlockT>::Header as HeaderT>::Number) -> sp_blockchain::Result<Option<Block::Hash>> {
Ok(self.id(BlockId::Number(number)))
}

fn pruned_header_was_canonical(
&self,
number: &NumberFor<Block>,
hash: &Block::Hash,
) -> sp_blockchain::Result<bool> {
Ok(self.storage.read().hashes.get(number) == Some(hash))
}

fn pruned_header_clean_up(&self, number: &NumberFor<Block>) -> sp_blockchain::Result<()> {
self.storage.write().hashes.remove(number);
Ok(())
}
}

impl<Block: BlockT> HeaderMetadata<Block> for Blockchain<Block> {
Expand Down
15 changes: 15 additions & 0 deletions client/authority-discovery/src/worker/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,21 @@ impl<Block: BlockT> HeaderBackend<Block> for TestApi {
) -> std::result::Result<Option<Block::Hash>, sp_blockchain::Error> {
Ok(None)
}

fn pruned_header_was_canonical(
&self,
_number: &NumberFor<Block>,
_hash: &Block::Hash,
) -> std::result::Result<bool, sp_blockchain::Error> {
Ok(false)
}

fn pruned_header_clean_up(
&self,
_number: &NumberFor<Block>,
) -> std::result::Result<(), sp_blockchain::Error> {
Ok(())
}
}

pub(crate) struct RuntimeApi {
Expand Down
1 change: 1 addition & 0 deletions client/consensus/babe/rpc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,7 @@ mod tests {
config.clone(),
client.clone(),
client.clone(),
None,
).expect("can initialize block-import");

let epoch_changes = link.epoch_changes().clone();
Expand Down
55 changes: 47 additions & 8 deletions client/consensus/babe/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ use sp_consensus::import_queue::{Verifier, BasicQueue, DefaultImportQueue, Cache
use sc_client_api::{
backend::AuxStore,
BlockchainEvents, ProvideUncles,
SharedPruningRequirements,
};
use sp_block_builder::BlockBuilder as BlockBuilderApi;
use futures::channel::mpsc::{channel, Sender, Receiver};
Expand Down Expand Up @@ -1080,6 +1081,7 @@ pub struct BabeBlockImport<Block: BlockT, Client, I> {
client: Arc<Client>,
epoch_changes: SharedEpochChanges<Block, Epoch>,
config: Config,
light_pruning: bool,
}

impl<Block: BlockT, I: Clone, Client> Clone for BabeBlockImport<Block, Client, I> {
Expand All @@ -1089,6 +1091,7 @@ impl<Block: BlockT, I: Clone, Client> Clone for BabeBlockImport<Block, Client, I
client: self.client.clone(),
epoch_changes: self.epoch_changes.clone(),
config: self.config.clone(),
light_pruning: self.light_pruning,
}
}
}
Expand All @@ -1099,12 +1102,15 @@ impl<Block: BlockT, Client, I> BabeBlockImport<Block, Client, I> {
epoch_changes: SharedEpochChanges<Block, Epoch>,
block_import: I,
config: Config,
shared_pruning_requirements: Option<&SharedPruningRequirements>,
) -> Self {
let light_pruning = shared_pruning_requirements.is_some();
BabeBlockImport {
client,
inner: block_import,
epoch_changes,
config,
light_pruning,
}
}
}
Expand Down Expand Up @@ -1278,6 +1284,7 @@ impl<Block, Client, Inner> BlockImport<Block> for BabeBlockImport<Block, Client,
prune_finalized(
self.client.clone(),
&mut epoch_changes,
self.light_pruning,
)?;

epoch_changes.import(
Expand Down Expand Up @@ -1365,9 +1372,11 @@ impl<Block, Client, Inner> BlockImport<Block> for BabeBlockImport<Block, Client,
fn prune_finalized<Block, Client>(
client: Arc<Client>,
epoch_changes: &mut EpochChangesFor<Block, Epoch>,
prune_light: bool,
) -> Result<(), ConsensusError> where
Block: BlockT,
Client: HeaderBackend<Block> + HeaderMetadata<Block, Error = sp_blockchain::Error>,
Client: HeaderBackend<Block>
+ HeaderMetadata<Block, Error = sp_blockchain::Error>
{
let info = client.info();

Expand All @@ -1383,16 +1392,43 @@ fn prune_finalized<Block, Client>(
.slot_number()
};

epoch_changes.prune_finalized(
descendent_query(&*client),
&info.finalized_hash,
info.finalized_number,
finalized_slot,
).map_err(|e| ConsensusError::ClientImport(format!("{:?}", e)))?;
if prune_light {
let is_canonical = |
hash: &Block::Hash,
number: &NumberFor<Block>,
finalized_number: &NumberFor<Block>,
| {
Ok(number <= finalized_number && client.pruned_header_was_canonical(number, hash)?)
};
let clean_up = |number: &NumberFor<Block>| {
client.pruned_header_clean_up(number)?;
Ok(())
};

epoch_changes.prune_finalized_light::<sp_blockchain::Error, _, _>(
&info.finalized_hash,
info.finalized_number,
finalized_slot,
is_canonical,
clean_up,
).map_err(|e| ConsensusError::ClientImport(format!("{:?}", e)))?;
} else {
epoch_changes.prune_finalized(
descendent_query(&*client),
&info.finalized_hash,
info.finalized_number,
finalized_slot,
).map_err(|e| ConsensusError::ClientImport(format!("{:?}", e)))?;
}

Ok(())
}

/// Initialize babe pruning requirements.
pub fn light_pruning_requirements(shared_pruning_requirements: &mut SharedPruningRequirements) {
shared_pruning_requirements.need_mapping_for_light_pruning = true;
}

/// Produce a BABE block-import object to be used later on in the construction of
/// an import-queue.
///
Expand All @@ -1402,8 +1438,9 @@ pub fn block_import<Client, Block: BlockT, I>(
config: Config,
wrapped_block_import: I,
client: Arc<Client>,
shared_pruning_requirements: Option<&SharedPruningRequirements>,
) -> ClientResult<(BabeBlockImport<Block, Client, I>, BabeLink<Block>)> where
Client: AuxStore + HeaderBackend<Block> + HeaderMetadata<Block, Error = sp_blockchain::Error>,
Client: AuxStore + HeaderBackend<Block> + HeaderMetadata<Block, Error = sp_blockchain::Error>
{
let epoch_changes = aux_schema::load_epoch_changes::<Block, _>(&*client, &config)?;
let link = BabeLink {
Expand All @@ -1418,13 +1455,15 @@ pub fn block_import<Client, Block: BlockT, I>(
prune_finalized(
client.clone(),
&mut epoch_changes.lock(),
shared_pruning_requirements.is_some(),
)?;

let import = BabeBlockImport::new(
client,
epoch_changes,
wrapped_block_import,
config,
shared_pruning_requirements,
);

Ok((import, link))
Expand Down
1 change: 1 addition & 0 deletions client/consensus/babe/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,7 @@ impl TestNetFactory for BabeTestNet {
config,
client.clone(),
client.clone(),
None,
).expect("can initialize block-import");

let block_import = PanickingBlockImport(block_import);
Expand Down
37 changes: 37 additions & 0 deletions client/consensus/epochs/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -421,6 +421,43 @@ impl<Hash, Number, E: Epoch> EpochChanges<Hash, Number, E> where
Ok(())
}

/// Prune out finalized epochs, except for the ancestor of the finalized
/// block. The given slot should be the slot number at which the finalized
/// block was authored.
pub fn prune_finalized_light<Error, F, C>(
&mut self,
hash: &Hash,
number: Number,
slot: E::SlotNumber,
is_canonical: F,
clean_up: C,
) -> Result<(), fork_tree::Error<Error>>
where Error: std::error::Error,
F: Fn(&Hash, &Number, &Number) -> Result<bool, Error>,
C: Fn(&Number) -> Result<(), Error>,
{

let predicate = |epoch: &PersistedEpochHeader<E>| match *epoch {
PersistedEpochHeader::Genesis(_, ref epoch_1) =>
slot >= epoch_1.end_slot,
PersistedEpochHeader::Regular(ref epoch_n) =>
slot >= epoch_n.end_slot,
};
let removed = self.inner.prune_non_cannonical(
hash,
&number,
&is_canonical,
&predicate,
&clean_up,
)?;

for (hash, number, _) in removed {
self.epochs.remove(&(hash, number));
}

Ok(())
}

/// Get a reference to an epoch with given identifier.
pub fn epoch(&self, id: &EpochIdentifier<Hash, Number>) -> Option<&E> {
self.epochs.get(&(id.hash, id.number))
Expand Down
30 changes: 30 additions & 0 deletions client/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,36 @@ impl<Block: BlockT> sc_client_api::blockchain::HeaderBackend<Block> for Blockcha
None => Ok(None),
})
}

fn pruned_header_was_canonical(
&self,
number: &NumberFor<Block>,
hash: &Block::Hash,
) -> sp_blockchain::Result<bool> {
let lookup_key = utils::block_id_to_lookup_key::<Block>(
&*self.db,
columns::KEY_LOOKUP,
BlockId::Number(number.clone()),
)?;
Ok(if let Some(lookup_key) = lookup_key {
utils::lookup_key_to_hash(lookup_key.as_ref())? == hash.as_ref()
} else {
false
})
}

fn pruned_header_clean_up(&self, number: &NumberFor<Block>) -> sp_blockchain::Result<()> {
let mut transaction = Transaction::new();
utils::remove_number_to_key_mapping(
&mut transaction,
columns::KEY_LOOKUP,
number.clone(),
)?;

self.db.commit(transaction)?;

Ok(())
}
}

impl<Block: BlockT> sc_client_api::blockchain::Backend<Block> for BlockchainDb<Block> {
Expand Down
Loading