-
Notifications
You must be signed in to change notification settings - Fork 132
Parachain loop metrics #1484
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Parachain loop metrics #1484
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -23,7 +23,10 @@ use async_trait::async_trait; | |
| use bp_parachains::parachain_head_storage_key_at_source; | ||
| use bp_polkadot_core::parachains::{ParaHash, ParaHead, ParaHeadsProof, ParaId}; | ||
| use codec::Decode; | ||
| use parachains_relay::parachains_loop::{ParaHashAtSource, SourceClient}; | ||
| use parachains_relay::{ | ||
| parachains_loop::{ParaHashAtSource, SourceClient}, | ||
| parachains_loop_metrics::ParachainsLoopMetrics, | ||
| }; | ||
| use relay_substrate_client::{ | ||
| Chain, Client, Error as SubstrateError, HeaderIdOf, HeaderOf, RelayChain, | ||
| }; | ||
|
|
@@ -100,6 +103,7 @@ where | |
| async fn parachain_head( | ||
| &self, | ||
| at_block: HeaderIdOf<P::SourceRelayChain>, | ||
| metrics: Option<&ParachainsLoopMetrics>, | ||
| para_id: ParaId, | ||
| ) -> Result<ParaHashAtSource, Self::Error> { | ||
| // we don't need to support many parachains now | ||
|
|
@@ -111,9 +115,11 @@ where | |
| ))) | ||
| } | ||
|
|
||
| Ok(match self.on_chain_parachain_header(at_block, para_id).await? { | ||
| let mut para_header_number_at_source = None; | ||
| let para_hash_at_source = match self.on_chain_parachain_header(at_block, para_id).await? { | ||
| Some(parachain_header) => { | ||
| let mut parachain_head = ParaHashAtSource::Some(parachain_header.hash()); | ||
| para_header_number_at_source = Some(*parachain_header.number()); | ||
|
Comment on lines
+119
to
+122
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nit:
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yeah, but they have different scopes. Feel free to change the code if you think your version is better, though :) |
||
| // never return head that is larger than requested. This way we'll never sync | ||
| // headers past `maximal_header_id` | ||
| if let Some(ref maximal_header_id) = self.maximal_header_id { | ||
|
|
@@ -125,19 +131,29 @@ where | |
| // we don't want this header yet => let's report previously requested | ||
| // header | ||
| parachain_head = ParaHashAtSource::Some(maximal_header_id.1); | ||
| para_header_number_at_source = Some(maximal_header_id.0); | ||
| }, | ||
| Some(_) => (), | ||
| None => { | ||
| // on-demand relay has not yet asked us to sync anything let's do that | ||
| parachain_head = ParaHashAtSource::Unavailable; | ||
| para_header_number_at_source = None; | ||
| }, | ||
| } | ||
| } | ||
|
|
||
| parachain_head | ||
| }, | ||
| None => ParaHashAtSource::None, | ||
| }) | ||
| }; | ||
|
|
||
| if let (Some(metrics), Some(para_header_number_at_source)) = | ||
| (metrics, para_header_number_at_source) | ||
| { | ||
| metrics.update_best_parachain_block_at_source(para_id, para_header_number_at_source); | ||
| } | ||
|
|
||
| Ok(para_hash_at_source) | ||
| } | ||
|
|
||
| async fn prove_parachain_heads( | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -24,13 +24,19 @@ use crate::{ | |
| }; | ||
|
|
||
| use async_trait::async_trait; | ||
| use bp_parachains::{parachain_head_storage_key_at_target, BestParaHeadHash}; | ||
| use bp_polkadot_core::parachains::{ParaHeadsProof, ParaId}; | ||
| use bp_parachains::{ | ||
| best_parachain_head_hash_storage_key_at_target, imported_parachain_head_storage_key_at_target, | ||
| BestParaHeadHash, | ||
| }; | ||
| use bp_polkadot_core::parachains::{ParaHead, ParaHeadsProof, ParaId}; | ||
| use codec::{Decode, Encode}; | ||
| use parachains_relay::parachains_loop::TargetClient; | ||
| use parachains_relay::{ | ||
| parachains_loop::TargetClient, parachains_loop_metrics::ParachainsLoopMetrics, | ||
| }; | ||
| use relay_substrate_client::{ | ||
| AccountIdOf, AccountKeyPairOf, BlockNumberOf, Chain, Client, Error as SubstrateError, HashOf, | ||
| HeaderIdOf, RelayChain, SignParam, TransactionEra, TransactionSignScheme, UnsignedTransaction, | ||
| HeaderIdOf, HeaderOf, RelayChain, SignParam, TransactionEra, TransactionSignScheme, | ||
| UnsignedTransaction, | ||
| }; | ||
| use relay_utils::{relay_loop::Client as RelayClient, HeaderId}; | ||
| use sp_core::{Bytes, Pair}; | ||
|
|
@@ -115,15 +121,46 @@ where | |
| async fn parachain_head( | ||
| &self, | ||
| at_block: HeaderIdOf<P::TargetChain>, | ||
| metrics: Option<&ParachainsLoopMetrics>, | ||
| para_id: ParaId, | ||
| ) -> Result<Option<BestParaHeadHash>, Self::Error> { | ||
| let storage_key = parachain_head_storage_key_at_target( | ||
| let best_para_head_hash_key = best_parachain_head_hash_storage_key_at_target( | ||
| P::SourceRelayChain::PARACHAINS_FINALITY_PALLET_NAME, | ||
| para_id, | ||
| ); | ||
| let para_head = self.client.storage_value(storage_key, Some(at_block.1)).await?; | ||
| let best_para_head_hash: Option<BestParaHeadHash> = | ||
| self.client.storage_value(best_para_head_hash_key, Some(at_block.1)).await?; | ||
| if let (Some(metrics), &Some(ref best_para_head_hash)) = (metrics, &best_para_head_hash) { | ||
| let imported_para_head_key = imported_parachain_head_storage_key_at_target( | ||
| P::SourceRelayChain::PARACHAINS_FINALITY_PALLET_NAME, | ||
| para_id, | ||
| best_para_head_hash.head_hash, | ||
| ); | ||
| let imported_para_header = self | ||
| .client | ||
| .storage_value::<ParaHead>(imported_para_head_key, Some(at_block.1)) | ||
| .await? | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Sorry, one more thing.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If you're going to make a new method for double-encoded storage values, then let's log it here, where we know what the storage value actually mean and where we may log the meaningful error. Ideally (imo) we'll need to migrate to some crate that would allow us to propagate error with context and log it once at the upper level (inside the relay loop). But that's not a part of your follow-up PR of course :)
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Sure, I will log the error here, but I was wondering if we should log an error both when The idea with having a new method for double-encoded storage value is not going very well so far. I think I might have to give it up.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Imo more detailing logging = better :) The fact that logging occurs at the one branch only is probably the outcome of some bug that I've met. In the end the error is logged at the top level anyway, but this additional trace adds some context. So I'd add the second logging call here. |
||
| .and_then(|h| match HeaderOf::<P::SourceParachain>::decode(&mut &h.0[..]) { | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nit: Would it make sense to add a helper function similar to
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I don't mind - feel free to add :) |
||
| Ok(header) => Some(header), | ||
| Err(e) => { | ||
| log::error!( | ||
| target: "bridge-metrics", | ||
| "Failed to decode {} parachain header at {}: {:?}. Metric will have obsolete value", | ||
| P::SourceParachain::NAME, | ||
| P::TargetChain::NAME, | ||
| e, | ||
| ); | ||
|
|
||
| None | ||
| }, | ||
| }); | ||
| if let Some(imported_para_header) = imported_para_header { | ||
| metrics | ||
| .update_best_parachain_block_at_target(para_id, *imported_para_header.number()); | ||
| } | ||
| } | ||
|
|
||
| Ok(para_head) | ||
| Ok(best_para_head_hash) | ||
| } | ||
|
|
||
| async fn submit_parachain_heads_proof( | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm not very familiar with storage values, but considering these fixes, I was wondering if we could use a safer approach here. I'm thinking of the following issues:
StorageDoubleMapthat is defined in the pallet. This seems to be covered by unit tests however.ParaId,ParaHash, but we might by mistake use something else when decoding. Not sure if this is covered by unit tests.So I was wondering if we could use a safer approach to cover these issues. Maybe define a new structure to hold all these hashers and types and use if both when defining the
StorageDoubleMapand when computing the key and reading the value. A scaled-down version ofStorageDoubleMapmaybe.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yep, that's a good idea. I'm not sure, though that you'll able to do that - all storage delarations are handled by procedural macro and it may happen that you can't specify e,g,
MyMap::Hasherthere. But you can try to experiment of course :)