Skip to content

Commit

Permalink
Setting up tide-disco bindings
Browse files Browse the repository at this point in the history
  • Loading branch information
nyospe committed Feb 1, 2024
1 parent 1a4d85a commit 7b9b0c8
Show file tree
Hide file tree
Showing 6 changed files with 215 additions and 16 deletions.
8 changes: 8 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,15 @@ edition = "2021"

[dependencies]
async-trait = "0.1"
clap = { version = "4.4", features = ["derive", "env"] }
commit = { git = "https://github.com/EspressoSystems/commit.git" }
derive_more = "0.99"
futures = "0.3"
hotshot-types = { git = "https://github.com/EspressoSystems/HotShot.git", tag = "0.5.7.1" }
serde = { version = "1.0", features = ["derive"] }
sha2 = "0.10"
snafu = { version = "0.7", features = ["backtraces"] }
tagged-base64 = { git = "https://github.com/EspressoSystems/tagged-base64", tag = "0.3.4" }
tide-disco = { git = "https://github.com/EspressoSystems/tide-disco.git", tag = "v0.4.6" }
toml = "0.8"

58 changes: 58 additions & 0 deletions src/api.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
// Copyright (c) 2022 Espresso Systems (espressosys.com)
// This file is part of the HotShot Query Service library.
//
// This program is free software: you can redistribute it and/or modify it under the terms of the GNU
// General Public License as published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
// even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
// You should have received a copy of the GNU General Public License along with this program. If not,
// see <https://www.gnu.org/licenses/>.

use std::fs;
use std::path::Path;
use tide_disco::api::{Api, ApiError};
use toml::{map::Entry, Value};

pub(crate) fn load_api<State, Error>(
path: Option<impl AsRef<Path>>,
default: &str,
extensions: impl IntoIterator<Item = Value>,
) -> Result<Api<State, Error>, ApiError> {
let mut toml = match path {
Some(path) => load_toml(path.as_ref())?,
None => toml::from_str(default).map_err(|err| ApiError::CannotReadToml {
reason: err.to_string(),
})?,
};
for extension in extensions {
merge_toml(&mut toml, extension);
}
Api::new(toml)
}

fn merge_toml(into: &mut Value, from: Value) {
if let (Value::Table(into), Value::Table(from)) = (into, from) {
for (key, value) in from {
match into.entry(key) {
Entry::Occupied(mut entry) => merge_toml(entry.get_mut(), value),
Entry::Vacant(entry) => {
entry.insert(value);
}
}
}
}
}

fn load_toml(path: &Path) -> Result<Value, ApiError> {
let bytes = fs::read(path).map_err(|err| ApiError::CannotReadToml {
reason: err.to_string(),
})?;
let string = std::str::from_utf8(&bytes).map_err(|err| ApiError::CannotReadToml {
reason: err.to_string(),
})?;
toml::from_str(string).map_err(|err| ApiError::CannotReadToml {
reason: err.to_string(),
})
}
35 changes: 29 additions & 6 deletions src/block_metadata.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,38 @@
use std::marker::PhantomData;
use std::{hash::Hash, marker::PhantomData};

use hotshot_types::traits::node_implementation::NodeType;
use commit::{Commitment, Committable};
use hotshot_types::traits::{node_implementation::NodeType, BlockPayload};
use serde::{Deserialize, Serialize};
use sha2::digest::{generic_array::GenericArray, typenum};
use sha2::{Digest, Sha256};

pub type BlockHash = GenericArray<u8, typenum::consts::U32>;
pub struct HashableBlock<I: NodeType>(<I as NodeType>::BlockPayload, <<I as NodeType>::BlockPayload as BlockPayload>::Metadata);
pub type BlockHash<I: NodeType> = Commitment<HashableBlock<I>>;
impl<I: NodeType> Default for HashableBlock<I> {
fn default() -> Self {
let (bp, bm) = <I as NodeType>::BlockPayload::from_transactions(Vec::new()).unwrap_or_else(|_|<I as NodeType>::BlockPayload::genesis());
Self(bp, bm)
}
}

impl<I: NodeType> Committable for HashableBlock<I> {
fn commit(&self) -> Commitment<Self> {
let builder = commit::RawCommitmentBuilder::new("Hashable Block Payload");
let mut hasher = Sha256::new();
let encoded = if let Ok(encoder) = self.0.encode() {
encoder.collect()
} else {
Vec::new()
};
hasher.update(&encoded);
let generic_array = hasher.finalize();
builder.generic_byte_array(&generic_array).finalize()
}
}

#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[serde(bound = "")]
pub struct BlockMetadata<I: NodeType> {
block_hash: BlockHash,
block_hash: BlockHash<I>,
block_size: u64,
offered_fee: u64,
_phantom: PhantomData<I>,
Expand Down
103 changes: 103 additions & 0 deletions src/builder.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
use std::{fmt::Display, path::PathBuf};

use clap::Args;
use derive_more::From;
use futures::FutureExt;
use hotshot_types::{data::VidCommitment, traits::{node_implementation::NodeType, signature_key::SignatureKey}};
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, ResultExt, Snafu};
use tagged_base64::TaggedBase64;
use tide_disco::{api::ApiError, method::ReadState, Api, RequestError, StatusCode};

use crate::{api::load_api, block_metadata::BlockHash, data_source::{self, BuilderDataSource}};

#[derive(Args, Default)]
pub struct Options {
#[arg(long = "builder-api-path", env = "HOTSHOT_BUILDER_API_PATH")]
pub api_path: Option<PathBuf>,

/// Additional API specification files to merge with `builder-api-path`.
///
/// These optional files may contain route definitions for application-specific routes that have
/// been added as extensions to the basic builder API.
#[arg(
long = "builder-extension",
env = "HOTSHOT_BUILDER_EXTENSIONS",
value_delimiter = ','
)]
pub extensions: Vec<toml::Value>,
}

#[derive(Clone, Debug, Snafu, Deserialize, Serialize)]
#[snafu(visibility(pub))]
pub enum BuildError {
/// The requested resource does not exist or is not known to this builder service.
NotFound,
/// The requested resource exists but is not currently available.
Missing,
/// There was an error while trying to fetch the requested resource.
#[snafu(display("Failed to fetch requested resource: {message}"))]
Error { message: String },
}


#[derive(Clone, Debug, From, Snafu, Deserialize, Serialize)]
#[snafu(visibility(pub))]
pub enum Error {
Request {
source: RequestError,
},
#[snafu(display("error building block from {resource}: {source}"))]
#[from(ignore)]
BlockAvailable {
source: BuildError,
resource: String,
},
#[snafu(display("error claiming block {resource}: {source}"))]
#[from(ignore)]
BlockClaim {
source: BuildError,
resource: String,
},
Custom {
message: String,
status: StatusCode,
},
}


pub fn define_api<State, Types: NodeType>(options: &Options) -> Result<Api<State, Error>, ApiError>
where
State: 'static + Send + Sync + ReadState,
<State as ReadState>::State: Send + Sync + BuilderDataSource<Types>,
Types: NodeType,
<<Types as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType: for<'a> TryFrom<&'a TaggedBase64> + Into<TaggedBase64> + Display,
for<'a> <<<Types as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType as TryFrom<&'a TaggedBase64>>::Error: Display,
{
let mut api = load_api::<State, Error>(
options.api_path.as_ref(),
include_str!("../api/builder.toml"),
options.extensions.clone(),
)?;
api.with_version("0.0.1".parse().unwrap())
.get("available_blocks", |req, state| {
async move {
let hash = req.blob_param("parent_hash")?;
state.get_available_blocks(&hash).await.context(BlockAvailableSnafu {
resource: hash.to_string(),
})
}
.boxed()
})?
.get("claim_block", |req, state| {
async move {
let hash = req.blob_param("block_hash")?;
let signature = req.blob_param("signature")?;
state.claim_block(&hash, &signature).await.context(BlockClaimSnafu {
resource: hash.to_string(),
})
}
.boxed()
})?;
Ok(api)
}
18 changes: 12 additions & 6 deletions src/data_source.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
use std::sync::Arc;

use async_trait::async_trait;
use hotshot_types::{data::VidCommitment, traits::{node_implementation::NodeType, signature_key::SignatureKey}};
use commit::Committable;
use hotshot_types::{data::VidCommitment, traits::{node_implementation::NodeType, signature_key::SignatureKey, BlockPayload}};
use tagged_base64::TaggedBase64;

use crate::block_metadata::{BlockHash, BlockMetadata};
use crate::{block_metadata::{BlockHash, BlockMetadata}, builder::BuildError};

#[async_trait]
pub trait BuilderDataSource<I: NodeType> {
async fn get_available_blocks(&self, for_parent: &VidCommitment) -> Vec<BlockMetadata<I>>;
async fn claim_block(&self, block_hash: BlockHash, signature: <<I as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType) -> Arc<Vec<u8>>;
async fn submit_txn(&self, txn: <I as NodeType>::Transaction);
pub trait BuilderDataSource<I>
where I: NodeType,
<<I as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType: for<'a> TryFrom<&'a TaggedBase64> + Into<TaggedBase64>
{
async fn get_available_blocks(&self, for_parent: &VidCommitment) -> Result<Vec<BlockMetadata<I>>, BuildError>;
async fn claim_block(&self, block_hash: &BlockHash<I>, signature: &<<I as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType) -> Result<I::BlockPayload, BuildError>;
async fn submit_txn(&self, txn: <I as NodeType>::Transaction) -> Result<(), BuildError>;
}

9 changes: 5 additions & 4 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
mod block_metadata;
mod data_source;
mod query_data;

pub mod block_metadata;
pub mod builder;
pub mod data_source;
pub mod query_data;
mod api;

0 comments on commit 7b9b0c8

Please sign in to comment.