Skip to content

Commit

Permalink
Fill BlindingFactor with zeros on Drop (#2847)
Browse files Browse the repository at this point in the history
* Implement simple zeroing of BlindingFactor in Drop

* rustfmt

* Make Debug implementation for BlindingFactor empty

* Implement BlindingFactor zeroing unit test

* mnemonic.rs: fix deprecated warning in test_bip39_random test

* Use zeroize crate to clear BlindingFactor

* Fix comment and implement dummy Debug trait for BlindingFactor

* Fix formatter
  • Loading branch information
eupn authored and hashmap committed May 30, 2019
1 parent 25a2ee1 commit 56b62a3
Show file tree
Hide file tree
Showing 8 changed files with 68 additions and 10 deletions.
22 changes: 22 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 6 additions & 4 deletions core/src/core/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ impl BlockHeader {

/// Total kernel offset for the chain state up to and including this block.
pub fn total_kernel_offset(&self) -> BlindingFactor {
self.total_kernel_offset
self.total_kernel_offset.clone()
}
}

Expand Down Expand Up @@ -560,8 +560,10 @@ impl Block {
.with_kernel(reward_kern);

// Now add the kernel offset of the previous block for a total
let total_kernel_offset =
committed::sum_kernel_offsets(vec![agg_tx.offset, prev.total_kernel_offset], vec![])?;
let total_kernel_offset = committed::sum_kernel_offsets(
vec![agg_tx.offset.clone(), prev.total_kernel_offset.clone()],
vec![],
)?;

let now = Utc::now().timestamp();
let timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now, 0), Utc);
Expand Down Expand Up @@ -698,7 +700,7 @@ impl Block {
// verify.body.outputs and kernel sums
let (_utxo_sum, kernel_sum) = self.verify_kernel_sums(
self.header.overage(),
self.block_kernel_offset(*prev_kernel_offset)?,
self.block_kernel_offset(prev_kernel_offset.clone())?,
)?;

Ok(kernel_sum)
Expand Down
2 changes: 1 addition & 1 deletion core/src/core/transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -957,7 +957,7 @@ impl Transaction {
) -> Result<(), Error> {
self.body.validate(weighting, verifier)?;
self.body.verify_features()?;
self.verify_kernel_sums(self.overage(), self.offset)?;
self.verify_kernel_sums(self.overage(), self.offset.clone())?;
Ok(())
}

Expand Down
2 changes: 1 addition & 1 deletion core/src/libtx/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ where
{
Box::new(
move |_build, (tx, kern, sum)| -> (Transaction, TxKernel, BlindSum) {
(tx.with_offset(offset), kern, sum)
(tx.with_offset(offset.clone()), kern, sum)
},
)
}
Expand Down
1 change: 1 addition & 0 deletions keychain/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ serde_derive = "1"
serde_json = "1"
uuid = { version = "0.6", features = ["serde", "v4"] }
lazy_static = "1"
zeroize = "0.8.0"

digest = "0.7"
hmac = "0.6"
Expand Down
3 changes: 2 additions & 1 deletion keychain/src/mnemonic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,10 +324,11 @@ mod tests {

#[test]
fn test_bip39_random() {
use rand::seq::SliceRandom;
let sizes: [usize; 5] = [16, 20, 24, 28, 32];

let mut rng = thread_rng();
let size = *rng.choose(&sizes).unwrap();
let size = *sizes.choose(&mut rng).unwrap();
let mut entropy: Vec<u8> = Vec::with_capacity(size);

for _ in 0..size {
Expand Down
36 changes: 34 additions & 2 deletions keychain/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ use crate::util::secp::key::{PublicKey, SecretKey};
use crate::util::secp::pedersen::Commitment;
use crate::util::secp::{self, Message, Secp256k1, Signature};
use crate::util::static_secp_instance;
use zeroize::Zeroize;

use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};

Expand Down Expand Up @@ -227,12 +228,13 @@ impl fmt::Display for Identifier {
}
}

#[derive(Clone, Copy, PartialEq, Serialize, Deserialize)]
#[derive(Default, Clone, PartialEq, Serialize, Deserialize, Zeroize)]
pub struct BlindingFactor([u8; SECRET_KEY_SIZE]);

// Dummy `Debug` implementation that prevents secret leakage.
impl fmt::Debug for BlindingFactor {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.to_hex())
write!(f, "BlindingFactor(<secret key hidden>)")
}
}

Expand Down Expand Up @@ -480,8 +482,38 @@ mod test {
use rand::thread_rng;

use crate::types::{BlindingFactor, ExtKeychainPath, Identifier};
use crate::util::secp::constants::SECRET_KEY_SIZE;
use crate::util::secp::key::{SecretKey, ZERO_KEY};
use crate::util::secp::Secp256k1;
use std::slice::from_raw_parts;

// This tests cleaning of BlindingFactor (e.g. secret key) on Drop.
// To make this test fail, just remove `Zeroize` derive from `BlindingFactor` definition.
#[test]
fn blinding_factor_clear_on_drop() {
// Create buffer for blinding factor filled with non-zero bytes.
let bf_bytes = [0xAA; SECRET_KEY_SIZE];
let ptr = {
// Fill blinding factor with some "sensitive" data
let bf = BlindingFactor::from_slice(&bf_bytes[..]);
bf.0.as_ptr()

// -- after this line BlindingFactor should be zeroed
};

// Unsafely get data from where BlindingFactor was in memory. Should be all zeros.
let bf_bytes = unsafe { from_raw_parts(ptr, SECRET_KEY_SIZE) };

// There should be all zeroes.
let mut all_zeros = true;
for b in bf_bytes {
if *b != 0x00 {
all_zeros = false;
}
}

assert!(all_zeros)
}

#[test]
fn split_blinding_factor() {
Expand Down
2 changes: 1 addition & 1 deletion pool/src/pool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ impl Pool {
header: &BlockHeader,
) -> Result<BlockSums, PoolError> {
let overage = tx.overage();
let offset = (header.total_kernel_offset() + tx.offset)?;
let offset = (header.total_kernel_offset() + tx.offset.clone())?;

let block_sums = self.blockchain.get_block_sums(&header.hash())?;

Expand Down

0 comments on commit 56b62a3

Please sign in to comment.