Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: reveal multiple rangesets with one rangeset. #664

Open
wants to merge 25 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 22 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 25 additions & 14 deletions crates/core/src/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,19 @@
attestation::{Field, FieldId},
transcript::{
hash::{PlaintextHash, PlaintextHashSecret},
Idx,
Direction, Idx,
},
};

/// Index for items which can be looked up by transcript index or field id.
/// Index for items which can be looked up by transcript's (direction and index)
/// or field id.
#[derive(Debug, Clone)]
pub(crate) struct Index<T> {
items: Vec<T>,
// Lookup by field id.
field_ids: HashMap<FieldId, usize>,
// Lookup by transcript index.
transcript_idxs: HashMap<Idx, usize>,
// Lookup by transcript direction and index.
transcript_idxs: HashMap<(Direction, Idx), usize>,
themighty1 marked this conversation as resolved.
Show resolved Hide resolved
}

impl<T> Default for Index<T> {
Expand Down Expand Up @@ -60,14 +61,14 @@
impl<T> Index<T> {
pub(crate) fn new<F>(items: Vec<T>, f: F) -> Self
where
F: Fn(&T) -> (&FieldId, &Idx),
F: Fn(&T) -> (&FieldId, Direction, &Idx),
{
let mut field_ids = HashMap::new();
let mut transcript_idxs = HashMap::new();
for (i, item) in items.iter().enumerate() {
let (id, idx) = f(item);
let (id, dir, idx) = f(item);
field_ids.insert(*id, i);
transcript_idxs.insert(idx.clone(), i);
transcript_idxs.insert((dir, idx.clone()), i);
}
Self {
items,
Expand All @@ -84,23 +85,23 @@
self.field_ids.get(id).map(|i| &self.items[*i])
}

pub(crate) fn get_by_transcript_idx(&self, idx: &Idx) -> Option<&T> {
self.transcript_idxs.get(idx).map(|i| &self.items[*i])
pub(crate) fn get_by_transcript_idx(&self, dir_idx: &(Direction, Idx)) -> Option<&T> {
self.transcript_idxs.get(dir_idx).map(|i| &self.items[*i])
}
}

impl From<Vec<Field<PlaintextHash>>> for Index<Field<PlaintextHash>> {
fn from(items: Vec<Field<PlaintextHash>>) -> Self {
Self::new(items, |field: &Field<PlaintextHash>| {
(&field.id, &field.data.idx)
(&field.id, field.data.direction, &field.data.idx)

Check warning on line 96 in crates/core/src/index.rs

View check run for this annotation

Codecov / codecov/patch

crates/core/src/index.rs#L96

Added line #L96 was not covered by tests
})
}
}

impl From<Vec<PlaintextHashSecret>> for Index<PlaintextHashSecret> {
fn from(items: Vec<PlaintextHashSecret>) -> Self {
Self::new(items, |item: &PlaintextHashSecret| {
(&item.commitment, &item.idx)
(&item.commitment, item.direction, &item.idx)
})
}
}
Expand All @@ -114,23 +115,28 @@
#[derive(PartialEq, Debug, Clone)]
struct Stub {
field_index: FieldId,
direction: Direction,
index: Idx,
}

impl From<Vec<Stub>> for Index<Stub> {
fn from(items: Vec<Stub>) -> Self {
Self::new(items, |item: &Stub| (&item.field_index, &item.index))
Self::new(items, |item: &Stub| {
(&item.field_index, item.direction, &item.index)
})
}
}

fn stubs() -> Vec<Stub> {
vec![
Stub {
field_index: FieldId(1),
direction: Direction::Sent,
index: Idx::new(RangeSet::from([0..1, 18..21])),
},
Stub {
field_index: FieldId(2),
direction: Direction::Received,
index: Idx::new(RangeSet::from([1..5, 8..11])),
},
]
Expand All @@ -144,10 +150,12 @@
let stubs = vec![
Stub {
field_index: FieldId(1),
direction: Direction::Sent,
index: stub_a_index.clone(),
},
Stub {
field_index: stub_b_field_index,
direction: Direction::Received,
index: Idx::new(RangeSet::from([1..5, 8..11])),
},
];
Expand All @@ -158,7 +166,7 @@
Some(&stubs[1])
);
assert_eq!(
stubs_index.get_by_transcript_idx(&stub_a_index),
stubs_index.get_by_transcript_idx(&(Direction::Sent, stub_a_index)),
Some(&stubs[0])
);
}
Expand All @@ -172,6 +180,9 @@
let wrong_field_index = FieldId(200);

assert_eq!(stubs_index.get_by_field_id(&wrong_field_index), None);
assert_eq!(stubs_index.get_by_transcript_idx(&wrong_index), None);
assert_eq!(
stubs_index.get_by_transcript_idx(&(Direction::Sent, wrong_index)),
None
);
}
}
12 changes: 11 additions & 1 deletion crates/core/src/transcript.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
use std::{fmt, ops::Range};

use serde::{Deserialize, Serialize};
use utils::range::{Difference, IndexRanges, RangeSet, ToRangeSet, Union};
use utils::range::{Difference, IndexRanges, RangeSet, Subset, ToRangeSet, Union};

use crate::connection::TranscriptLength;

Expand Down Expand Up @@ -494,6 +494,11 @@
self.0.len()
}

/// Returns the number of ranges in the index.
pub fn len_ranges(&self) -> usize {
self.0.len_ranges()
}

Check warning on line 500 in crates/core/src/transcript.rs

View check run for this annotation

Codecov / codecov/patch

crates/core/src/transcript.rs#L498-L500

Added lines #L498 - L500 were not covered by tests

/// Returns whether the index is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
Expand All @@ -508,6 +513,11 @@
pub fn union(&self, other: &Idx) -> Idx {
Idx(self.0.union(&other.0))
}

/// Checks if this index is a subset of another.
pub fn is_subset(&self, other: &Idx) -> bool {
self.0.is_subset(&other.0)
}
}

/// Builder for [`Idx`].
Expand Down
5 changes: 5 additions & 0 deletions crates/core/src/transcript/encoding/tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,11 @@ impl EncodingTree {
pub fn contains(&self, idx: &(Direction, Idx)) -> bool {
self.idxs.contains_right(idx)
}

/// Returns the committed transcript indices.
pub(crate) fn transcript_indices(&self) -> impl IntoIterator<Item = &(Direction, Idx)> {
self.idxs.right_values()
}
}

#[cfg(test)]
Expand Down
Loading
Loading