Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 57 additions & 57 deletions acvm-repo/acir_field/src/generic_ark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -429,63 +429,6 @@ impl<F: PrimeField> SubAssign for FieldElement<F> {
}
}

#[cfg(test)]
mod tests {
#[test]
fn and() {
let max = 10_000u32;

let num_bits = (std::mem::size_of::<u32>() * 8) as u32 - max.leading_zeros();

for x in 0..max {
let x = crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(x as i128);
let res = x.and(&x, num_bits);
assert_eq!(res.to_be_bytes(), x.to_be_bytes());
}
}

#[test]
fn serialize_fixed_test_vectors() {
// Serialized field elements from of 0, -1, -2, -3
let hex_strings = vec![
"0000000000000000000000000000000000000000000000000000000000000000",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe",
];

for (i, string) in hex_strings.into_iter().enumerate() {
let minus_i_field_element =
-crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(i as i128);
assert_eq!(minus_i_field_element.to_hex(), string);
}
}

#[test]
fn deserialize_even_and_odd_length_hex() {
// Test cases of (odd, even) length hex strings
let hex_strings =
vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")];
for (i, case) in hex_strings.into_iter().enumerate() {
let i_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(i as i128);
let odd_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from_hex(case.0).unwrap();
let even_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from_hex(case.1).unwrap();

assert_eq!(i_field_element, odd_field_element);
assert_eq!(odd_field_element, even_field_element);
}
}

#[test]
fn max_num_bits_smoke() {
let max_num_bits_bn254 = crate::generic_ark::FieldElement::<ark_bn254::Fr>::max_num_bits();
assert_eq!(max_num_bits_bn254, 254);
}
}

fn mask_vector_le(bytes: &mut [u8], num_bits: usize) {
// reverse to big endian format
bytes.reverse();
Expand Down Expand Up @@ -543,3 +486,60 @@ fn superscript(n: u64) -> String {
panic!("{}", n.to_string() + " can't be converted to superscript.");
}
}

#[cfg(test)]
mod tests {
#[test]
fn and() {
let max = 10_000u32;

let num_bits = (std::mem::size_of::<u32>() * 8) as u32 - max.leading_zeros();

for x in 0..max {
let x = crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(x as i128);
let res = x.and(&x, num_bits);
assert_eq!(res.to_be_bytes(), x.to_be_bytes());
}
}

#[test]
fn serialize_fixed_test_vectors() {
// Serialized field elements from of 0, -1, -2, -3
let hex_strings = vec![
"0000000000000000000000000000000000000000000000000000000000000000",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff",
"30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe",
];

for (i, string) in hex_strings.into_iter().enumerate() {
let minus_i_field_element =
-crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(i as i128);
assert_eq!(minus_i_field_element.to_hex(), string);
}
}

#[test]
fn deserialize_even_and_odd_length_hex() {
// Test cases of (odd, even) length hex strings
let hex_strings =
vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")];
for (i, case) in hex_strings.into_iter().enumerate() {
let i_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from(i as i128);
let odd_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from_hex(case.0).unwrap();
let even_field_element =
crate::generic_ark::FieldElement::<ark_bn254::Fr>::from_hex(case.1).unwrap();

assert_eq!(i_field_element, odd_field_element);
assert_eq!(odd_field_element, even_field_element);
}
}

#[test]
fn max_num_bits_smoke() {
let max_num_bits_bn254 = crate::generic_ark::FieldElement::<ark_bn254::Fr>::max_num_bits();
assert_eq!(max_num_bits_bn254, 254);
}
}
5 changes: 1 addition & 4 deletions acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,9 @@ impl RangeOptimizer {
}
}


Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE {
input: FunctionInput { witness, num_bits },
}) => {
Some((*witness, *num_bits))
}
}) => Some((*witness, *num_bits)),

_ => None,
}) else {
Expand Down
2 changes: 1 addition & 1 deletion acvm-repo/acvm/src/pwg/blackbox/bigint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ impl BigIntSolver {
pub(crate) fn bigint_to_bytes(
&self,
input: u32,
outputs: &Vec<Witness>,
outputs: &[Witness],
initial_witness: &mut WitnessMap,
) -> Result<(), OpcodeResolutionError> {
let bigint = self.get_bigint(input, BlackBoxFunc::BigIntToLeBytes)?;
Expand Down
8 changes: 3 additions & 5 deletions aztec_macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -809,7 +809,7 @@ fn get_serialized_length(
) -> Result<u64, AztecMacroError> {
let (struct_name, maybe_stored_in_state) = match typ {
Type::Struct(struct_type, generics) => {
Ok((struct_type.borrow().name.0.contents.clone(), generics.get(0)))
Ok((struct_type.borrow().name.0.contents.clone(), generics.first()))
}
_ => Err(AztecMacroError::CouldNotAssignStorageSlots {
secondary_message: Some("State storage variable must be a struct".to_string()),
Expand Down Expand Up @@ -859,7 +859,7 @@ fn get_serialized_length(
let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id);
let serialized_trait_impl = serialized_trait_impl_shared.borrow();

match serialized_trait_impl.trait_generics.get(0).unwrap() {
match serialized_trait_impl.trait_generics.first().unwrap() {
Type::Constant(value) => Ok(*value),
_ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }),
}
Expand Down Expand Up @@ -946,9 +946,7 @@ fn assign_storage_slots(
let slot_arg_expression = interner.expression(&new_call_expression.arguments[1]);

let current_storage_slot = match slot_arg_expression {
HirExpression::Literal(HirLiteral::Integer(slot, _)) => {
Ok(slot.borrow().to_u128())
}
HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()),
_ => Err((
AztecMacroError::CouldNotAssignStorageSlots {
secondary_message: Some(
Expand Down
2 changes: 1 addition & 1 deletion compiler/noirc_errors/src/debug_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ impl DebugInfo {

for (opcode_location, locations) in self.locations.iter() {
for location in locations.iter() {
let opcodes = accumulator.entry(*location).or_insert(Vec::new());
let opcodes = accumulator.entry(*location).or_default();
opcodes.push(opcode_location);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ impl<'block> BrilligBlock<'block> {
unreachable!("expected a call instruction")
};

let Value::Function(func_id) = &dfg[*func] else {
let Value::Function(func_id) = &dfg[*func] else {
unreachable!("expected a function value")
};

Expand Down
4 changes: 1 addition & 3 deletions compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -684,9 +684,7 @@ impl<'a> FunctionContext<'a> {
&mut self,
assert_message: &Option<Box<Expression>>,
) -> Result<Option<Box<ConstrainError>>, RuntimeError> {
let Some(assert_message_expr) = assert_message else {
return Ok(None)
};
let Some(assert_message_expr) = assert_message else { return Ok(None) };

if let ast::Expression::Literal(ast::Literal::Str(assert_message)) =
assert_message_expr.as_ref()
Expand Down
4 changes: 2 additions & 2 deletions compiler/noirc_frontend/src/debug/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ impl DebugInstrumenter {
self.walk_scope(&mut func.body.0, func.span);

// prepend fn params:
func.body.0 = vec![set_fn_params, func.body.0.clone()].concat();
func.body.0 = [set_fn_params, func.body.0.clone()].concat();
}

// Modify a vector of statements in-place, adding instrumentation for sets and drops.
Expand Down Expand Up @@ -130,7 +130,7 @@ impl DebugInstrumenter {
let span = Span::empty(span.end());

// drop scope variables
let scope_vars = self.scope.pop().unwrap_or(HashMap::default());
let scope_vars = self.scope.pop().unwrap_or_default();
let drop_vars_stmts = scope_vars.values().map(|var_id| build_drop_var_stmt(*var_id, span));
statements.extend(drop_vars_stmts);

Expand Down
2 changes: 1 addition & 1 deletion compiler/noirc_frontend/src/hir/type_check/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl<'interner> TypeChecker<'interner> {
let elem_types = vecmap(&arr, |arg| self.check_expression(arg));

let first_elem_type = elem_types
.get(0)
.first()
.cloned()
.unwrap_or_else(|| self.interner.next_type_variable());

Expand Down
42 changes: 21 additions & 21 deletions compiler/noirc_frontend/src/lexer/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -774,6 +774,27 @@ impl Keyword {
}
}

pub struct Tokens(pub Vec<SpannedToken>);

type TokenMapIter = Map<IntoIter<SpannedToken>, fn(SpannedToken) -> (Token, Span)>;

impl<'a> From<Tokens> for chumsky::Stream<'a, Token, Span, TokenMapIter> {
fn from(tokens: Tokens) -> Self {
let end_of_input = match tokens.0.last() {
Some(spanned_token) => spanned_token.to_span(),
None => Span::single_char(0),
};

fn get_span(token: SpannedToken) -> (Token, Span) {
let span = token.to_span();
(token.into_token(), span)
}

let iter = tokens.0.into_iter().map(get_span as fn(_) -> _);
chumsky::Stream::from_iter(end_of_input, iter)
}
}

#[cfg(test)]
mod keywords {
use strum::IntoEnumIterator;
Expand All @@ -796,24 +817,3 @@ mod keywords {
}
}
}

pub struct Tokens(pub Vec<SpannedToken>);

type TokenMapIter = Map<IntoIter<SpannedToken>, fn(SpannedToken) -> (Token, Span)>;

impl<'a> From<Tokens> for chumsky::Stream<'a, Token, Span, TokenMapIter> {
fn from(tokens: Tokens) -> Self {
let end_of_input = match tokens.0.last() {
Some(spanned_token) => spanned_token.to_span(),
None => Span::single_char(0),
};

fn get_span(token: SpannedToken) -> (Token, Span) {
let span = token.to_span();
(token.into_token(), span)
}

let iter = tokens.0.into_iter().map(get_span as fn(_) -> _);
chumsky::Stream::from_iter(end_of_input, iter)
}
}
4 changes: 3 additions & 1 deletion compiler/noirc_frontend/src/monomorphization/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -733,7 +733,9 @@ impl<'interner> Monomorphizer<'interner> {
}
DefinitionKind::Global(global_id) => {
let Some(let_) = self.interner.get_global_let_statement(*global_id) else {
unreachable!("Globals should have a corresponding let statement by monomorphization")
unreachable!(
"Globals should have a corresponding let statement by monomorphization"
)
};
self.expr(let_.expression)
}
Expand Down
4 changes: 2 additions & 2 deletions compiler/noirc_frontend/src/node_interner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1147,7 +1147,7 @@ impl NodeInterner {
})
.collect()
})
.unwrap_or(vec![])
.unwrap_or_default()
}

/// Similar to `lookup_trait_implementation` but does not apply any type bindings on success.
Expand Down Expand Up @@ -1670,7 +1670,7 @@ impl Methods {
for method in self.iter() {
match interner.function_meta(&method).typ.instantiate(interner).0 {
Type::Function(args, _, _) => {
if let Some(object) = args.get(0) {
if let Some(object) = args.first() {
let mut bindings = TypeBindings::new();

if object.try_unify(typ, &mut bindings).is_ok() {
Expand Down
Loading