Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion compiler/noirc_frontend/src/hir/comptime/display.rs
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,8 @@ impl<'interner> TokenPrettyPrinter<'interner> {
| Token::Ampersand
| Token::SliceStart
| Token::ShiftLeft
| Token::ShiftRight => {
| Token::ShiftRight
| Token::LogicalAnd => {
self.last_was_op = true;
write!(f, " {token}")
}
Expand Down
8 changes: 0 additions & 8 deletions compiler/noirc_frontend/src/lexer/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ pub enum LexerErrorKind {
MalformedFuzzAttribute { location: Location },
#[error("{:?} is not a valid inner attribute", found)]
InvalidInnerAttribute { location: Location, found: String },
#[error("Logical and used instead of bitwise and")]
LogicalAnd { location: Location },
#[error("Unterminated block comment")]
UnterminatedBlockComment { location: Location },
#[error("Unterminated string literal")]
Expand Down Expand Up @@ -72,7 +70,6 @@ impl LexerErrorKind {
LexerErrorKind::MalformedTestAttribute { location, .. } => *location,
LexerErrorKind::MalformedFuzzAttribute { location, .. } => *location,
LexerErrorKind::InvalidInnerAttribute { location, .. } => *location,
LexerErrorKind::LogicalAnd { location } => *location,
LexerErrorKind::UnterminatedBlockComment { location } => *location,
LexerErrorKind::UnterminatedStringLiteral { location } => *location,
LexerErrorKind::InvalidFormatString { location, .. } => *location,
Expand Down Expand Up @@ -136,11 +133,6 @@ impl LexerErrorKind {
format!(" {found} is not a valid inner attribute"),
*location,
),
LexerErrorKind::LogicalAnd { location } => (
"Noir has no logical-and (&&) operator since short-circuiting is much less efficient when compiling to circuits".to_string(),
"Try `&` instead, or use `if` only if you require short-circuiting".to_string(),
*location,
),
LexerErrorKind::UnterminatedBlockComment { location } => ("Unterminated block comment".to_string(), "Unterminated block comment".to_string(), *location),
LexerErrorKind::UnterminatedStringLiteral { location } =>
("Unterminated string literal".to_string(), "Unterminated string literal".to_string(), *location),
Expand Down
7 changes: 3 additions & 4 deletions compiler/noirc_frontend/src/lexer/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
done: false,
skip_comments: true,
skip_whitespaces: true,
max_integer: BigInt::from_biguint(num_bigint::Sign::Plus, FieldElement::modulus())

Check warning on line 55 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (biguint)
- BigInt::one(),
}
}
Expand Down Expand Up @@ -102,10 +102,9 @@

fn ampersand(&mut self) -> SpannedTokenResult {
if self.peek_char_is('&') {
// When we issue this error the first '&' will already be consumed
// and the next token issued will be the next '&'.
let span = Span::inclusive(self.position, self.position + 1);
Err(LexerErrorKind::LogicalAnd { location: self.location(span) })
let start = self.position;
self.next_char();
Ok(Token::LogicalAnd.into_span(start, start + 1))
} else if self.peek_char_is('[') {
self.single_char_token(Token::SliceStart)
} else {
Expand Down Expand Up @@ -1462,7 +1461,7 @@
// (expected_token_discriminator, strings_to_lex)
// expected_token_discriminator matches a given token when
// std::mem::discriminant returns the same discriminant for both.
fn blns_base64_to_statements(base64_str: String) -> Vec<(Option<Token>, Vec<String>)> {

Check warning on line 1464 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (blns)
use base64::engine::general_purpose;
use std::borrow::Cow;
use std::io::Cursor;
Expand Down Expand Up @@ -1520,13 +1519,13 @@
fn test_big_list_of_naughty_strings() {
use std::mem::discriminant;

let blns_contents = include_str!("./blns/blns.base64.json");

Check warning on line 1522 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (blns)

Check warning on line 1522 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (blns)

Check warning on line 1522 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (blns)
let blns_base64: Vec<String> =

Check warning on line 1523 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (blns)
serde_json::from_str(blns_contents).expect("BLNS json invalid");

Check warning on line 1524 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (BLNS)
for blns_base64_str in blns_base64 {
let statements = blns_base64_to_statements(blns_base64_str);
for (token_discriminator_opt, blns_program_strs) in statements {

Check warning on line 1527 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (strs)
for blns_program_str in blns_program_strs {

Check warning on line 1528 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (strs)
let mut expected_token_found = false;
let mut lexer = Lexer::new_with_dummy_file(&blns_program_str);
let mut result_tokens = Vec::new();
Expand Down Expand Up @@ -1620,7 +1619,7 @@

#[test]
fn test_non_ascii_comments() {
let cases = vec!["// 🙂", "// schön", "/* in the middle 🙂 of a comment */"];

Check warning on line 1622 in compiler/noirc_frontend/src/lexer/lexer.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (schön)

for source in cases {
let mut lexer = Lexer::new_with_dummy_file(source);
Expand Down
6 changes: 6 additions & 0 deletions compiler/noirc_frontend/src/lexer/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,8 @@ pub enum BorrowedToken<'input> {
DollarSign,
/// =
Assign,
/// &&
LogicalAnd,
#[allow(clippy::upper_case_acronyms)]
EOF,

Expand Down Expand Up @@ -238,6 +240,8 @@ pub enum Token {
Assign,
/// $
DollarSign,
/// &&
LogicalAnd,
#[allow(clippy::upper_case_acronyms)]
EOF,

Expand Down Expand Up @@ -315,6 +319,7 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> {
Token::Assign => BorrowedToken::Assign,
Token::Bang => BorrowedToken::Bang,
Token::DollarSign => BorrowedToken::DollarSign,
Token::LogicalAnd => BorrowedToken::LogicalAnd,
Token::EOF => BorrowedToken::EOF,
Token::Invalid(c) => BorrowedToken::Invalid(*c),
Token::Whitespace(s) => BorrowedToken::Whitespace(s),
Expand Down Expand Up @@ -551,6 +556,7 @@ impl fmt::Display for Token {
Token::Assign => write!(f, "="),
Token::Bang => write!(f, "!"),
Token::DollarSign => write!(f, "$"),
Token::LogicalAnd => write!(f, "&&"),
Token::EOF => write!(f, "end of input"),
Token::Invalid(c) => write!(f, "{c}"),
Token::Whitespace(ref s) => write!(f, "{s}"),
Expand Down
9 changes: 9 additions & 0 deletions compiler/noirc_frontend/src/parser/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@ pub enum ParserErrorReason {
MissingAngleBrackets,
#[error("Expected value, found built-in type `{typ}`")]
ExpectedValueFoundBuiltInType { typ: UnresolvedType },
#[error("Logical and used instead of bitwise and")]
LogicalAnd,
}

/// Represents a parsing error, or a parsing error in the making.
Expand Down Expand Up @@ -321,6 +323,13 @@ impl<'a> From<&'a ParserError> for Diagnostic {
let secondary = "Types that don't start with an identifier need to be surrounded with angle brackets: `<`, `>`".to_string();
Diagnostic::simple_error(format!("{reason}"), secondary, error.location())
}
ParserErrorReason::LogicalAnd => {
let primary = "Noir has no logical-and (&&) operator since short-circuiting is much less efficient when compiling to circuits".to_string();
let secondary =
"Try `&` instead, or use `if` only if you require short-circuiting"
.to_string();
Diagnostic::simple_error(primary, secondary, error.location)
}
other => {
Diagnostic::simple_error(format!("{other}"), String::new(), error.location())
}
Expand Down
15 changes: 15 additions & 0 deletions compiler/noirc_frontend/src/parser/parser/expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1836,6 +1836,21 @@ mod tests {
assert_eq!(infix_expr.to_string(), expected_src);
}

#[test]
fn errors_on_logical_and() {
let src = "
1 && 2
^^
";
let (src, span) = get_source_with_error_span(src);
let mut parser = Parser::for_str_with_dummy_file(&src);
let expression = parser.parse_expression_or_error();
assert_eq!(expression.to_string(), "(1 & 2)");

let reason = get_single_error_reason(&parser.errors, span);
assert!(matches!(reason, ParserErrorReason::LogicalAnd));
}

#[test]
fn parses_empty_lambda() {
let src = "|| 1";
Expand Down
4 changes: 4 additions & 0 deletions compiler/noirc_frontend/src/parser/parser/infix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use noirc_errors::{Located, Location};

use crate::{
ast::{BinaryOpKind, Expression, ExpressionKind, InfixExpression},
parser::ParserErrorReason,
token::Token,
};

Expand Down Expand Up @@ -49,6 +50,9 @@ impl<'a> Parser<'a> {
None
} else if parser.eat(Token::Ampersand) {
Some(BinaryOpKind::And)
} else if parser.eat(Token::LogicalAnd) {
parser.push_error(ParserErrorReason::LogicalAnd, parser.previous_token_location);
Some(BinaryOpKind::And)
} else {
None
}
Expand Down
51 changes: 49 additions & 2 deletions compiler/noirc_frontend/src/parser/parser/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ impl Parser<'_> {
return Some(typ);
}

if let Some(typ) = self.parses_mutable_reference_type() {
if let Some(typ) = self.parse_reference_type() {
return Some(typ);
}

Expand Down Expand Up @@ -371,7 +371,20 @@ impl Parser<'_> {
None
}

fn parses_mutable_reference_type(&mut self) -> Option<UnresolvedTypeData> {
fn parse_reference_type(&mut self) -> Option<UnresolvedTypeData> {
let start_location = self.current_token_location;

// This is '&&', which in this context is a double reference type
if self.eat(Token::LogicalAnd) {
let mutable = self.eat_keyword(Keyword::Mut);
let inner_type =
UnresolvedTypeData::Reference(Box::new(self.parse_type_or_error()), mutable);
let inner_type =
UnresolvedType { typ: inner_type, location: self.location_since(start_location) };
let typ = UnresolvedTypeData::Reference(Box::new(inner_type), false /* mutable */);
return Some(typ);
}

// The `&` may be lexed as a slice start if this is an array or slice type
if self.eat(Token::Ampersand) || self.eat(Token::SliceStart) {
let mutable = self.eat_keyword(Keyword::Mut);
Expand Down Expand Up @@ -627,6 +640,26 @@ mod tests {
assert!(matches!(typ.typ, UnresolvedTypeData::FieldElement));
}

#[test]
fn parses_double_reference_type() {
let src = "&&Field";
let typ = parse_type_no_errors(src);
let UnresolvedTypeData::Reference(typ, false) = typ.typ else {
panic!("Expected a reference type")
};
assert_eq!(typ.typ.to_string(), "&Field");
}

#[test]
fn parses_double_reference_mutable_type() {
let src = "&&mut Field";
let typ = parse_type_no_errors(src);
let UnresolvedTypeData::Reference(typ, false) = typ.typ else {
panic!("Expected a reference type")
};
assert_eq!(typ.typ.to_string(), "&mut Field");
}

#[test]
fn parses_named_type_no_generics() {
let src = "foo::Bar";
Expand Down Expand Up @@ -670,6 +703,20 @@ mod tests {
assert_eq!(expr.to_string(), "10");
}

#[test]
fn parses_reference_to_array_type() {
let src = "&[Field; 10]";
let typ = parse_type_no_errors(src);
let UnresolvedTypeData::Reference(typ, false) = typ.typ else {
panic!("Expected a reference typ");
};
let UnresolvedTypeData::Array(expr, typ) = typ.typ else {
panic!("Expected an array type")
};
assert!(matches!(typ.typ, UnresolvedTypeData::FieldElement));
assert_eq!(expr.to_string(), "10");
}

#[test]
fn parses_empty_function_type() {
let src = "fn() -> Field";
Expand Down
Loading