Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
133 changes: 5 additions & 128 deletions compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
//!
//! Repeating this algorithm for each block in the function in program order should result in
//! optimizing out most known loads. However, identifying all aliases correctly has been proven
//! undecidable in general (Landi, 1992). So this pass will not always optimize out all loads

Check warning on line 71 in compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (Landi)
//! that could theoretically be optimized out. This pass can be performed at any time in the
//! SSA optimization pipeline, although it will be more successful the simpler the program's CFG is.
//! This pass is currently performed several times to enable other passes - most notably being
Expand Down Expand Up @@ -136,15 +136,6 @@
/// Track a value's last load across all blocks.
/// If a value is not used in anymore loads we can remove the last store to that value.
last_loads: HashMap<ValueId, (InstructionId, BasicBlockId)>,

/// Track whether a reference was passed into another entry point
/// This is needed to determine whether we can remove a store.
calls_reference_input: HashSet<ValueId>,

/// Track whether a reference has been aliased, and store the respective
/// instruction that aliased that reference.
/// If that store has been set for removal, we can also remove this instruction.
aliased_references: HashMap<ValueId, HashSet<InstructionId>>,
}

impl<'f> PerFunctionContext<'f> {
Expand All @@ -159,8 +150,6 @@
blocks: BTreeMap::new(),
instructions_to_remove: HashSet::default(),
last_loads: HashMap::default(),
calls_reference_input: HashSet::default(),
aliased_references: HashMap::default(),
}
}

Expand All @@ -177,94 +166,6 @@
let references = self.find_starting_references(block);
self.analyze_block(block, references);
}

let mut all_terminator_values = HashSet::default();
let mut per_func_block_params: HashSet<ValueId> = HashSet::default();
for (block_id, _) in self.blocks.iter() {
let block_params = self.inserter.function.dfg.block_parameters(*block_id);
per_func_block_params.extend(block_params.iter());
let terminator = self.inserter.function.dfg[*block_id].unwrap_terminator();
terminator.for_each_value(|value| all_terminator_values.insert(value));
}

// If we never load from an address within a function we can remove all stores to that address.
// This rule does not apply to reference parameters, which we must also check for before removing these stores.
for (_, block) in self.blocks.iter() {
for (store_address, store_instruction) in block.last_stores.iter() {
let store_alias_used = self.is_store_alias_used(
store_address,
block,
&all_terminator_values,
&per_func_block_params,
);

let is_dereference = block
.expressions
.get(store_address)
.is_some_and(|expression| matches!(expression, Expression::Dereference(_)));

if !self.last_loads.contains_key(store_address)
&& !store_alias_used
&& !is_dereference
{
self.instructions_to_remove.insert(*store_instruction);
}
}
}
}

// Extra checks on where a reference can be used aside a load instruction.
// Even if all loads to a reference have been removed we need to make sure that
// an allocation did not come from an entry point or was passed to an entry point.
fn is_store_alias_used(
&self,
store_address: &ValueId,
block: &Block,
all_terminator_values: &HashSet<ValueId>,
per_func_block_params: &HashSet<ValueId>,
) -> bool {
let reference_parameters = self.reference_parameters();

if let Some(expression) = block.expressions.get(store_address) {
if let Some(aliases) = block.aliases.get(expression) {
let allocation_aliases_parameter =
aliases.any(|alias| reference_parameters.contains(&alias));
if allocation_aliases_parameter == Some(true) {
return true;
}

let allocation_aliases_parameter =
aliases.any(|alias| per_func_block_params.contains(&alias));
if allocation_aliases_parameter == Some(true) {
return true;
}

let allocation_aliases_parameter =
aliases.any(|alias| self.calls_reference_input.contains(&alias));
if allocation_aliases_parameter == Some(true) {
return true;
}

let allocation_aliases_parameter =
aliases.any(|alias| all_terminator_values.contains(&alias));
if allocation_aliases_parameter == Some(true) {
return true;
}

let allocation_aliases_parameter = aliases.any(|alias| {
if let Some(alias_instructions) = self.aliased_references.get(&alias) {
self.instructions_to_remove.is_disjoint(alias_instructions)
} else {
false
}
});
if allocation_aliases_parameter == Some(true) {
return true;
}
}
}

false
}

/// Collect the input parameters of the function which are of reference type.
Expand Down Expand Up @@ -459,19 +360,6 @@
self.instructions_to_remove.insert(*last_store);
}

if self.inserter.function.dfg.value_is_reference(value) {
if let Some(expression) = references.expressions.get(&value) {
if let Some(aliases) = references.aliases.get(expression) {
aliases.for_each(|alias| {
self.aliased_references
.entry(alias)
.or_default()
.insert(instruction);
});
}
}
}

references.set_known_value(address, value);
// If we see a store to an address, the last load to that address needs to remain.
references.keep_last_load_for(address);
Expand Down Expand Up @@ -526,17 +414,6 @@
}
}
Instruction::Call { arguments, .. } => {
for arg in arguments {
if self.inserter.function.dfg.value_is_reference(*arg) {
if let Some(expression) = references.expressions.get(arg) {
if let Some(aliases) = references.aliases.get(expression) {
aliases.for_each(|alias| {
self.calls_reference_input.insert(alias);
});
}
}
}
}
self.mark_all_unknown(arguments, references);
}
Instruction::MakeArray { elements, typ } => {
Expand Down Expand Up @@ -870,7 +747,7 @@
// return v3, Field 5, Field 6
// }
let ssa = ssa.mem2reg();

println!("{}", ssa);
let main = ssa.main();
assert_eq!(main.reachable_blocks().len(), 2);

Expand All @@ -879,7 +756,7 @@
assert_eq!(count_loads(b1, &main.dfg), 0);

// All stores are removed as there are no loads to the values being stored anywhere in the function.
assert_eq!(count_stores(main.entry_block(), &main.dfg), 0);
assert_eq!(count_stores(main.entry_block(), &main.dfg), 1);
assert_eq!(count_stores(b1, &main.dfg), 0);

// The jmp to b1 should also be a constant 5 now
Expand Down Expand Up @@ -927,18 +804,18 @@
// The first store is not removed as it is used as a nested reference in another store.
// We would need to track whether the store where `v0` is the store value gets removed to know whether
// to remove it.
// The first store in b1 is removed since there is another store to the same reference
// The first store in b2 is removed since there is another store to the same reference
// in the same block, and the store is not needed before the later store.
// The rest of the stores are also removed as no loads are done within any blocks
// to the stored values.
let expected = "
acir(inline) fn main f0 {
b0():
v0 = allocate -> &mut Field
store Field 0 at v0
v2 = allocate -> &mut &mut Field
store v0 at v2
jmp b1()
b1():
store Field 2 at v0
return
}
";
Expand Down
6 changes: 6 additions & 0 deletions test_programs/execution_success/regression_8739/Nargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[package]
name = "regression_8739"
type = "bin"
authors = [""]

[dependencies]
15 changes: 15 additions & 0 deletions test_programs/execution_success/regression_8739/src/main.nr
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
fn main() {
// Safety: testing context
unsafe {
func_2()
}
}
unconstrained fn func_2() {
let mut a: [&mut bool; 1] = [(&mut true)];
let mut idx_b: u32 = 0;
while (*a[0]) {
if (idx_b == 0) {
break;
}
}
}
Loading