Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(ssa): Check arrays used as a terminator argument in the RC tracker #7165

Merged
merged 13 commits into from
Jan 23, 2025
64 changes: 21 additions & 43 deletions compiler/noirc_evaluator/src/ssa/opt/die.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,20 +26,14 @@ impl Ssa {
/// This step should come after the flattening of the CFG and mem2reg.
#[tracing::instrument(level = "trace", skip(self))]
pub(crate) fn dead_instruction_elimination(self) -> Ssa {
self.dead_instruction_elimination_inner(true, false)
self.dead_instruction_elimination_inner(true)
}

fn dead_instruction_elimination_inner(
mut self,
flattened: bool,
keep_rcs_of_parameters: bool,
) -> Ssa {
fn dead_instruction_elimination_inner(mut self, flattened: bool) -> Ssa {
let mut used_global_values: HashSet<_> = self
.functions
.par_iter_mut()
.flat_map(|(_, func)| {
func.dead_instruction_elimination(true, flattened, keep_rcs_of_parameters)
})
.flat_map(|(_, func)| func.dead_instruction_elimination(true, flattened))
.collect();

let globals = &self.functions[&self.main_id].dfg.globals;
Expand Down Expand Up @@ -75,7 +69,6 @@ impl Function {
&mut self,
insert_out_of_bounds_checks: bool,
flattened: bool,
keep_rcs_of_parameters: bool,
) -> HashSet<ValueId> {
let mut context = Context { flattened, ..Default::default() };

Expand All @@ -91,15 +84,14 @@ impl Function {
self,
*block,
insert_out_of_bounds_checks,
keep_rcs_of_parameters,
);
}

// If we inserted out of bounds check, let's run the pass again with those new
// instructions (we don't want to remove those checks, or instructions that are
// dependencies of those checks)
if inserted_out_of_bounds_checks {
return self.dead_instruction_elimination(false, flattened, keep_rcs_of_parameters);
return self.dead_instruction_elimination(false, flattened);
}

context.remove_rc_instructions(&mut self.dfg);
Expand Down Expand Up @@ -148,20 +140,14 @@ impl Context {
function: &mut Function,
block_id: BasicBlockId,
insert_out_of_bounds_checks: bool,
keep_rcs_of_parameters: bool,
) -> bool {
let block = &function.dfg[block_id];
self.mark_terminator_values_as_used(function, block);

let instructions_len = block.instructions().len();

let mut rc_tracker = RcTracker::default();
rc_tracker.mark_terminator_arrays_as_used(function, block);

// During the preprocessing of functions in isolation we don't want to
// get rid of IncRCs arrays that can potentially be mutated outside.
if keep_rcs_of_parameters {
rc_tracker.track_function_parameters(function);
}
let instructions_len = block.instructions().len();

// Indexes of instructions that might be out of bounds.
// We'll remove those, but before that we'll insert bounds checks for them.
Expand Down Expand Up @@ -613,17 +599,13 @@ struct RcTracker {
}

impl RcTracker {
/// Mark any array parameters to the function itself as possibly mutated,
/// so we don't get rid of RC instructions just because we don't mutate
/// them in this function, which could potentially cause them to be
/// mutated outside the function without our consent.
fn track_function_parameters(&mut self, function: &Function) {
for parameter in function.parameters() {
let typ = function.dfg.type_of_value(*parameter);
if typ.contains_an_array() {
fn mark_terminator_arrays_as_used(&mut self, function: &Function, block: &BasicBlock) {
block.unwrap_terminator().for_each_value(|value| {
let typ = function.dfg.type_of_value(value);
if matches!(&typ, Type::Array(_, _) | Type::Slice(_)) {
self.mutated_array_types.insert(typ);
}
}
});
}

fn track_inc_rcs_to_remove(&mut self, instruction_id: InstructionId, function: &Function) {
Expand Down Expand Up @@ -686,6 +668,8 @@ impl RcTracker {
}
Instruction::Call { arguments, .. } => {
// Treat any array-type arguments to calls as possible sources of mutation.
// During the preprocessing of functions in isolation we don't want to
// get rid of IncRCs arrays that can potentially be mutated outside.
for arg in arguments {
let typ = function.dfg.type_of_value(*arg);
if matches!(&typ, Type::Array(..) | Type::Slice(..)) {
Expand Down Expand Up @@ -949,7 +933,7 @@ mod test {
}

#[test]
fn not_remove_inc_rcs_for_input_parameters() {
fn remove_inc_rcs_that_are_never_mutably_borrowed() {
let src = "
brillig(inline) fn main f0 {
b0(v0: [Field; 2]):
Expand All @@ -971,21 +955,17 @@ mod test {
let expected = "
brillig(inline) fn main f0 {
b0(v0: [Field; 2]):
inc_rc v0
v2 = array_get v0, index u32 0 -> Field
inc_rc v0
return v2
}
";

// We want to be able to switch this on during preprocessing.
let keep_rcs_of_parameters = true;
let ssa = ssa.dead_instruction_elimination_inner(true, keep_rcs_of_parameters);
let ssa = ssa.dead_instruction_elimination();
assert_normalized_ssa_equals(ssa, expected);
}

#[test]
fn remove_inc_rcs_that_are_never_mutably_borrowed() {
fn do_not_remove_inc_rcs_for_arrays_in_terminator() {
let src = "
brillig(inline) fn main f0 {
b0(v0: [Field; 2]):
Expand All @@ -994,21 +974,19 @@ mod test {
inc_rc v0
v2 = array_get v0, index u32 0 -> Field
inc_rc v0
return v2
return v0, v2
}
";

let ssa = Ssa::from_str(src).unwrap();
let main = ssa.main();

// The instruction count never includes the terminator instruction
assert_eq!(main.dfg[main.entry_block()].instructions().len(), 5);

let expected = "
brillig(inline) fn main f0 {
b0(v0: [Field; 2]):
inc_rc v0
v2 = array_get v0, index u32 0 -> Field
return v2
inc_rc v0
return v0, v2
}
";

Expand Down Expand Up @@ -1075,7 +1053,7 @@ mod test {
let ssa = Ssa::from_str(src).unwrap();

// Even though these ACIR functions only have 1 block, we have not inlined and flattened anything yet.
let ssa = ssa.dead_instruction_elimination_inner(false, false);
let ssa = ssa.dead_instruction_elimination_inner(false);

let expected = "
acir(inline) fn main f0 {
Expand Down
2 changes: 1 addition & 1 deletion compiler/noirc_evaluator/src/ssa/opt/preprocess_fns.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ impl Ssa {
// Try to reduce the number of blocks.
function.simplify_function();
// Remove leftover instructions.
function.dead_instruction_elimination(true, false, true);
function.dead_instruction_elimination(true, false);

// Put it back into the SSA, so the next functions can pick it up.
self.functions.insert(id, function);
Expand Down
2 changes: 1 addition & 1 deletion compiler/noirc_evaluator/src/ssa/opt/unrolling.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1035,7 +1035,7 @@
simplify_between_unrolls(&mut temp);

// This is to try to prevent hitting ICE.
temp.dead_instruction_elimination(false, true, false);
temp.dead_instruction_elimination(false, true);

convert_ssa_function(&temp, false, globals).byte_code.len()
}
Expand All @@ -1061,7 +1061,7 @@
use super::{is_new_size_ok, BoilerplateStats, Loops};

/// Tries to unroll all loops in each SSA function once, calling the `Function` directly,
/// bypassing the iterative loop done by the SSA which does further optimisations.

Check warning on line 1064 in compiler/noirc_evaluator/src/ssa/opt/unrolling.rs

View workflow job for this annotation

GitHub Actions / Code

Unknown word (optimisations)
///
/// If any loop cannot be unrolled, it is left as-is or in a partially unrolled state.
fn try_unroll_loops(mut ssa: Ssa) -> (Ssa, Vec<RuntimeError>) {
Expand Down
6 changes: 3 additions & 3 deletions tooling/nargo_cli/src/cli/info_cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@ use prettytable::{row, table, Row};
use rayon::prelude::*;
use serde::Serialize;

use crate::{cli::fs::inputs::read_inputs_from_file, errors::CliError};
use crate::errors::CliError;

use super::{
compile_cmd::{compile_workspace_full, get_target_width},
fs::program::read_program_from_file,
fs::{inputs::read_inputs_from_file, program::read_program_from_file},
NargoConfig, PackageOptions,
};

Expand Down Expand Up @@ -243,7 +243,7 @@ fn profile_brillig_execution(
) -> Result<Vec<ProgramInfo>, CliError> {
let mut program_info = Vec::new();
for (package, program_artifact) in binary_packages.iter() {
// Parse the initial witness values from Prover.toml
// Parse the initial witness values from Prover.toml or Prover.json
let (inputs_map, _) = read_inputs_from_file(
&package.root_dir,
prover_name,
Expand Down
Loading