From 87294f4339a1fdd471fb306a563ead47f8539e4c Mon Sep 17 00:00:00 2001 From: vezenovm Date: Thu, 1 Feb 2024 04:15:08 +0000 Subject: [PATCH 01/17] check for outer_block stores in Jmp blocks --- .../noirc_evaluator/src/ssa/opt/flatten_cfg.rs | 2 +- .../src/ssa/opt/flatten_cfg/value_merger.rs | 5 ++++- .../execution_success/regression_4202/Nargo.toml | 7 +++++++ .../execution_success/regression_4202/src/main.nr | 14 ++++++++++++++ 4 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 test_programs/execution_success/regression_4202/Nargo.toml create mode 100644 test_programs/execution_success/regression_4202/src/main.nr diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 1059994b9be..3f840a7c883 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -262,7 +262,7 @@ impl<'f> Context<'f> { /// Returns the last block to be inlined. This is either the return block of the function or, /// if self.conditions is not empty, the end block of the most recent condition. fn handle_terminator(&mut self, block: BasicBlockId) -> BasicBlockId { - if let TerminatorInstruction::JmpIf { .. } = + if let TerminatorInstruction::JmpIf { .. } | TerminatorInstruction::Jmp { .. } = self.inserter.function.dfg[block].unwrap_terminator() { // Find stores in the outer block and insert into the `outer_block_stores` map. diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 446560f45f1..a34d01983ee 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -257,7 +257,6 @@ impl<'a> ValueMerger<'a> { let store_value = outer_block_stores .get(address) .expect("ICE: load in merger should have store from outer block"); - if let Some(len) = self.slice_sizes.get(store_value) { return *len; } @@ -269,6 +268,10 @@ impl<'a> ValueMerger<'a> { store.new_value } else { + // Still panics here + // let store_value = outer_block_stores + // .get(address) + // .expect("ICE: load in merger should have store from outer block"); *store_value }; diff --git a/test_programs/execution_success/regression_4202/Nargo.toml b/test_programs/execution_success/regression_4202/Nargo.toml new file mode 100644 index 00000000000..acfba12dd4f --- /dev/null +++ b/test_programs/execution_success/regression_4202/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_4202" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/regression_4202/src/main.nr b/test_programs/execution_success/regression_4202/src/main.nr new file mode 100644 index 00000000000..0e79da84d20 --- /dev/null +++ b/test_programs/execution_success/regression_4202/src/main.nr @@ -0,0 +1,14 @@ +fn main() { + let mut slice1: [u32] = [1, 2, 3, 4]; + if slice1[0] == 3 { + slice1[1] = 4; + } + + if slice1[1] == 5 { + slice1[3] = 6; + } + + for i in 0..4 { + assert(slice1[i] == i + 1); + } +} From 3ed2435d0a539000163286a5b81d93205d0d5a3f Mon Sep 17 00:00:00 2001 From: vezenovm Date: Thu, 1 Feb 2024 04:15:19 +0000 Subject: [PATCH 02/17] remove old comments' --- .../noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index a34d01983ee..446560f45f1 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -257,6 +257,7 @@ impl<'a> ValueMerger<'a> { let store_value = outer_block_stores .get(address) .expect("ICE: load in merger should have store from outer block"); + if let Some(len) = self.slice_sizes.get(store_value) { return *len; } @@ -268,10 +269,6 @@ impl<'a> ValueMerger<'a> { store.new_value } else { - // Still panics here - // let store_value = outer_block_stores - // .get(address) - // .expect("ICE: load in merger should have store from outer block"); *store_value }; From 3a05664bca6ac7c8d4a602f67ecedf0777599e7a Mon Sep 17 00:00:00 2001 From: vezenovm Date: Thu, 1 Feb 2024 04:15:28 +0000 Subject: [PATCH 03/17] cargo fmt --- compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 3f840a7c883..29d3d6cbd2d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -262,7 +262,7 @@ impl<'f> Context<'f> { /// Returns the last block to be inlined. This is either the return block of the function or, /// if self.conditions is not empty, the end block of the most recent condition. fn handle_terminator(&mut self, block: BasicBlockId) -> BasicBlockId { - if let TerminatorInstruction::JmpIf { .. } | TerminatorInstruction::Jmp { .. } = + if let TerminatorInstruction::JmpIf { .. } | TerminatorInstruction::Jmp { .. } = self.inserter.function.dfg[block].unwrap_terminator() { // Find stores in the outer block and insert into the `outer_block_stores` map. From 67e63db89eda73184afb1e3edb1c2aa3b1b3a594 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Thu, 1 Feb 2024 21:15:31 +0000 Subject: [PATCH 04/17] use capacity tracker rather than backtracking in flattening, still a draft --- compiler/noirc_evaluator/src/errors.rs | 2 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 90 +++++- .../noirc_evaluator/src/ssa/ir/instruction.rs | 4 + .../src/ssa/ir/instruction/call.rs | 11 +- compiler/noirc_evaluator/src/ssa/ir/types.rs | 3 +- .../src/ssa/opt/flatten_cfg.rs | 60 +++- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 306 ++++++++++++++++++ .../src/ssa/opt/flatten_cfg/value_merger.rs | 45 ++- .../slice_dynamic_index/src/main.nr | 24 +- 9 files changed, 506 insertions(+), 39 deletions(-) create mode 100644 compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 73b6e671bd5..c3bf40cc659 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -163,7 +163,7 @@ impl RuntimeError { _ => { let message = self.to_string(); let location = - self.call_stack().back().expect("Expected RuntimeError to have a location"); + self.call_stack().back().unwrap_or_else(|| panic!("Expected RuntimeError to have a location. Error message: {message}")); Diagnostic::simple_error(message, String::new(), location.span) } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 120c5bf25df..38f83cc1193 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -621,11 +621,11 @@ impl Context { instruction: InstructionId, dfg: &DataFlowGraph, index: ValueId, - array: ValueId, + array_id: ValueId, store_value: Option, ) -> Result { let index_const = dfg.get_numeric_constant(index); - let value_type = dfg.type_of_value(array); + let value_type = dfg.type_of_value(array_id); // Compiler sanity checks assert!( !value_type.is_nested_slice(), @@ -635,7 +635,7 @@ impl Context { unreachable!("ICE: expected array or slice type"); }; - match self.convert_value(array, dfg) { + match self.convert_value(array_id, dfg) { AcirValue::Var(acir_var, _) => { return Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), @@ -659,7 +659,7 @@ impl Context { }; if self.acir_context.is_constant_one(&self.current_side_effects_enabled_var) { // Report the error if side effects are enabled. - if index >= array_size { + if index >= array_size && !matches!(value_type, Type::Slice(_)) { let call_stack = self.acir_context.get_call_stack(); return Err(RuntimeError::IndexOutOfBounds { index, @@ -672,7 +672,13 @@ impl Context { let store_value = self.convert_value(store_value, dfg); AcirValue::Array(array.update(index, store_value)) } - None => array[index].clone(), + None => { + if index >= array_size { + self.construct_dummy_slice_value(instruction, dfg, array_id) + } else { + array[index].clone() + } + } }; self.define_result(dfg, instruction, value); @@ -684,14 +690,86 @@ impl Context { self.define_result(dfg, instruction, array[index].clone()); return Ok(true); } + // If there is a non constant predicate and the index is out of range for a slice we should still directly create dummy data + // These situations should only happen during flattening of slices when the same slice after an if statement + // would have different lengths depending upon the condition + else if index >= array_size + && value_type.contains_slice_element() + && store_value.is_none() + { + if index >= array_size { + let value = + self.construct_dummy_slice_value(instruction, dfg, array_id); + + self.define_result(dfg, instruction, value); + return Ok(true); + } else { + return Ok(false); + } + } + } + } + AcirValue::DynamicArray(AcirDynamicArray { len, .. }) => { + if let Some(index_const) = index_const { + let index = match index_const.try_to_u64() { + Some(index_const) => index_const as usize, + None => { + let call_stack = self.acir_context.get_call_stack(); + return Err(RuntimeError::TypeConversion { + from: "array index".to_string(), + into: "u64".to_string(), + call_stack, + }); + } + }; + + if index >= len { + let value = self.construct_dummy_slice_value(instruction, dfg, array_id); + + self.define_result(dfg, instruction, value); + return Ok(true); + } } + + return Ok(false); } - AcirValue::DynamicArray(_) => (), + // AcirValue::DynamicArray(_) => (), }; Ok(false) } + fn construct_dummy_slice_value( + &mut self, + instruction: InstructionId, + dfg: &DataFlowGraph, + array_id: ValueId, + ) -> AcirValue { + let results = dfg.instruction_results(instruction); + let res_typ = dfg.type_of_value(results[0]); + self.construct_dummy_array_value(&res_typ) + } + + fn construct_dummy_array_value(&mut self, ssa_type: &Type) -> AcirValue { + match ssa_type.clone() { + Type::Numeric(numeric_type) => { + let zero = self.acir_context.add_constant(FieldElement::zero()); + let typ = AcirType::NumericType(numeric_type); + AcirValue::Var(zero, typ) + } + Type::Array(element_types, len) => { + let mut values = Vector::new(); + for _ in 0..len { + for typ in element_types.as_ref() { + values.push_back(self.construct_dummy_array_value(typ)); + } + } + AcirValue::Array(values) + } + _ => unreachable!("ICE - expected an array or numeric type"), + } + } + /// We need to properly setup the inputs for array operations in ACIR. /// From the original SSA values we compute the following AcirVars: /// - new_index is the index of the array. ACIR memory operations work with a flat memory, so we fully flattened the specified index diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 331a02a6974..de428d2585c 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -246,6 +246,10 @@ impl Instruction { bin.operator != BinaryOp::Div } Cast(_, _) | Truncate { .. } | Not(_) | ArrayGet { .. } | ArraySet { .. } => true, + // Cast(_, _) | Truncate { .. } | Not(_) => true, + + // These are not pure when working with nested slices based upon witness conditions + // ArrayGet { .. } | ArraySet { .. } => false, // These either have side-effects or interact with memory Constrain(..) diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 0178ae9dba1..f402902cb83 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -1,3 +1,4 @@ +use fxhash::FxHashMap as HashMap; use std::{collections::VecDeque, rc::Rc}; use acvm::{acir::BlackBoxFunc, BlackBoxResolutionError, FieldElement}; @@ -319,7 +320,11 @@ fn simplify_slice_push_back( for elem in &arguments[2..] { slice.push_back(*elem); } + let slice_size = slice.len(); + let element_size = element_type.element_size(); let new_slice = dfg.make_array(slice, element_type); + // dbg!(slice_size); + // dbg!(element_size); let set_last_slice_value_instr = Instruction::ArraySet { array: new_slice, index: arguments[0], value: arguments[2] }; @@ -327,7 +332,11 @@ fn simplify_slice_push_back( .insert_instruction_and_results(set_last_slice_value_instr, block, None, call_stack) .first(); - let mut value_merger = ValueMerger::new(dfg, block, None, None); + let mut slice_sizes = HashMap::default(); + slice_sizes.insert(set_last_slice_value, (slice_size / element_size, vec![])); + slice_sizes.insert(new_slice, (slice_size / element_size, vec![])); + + let mut value_merger = ValueMerger::new(dfg, block, None, None, &mut slice_sizes); let new_slice = value_merger.merge_values( len_not_equals_capacity, len_equals_capacity, diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index f412def1e76..fcd48281a72 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -120,7 +120,8 @@ impl Type { } Type::Slice(_) => true, Type::Numeric(_) => false, - Type::Reference(_) => false, + Type::Reference(element) => element.contains_slice_element(), + // Type::Reference(_) => false, Type::Function => false, } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 29d3d6cbd2d..0371ee2a26d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -152,8 +152,10 @@ use crate::ssa::{ }; mod branch_analysis; +mod capacity_tracker; pub(crate) mod value_merger; +use capacity_tracker::SliceCapacityTracker; use value_merger::ValueMerger; impl Ssa { @@ -209,6 +211,8 @@ struct Context<'f> { /// condition. If we are under multiple conditions (a nested if), the topmost condition is /// the most recent condition combined with all previous conditions via `And` instructions. conditions: Vec<(BasicBlockId, ValueId)>, + + slice_sizes: HashMap)>, } pub(crate) struct Store { @@ -240,6 +244,7 @@ fn flatten_function_cfg(function: &mut Function) { branch_ends, conditions: Vec::new(), outer_block_stores: HashMap::default(), + slice_sizes: HashMap::default(), }; context.flatten(); } @@ -279,6 +284,20 @@ impl<'f> Context<'f> { } } + // As we recursively flatten inner blocks, we need to track the slice information + // for the outer block before we start recursively inlining + let outer_block_instructions = self.inserter.function.dfg[block].instructions(); + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + for instruction in outer_block_instructions { + let results = self.inserter.function.dfg.instruction_results(*instruction); + let instruction = &self.inserter.function.dfg[*instruction]; + capacity_tracker.collect_slice_information( + instruction, + &mut self.slice_sizes, + results.to_vec(), + ); + } + match self.inserter.function.dfg[block].unwrap_terminator() { TerminatorInstruction::JmpIf { condition, then_destination, else_destination } => { let old_condition = *condition; @@ -494,11 +513,20 @@ impl<'f> Context<'f> { }); let block = self.inserter.function.entry_block(); + + // Make sure we have tracked the slice sizes of any block arguments + let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + for (then_arg, else_arg) in args.iter() { + capacity_tracker.compute_slice_sizes(*then_arg, &mut self.slice_sizes); + capacity_tracker.compute_slice_sizes(*else_arg, &mut self.slice_sizes); + } + let mut value_merger = ValueMerger::new( &mut self.inserter.function.dfg, block, Some(&self.store_values), Some(&self.outer_block_stores), + &mut self.slice_sizes, ); // Cannot include this in the previous vecmap since it requires exclusive access to self @@ -548,6 +576,7 @@ impl<'f> Context<'f> { block, Some(&self.store_values), Some(&self.outer_block_stores), + &mut self.slice_sizes, ); // Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does @@ -571,6 +600,16 @@ impl<'f> Context<'f> { .insert(address, Store { old_value: *old_value, new_value: value }); } } + + // Collect any potential slice information on the stores we are merging + for (address, (_, _, _)) in &new_map { + let value = new_values[address]; + let address = *address; + let instruction = Instruction::Store { address, value }; + + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + capacity_tracker.collect_slice_information(&instruction, &mut self.slice_sizes, vec![]); + } } fn remember_store(&mut self, address: ValueId, new_value: ValueId) { @@ -582,6 +621,11 @@ impl<'f> Context<'f> { let load_type = Some(vec![self.inserter.function.dfg.type_of_value(new_value)]); let old_value = self.insert_instruction_with_typevars(load, load_type).first(); + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + // Need this or else we will be missing slice stores that we wish to merge + let store = Instruction::Store { address: old_value, value: new_value }; + capacity_tracker.collect_slice_information(&store, &mut self.slice_sizes, vec![]); + self.store_values.insert(address, Store { old_value, new_value }); } } @@ -602,8 +646,16 @@ impl<'f> Context<'f> { // unnecessary, when removing it actually causes an aliasing/mutability error. let instructions = self.inserter.function.dfg[destination].instructions().to_vec(); - for instruction in instructions { - self.push_instruction(instruction); + for instruction in instructions.iter() { + // self.push_instruction(instruction); + let results = self.push_instruction(*instruction); + let (instruction, _) = self.inserter.map_instruction(*instruction); + let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + capacity_tracker.collect_slice_information( + &instruction, + &mut self.slice_sizes, + results, + ); } self.handle_terminator(destination) @@ -615,7 +667,7 @@ impl<'f> Context<'f> { /// As a result, the instruction that will be pushed will actually be a new instruction /// with a different InstructionId from the original. The results of the given instruction /// will also be mapped to the results of the new instruction. - fn push_instruction(&mut self, id: InstructionId) { + fn push_instruction(&mut self, id: InstructionId) -> Vec { let (instruction, call_stack) = self.inserter.map_instruction(id); let instruction = self.handle_instruction_side_effects(instruction, call_stack.clone()); let is_allocate = matches!(instruction, Instruction::Allocate); @@ -628,6 +680,8 @@ impl<'f> Context<'f> { if is_allocate { self.local_allocations.insert(results.first()); } + + results.results().into_owned() } /// If we are currently in a branch, we need to modify constrain instructions diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs new file mode 100644 index 00000000000..82cef63854a --- /dev/null +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -0,0 +1,306 @@ +use crate::ssa::ir::{ + dfg::DataFlowGraph, + instruction::{Instruction, Intrinsic}, + types::Type, + value::{Value, ValueId}, +}; + +use fxhash::FxHashMap as HashMap; + +pub(crate) struct SliceCapacityTracker<'a> { + dfg: &'a DataFlowGraph, + /// Maps SSA array values representing a slice's contents to its updated array value + /// after an array set or a slice intrinsic operation. + /// Maps original value -> result + mapped_slice_values: HashMap, + + /// Maps an updated array value following an array operation to its previous value. + /// When used in conjunction with `mapped_slice_values` we form a two way map of all array + /// values being used in array operations. + /// Maps result -> original value + slice_parents: HashMap, + + // Values containing nested slices to be replaced + slice_values: Vec, +} + +impl<'a> SliceCapacityTracker<'a> { + pub(crate) fn new(dfg: &'a DataFlowGraph) -> Self { + SliceCapacityTracker { + dfg, + mapped_slice_values: HashMap::default(), + slice_parents: HashMap::default(), + slice_values: Vec::new(), + } + } + + /// Determine how the slice sizes map needs to be updated according to the provided instruction. + pub(crate) fn collect_slice_information( + &mut self, + instruction: &Instruction, + slice_sizes: &mut HashMap)>, + results: Vec, + ) { + match instruction { + Instruction::ArrayGet { array, .. } => { + let array_typ = self.dfg.type_of_value(*array); + let array_value = &self.dfg[*array]; + // If we have an SSA value containing nested slices we should mark it + // as a slice that potentially requires to be filled with dummy data. + if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() + { + self.slice_values.push(*array); + // Initial insertion into the slice sizes map + // Any other insertions should only occur if the value is already + // a part of the map. + self.compute_slice_sizes(*array, slice_sizes); + } + + let res_typ = self.dfg.type_of_value(results[0]); + if res_typ.contains_slice_element() { + if let Some(inner_sizes) = slice_sizes.get_mut(array) { + // Include the result in the parent array potential children + // If the result has internal slices and is called in an array set + // we could potentially have a new larger slice which we need to account for + inner_sizes.1.push(results[0]); + self.slice_parents.insert(results[0], *array); + + let inner_sizes_iter = inner_sizes.1.clone(); + for slice_value in inner_sizes_iter { + let inner_slice = slice_sizes.get(&slice_value).unwrap_or_else(|| { + panic!("ICE: should have inner slice set for {slice_value}") + }); + let previous_res_size = slice_sizes.get(&results[0]); + if let Some(previous_res_size) = previous_res_size { + if inner_slice.0 > previous_res_size.0 { + slice_sizes.insert(results[0], inner_slice.clone()); + } + } else { + slice_sizes.insert(results[0], inner_slice.clone()); + } + let resolved_result = self.resolve_slice_value(results[0]); + if resolved_result != slice_value { + self.mapped_slice_values.insert(slice_value, results[0]); + } + } + } + } + } + Instruction::ArraySet { array, value, .. } => { + let array_typ = self.dfg.type_of_value(*array); + let array_value = &self.dfg[*array]; + // If we have an SSA value containing nested slices we should mark it + // as a slice that potentially requires to be filled with dummy data. + if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() + { + self.slice_values.push(*array); + // Initial insertion into the slice sizes map + // Any other insertions should only occur if the value is already + // a part of the map. + self.compute_slice_sizes(*array, slice_sizes); + } + + let value_typ = self.dfg.type_of_value(*value); + if value_typ.contains_slice_element() { + self.compute_slice_sizes(*value, slice_sizes); + + let inner_sizes = slice_sizes.get_mut(array).expect("ICE expected slice sizes"); + inner_sizes.1.push(*value); + } + + if let Some(inner_sizes) = slice_sizes.get_mut(array) { + let inner_sizes = inner_sizes.clone(); + + slice_sizes.insert(results[0], inner_sizes); + + if let Some(fetched_val) = self.mapped_slice_values.get(&results[0]) { + if *fetched_val != *array { + self.mapped_slice_values.insert(*array, results[0]); + } + } else if *array != results[0] { + self.mapped_slice_values.insert(*array, results[0]); + } + + self.slice_parents.insert(results[0], *array); + } + } + Instruction::Call { func, arguments } => { + let func = &self.dfg[*func]; + if let Value::Intrinsic(intrinsic) = func { + let (argument_index, result_index) = match intrinsic { + Intrinsic::SlicePushBack + | Intrinsic::SlicePushFront + | Intrinsic::SlicePopBack + | Intrinsic::SliceInsert + | Intrinsic::SliceRemove => (1, 1), + // `pop_front` returns the popped element, and then the respective slice. + // This means in the case of a slice with structs, the result index of the popped slice + // will change depending on the number of elements in the struct. + // For example, a slice with four elements will look as such in SSA: + // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) + // where v7 is the slice length and v8 is the popped slice itself. + Intrinsic::SlicePopFront => (1, results.len() - 1), + _ => return, + }; + let slice_contents = arguments[argument_index]; + match intrinsic { + Intrinsic::SlicePushBack + | Intrinsic::SlicePushFront + | Intrinsic::SliceInsert => { + for arg in &arguments[(argument_index + 1)..] { + let element_typ = self.dfg.type_of_value(*arg); + if element_typ.contains_slice_element() { + self.slice_values.push(*arg); + self.compute_slice_sizes(*arg, slice_sizes); + } + } + if let Some(inner_sizes) = slice_sizes.get_mut(&slice_contents) { + inner_sizes.0 += 1; + + let inner_sizes = inner_sizes.clone(); + slice_sizes.insert(results[result_index], inner_sizes); + + if let Some(fetched_val) = + self.mapped_slice_values.get(&results[result_index]) + { + if *fetched_val != slice_contents { + self.mapped_slice_values + .insert(slice_contents, results[result_index]); + } + } else if slice_contents != results[result_index] { + self.mapped_slice_values + .insert(slice_contents, results[result_index]); + } + + self.slice_parents.insert(results[result_index], slice_contents); + } + } + Intrinsic::SlicePopBack + | Intrinsic::SliceRemove + | Intrinsic::SlicePopFront => { + // We do not decrement the size on intrinsics that could remove values from a slice. + // This is because we could potentially go back to the smaller slice and not fill in dummies. + // This pass should be tracking the potential max that a slice ***could be*** + if let Some(inner_sizes) = slice_sizes.get(&slice_contents) { + let inner_sizes = inner_sizes.clone(); + slice_sizes.insert(results[result_index], inner_sizes); + + if let Some(fetched_val) = + self.mapped_slice_values.get(&results[result_index]) + { + if *fetched_val != slice_contents { + self.mapped_slice_values + .insert(slice_contents, results[result_index]); + } + } else if slice_contents != results[result_index] { + self.mapped_slice_values + .insert(slice_contents, results[result_index]); + } + + self.slice_parents.insert(results[result_index], slice_contents); + } + } + _ => {} + } + } + } + Instruction::Store { address, value } => { + let value_typ = self.dfg.type_of_value(*value); + if value_typ.contains_slice_element() { + self.compute_slice_sizes(*value, slice_sizes); + + let mut inner_sizes = slice_sizes.get(value).unwrap_or_else(|| { + panic!("ICE: should have inner slice set for value {value} being stored at {address}") + }).clone(); + + if let Some(previous_store) = slice_sizes.get(address) { + inner_sizes.1.append(&mut previous_store.1.clone()); + } + + slice_sizes.insert(*address, inner_sizes); + } + } + Instruction::Load { address } => { + let load_typ = self.dfg.type_of_value(*address); + if load_typ.contains_slice_element() { + let result = results[0]; + let mut inner_sizes = slice_sizes.get(address).unwrap_or_else(|| { + panic!("ICE: should have inner slice set at addres {address} being loaded into {result}") + }).clone(); + + if let Some(previous_load_value) = slice_sizes.get(&result) { + inner_sizes.1.append(&mut previous_load_value.1.clone()); + } + + slice_sizes.insert(result, inner_sizes); + } + } + _ => {} + } + } + + // This methods computes a map representing a nested slice. + // The method also automatically computes the given max slice size + // at each depth of the recursive type. + // For example if we had a next slice + pub(crate) fn compute_slice_sizes( + &self, + array_id: ValueId, + slice_sizes: &mut HashMap)>, + ) { + if let Value::Array { array, typ } = &self.dfg[array_id].clone() { + if let Type::Slice(_) = typ { + let element_size = typ.element_size(); + let len = array.len() / element_size; + let mut slice_value = (len, vec![]); + for value in array { + let typ = self.dfg.type_of_value(*value); + if let Type::Slice(_) = typ { + slice_value.1.push(*value); + self.compute_slice_sizes(*value, slice_sizes); + } + } + // Mark the correct max size based upon an array values internal structure + let mut max_size = 0; + for inner_value in slice_value.1.iter() { + let inner_slice = + slice_sizes.get(inner_value).expect("ICE: should have inner slice set"); + if inner_slice.0 > max_size { + max_size = inner_slice.0; + } + } + for inner_value in slice_value.1.iter() { + let inner_slice = + slice_sizes.get_mut(inner_value).expect("ICE: should have inner slice set"); + if inner_slice.0 < max_size { + inner_slice.0 = max_size; + } + } + slice_sizes.insert(array_id, slice_value); + } + } + } + + /// Resolves a ValueId representing a slice's contents to its updated value. + /// If there is no resolved value for the supplied value, the value which + /// was passed to the method is returned. + fn resolve_slice_value(&self, array_id: ValueId) -> ValueId { + let val = match self.mapped_slice_values.get(&array_id) { + Some(value) => self.resolve_slice_value(*value), + None => array_id, + }; + val + } + + pub(crate) fn constant_nested_slices(&mut self) -> Vec { + std::mem::take(&mut self.slice_values) + } + + pub(crate) fn slice_parents_map(&mut self) -> HashMap { + std::mem::take(&mut self.slice_parents) + } + + pub(crate) fn slice_values_map(&mut self) -> HashMap { + std::mem::take(&mut self.mapped_slice_values) + } +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 446560f45f1..8140c1d23be 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -16,7 +16,9 @@ pub(crate) struct ValueMerger<'a> { block: BasicBlockId, store_values: Option<&'a HashMap>, outer_block_stores: Option<&'a HashMap>, - slice_sizes: HashMap, + old_slice_sizes: HashMap, + // Maps SSA array values to their size and any respective nested slice children it may have + slice_sizes: &'a mut HashMap)>, } impl<'a> ValueMerger<'a> { @@ -25,13 +27,15 @@ impl<'a> ValueMerger<'a> { block: BasicBlockId, store_values: Option<&'a HashMap>, outer_block_stores: Option<&'a HashMap>, + slice_sizes: &'a mut HashMap)>, ) -> Self { ValueMerger { dfg, block, store_values, outer_block_stores, - slice_sizes: HashMap::default(), + slice_sizes: slice_sizes, + old_slice_sizes: HashMap::default(), } } @@ -185,13 +189,21 @@ impl<'a> ValueMerger<'a> { }; let then_len = self.get_slice_length(then_value_id); - self.slice_sizes.insert(then_value_id, then_len); + self.old_slice_sizes.insert(then_value_id, then_len); let else_len = self.get_slice_length(else_value_id); - self.slice_sizes.insert(else_value_id, else_len); + self.old_slice_sizes.insert(else_value_id, else_len); - let len = then_len.max(else_len); + // let then_len = self.slice_sizes.get(&then_value_id).unwrap_or_else(|| { + // panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); + // }).0; + + // let else_len = self.slice_sizes.get(&else_value_id).unwrap_or_else(|| { + // panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); + // }).0; + let len = then_len.max(else_len); + for i in 0..len { for (element_index, element_type) in element_types.iter().enumerate() { let index_usize = i * element_types.len() + element_index; @@ -221,6 +233,11 @@ impl<'a> ValueMerger<'a> { let then_element = get_element(then_value_id, typevars.clone(), then_len); let else_element = get_element(else_value_id, typevars, else_len); + // let then_element = + // get_element(then_value_id, typevars.clone(), then_len * element_types.len()); + // let else_element = + // get_element(else_value_id, typevars, else_len * element_types.len()); + merged.push_back(self.merge_values( then_condition, else_condition, @@ -230,7 +247,9 @@ impl<'a> ValueMerger<'a> { } } - self.dfg.make_array(merged, typ) + let merged = self.dfg.make_array(merged, typ); + println!("Merged value ID: {merged}"); + merged } fn get_slice_length(&mut self, value_id: ValueId) -> usize { @@ -248,7 +267,7 @@ impl<'a> ValueMerger<'a> { Instruction::ArraySet { array, .. } => { let array = *array; let len = self.get_slice_length(array); - self.slice_sizes.insert(array, len); + self.old_slice_sizes.insert(array, len); len } Instruction::Load { address } => { @@ -257,21 +276,17 @@ impl<'a> ValueMerger<'a> { let store_value = outer_block_stores .get(address) .expect("ICE: load in merger should have store from outer block"); - - if let Some(len) = self.slice_sizes.get(store_value) { + if let Some(len) = self.old_slice_sizes.get(store_value) { return *len; } - let store_value = if let Some(store) = store_values.get(address) { - if let Some(len) = self.slice_sizes.get(&store.new_value) { + if let Some(len) = self.old_slice_sizes.get(&store.new_value) { return *len; } - store.new_value } else { *store_value }; - self.get_slice_length(store_value) } Instruction::Call { func, arguments } => { @@ -284,7 +299,7 @@ impl<'a> ValueMerger<'a> { | Intrinsic::SliceInsert => { // `get_slice_length` needs to be called here as it is borrows self as mutable let initial_len = self.get_slice_length(slice_contents); - self.slice_sizes.insert(slice_contents, initial_len); + self.old_slice_sizes.insert(slice_contents, initial_len); initial_len + 1 } Intrinsic::SlicePopBack @@ -292,7 +307,7 @@ impl<'a> ValueMerger<'a> { | Intrinsic::SliceRemove => { // `get_slice_length` needs to be called here as it is borrows self as mutable let initial_len = self.get_slice_length(slice_contents); - self.slice_sizes.insert(slice_contents, initial_len); + self.old_slice_sizes.insert(slice_contents, initial_len); initial_len - 1 } _ => { diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr index 374d2ba4c26..4d628efe841 100644 --- a/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -10,19 +10,19 @@ fn regression_dynamic_slice_index(x: Field, y: Field) { } assert(slice.len() == 5); - dynamic_slice_index_set_if(slice, x, y); - dynamic_slice_index_set_else(slice, x, y); + // dynamic_slice_index_set_if(slice, x, y); + // dynamic_slice_index_set_else(slice, x, y); dynamic_slice_index_set_nested_if_else_else(slice, x, y); - dynamic_slice_index_set_nested_if_else_if(slice, x, y + 1); - dynamic_slice_index_if(slice, x); - dynamic_array_index_if([0, 1, 2, 3, 4], x); - dynamic_slice_index_else(slice, x); - - dynamic_slice_merge_if(slice, x); - dynamic_slice_merge_else(slice, x); - dynamic_slice_merge_two_ifs(slice, x); - dynamic_slice_merge_mutate_between_ifs(slice, x, y); - dynamic_slice_merge_push_then_pop(slice, x, y); + // dynamic_slice_index_set_nested_if_else_if(slice, x, y + 1); + // dynamic_slice_index_if(slice, x); + // dynamic_array_index_if([0, 1, 2, 3, 4], x); + // dynamic_slice_index_else(slice, x); + + // dynamic_slice_merge_if(slice, x); + // dynamic_slice_merge_else(slice, x); + // dynamic_slice_merge_two_ifs(slice, x); + // dynamic_slice_merge_mutate_between_ifs(slice, x, y); + // dynamic_slice_merge_push_then_pop(slice, x, y); } fn dynamic_slice_index_set_if(mut slice: [Field], x: Field, y: Field) { From 64d5fffe9210220189a2c89e6c2eb8a5fb2a2186 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Thu, 1 Feb 2024 22:05:24 +0000 Subject: [PATCH 05/17] handle out of bounds slice index from flattening --- compiler/noirc_evaluator/src/errors.rs | 2 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 87 +++++++++++++++++-- 2 files changed, 82 insertions(+), 7 deletions(-) diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 73b6e671bd5..c3bf40cc659 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -163,7 +163,7 @@ impl RuntimeError { _ => { let message = self.to_string(); let location = - self.call_stack().back().expect("Expected RuntimeError to have a location"); + self.call_stack().back().unwrap_or_else(|| panic!("Expected RuntimeError to have a location. Error message: {message}")); Diagnostic::simple_error(message, String::new(), location.span) } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 120c5bf25df..a819262f2ca 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -621,11 +621,11 @@ impl Context { instruction: InstructionId, dfg: &DataFlowGraph, index: ValueId, - array: ValueId, + array_id: ValueId, store_value: Option, ) -> Result { let index_const = dfg.get_numeric_constant(index); - let value_type = dfg.type_of_value(array); + let value_type = dfg.type_of_value(array_id); // Compiler sanity checks assert!( !value_type.is_nested_slice(), @@ -635,7 +635,7 @@ impl Context { unreachable!("ICE: expected array or slice type"); }; - match self.convert_value(array, dfg) { + match self.convert_value(array_id, dfg) { AcirValue::Var(acir_var, _) => { return Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), @@ -659,7 +659,7 @@ impl Context { }; if self.acir_context.is_constant_one(&self.current_side_effects_enabled_var) { // Report the error if side effects are enabled. - if index >= array_size { + if index >= array_size && !matches!(value_type, Type::Slice(_)) { let call_stack = self.acir_context.get_call_stack(); return Err(RuntimeError::IndexOutOfBounds { index, @@ -672,7 +672,13 @@ impl Context { let store_value = self.convert_value(store_value, dfg); AcirValue::Array(array.update(index, store_value)) } - None => array[index].clone(), + None => { + if index >= array_size { + self.construct_dummy_slice_value(instruction, dfg) + } else { + array[index].clone() + } + } }; self.define_result(dfg, instruction, value); @@ -684,14 +690,83 @@ impl Context { self.define_result(dfg, instruction, array[index].clone()); return Ok(true); } + // If there is a non constant predicate and the index is out of range for a slice we should still directly create dummy data + // These situations should only happen during flattening of slices when the same slice after an if statement + // would have different lengths depending upon the condition + else if index >= array_size + && value_type.contains_slice_element() + && store_value.is_none() + { + if index >= array_size { + let value = self.construct_dummy_slice_value(instruction, dfg); + + self.define_result(dfg, instruction, value); + return Ok(true); + } else { + return Ok(false); + } + } } } - AcirValue::DynamicArray(_) => (), + AcirValue::DynamicArray(AcirDynamicArray { len, .. }) => { + if let Some(index_const) = index_const { + let index = match index_const.try_to_u64() { + Some(index_const) => index_const as usize, + None => { + let call_stack = self.acir_context.get_call_stack(); + return Err(RuntimeError::TypeConversion { + from: "array index".to_string(), + into: "u64".to_string(), + call_stack, + }); + } + }; + + if index >= len { + let value = self.construct_dummy_slice_value(instruction, dfg); + + self.define_result(dfg, instruction, value); + return Ok(true); + } + } + + return Ok(false); + } }; Ok(false) } + fn construct_dummy_slice_value( + &mut self, + instruction: InstructionId, + dfg: &DataFlowGraph, + ) -> AcirValue { + let results = dfg.instruction_results(instruction); + let res_typ = dfg.type_of_value(results[0]); + self.construct_dummy_array_value(&res_typ) + } + + fn construct_dummy_array_value(&mut self, ssa_type: &Type) -> AcirValue { + match ssa_type.clone() { + Type::Numeric(numeric_type) => { + let zero = self.acir_context.add_constant(FieldElement::zero()); + let typ = AcirType::NumericType(numeric_type); + AcirValue::Var(zero, typ) + } + Type::Array(element_types, len) => { + let mut values = Vector::new(); + for _ in 0..len { + for typ in element_types.as_ref() { + values.push_back(self.construct_dummy_array_value(typ)); + } + } + AcirValue::Array(values) + } + _ => unreachable!("ICE - expected an array or numeric type"), + } + } + /// We need to properly setup the inputs for array operations in ACIR. /// From the original SSA values we compute the following AcirVars: /// - new_index is the index of the array. ACIR memory operations work with a flat memory, so we fully flattened the specified index From 92f02a01b72030d8790a27887cf84673f901514a Mon Sep 17 00:00:00 2001 From: vezenovm Date: Thu, 1 Feb 2024 22:26:56 +0000 Subject: [PATCH 06/17] make regression_4202 have opcodes --- compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs | 2 +- test_programs/execution_success/regression_4202/Prover.toml | 1 + test_programs/execution_success/regression_4202/src/main.nr | 4 ++-- .../execution_success/slice_dynamic_index/src/main.nr | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 test_programs/execution_success/regression_4202/Prover.toml diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index a819262f2ca..6a783c8c3c9 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -722,7 +722,7 @@ impl Context { } }; - if index >= len { + if index >= len && matches!(value_type, Type::Slice(_)) { let value = self.construct_dummy_slice_value(instruction, dfg); self.define_result(dfg, instruction, value); diff --git a/test_programs/execution_success/regression_4202/Prover.toml b/test_programs/execution_success/regression_4202/Prover.toml new file mode 100644 index 00000000000..e9319802dfd --- /dev/null +++ b/test_programs/execution_success/regression_4202/Prover.toml @@ -0,0 +1 @@ +input = [1, 2, 3, 4] diff --git a/test_programs/execution_success/regression_4202/src/main.nr b/test_programs/execution_success/regression_4202/src/main.nr index 0e79da84d20..37d2ee4578d 100644 --- a/test_programs/execution_success/regression_4202/src/main.nr +++ b/test_programs/execution_success/regression_4202/src/main.nr @@ -1,4 +1,4 @@ -fn main() { +fn main(input: [u32; 4]) { let mut slice1: [u32] = [1, 2, 3, 4]; if slice1[0] == 3 { slice1[1] = 4; @@ -9,6 +9,6 @@ fn main() { } for i in 0..4 { - assert(slice1[i] == i + 1); + assert(slice1[i] == input[i]); } } diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr index 374d2ba4c26..b16cec732a2 100644 --- a/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -8,7 +8,7 @@ fn regression_dynamic_slice_index(x: Field, y: Field) { for i in 0..5 { slice = slice.push_back(i); } - assert(slice.len() == 5); + // assert(slice.len() == 5); dynamic_slice_index_set_if(slice, x, y); dynamic_slice_index_set_else(slice, x, y); From adeb02d01895ca41ff4936f8d6ba732b7c28b249 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Thu, 1 Feb 2024 22:27:05 +0000 Subject: [PATCH 07/17] bring back assert in slice_dynamic_index --- test_programs/execution_success/slice_dynamic_index/src/main.nr | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr index b16cec732a2..374d2ba4c26 100644 --- a/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -8,7 +8,7 @@ fn regression_dynamic_slice_index(x: Field, y: Field) { for i in 0..5 { slice = slice.push_back(i); } - // assert(slice.len() == 5); + assert(slice.len() == 5); dynamic_slice_index_set_if(slice, x, y); dynamic_slice_index_set_else(slice, x, y); From a3b79c557ccb3ddebee868cdfdb841a5263bd392 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Thu, 1 Feb 2024 22:34:29 +0000 Subject: [PATCH 08/17] improve comments --- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 7ea3a6bf4f3..f5dfc96b2f8 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -713,6 +713,10 @@ impl Context { AcirValue::Array(array.update(index, store_value)) } None => { + // If the index is out of range for a slice we should directly create dummy data. + // This situation should only happen during flattening of slices when the same slice after an if statement + // would have different lengths depending upon the predicate. + // Otherwise, we can potentially hit an index out of bounds error. if index >= array_size { self.construct_dummy_slice_value(instruction, dfg) } else { @@ -730,9 +734,10 @@ impl Context { self.define_result(dfg, instruction, array[index].clone()); return Ok(true); } - // If there is a non constant predicate and the index is out of range for a slice we should still directly create dummy data - // These situations should only happen during flattening of slices when the same slice after an if statement - // would have different lengths depending upon the condition + // If there is a non constant predicate and the index is out of range for a slice, we should directly create dummy data. + // This situation should only happen during flattening of slices when the same slice after an if statement + // would have different lengths depending upon the predicate. + // Otherwise, we can potentially hit an index out of bounds error. else if index >= array_size && value_type.contains_slice_element() && store_value.is_none() @@ -784,10 +789,10 @@ impl Context { ) -> AcirValue { let results = dfg.instruction_results(instruction); let res_typ = dfg.type_of_value(results[0]); - self.construct_dummy_array_value(&res_typ) + self.construct_dummy_slice_value_inner(&res_typ) } - fn construct_dummy_array_value(&mut self, ssa_type: &Type) -> AcirValue { + fn construct_dummy_slice_value_inner(&mut self, ssa_type: &Type) -> AcirValue { match ssa_type.clone() { Type::Numeric(numeric_type) => { let zero = self.acir_context.add_constant(FieldElement::zero()); @@ -798,7 +803,7 @@ impl Context { let mut values = Vector::new(); for _ in 0..len { for typ in element_types.as_ref() { - values.push_back(self.construct_dummy_array_value(typ)); + values.push_back(self.construct_dummy_slice_value_inner(typ)); } } AcirValue::Array(values) From c67776d8de23d3e6836e907671c4ef0ebb98ce78 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 16:33:11 +0000 Subject: [PATCH 09/17] use capacity tracker rather than backtracking --- .../src/ssa/ir/instruction/call.rs | 2 +- .../src/ssa/opt/flatten_cfg.rs | 51 +------ .../src/ssa/opt/flatten_cfg/value_merger.rs | 127 +++--------------- .../slice_dynamic_index/src/main.nr | 24 ++-- 4 files changed, 35 insertions(+), 169 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index f402902cb83..f8f034e5bc9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -336,7 +336,7 @@ fn simplify_slice_push_back( slice_sizes.insert(set_last_slice_value, (slice_size / element_size, vec![])); slice_sizes.insert(new_slice, (slice_size / element_size, vec![])); - let mut value_merger = ValueMerger::new(dfg, block, None, None, &mut slice_sizes); + let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes); let new_slice = value_merger.merge_values( len_not_equals_capacity, len_equals_capacity, diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 0371ee2a26d..0b035eefa8c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -186,17 +186,6 @@ struct Context<'f> { /// between inlining of branches. store_values: HashMap, - /// Maps an address to the old and new value of the element at that address - /// The difference between this map and store_values is that this stores - /// the old and new value of an element from the outer block whose jmpif - /// terminator is being flattened. - /// - /// This map persists throughout the flattening process, where addresses - /// are overwritten as new stores are found. This overwriting is the desired behavior, - /// as we want the most update to date value to be stored at a given address as - /// we walk through blocks to flatten. - outer_block_stores: HashMap, - /// Stores all allocations local to the current branch. /// Since these branches are local to the current branch (ie. only defined within one branch of /// an if expression), they should not be merged with their previous value or stored value in @@ -212,6 +201,8 @@ struct Context<'f> { /// the most recent condition combined with all previous conditions via `And` instructions. conditions: Vec<(BasicBlockId, ValueId)>, + /// Maps SSA array values to their size and any respective nested slice children it may have + /// This is maintained by appropriate calls to the `SliceCapacityTracker` slice_sizes: HashMap)>, } @@ -243,7 +234,6 @@ fn flatten_function_cfg(function: &mut Function) { local_allocations: HashSet::new(), branch_ends, conditions: Vec::new(), - outer_block_stores: HashMap::default(), slice_sizes: HashMap::default(), }; context.flatten(); @@ -267,23 +257,6 @@ impl<'f> Context<'f> { /// Returns the last block to be inlined. This is either the return block of the function or, /// if self.conditions is not empty, the end block of the most recent condition. fn handle_terminator(&mut self, block: BasicBlockId) -> BasicBlockId { - if let TerminatorInstruction::JmpIf { .. } | TerminatorInstruction::Jmp { .. } = - self.inserter.function.dfg[block].unwrap_terminator() - { - // Find stores in the outer block and insert into the `outer_block_stores` map. - // Not using this map can lead to issues when attempting to merge slices. - // When inlining a branch end, only the then branch and the else branch are checked for stores. - // However, there are cases where we want to load a value that comes from the outer block - // that we are handling the terminator for here. - let instructions = self.inserter.function.dfg[block].instructions().to_vec(); - for instruction in instructions { - let (instruction, _) = self.inserter.map_instruction(instruction); - if let Instruction::Store { address, value } = instruction { - self.outer_block_stores.insert(address, value); - } - } - } - // As we recursively flatten inner blocks, we need to track the slice information // for the outer block before we start recursively inlining let outer_block_instructions = self.inserter.function.dfg[block].instructions(); @@ -521,13 +494,8 @@ impl<'f> Context<'f> { capacity_tracker.compute_slice_sizes(*else_arg, &mut self.slice_sizes); } - let mut value_merger = ValueMerger::new( - &mut self.inserter.function.dfg, - block, - Some(&self.store_values), - Some(&self.outer_block_stores), - &mut self.slice_sizes, - ); + let mut value_merger = + ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes); // Cannot include this in the previous vecmap since it requires exclusive access to self let args = vecmap(args, |(then_arg, else_arg)| { @@ -571,13 +539,8 @@ impl<'f> Context<'f> { let block = self.inserter.function.entry_block(); - let mut value_merger = ValueMerger::new( - &mut self.inserter.function.dfg, - block, - Some(&self.store_values), - Some(&self.outer_block_stores), - &mut self.slice_sizes, - ); + let mut value_merger = + ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes); // Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does let mut new_values = HashMap::default(); @@ -667,7 +630,7 @@ impl<'f> Context<'f> { /// As a result, the instruction that will be pushed will actually be a new instruction /// with a different InstructionId from the original. The results of the given instruction /// will also be mapped to the results of the new instruction. - fn push_instruction(&mut self, id: InstructionId) -> Vec { + fn push_instruction(&mut self, id: InstructionId) -> Vec { let (instruction, call_stack) = self.inserter.map_instruction(id); let instruction = self.handle_instruction_side_effects(instruction, call_stack.clone()); let is_allocate = matches!(instruction, Instruction::Allocate); diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 8140c1d23be..05500bd5701 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -4,19 +4,14 @@ use fxhash::FxHashMap as HashMap; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::{CallStack, DataFlowGraph}, - instruction::{BinaryOp, Instruction, Intrinsic}, + instruction::{BinaryOp, Instruction}, types::Type, - value::{Value, ValueId}, + value::ValueId, }; -use crate::ssa::opt::flatten_cfg::Store; - pub(crate) struct ValueMerger<'a> { dfg: &'a mut DataFlowGraph, block: BasicBlockId, - store_values: Option<&'a HashMap>, - outer_block_stores: Option<&'a HashMap>, - old_slice_sizes: HashMap, // Maps SSA array values to their size and any respective nested slice children it may have slice_sizes: &'a mut HashMap)>, } @@ -25,18 +20,9 @@ impl<'a> ValueMerger<'a> { pub(crate) fn new( dfg: &'a mut DataFlowGraph, block: BasicBlockId, - store_values: Option<&'a HashMap>, - outer_block_stores: Option<&'a HashMap>, slice_sizes: &'a mut HashMap)>, ) -> Self { - ValueMerger { - dfg, - block, - store_values, - outer_block_stores, - slice_sizes: slice_sizes, - old_slice_sizes: HashMap::default(), - } + ValueMerger { dfg, block, slice_sizes } } /// Merge two values a and b from separate basic blocks to a single value. @@ -188,22 +174,16 @@ impl<'a> ValueMerger<'a> { _ => panic!("Expected slice type"), }; - let then_len = self.get_slice_length(then_value_id); - self.old_slice_sizes.insert(then_value_id, then_len); - - let else_len = self.get_slice_length(else_value_id); - self.old_slice_sizes.insert(else_value_id, else_len); - - // let then_len = self.slice_sizes.get(&then_value_id).unwrap_or_else(|| { - // panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); - // }).0; + let then_len = self.slice_sizes.get(&then_value_id).unwrap_or_else(|| { + panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); + }).0; - // let else_len = self.slice_sizes.get(&else_value_id).unwrap_or_else(|| { - // panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); - // }).0; + let else_len = self.slice_sizes.get(&else_value_id).unwrap_or_else(|| { + panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); + }).0; let len = then_len.max(else_len); - + for i in 0..len { for (element_index, element_type) in element_types.iter().enumerate() { let index_usize = i * element_types.len() + element_index; @@ -230,13 +210,10 @@ impl<'a> ValueMerger<'a> { } }; - let then_element = get_element(then_value_id, typevars.clone(), then_len); - let else_element = get_element(else_value_id, typevars, else_len); - - // let then_element = - // get_element(then_value_id, typevars.clone(), then_len * element_types.len()); - // let else_element = - // get_element(else_value_id, typevars, else_len * element_types.len()); + let then_element = + get_element(then_value_id, typevars.clone(), then_len * element_types.len()); + let else_element = + get_element(else_value_id, typevars, else_len * element_types.len()); merged.push_back(self.merge_values( then_condition, @@ -247,81 +224,7 @@ impl<'a> ValueMerger<'a> { } } - let merged = self.dfg.make_array(merged, typ); - println!("Merged value ID: {merged}"); - merged - } - - fn get_slice_length(&mut self, value_id: ValueId) -> usize { - let value = &self.dfg[value_id]; - match value { - Value::Array { array, .. } => array.len(), - Value::Instruction { instruction: instruction_id, .. } => { - let instruction = &self.dfg[*instruction_id]; - match instruction { - // TODO(#3188): A slice can be the result of an ArrayGet when it is the - // fetched from a slice of slices or as a struct field. - // However, we need to incorporate nested slice support in flattening - // in order for this to be valid - // Instruction::ArrayGet { array, .. } => {} - Instruction::ArraySet { array, .. } => { - let array = *array; - let len = self.get_slice_length(array); - self.old_slice_sizes.insert(array, len); - len - } - Instruction::Load { address } => { - let outer_block_stores = self.outer_block_stores.expect("ICE: A map of previous stores is required in order to resolve a slice load"); - let store_values = self.store_values.expect("ICE: A map of previous stores is required in order to resolve a slice load"); - let store_value = outer_block_stores - .get(address) - .expect("ICE: load in merger should have store from outer block"); - if let Some(len) = self.old_slice_sizes.get(store_value) { - return *len; - } - let store_value = if let Some(store) = store_values.get(address) { - if let Some(len) = self.old_slice_sizes.get(&store.new_value) { - return *len; - } - store.new_value - } else { - *store_value - }; - self.get_slice_length(store_value) - } - Instruction::Call { func, arguments } => { - let slice_contents = arguments[1]; - let func = &self.dfg[*func]; - match func { - Value::Intrinsic(intrinsic) => match intrinsic { - Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SliceInsert => { - // `get_slice_length` needs to be called here as it is borrows self as mutable - let initial_len = self.get_slice_length(slice_contents); - self.old_slice_sizes.insert(slice_contents, initial_len); - initial_len + 1 - } - Intrinsic::SlicePopBack - | Intrinsic::SlicePopFront - | Intrinsic::SliceRemove => { - // `get_slice_length` needs to be called here as it is borrows self as mutable - let initial_len = self.get_slice_length(slice_contents); - self.old_slice_sizes.insert(slice_contents, initial_len); - initial_len - 1 - } - _ => { - unreachable!("ICE: Intrinsic not supported, got {intrinsic:?}") - } - }, - _ => unreachable!("ICE: Expected intrinsic value but got {func:?}"), - } - } - _ => unreachable!("ICE: Got unexpected instruction: {instruction:?}"), - } - } - _ => unreachable!("ICE: Got unexpected value when resolving slice length {value:?}"), - } + self.dfg.make_array(merged, typ) } /// Construct a dummy value to be attached to the smaller of two slices being merged. diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr index 4d628efe841..374d2ba4c26 100644 --- a/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -10,19 +10,19 @@ fn regression_dynamic_slice_index(x: Field, y: Field) { } assert(slice.len() == 5); - // dynamic_slice_index_set_if(slice, x, y); - // dynamic_slice_index_set_else(slice, x, y); + dynamic_slice_index_set_if(slice, x, y); + dynamic_slice_index_set_else(slice, x, y); dynamic_slice_index_set_nested_if_else_else(slice, x, y); - // dynamic_slice_index_set_nested_if_else_if(slice, x, y + 1); - // dynamic_slice_index_if(slice, x); - // dynamic_array_index_if([0, 1, 2, 3, 4], x); - // dynamic_slice_index_else(slice, x); - - // dynamic_slice_merge_if(slice, x); - // dynamic_slice_merge_else(slice, x); - // dynamic_slice_merge_two_ifs(slice, x); - // dynamic_slice_merge_mutate_between_ifs(slice, x, y); - // dynamic_slice_merge_push_then_pop(slice, x, y); + dynamic_slice_index_set_nested_if_else_if(slice, x, y + 1); + dynamic_slice_index_if(slice, x); + dynamic_array_index_if([0, 1, 2, 3, 4], x); + dynamic_slice_index_else(slice, x); + + dynamic_slice_merge_if(slice, x); + dynamic_slice_merge_else(slice, x); + dynamic_slice_merge_two_ifs(slice, x); + dynamic_slice_merge_mutate_between_ifs(slice, x, y); + dynamic_slice_merge_push_then_pop(slice, x, y); } fn dynamic_slice_index_set_if(mut slice: [Field], x: Field, y: Field) { From 254e724522742fb07fa2d596edb95469e3bc473b Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 16:35:08 +0000 Subject: [PATCH 10/17] remove old slice mapping for resolution --- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 83 +------------------ 1 file changed, 1 insertion(+), 82 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index 82cef63854a..1e19b7da542 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -9,16 +9,6 @@ use fxhash::FxHashMap as HashMap; pub(crate) struct SliceCapacityTracker<'a> { dfg: &'a DataFlowGraph, - /// Maps SSA array values representing a slice's contents to its updated array value - /// after an array set or a slice intrinsic operation. - /// Maps original value -> result - mapped_slice_values: HashMap, - - /// Maps an updated array value following an array operation to its previous value. - /// When used in conjunction with `mapped_slice_values` we form a two way map of all array - /// values being used in array operations. - /// Maps result -> original value - slice_parents: HashMap, // Values containing nested slices to be replaced slice_values: Vec, @@ -26,12 +16,7 @@ pub(crate) struct SliceCapacityTracker<'a> { impl<'a> SliceCapacityTracker<'a> { pub(crate) fn new(dfg: &'a DataFlowGraph) -> Self { - SliceCapacityTracker { - dfg, - mapped_slice_values: HashMap::default(), - slice_parents: HashMap::default(), - slice_values: Vec::new(), - } + SliceCapacityTracker { dfg, slice_values: Vec::new() } } /// Determine how the slice sizes map needs to be updated according to the provided instruction. @@ -63,7 +48,6 @@ impl<'a> SliceCapacityTracker<'a> { // If the result has internal slices and is called in an array set // we could potentially have a new larger slice which we need to account for inner_sizes.1.push(results[0]); - self.slice_parents.insert(results[0], *array); let inner_sizes_iter = inner_sizes.1.clone(); for slice_value in inner_sizes_iter { @@ -78,10 +62,6 @@ impl<'a> SliceCapacityTracker<'a> { } else { slice_sizes.insert(results[0], inner_slice.clone()); } - let resolved_result = self.resolve_slice_value(results[0]); - if resolved_result != slice_value { - self.mapped_slice_values.insert(slice_value, results[0]); - } } } } @@ -112,16 +92,6 @@ impl<'a> SliceCapacityTracker<'a> { let inner_sizes = inner_sizes.clone(); slice_sizes.insert(results[0], inner_sizes); - - if let Some(fetched_val) = self.mapped_slice_values.get(&results[0]) { - if *fetched_val != *array { - self.mapped_slice_values.insert(*array, results[0]); - } - } else if *array != results[0] { - self.mapped_slice_values.insert(*array, results[0]); - } - - self.slice_parents.insert(results[0], *array); } } Instruction::Call { func, arguments } => { @@ -159,20 +129,6 @@ impl<'a> SliceCapacityTracker<'a> { let inner_sizes = inner_sizes.clone(); slice_sizes.insert(results[result_index], inner_sizes); - - if let Some(fetched_val) = - self.mapped_slice_values.get(&results[result_index]) - { - if *fetched_val != slice_contents { - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - } - } else if slice_contents != results[result_index] { - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - } - - self.slice_parents.insert(results[result_index], slice_contents); } } Intrinsic::SlicePopBack @@ -184,20 +140,6 @@ impl<'a> SliceCapacityTracker<'a> { if let Some(inner_sizes) = slice_sizes.get(&slice_contents) { let inner_sizes = inner_sizes.clone(); slice_sizes.insert(results[result_index], inner_sizes); - - if let Some(fetched_val) = - self.mapped_slice_values.get(&results[result_index]) - { - if *fetched_val != slice_contents { - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - } - } else if slice_contents != results[result_index] { - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - } - - self.slice_parents.insert(results[result_index], slice_contents); } } _ => {} @@ -280,27 +222,4 @@ impl<'a> SliceCapacityTracker<'a> { } } } - - /// Resolves a ValueId representing a slice's contents to its updated value. - /// If there is no resolved value for the supplied value, the value which - /// was passed to the method is returned. - fn resolve_slice_value(&self, array_id: ValueId) -> ValueId { - let val = match self.mapped_slice_values.get(&array_id) { - Some(value) => self.resolve_slice_value(*value), - None => array_id, - }; - val - } - - pub(crate) fn constant_nested_slices(&mut self) -> Vec { - std::mem::take(&mut self.slice_values) - } - - pub(crate) fn slice_parents_map(&mut self) -> HashMap { - std::mem::take(&mut self.slice_parents) - } - - pub(crate) fn slice_values_map(&mut self) -> HashMap { - std::mem::take(&mut self.mapped_slice_values) - } } From e74968606a30be5ac33678e14f88c66a698a7fc3 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 17:02:17 +0000 Subject: [PATCH 11/17] cleanup and simplficiation for non-nested slices --- .../src/ssa/ir/instruction/call.rs | 4 +- .../src/ssa/opt/flatten_cfg.rs | 8 +- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 137 ++++-------------- .../src/ssa/opt/flatten_cfg/value_merger.rs | 12 +- 4 files changed, 42 insertions(+), 119 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index f8f034e5bc9..c2484d51580 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -333,8 +333,8 @@ fn simplify_slice_push_back( .first(); let mut slice_sizes = HashMap::default(); - slice_sizes.insert(set_last_slice_value, (slice_size / element_size, vec![])); - slice_sizes.insert(new_slice, (slice_size / element_size, vec![])); + slice_sizes.insert(set_last_slice_value, slice_size / element_size); + slice_sizes.insert(new_slice, slice_size / element_size); let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes); let new_slice = value_merger.merge_values( diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 0b035eefa8c..da742dc39a6 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -201,9 +201,9 @@ struct Context<'f> { /// the most recent condition combined with all previous conditions via `And` instructions. conditions: Vec<(BasicBlockId, ValueId)>, - /// Maps SSA array values to their size and any respective nested slice children it may have + /// Maps SSA array values to their size /// This is maintained by appropriate calls to the `SliceCapacityTracker` - slice_sizes: HashMap)>, + slice_sizes: HashMap, } pub(crate) struct Store { @@ -490,8 +490,8 @@ impl<'f> Context<'f> { // Make sure we have tracked the slice sizes of any block arguments let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); for (then_arg, else_arg) in args.iter() { - capacity_tracker.compute_slice_sizes(*then_arg, &mut self.slice_sizes); - capacity_tracker.compute_slice_sizes(*else_arg, &mut self.slice_sizes); + capacity_tracker.compute_slice_capacity(*then_arg, &mut self.slice_sizes); + capacity_tracker.compute_slice_capacity(*else_arg, &mut self.slice_sizes); } let mut value_merger = diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index 1e19b7da542..7411c023178 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -9,89 +9,49 @@ use fxhash::FxHashMap as HashMap; pub(crate) struct SliceCapacityTracker<'a> { dfg: &'a DataFlowGraph, - - // Values containing nested slices to be replaced - slice_values: Vec, } impl<'a> SliceCapacityTracker<'a> { pub(crate) fn new(dfg: &'a DataFlowGraph) -> Self { - SliceCapacityTracker { dfg, slice_values: Vec::new() } + SliceCapacityTracker { dfg } } /// Determine how the slice sizes map needs to be updated according to the provided instruction. pub(crate) fn collect_slice_information( &mut self, instruction: &Instruction, - slice_sizes: &mut HashMap)>, + slice_sizes: &mut HashMap, results: Vec, ) { match instruction { Instruction::ArrayGet { array, .. } => { let array_typ = self.dfg.type_of_value(*array); let array_value = &self.dfg[*array]; - // If we have an SSA value containing nested slices we should mark it - // as a slice that potentially requires to be filled with dummy data. if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() { - self.slice_values.push(*array); // Initial insertion into the slice sizes map // Any other insertions should only occur if the value is already // a part of the map. - self.compute_slice_sizes(*array, slice_sizes); - } - - let res_typ = self.dfg.type_of_value(results[0]); - if res_typ.contains_slice_element() { - if let Some(inner_sizes) = slice_sizes.get_mut(array) { - // Include the result in the parent array potential children - // If the result has internal slices and is called in an array set - // we could potentially have a new larger slice which we need to account for - inner_sizes.1.push(results[0]); - - let inner_sizes_iter = inner_sizes.1.clone(); - for slice_value in inner_sizes_iter { - let inner_slice = slice_sizes.get(&slice_value).unwrap_or_else(|| { - panic!("ICE: should have inner slice set for {slice_value}") - }); - let previous_res_size = slice_sizes.get(&results[0]); - if let Some(previous_res_size) = previous_res_size { - if inner_slice.0 > previous_res_size.0 { - slice_sizes.insert(results[0], inner_slice.clone()); - } - } else { - slice_sizes.insert(results[0], inner_slice.clone()); - } - } - } + self.compute_slice_capacity(*array, slice_sizes); } } Instruction::ArraySet { array, value, .. } => { let array_typ = self.dfg.type_of_value(*array); let array_value = &self.dfg[*array]; - // If we have an SSA value containing nested slices we should mark it - // as a slice that potentially requires to be filled with dummy data. if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() { - self.slice_values.push(*array); // Initial insertion into the slice sizes map // Any other insertions should only occur if the value is already // a part of the map. - self.compute_slice_sizes(*array, slice_sizes); + self.compute_slice_capacity(*array, slice_sizes); } let value_typ = self.dfg.type_of_value(*value); - if value_typ.contains_slice_element() { - self.compute_slice_sizes(*value, slice_sizes); - - let inner_sizes = slice_sizes.get_mut(array).expect("ICE expected slice sizes"); - inner_sizes.1.push(*value); - } - - if let Some(inner_sizes) = slice_sizes.get_mut(array) { - let inner_sizes = inner_sizes.clone(); + // Compiler sanity check + assert!(!value_typ.contains_slice_element(), "ICE: Nested slices are not allowed and should not have reached the flattening pass of SSA"); - slice_sizes.insert(results[0], inner_sizes); + if let Some(capacity) = slice_sizes.get(array) { + slice_sizes.insert(results[0], *capacity); } } Instruction::Call { func, arguments } => { @@ -120,15 +80,12 @@ impl<'a> SliceCapacityTracker<'a> { for arg in &arguments[(argument_index + 1)..] { let element_typ = self.dfg.type_of_value(*arg); if element_typ.contains_slice_element() { - self.slice_values.push(*arg); - self.compute_slice_sizes(*arg, slice_sizes); + self.compute_slice_capacity(*arg, slice_sizes); } } - if let Some(inner_sizes) = slice_sizes.get_mut(&slice_contents) { - inner_sizes.0 += 1; - - let inner_sizes = inner_sizes.clone(); - slice_sizes.insert(results[result_index], inner_sizes); + if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { + let new_capacity = *contents_capacity + 1; + slice_sizes.insert(results[result_index], new_capacity); } } Intrinsic::SlicePopBack @@ -137,9 +94,8 @@ impl<'a> SliceCapacityTracker<'a> { // We do not decrement the size on intrinsics that could remove values from a slice. // This is because we could potentially go back to the smaller slice and not fill in dummies. // This pass should be tracking the potential max that a slice ***could be*** - if let Some(inner_sizes) = slice_sizes.get(&slice_contents) { - let inner_sizes = inner_sizes.clone(); - slice_sizes.insert(results[result_index], inner_sizes); + if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { + slice_sizes.insert(results[result_index], *contents_capacity); } } _ => {} @@ -149,76 +105,43 @@ impl<'a> SliceCapacityTracker<'a> { Instruction::Store { address, value } => { let value_typ = self.dfg.type_of_value(*value); if value_typ.contains_slice_element() { - self.compute_slice_sizes(*value, slice_sizes); - - let mut inner_sizes = slice_sizes.get(value).unwrap_or_else(|| { - panic!("ICE: should have inner slice set for value {value} being stored at {address}") - }).clone(); + self.compute_slice_capacity(*value, slice_sizes); - if let Some(previous_store) = slice_sizes.get(address) { - inner_sizes.1.append(&mut previous_store.1.clone()); - } + let value_capacity = slice_sizes.get(value).unwrap_or_else(|| { + panic!("ICE: should have slice capacity set for value {value} being stored at {address}") + }); - slice_sizes.insert(*address, inner_sizes); + slice_sizes.insert(*address, *value_capacity); } } Instruction::Load { address } => { let load_typ = self.dfg.type_of_value(*address); if load_typ.contains_slice_element() { let result = results[0]; - let mut inner_sizes = slice_sizes.get(address).unwrap_or_else(|| { - panic!("ICE: should have inner slice set at addres {address} being loaded into {result}") - }).clone(); + let address_capacity = slice_sizes.get(address).unwrap_or_else(|| { + panic!("ICE: should have slice capacity set at address {address} being loaded into {result}") + }); - if let Some(previous_load_value) = slice_sizes.get(&result) { - inner_sizes.1.append(&mut previous_load_value.1.clone()); - } - - slice_sizes.insert(result, inner_sizes); + slice_sizes.insert(result, *address_capacity); } } _ => {} } } - // This methods computes a map representing a nested slice. - // The method also automatically computes the given max slice size - // at each depth of the recursive type. - // For example if we had a next slice - pub(crate) fn compute_slice_sizes( + /// Computes the starting capacity of a slice which is still a `Value::Array` + pub(crate) fn compute_slice_capacity( &self, array_id: ValueId, - slice_sizes: &mut HashMap)>, + slice_sizes: &mut HashMap, ) { - if let Value::Array { array, typ } = &self.dfg[array_id].clone() { + if let Value::Array { array, typ } = &self.dfg[array_id] { + // Compiler sanity check + assert!(!typ.is_nested_slice(), "ICE: Nested slices are not allowed and should not have reached the flattening pass of SSA"); if let Type::Slice(_) = typ { let element_size = typ.element_size(); let len = array.len() / element_size; - let mut slice_value = (len, vec![]); - for value in array { - let typ = self.dfg.type_of_value(*value); - if let Type::Slice(_) = typ { - slice_value.1.push(*value); - self.compute_slice_sizes(*value, slice_sizes); - } - } - // Mark the correct max size based upon an array values internal structure - let mut max_size = 0; - for inner_value in slice_value.1.iter() { - let inner_slice = - slice_sizes.get(inner_value).expect("ICE: should have inner slice set"); - if inner_slice.0 > max_size { - max_size = inner_slice.0; - } - } - for inner_value in slice_value.1.iter() { - let inner_slice = - slice_sizes.get_mut(inner_value).expect("ICE: should have inner slice set"); - if inner_slice.0 < max_size { - inner_slice.0 = max_size; - } - } - slice_sizes.insert(array_id, slice_value); + slice_sizes.insert(array_id, len); } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 05500bd5701..58a9827c3fa 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -13,14 +13,14 @@ pub(crate) struct ValueMerger<'a> { dfg: &'a mut DataFlowGraph, block: BasicBlockId, // Maps SSA array values to their size and any respective nested slice children it may have - slice_sizes: &'a mut HashMap)>, + slice_sizes: &'a mut HashMap, } impl<'a> ValueMerger<'a> { pub(crate) fn new( dfg: &'a mut DataFlowGraph, block: BasicBlockId, - slice_sizes: &'a mut HashMap)>, + slice_sizes: &'a mut HashMap, ) -> Self { ValueMerger { dfg, block, slice_sizes } } @@ -174,13 +174,13 @@ impl<'a> ValueMerger<'a> { _ => panic!("Expected slice type"), }; - let then_len = self.slice_sizes.get(&then_value_id).unwrap_or_else(|| { + let then_len = *self.slice_sizes.get(&then_value_id).unwrap_or_else(|| { panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); - }).0; + }); - let else_len = self.slice_sizes.get(&else_value_id).unwrap_or_else(|| { + let else_len = *self.slice_sizes.get(&else_value_id).unwrap_or_else(|| { panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); - }).0; + }); let len = then_len.max(else_len); From ee5e0218345631c9e0826544fe5075a56427e4b9 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 17:07:55 +0000 Subject: [PATCH 12/17] cleanup old comment --- compiler/noirc_evaluator/src/ssa/ir/instruction.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 5ef9fda390b..fa5bc0f6697 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -246,10 +246,6 @@ impl Instruction { bin.operator != BinaryOp::Div } Cast(_, _) | Truncate { .. } | Not(_) | ArrayGet { .. } | ArraySet { .. } => true, - // Cast(_, _) | Truncate { .. } | Not(_) => true, - - // These are not pure when working with nested slices based upon witness conditions - // ArrayGet { .. } | ArraySet { .. } => false, // These either have side-effects or interact with memory Constrain(..) From 10f584490671f56338fd56fb6d309fd1af0a2eed Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 17:08:22 +0000 Subject: [PATCH 13/17] remove old comments --- compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index c2484d51580..86e4b22f034 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -323,8 +323,6 @@ fn simplify_slice_push_back( let slice_size = slice.len(); let element_size = element_type.element_size(); let new_slice = dfg.make_array(slice, element_type); - // dbg!(slice_size); - // dbg!(element_size); let set_last_slice_value_instr = Instruction::ArraySet { array: new_slice, index: arguments[0], value: arguments[2] }; From 20e9a9e2fa4cc2707f1eb52b83df34b5d12ab3e0 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 17:10:11 +0000 Subject: [PATCH 14/17] more comments cleanup --- compiler/noirc_evaluator/src/ssa/ir/types.rs | 1 - compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs | 1 - .../noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs | 3 ++- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index fcd48281a72..8dc9e67db79 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -121,7 +121,6 @@ impl Type { Type::Slice(_) => true, Type::Numeric(_) => false, Type::Reference(element) => element.contains_slice_element(), - // Type::Reference(_) => false, Type::Function => false, } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index da742dc39a6..2899304f24e 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -610,7 +610,6 @@ impl<'f> Context<'f> { let instructions = self.inserter.function.dfg[destination].instructions().to_vec(); for instruction in instructions.iter() { - // self.push_instruction(instruction); let results = self.push_instruction(*instruction); let (instruction, _) = self.inserter.map_instruction(*instruction); let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 58a9827c3fa..6b923a2e42d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -12,7 +12,8 @@ use crate::ssa::ir::{ pub(crate) struct ValueMerger<'a> { dfg: &'a mut DataFlowGraph, block: BasicBlockId, - // Maps SSA array values to their size and any respective nested slice children it may have + // Maps SSA array values with a slice type to their size. + // This must be computed before merging values. slice_sizes: &'a mut HashMap, } From 27ef641a6c3a952f61cfb1c68338e5034833f18e Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 17:11:20 +0000 Subject: [PATCH 15/17] expand comment on slice_sizes field of flattening context --- compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 2899304f24e..dc37174869e 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -201,8 +201,8 @@ struct Context<'f> { /// the most recent condition combined with all previous conditions via `And` instructions. conditions: Vec<(BasicBlockId, ValueId)>, - /// Maps SSA array values to their size - /// This is maintained by appropriate calls to the `SliceCapacityTracker` + /// Maps SSA array values with a slice type to their size. + /// This is maintained by appropriate calls to the `SliceCapacityTracker` and is used by the `ValueMerger`. slice_sizes: HashMap, } From a57530e7c4fe34a1902d69c11d31e3d617fbba83 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 20:50:19 +0000 Subject: [PATCH 16/17] fully accurate tracking of slice capacities across blocks --- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 86 +------------------ .../src/ssa/opt/flatten_cfg.rs | 25 ++++-- .../ssa/opt/flatten_cfg/capacity_tracker.rs | 4 +- 3 files changed, 25 insertions(+), 90 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index f5dfc96b2f8..0db717153bd 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -699,7 +699,7 @@ impl Context { }; if self.acir_context.is_constant_one(&self.current_side_effects_enabled_var) { // Report the error if side effects are enabled. - if index >= array_size && !matches!(value_type, Type::Slice(_)) { + if index >= array_size { let call_stack = self.acir_context.get_call_stack(); return Err(RuntimeError::IndexOutOfBounds { index, @@ -712,17 +712,7 @@ impl Context { let store_value = self.convert_value(store_value, dfg); AcirValue::Array(array.update(index, store_value)) } - None => { - // If the index is out of range for a slice we should directly create dummy data. - // This situation should only happen during flattening of slices when the same slice after an if statement - // would have different lengths depending upon the predicate. - // Otherwise, we can potentially hit an index out of bounds error. - if index >= array_size { - self.construct_dummy_slice_value(instruction, dfg) - } else { - array[index].clone() - } - } + None => array[index].clone(), }; self.define_result(dfg, instruction, value); @@ -734,84 +724,14 @@ impl Context { self.define_result(dfg, instruction, array[index].clone()); return Ok(true); } - // If there is a non constant predicate and the index is out of range for a slice, we should directly create dummy data. - // This situation should only happen during flattening of slices when the same slice after an if statement - // would have different lengths depending upon the predicate. - // Otherwise, we can potentially hit an index out of bounds error. - else if index >= array_size - && value_type.contains_slice_element() - && store_value.is_none() - { - if index >= array_size { - let value = self.construct_dummy_slice_value(instruction, dfg); - - self.define_result(dfg, instruction, value); - return Ok(true); - } else { - return Ok(false); - } - } } } - AcirValue::DynamicArray(AcirDynamicArray { len, .. }) => { - if let Some(index_const) = index_const { - let index = match index_const.try_to_u64() { - Some(index_const) => index_const as usize, - None => { - let call_stack = self.acir_context.get_call_stack(); - return Err(RuntimeError::TypeConversion { - from: "array index".to_string(), - into: "u64".to_string(), - call_stack, - }); - } - }; - - if index >= len && matches!(value_type, Type::Slice(_)) { - let value = self.construct_dummy_slice_value(instruction, dfg); - - self.define_result(dfg, instruction, value); - return Ok(true); - } - } - - return Ok(false); - } + AcirValue::DynamicArray(_) => {} }; Ok(false) } - fn construct_dummy_slice_value( - &mut self, - instruction: InstructionId, - dfg: &DataFlowGraph, - ) -> AcirValue { - let results = dfg.instruction_results(instruction); - let res_typ = dfg.type_of_value(results[0]); - self.construct_dummy_slice_value_inner(&res_typ) - } - - fn construct_dummy_slice_value_inner(&mut self, ssa_type: &Type) -> AcirValue { - match ssa_type.clone() { - Type::Numeric(numeric_type) => { - let zero = self.acir_context.add_constant(FieldElement::zero()); - let typ = AcirType::NumericType(numeric_type); - AcirValue::Var(zero, typ) - } - Type::Array(element_types, len) => { - let mut values = Vector::new(); - for _ in 0..len { - for typ in element_types.as_ref() { - values.push_back(self.construct_dummy_slice_value_inner(typ)); - } - } - AcirValue::Array(values) - } - _ => unreachable!("ICE - expected an array or numeric type"), - } - } - /// We need to properly setup the inputs for array operations in ACIR. /// From the original SSA values we compute the following AcirVars: /// - new_index is the index of the array. ACIR memory operations work with a flat memory, so we fully flattened the specified index diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index dc37174869e..943a57c1bc0 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -487,7 +487,7 @@ impl<'f> Context<'f> { let block = self.inserter.function.entry_block(); - // Make sure we have tracked the slice sizes of any block arguments + // Make sure we have tracked the slice capacities of any block arguments let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); for (then_arg, else_arg) in args.iter() { capacity_tracker.compute_slice_capacity(*then_arg, &mut self.slice_sizes); @@ -534,6 +534,15 @@ impl<'f> Context<'f> { } } + // Most slice information is collected when instructions are inlined. + // We need to collect information on slice values here as we may possibly merge stores + // before any inlining occurs. + let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); + for (then_case, else_case, _) in new_map.values() { + capacity_tracker.compute_slice_capacity(*then_case, &mut self.slice_sizes); + capacity_tracker.compute_slice_capacity(*else_case, &mut self.slice_sizes); + } + let then_condition = then_branch.condition; let else_condition = else_branch.condition; @@ -541,7 +550,6 @@ impl<'f> Context<'f> { let mut value_merger = ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes); - // Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does let mut new_values = HashMap::default(); for (address, (then_case, else_case, _)) in &new_map { @@ -581,13 +589,18 @@ impl<'f> Context<'f> { store_value.new_value = new_value; } else { let load = Instruction::Load { address }; + let load_type = Some(vec![self.inserter.function.dfg.type_of_value(new_value)]); - let old_value = self.insert_instruction_with_typevars(load, load_type).first(); + let old_value = + self.insert_instruction_with_typevars(load.clone(), load_type).first(); + // Need this or else we will be missing a the previous value of a slice that we wish to merge let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg); - // Need this or else we will be missing slice stores that we wish to merge - let store = Instruction::Store { address: old_value, value: new_value }; - capacity_tracker.collect_slice_information(&store, &mut self.slice_sizes, vec![]); + capacity_tracker.collect_slice_information( + &load, + &mut self.slice_sizes, + vec![old_value], + ); self.store_values.insert(address, Store { old_value, new_value }); } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index 7411c023178..7cd0fe3084e 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -95,7 +95,8 @@ impl<'a> SliceCapacityTracker<'a> { // This is because we could potentially go back to the smaller slice and not fill in dummies. // This pass should be tracking the potential max that a slice ***could be*** if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { - slice_sizes.insert(results[result_index], *contents_capacity); + let new_capacity = *contents_capacity - 1; + slice_sizes.insert(results[result_index], new_capacity); } } _ => {} @@ -118,6 +119,7 @@ impl<'a> SliceCapacityTracker<'a> { let load_typ = self.dfg.type_of_value(*address); if load_typ.contains_slice_element() { let result = results[0]; + let address_capacity = slice_sizes.get(address).unwrap_or_else(|| { panic!("ICE: should have slice capacity set at address {address} being loaded into {result}") }); From 058f5d38aa12a17f9fb3bd51f86af9dbb7467ab9 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Fri, 2 Feb 2024 20:52:40 +0000 Subject: [PATCH 17/17] a little cleanup --- compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 0db717153bd..f75eb4be775 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -726,7 +726,7 @@ impl Context { } } } - AcirValue::DynamicArray(_) => {} + AcirValue::DynamicArray(_) => (), }; Ok(false)