Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Accurate tracking of slice capacities across blocks #4240

Merged
merged 24 commits into from
Feb 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
87294f4
check for outer_block stores in Jmp blocks
vezenovm Feb 1, 2024
3ed2435
remove old comments'
vezenovm Feb 1, 2024
3a05664
cargo fmt
vezenovm Feb 1, 2024
67e63db
use capacity tracker rather than backtracking in flattening, still a …
vezenovm Feb 1, 2024
64d5fff
handle out of bounds slice index from flattening
vezenovm Feb 1, 2024
92f02a0
make regression_4202 have opcodes
vezenovm Feb 1, 2024
adeb02d
bring back assert in slice_dynamic_index
vezenovm Feb 1, 2024
dfe904e
Merge branch 'master' into mv/fix-4202
vezenovm Feb 1, 2024
a3b79c5
improve comments
vezenovm Feb 1, 2024
2ef2dcb
merge conflicts w/ parent mv/fix-4202
vezenovm Feb 1, 2024
c67776d
use capacity tracker rather than backtracking
vezenovm Feb 2, 2024
254e724
remove old slice mapping for resolution
vezenovm Feb 2, 2024
e749686
cleanup and simplficiation for non-nested slices
vezenovm Feb 2, 2024
ee5e021
cleanup old comment
vezenovm Feb 2, 2024
10f5844
remove old comments
vezenovm Feb 2, 2024
20e9a9e
more comments cleanup
vezenovm Feb 2, 2024
27ef641
expand comment on slice_sizes field of flattening context
vezenovm Feb 2, 2024
a57530e
fully accurate tracking of slice capacities across blocks
vezenovm Feb 2, 2024
058f5d3
a little cleanup
vezenovm Feb 2, 2024
bb125fc
Merge branch 'master' into mv/capacity-tracker
vezenovm Feb 2, 2024
218f5c4
Merge branch 'master' into mv/capacity-tracker
vezenovm Feb 5, 2024
769f2ef
Merge branch 'master' into mv/capacity-tracker
vezenovm Feb 6, 2024
90d6317
Merge branch 'master' into mv/capacity-tracker
vezenovm Feb 6, 2024
67ea0d0
Merge branch 'master' into mv/capacity-tracker
TomAFrench Feb 7, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion compiler/noirc_evaluator/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ impl RuntimeError {
_ => {
let message = self.to_string();
let location =
self.call_stack().back().expect("Expected RuntimeError to have a location");
self.call_stack().back().unwrap_or_else(|| panic!("Expected RuntimeError to have a location. Error message: {message}"));

Diagnostic::simple_error(message, String::new(), location.span)
}
Expand Down
6 changes: 3 additions & 3 deletions compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -679,11 +679,11 @@ impl Context {
instruction: InstructionId,
dfg: &DataFlowGraph,
index: ValueId,
array: ValueId,
array_id: ValueId,
store_value: Option<ValueId>,
) -> Result<bool, RuntimeError> {
let index_const = dfg.get_numeric_constant(index);
let value_type = dfg.type_of_value(array);
let value_type = dfg.type_of_value(array_id);
// Compiler sanity checks
assert!(
!value_type.is_nested_slice(),
Expand All @@ -693,7 +693,7 @@ impl Context {
unreachable!("ICE: expected array or slice type");
};

match self.convert_value(array, dfg) {
match self.convert_value(array_id, dfg) {
AcirValue::Var(acir_var, _) => {
return Err(RuntimeError::InternalError(InternalError::Unexpected {
expected: "an array value".to_string(),
Expand Down
9 changes: 8 additions & 1 deletion compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use fxhash::FxHashMap as HashMap;
use std::{collections::VecDeque, rc::Rc};

use acvm::{acir::BlackBoxFunc, BlackBoxResolutionError, FieldElement};
Expand Down Expand Up @@ -318,6 +319,8 @@ fn simplify_slice_push_back(
for elem in &arguments[2..] {
slice.push_back(*elem);
}
let slice_size = slice.len();
let element_size = element_type.element_size();
let new_slice = dfg.make_array(slice, element_type);

let set_last_slice_value_instr =
Expand All @@ -326,7 +329,11 @@ fn simplify_slice_push_back(
.insert_instruction_and_results(set_last_slice_value_instr, block, None, call_stack)
.first();

let mut value_merger = ValueMerger::new(dfg, block, None, None);
let mut slice_sizes = HashMap::default();
slice_sizes.insert(set_last_slice_value, slice_size / element_size);
slice_sizes.insert(new_slice, slice_size / element_size);

let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes);
let new_slice = value_merger.merge_values(
len_not_equals_capacity,
len_equals_capacity,
Expand Down
2 changes: 1 addition & 1 deletion compiler/noirc_evaluator/src/ssa/ir/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ impl Type {
}
Type::Slice(_) => true,
Type::Numeric(_) => false,
Type::Reference(_) => false,
Type::Reference(element) => element.contains_slice_element(),
Type::Function => false,
}
}
Expand Down
117 changes: 73 additions & 44 deletions compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,10 @@ use crate::ssa::{
};

mod branch_analysis;
mod capacity_tracker;
pub(crate) mod value_merger;

use capacity_tracker::SliceCapacityTracker;
use value_merger::ValueMerger;

impl Ssa {
Expand Down Expand Up @@ -184,17 +186,6 @@ struct Context<'f> {
/// between inlining of branches.
store_values: HashMap<ValueId, Store>,

/// Maps an address to the old and new value of the element at that address
/// The difference between this map and store_values is that this stores
/// the old and new value of an element from the outer block whose jmpif
/// terminator is being flattened.
///
/// This map persists throughout the flattening process, where addresses
/// are overwritten as new stores are found. This overwriting is the desired behavior,
/// as we want the most update to date value to be stored at a given address as
/// we walk through blocks to flatten.
outer_block_stores: HashMap<ValueId, ValueId>,

/// Stores all allocations local to the current branch.
/// Since these branches are local to the current branch (ie. only defined within one branch of
/// an if expression), they should not be merged with their previous value or stored value in
Expand All @@ -209,6 +200,10 @@ struct Context<'f> {
/// condition. If we are under multiple conditions (a nested if), the topmost condition is
/// the most recent condition combined with all previous conditions via `And` instructions.
conditions: Vec<(BasicBlockId, ValueId)>,

/// Maps SSA array values with a slice type to their size.
/// This is maintained by appropriate calls to the `SliceCapacityTracker` and is used by the `ValueMerger`.
slice_sizes: HashMap<ValueId, usize>,
}

pub(crate) struct Store {
Expand Down Expand Up @@ -239,7 +234,7 @@ fn flatten_function_cfg(function: &mut Function) {
local_allocations: HashSet::new(),
branch_ends,
conditions: Vec::new(),
outer_block_stores: HashMap::default(),
slice_sizes: HashMap::default(),
};
context.flatten();
}
Expand All @@ -262,21 +257,18 @@ impl<'f> Context<'f> {
/// Returns the last block to be inlined. This is either the return block of the function or,
/// if self.conditions is not empty, the end block of the most recent condition.
fn handle_terminator(&mut self, block: BasicBlockId) -> BasicBlockId {
if let TerminatorInstruction::JmpIf { .. } =
self.inserter.function.dfg[block].unwrap_terminator()
{
// Find stores in the outer block and insert into the `outer_block_stores` map.
// Not using this map can lead to issues when attempting to merge slices.
// When inlining a branch end, only the then branch and the else branch are checked for stores.
// However, there are cases where we want to load a value that comes from the outer block
// that we are handling the terminator for here.
let instructions = self.inserter.function.dfg[block].instructions().to_vec();
for instruction in instructions {
let (instruction, _) = self.inserter.map_instruction(instruction);
if let Instruction::Store { address, value } = instruction {
self.outer_block_stores.insert(address, value);
}
}
// As we recursively flatten inner blocks, we need to track the slice information
// for the outer block before we start recursively inlining
let outer_block_instructions = self.inserter.function.dfg[block].instructions();
let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
for instruction in outer_block_instructions {
let results = self.inserter.function.dfg.instruction_results(*instruction);
let instruction = &self.inserter.function.dfg[*instruction];
capacity_tracker.collect_slice_information(
instruction,
&mut self.slice_sizes,
results.to_vec(),
);
}

match self.inserter.function.dfg[block].unwrap_terminator() {
Expand Down Expand Up @@ -494,12 +486,16 @@ impl<'f> Context<'f> {
});

let block = self.inserter.function.entry_block();
let mut value_merger = ValueMerger::new(
&mut self.inserter.function.dfg,
block,
Some(&self.store_values),
Some(&self.outer_block_stores),
);

// Make sure we have tracked the slice capacities of any block arguments
let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
for (then_arg, else_arg) in args.iter() {
capacity_tracker.compute_slice_capacity(*then_arg, &mut self.slice_sizes);
capacity_tracker.compute_slice_capacity(*else_arg, &mut self.slice_sizes);
}

let mut value_merger =
ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes);

// Cannot include this in the previous vecmap since it requires exclusive access to self
let args = vecmap(args, |(then_arg, else_arg)| {
Expand Down Expand Up @@ -538,18 +534,22 @@ impl<'f> Context<'f> {
}
}

// Most slice information is collected when instructions are inlined.
// We need to collect information on slice values here as we may possibly merge stores
// before any inlining occurs.
let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
for (then_case, else_case, _) in new_map.values() {
capacity_tracker.compute_slice_capacity(*then_case, &mut self.slice_sizes);
capacity_tracker.compute_slice_capacity(*else_case, &mut self.slice_sizes);
}

let then_condition = then_branch.condition;
let else_condition = else_branch.condition;

let block = self.inserter.function.entry_block();

let mut value_merger = ValueMerger::new(
&mut self.inserter.function.dfg,
block,
Some(&self.store_values),
Some(&self.outer_block_stores),
);

let mut value_merger =
ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes);
// Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does
let mut new_values = HashMap::default();
for (address, (then_case, else_case, _)) in &new_map {
Expand All @@ -571,6 +571,16 @@ impl<'f> Context<'f> {
.insert(address, Store { old_value: *old_value, new_value: value });
}
}

// Collect any potential slice information on the stores we are merging
for (address, (_, _, _)) in &new_map {
let value = new_values[address];
let address = *address;
let instruction = Instruction::Store { address, value };

let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
capacity_tracker.collect_slice_information(&instruction, &mut self.slice_sizes, vec![]);
}
}

fn remember_store(&mut self, address: ValueId, new_value: ValueId) {
Expand All @@ -579,8 +589,18 @@ impl<'f> Context<'f> {
store_value.new_value = new_value;
} else {
let load = Instruction::Load { address };

let load_type = Some(vec![self.inserter.function.dfg.type_of_value(new_value)]);
let old_value = self.insert_instruction_with_typevars(load, load_type).first();
let old_value =
self.insert_instruction_with_typevars(load.clone(), load_type).first();

// Need this or else we will be missing a the previous value of a slice that we wish to merge
let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
capacity_tracker.collect_slice_information(
&load,
&mut self.slice_sizes,
vec![old_value],
);

self.store_values.insert(address, Store { old_value, new_value });
}
Expand All @@ -602,8 +622,15 @@ impl<'f> Context<'f> {
// unnecessary, when removing it actually causes an aliasing/mutability error.
let instructions = self.inserter.function.dfg[destination].instructions().to_vec();

for instruction in instructions {
self.push_instruction(instruction);
for instruction in instructions.iter() {
let results = self.push_instruction(*instruction);
let (instruction, _) = self.inserter.map_instruction(*instruction);
let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
capacity_tracker.collect_slice_information(
&instruction,
&mut self.slice_sizes,
results,
);
}

self.handle_terminator(destination)
Expand All @@ -615,7 +642,7 @@ impl<'f> Context<'f> {
/// As a result, the instruction that will be pushed will actually be a new instruction
/// with a different InstructionId from the original. The results of the given instruction
/// will also be mapped to the results of the new instruction.
fn push_instruction(&mut self, id: InstructionId) {
fn push_instruction(&mut self, id: InstructionId) -> Vec<ValueId> {
let (instruction, call_stack) = self.inserter.map_instruction(id);
let instruction = self.handle_instruction_side_effects(instruction, call_stack.clone());
let is_allocate = matches!(instruction, Instruction::Allocate);
Expand All @@ -628,6 +655,8 @@ impl<'f> Context<'f> {
if is_allocate {
self.local_allocations.insert(results.first());
}

results.results().into_owned()
}

/// If we are currently in a branch, we need to modify constrain instructions
Expand Down
Loading
Loading