diff --git a/src/base.rs b/src/base.rs index 342c12ffc4118..01f7ecd86f49f 100644 --- a/src/base.rs +++ b/src/base.rs @@ -110,6 +110,7 @@ pub(crate) fn trans_fn<'tcx, B: Backend + 'static>( context.compute_cfg(); context.compute_domtree(); context.eliminate_unreachable_code(cx.module.isa()).unwrap(); + context.dce(cx.module.isa()).unwrap(); // Define function let module = &mut cx.module; @@ -315,18 +316,45 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Backend>) { TerminatorKind::SwitchInt { discr, - switch_ty: _, + switch_ty, values, targets, } => { let discr = trans_operand(fx, discr).load_scalar(fx); - let mut switch = ::cranelift_frontend::Switch::new(); - for (i, value) in values.iter().enumerate() { - let block = fx.get_block(targets[i]); - switch.set_entry(*value, block); + + if switch_ty.kind == fx.tcx.types.bool.kind { + assert_eq!(targets.len(), 2); + let then_block = fx.get_block(targets[0]); + let else_block = fx.get_block(targets[1]); + let test_zero = match **values { + [0] => true, + [1] => false, + _ => unreachable!("{:?}", values), + }; + + let discr = crate::optimize::peephole::maybe_unwrap_bint(&mut fx.bcx, discr); + let (discr, is_inverted) = + crate::optimize::peephole::maybe_unwrap_bool_not(&mut fx.bcx, discr); + let test_zero = if is_inverted { !test_zero } else { test_zero }; + let discr = crate::optimize::peephole::maybe_unwrap_bint(&mut fx.bcx, discr); + let discr = + crate::optimize::peephole::make_branchable_value(&mut fx.bcx, discr); + if test_zero { + fx.bcx.ins().brz(discr, then_block, &[]); + fx.bcx.ins().jump(else_block, &[]); + } else { + fx.bcx.ins().brnz(discr, then_block, &[]); + fx.bcx.ins().jump(else_block, &[]); + } + } else { + let mut switch = ::cranelift_frontend::Switch::new(); + for (i, value) in values.iter().enumerate() { + let block = fx.get_block(targets[i]); + switch.set_entry(*value, block); + } + let otherwise_block = fx.get_block(targets[targets.len() - 1]); + switch.emit(&mut fx.bcx, discr, otherwise_block); } - let otherwise_block = fx.get_block(targets[targets.len() - 1]); - switch.emit(&mut fx.bcx, discr, otherwise_block); } TerminatorKind::Call { func, diff --git a/src/optimize/mod.rs b/src/optimize/mod.rs index 33d75b36d9651..ae9692790896c 100644 --- a/src/optimize/mod.rs +++ b/src/optimize/mod.rs @@ -1,6 +1,7 @@ use crate::prelude::*; mod code_layout; +pub(crate) mod peephole; mod stack2reg; pub(crate) fn optimize_function<'tcx>( diff --git a/src/optimize/peephole.rs b/src/optimize/peephole.rs new file mode 100644 index 0000000000000..f8e0f3af3d0ad --- /dev/null +++ b/src/optimize/peephole.rs @@ -0,0 +1,83 @@ +//! Peephole optimizations that can be performed while creating clif ir. + +use cranelift_codegen::ir::{ + condcodes::IntCC, types, InstBuilder, InstructionData, Opcode, Value, ValueDef, +}; +use cranelift_frontend::FunctionBuilder; + +/// If the given value was produced by a `bint` instruction, return it's input, otherwise return the +/// given value. +pub(crate) fn maybe_unwrap_bint(bcx: &mut FunctionBuilder<'_>, arg: Value) -> Value { + if let ValueDef::Result(arg_inst, 0) = bcx.func.dfg.value_def(arg) { + match bcx.func.dfg[arg_inst] { + InstructionData::Unary { + opcode: Opcode::Bint, + arg, + } => arg, + _ => arg, + } + } else { + arg + } +} + +/// If the given value was produced by the lowering of `Rvalue::Not` return the input and true, +/// otherwise return the given value and false. +pub(crate) fn maybe_unwrap_bool_not(bcx: &mut FunctionBuilder<'_>, arg: Value) -> (Value, bool) { + if let ValueDef::Result(arg_inst, 0) = bcx.func.dfg.value_def(arg) { + match bcx.func.dfg[arg_inst] { + // This is the lowering of `Rvalue::Not` + InstructionData::IntCompareImm { + opcode: Opcode::IcmpImm, + cond: IntCC::Equal, + arg, + imm, + } if imm.bits() == 0 => (arg, true), + _ => (arg, false), + } + } else { + (arg, false) + } +} + +pub(crate) fn make_branchable_value(bcx: &mut FunctionBuilder<'_>, arg: Value) -> Value { + if bcx.func.dfg.value_type(arg).is_bool() { + return arg; + } + + (|| { + let arg_inst = if let ValueDef::Result(arg_inst, 0) = bcx.func.dfg.value_def(arg) { + arg_inst + } else { + return None; + }; + + match bcx.func.dfg[arg_inst] { + // This is the lowering of Rvalue::Not + InstructionData::Load { + opcode: Opcode::Load, + arg: ptr, + flags, + offset, + } => { + // Using `load.i8 + uextend.i32` would legalize to `uload8 + ireduce.i8 + + // uextend.i32`. Just `uload8` is much faster. + match bcx.func.dfg.ctrl_typevar(arg_inst) { + types::I8 => Some(bcx.ins().uload8(types::I32, flags, ptr, offset)), + types::I16 => Some(bcx.ins().uload16(types::I32, flags, ptr, offset)), + _ => None, + } + } + _ => None, + } + })() + .unwrap_or_else(|| { + match bcx.func.dfg.value_type(arg) { + types::I8 | types::I32 => { + // WORKAROUND for brz.i8 and brnz.i8 not yet being implemented + bcx.ins().uextend(types::I32, arg) + } + _ => arg, + } + }) +}