Skip to content

Commit

Permalink
Merge pull request #971 from vext01/gen-code
Browse files Browse the repository at this point in the history
New codegen backend: Start emitting code.
  • Loading branch information
ltratt authored Feb 21, 2024
2 parents df8e373 + 8d974eb commit 7c6b99e
Show file tree
Hide file tree
Showing 9 changed files with 755 additions and 4 deletions.
7 changes: 4 additions & 3 deletions .buildbot.sh
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,10 @@ done

# Test with LLVM sanitisers
rustup component add rust-src
RUSTFLAGS="-Z sanitizer=address" cargo test \
-Z build-std \
--target x86_64-unknown-linux-gnu
# FIXME: asan disabled for now: https://github.com/ykjit/yk/issues/981
#RUSTFLAGS="-Z sanitizer=address" cargo test \
# -Z build-std \
# --target x86_64-unknown-linux-gnu
# The thread sanitiser does have false positives (albeit much reduced by `-Z
# build-std`), so we have to add a suppression file to avoid those stopping
# this script from succeeding. This does mean that we might suppress some true
Expand Down
3 changes: 3 additions & 0 deletions ykrt/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ yktracec = { path = "../yktracec" }
static_assertions = "1.1.0"
typed-index-collections = "3.1.0"
thiserror = "1.0.56"
dynasmrt = "2.0.0"
iced-x86 = { version = "1.21.0", features = ["decoder", "std"] }

[dependencies.llvm-sys]
# note: using a git version to get llvm linkage features in llvm-sys (not in a
Expand All @@ -46,4 +48,5 @@ yk_jitstate_debug = []
yk_testing = []

[dev-dependencies]
fm = "0.2.2"
num-traits = "0.2.16"
81 changes: 81 additions & 0 deletions ykrt/src/compile/jitc_yk/codegen/abs_stack.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
//! The abstract stack.
/// This data structure keeps track of an abstract stack pointer for a JIT frame during code
/// generation. The abstract stack pointer is zero-based, so the stack pointer value also serves as
/// the size of the stack.
///
/// The implementation is platform agnostic: as the abstract stack gets bigger, the abstract stack
/// pointer grows upwards, even on architectures where the stack grows downwards.
#[derive(Debug, Default)]
pub(crate) struct AbstractStack(usize);

impl AbstractStack {
/// Aligns the abstract stack pointer to the specified number of bytes.
///
/// Returns the newly aligned stack pointer.
pub(crate) fn align(&mut self, to: usize) -> usize {
let rem = self.0 % to;
if rem != 0 {
self.0 += to - rem;
}
self.0
}

/// Makes the stack bigger by `nbytes` bytes.
///
/// Returns the new stack pointer.
pub(crate) fn grow(&mut self, nbytes: usize) -> usize {
self.0 += nbytes;
self.0
}

/// Returns the stack pointer value.
pub(crate) fn size(&self) -> usize {
self.0
}
}

#[cfg(test)]
mod tests {
use super::AbstractStack;

#[test]
fn grow() {
let mut s = AbstractStack::default();
assert_eq!(s.grow(8), 8);
assert_eq!(s.grow(8), 16);
assert_eq!(s.grow(1), 17);
assert_eq!(s.grow(0), 17);
assert_eq!(s.grow(1000), 1017);
}

#[test]
fn align() {
let mut s = AbstractStack::default();
for i in 1..100 {
assert_eq!(s.align(i), 0);
assert_eq!(s.align(i), 0);
}
for i in 1..100 {
s.grow(1);
assert_eq!(s.align(1), i);
assert_eq!(s.align(1), i);
}
assert_eq!(s.align(8), 104);
for i in 105..205 {
assert_eq!(s.align(i), i);
assert_eq!(s.align(i), i);
}
assert_eq!(s.align(12345678), 12345678);
assert_eq!(s.align(12345678), 12345678);
}

#[test]
fn size() {
let mut s = AbstractStack::default();
for i in 1..100 {
s.grow(1);
assert_eq!(s.size(), i);
}
}
}
50 changes: 50 additions & 0 deletions ykrt/src/compile/jitc_yk/codegen/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
//! The JIT's Code Generator.
// FIXME: eventually delete.
#![allow(dead_code)]

use super::{jit_ir, CompilationError};
use reg_alloc::RegisterAllocator;

mod abs_stack;
mod reg_alloc;
mod x86_64;

/// A trait that defines access to JIT compiled code.
pub(crate) trait CodeGenOutput {
/// Disassemble the code-genned trace into a string.
#[cfg(any(debug_assertions, test))]
fn disassemble(&self) -> String;
}

/// All code generators conform to this contract.
trait CodeGen<'a> {
/// Instantiate a code generator for the specified JIT module.
fn new(
jit_mod: &'a jit_ir::Module,
ra: &'a mut dyn RegisterAllocator,
) -> Result<Self, CompilationError>
where
Self: Sized;

/// Perform code generation.
fn codegen(self) -> Result<Box<dyn CodeGenOutput>, CompilationError>;
}

#[cfg(test)]
mod tests {
use super::CodeGenOutput;
use fm::FMatcher;

/// Test helper to use `fm` to match a disassembled trace.
pub(crate) fn match_asm(cgo: Box<dyn CodeGenOutput>, pattern: &str) {
let dis = cgo.disassemble();
match FMatcher::new(pattern).unwrap().matches(&dis) {
Ok(()) => (),
Err(e) => panic!(
"\n!!! Emitted code didn't match !!!\n\n{}\nFull asm:\n{}\n",
e, dis
),
}
}
}
68 changes: 68 additions & 0 deletions ykrt/src/compile/jitc_yk/codegen/reg_alloc/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
//! Register allocation.
//!
//! This module:
//! - describes the generic interface to register allocators.
//! - contains concrete implementations of register allocators.
use super::{super::jit_ir, abs_stack::AbstractStack};

mod spill_alloc;
#[cfg(test)]
pub(crate) use spill_alloc::SpillAllocator;

/// Describes a local variable allocation.
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub(crate) enum LocalAlloc {
/// The local variable is on the stack.
Stack {
/// The offset (from the base pointer) of the allocation.
///
/// This is independent of which direction the stack grows. In other words, for
/// architectures where the stack grows downwards, you'd subtract this from the base
/// pointer to find the address of the allocation.
///
/// OPT: consider addressing relative to the stack pointer, thus freeing up the base
/// pointer for general purpose use.
frame_off: usize,
},
/// The local variable is in a register.
///
/// FIXME: unimplemented.
Register,
}

impl LocalAlloc {
/// Create a [Self::Stack] allocation.
pub(crate) fn new_stack(frame_off: usize) -> Self {
Self::Stack { frame_off }
}
}

/// Indicates the direction of stack growth.
pub(crate) enum StackDirection {
GrowsUp,
GrowsDown,
}

/// The API to regsiter allocators.
///
/// Register allocators are responsible for assigning storage for local variables.
pub(crate) trait RegisterAllocator {
/// Creates a register allocator for a stack growing in the specified direction.
fn new(stack_dir: StackDirection) -> Self
where
Self: Sized;

/// Allocates `size` bytes storage space for the local variable defined by the instruction with
/// index `local`.
fn allocate(
&mut self,
local: jit_ir::InstrIdx,
size: usize,
stack: &mut AbstractStack,
) -> LocalAlloc;

/// Return the allocation for the value computed by the instruction at the specified
/// instruction index.
fn allocation<'a>(&'a self, idx: jit_ir::InstrIdx) -> &'a LocalAlloc;
}
142 changes: 142 additions & 0 deletions ykrt/src/compile/jitc_yk/codegen/reg_alloc/spill_alloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
//! The spill allocator.
//!
//! This is a register allocator that always allocates to the stack, so in fact it's not much of a
//! register allocator at all.
use super::{
super::{abs_stack::AbstractStack, jit_ir},
LocalAlloc, RegisterAllocator, StackDirection,
};
use typed_index_collections::TiVec;

pub(crate) struct SpillAllocator {
allocs: TiVec<jit_ir::InstrIdx, LocalAlloc>,
stack_dir: StackDirection,
}

impl RegisterAllocator for SpillAllocator {
fn new(stack_dir: StackDirection) -> SpillAllocator {
Self {
allocs: Default::default(),
stack_dir,
}
}

fn allocate(
&mut self,
local: jit_ir::InstrIdx,
size: usize,
stack: &mut AbstractStack,
) -> LocalAlloc {
// Under the current design, there can't be gaps in [self.allocs] and local variable
// allocations happen sequentially. So the local we are currently allocating should be the
// next unallocated index.
debug_assert!(jit_ir::InstrIdx::new(self.allocs.len()).unwrap() == local);

// Align the stack to the size of the allocation.
//
// FIXME: perhaps we should align to the largest alignment of the constituent fields?
// To do this we need to first finish proper type sizes.
let post_align_off = stack.align(size);

// Make space for the allocation.
let post_grow_off = stack.grow(size);

// If the stack grows up, then the allocation's offset is the stack height *before* we've
// made space on the stack, otherwise it's the stack height *after*.
let alloc_off = match self.stack_dir {
StackDirection::GrowsUp => post_align_off,
StackDirection::GrowsDown => post_grow_off,
};

let alloc = LocalAlloc::new_stack(alloc_off);
self.allocs.push(alloc);
alloc
}

fn allocation<'a>(&'a self, idx: jit_ir::InstrIdx) -> &'a LocalAlloc {
&self.allocs[idx]
}
}

#[cfg(test)]
mod tests {
use crate::compile::jitc_yk::{
codegen::{
abs_stack::AbstractStack,
reg_alloc::{LocalAlloc, RegisterAllocator, SpillAllocator, StackDirection},
},
jit_ir::InstrIdx,
};

#[test]
fn grow_down() {
let mut stack = AbstractStack::default();
let mut sa = SpillAllocator::new(StackDirection::GrowsDown);

let idx = InstrIdx::new(0).unwrap();
sa.allocate(idx, 8, &mut stack);
debug_assert_eq!(stack.size(), 8);
debug_assert_eq!(sa.allocation(idx), &LocalAlloc::Stack { frame_off: 8 });

let idx = InstrIdx::new(1).unwrap();
sa.allocate(idx, 1, &mut stack);
debug_assert_eq!(stack.size(), 9);
debug_assert_eq!(sa.allocation(idx), &LocalAlloc::Stack { frame_off: 9 });
}

#[test]
fn grow_up() {
let mut stack = AbstractStack::default();
let mut sa = SpillAllocator::new(StackDirection::GrowsUp);

let idx = InstrIdx::new(0).unwrap();
sa.allocate(idx, 8, &mut stack);
debug_assert_eq!(stack.size(), 8);
debug_assert_eq!(sa.allocation(idx), &LocalAlloc::Stack { frame_off: 0 });

let idx = InstrIdx::new(1).unwrap();
sa.allocate(idx, 1, &mut stack);
debug_assert_eq!(stack.size(), 9);
debug_assert_eq!(sa.allocation(idx), &LocalAlloc::Stack { frame_off: 8 });
}

#[cfg(debug_assertions)]
#[should_panic]
#[test]
fn allocate_out_of_order() {
let mut stack = AbstractStack::default();
let mut sa = SpillAllocator::new(StackDirection::GrowsUp);
// panics because the backing store for local allocations are a "unsparse vector" and Local
// 0 hasn't been allocated yet.
sa.allocate(InstrIdx::new(1).unwrap(), 1, &mut stack);
}

#[test]
fn compose_alloc_and_align_down() {
let mut stack = AbstractStack::default();
let mut sa = SpillAllocator::new(StackDirection::GrowsDown);

sa.allocate(InstrIdx::new(0).unwrap(), 8, &mut stack);
stack.align(32);

let idx = InstrIdx::new(1).unwrap();
sa.allocate(idx, 1, &mut stack);
debug_assert_eq!(stack.size(), 33);
debug_assert_eq!(sa.allocation(idx), &LocalAlloc::Stack { frame_off: 33 });
}

#[test]
fn compose_alloc_and_align_up() {
let mut stack = AbstractStack::default();
let mut sa = SpillAllocator::new(StackDirection::GrowsUp);

sa.allocate(InstrIdx::new(0).unwrap(), 8, &mut stack);
stack.align(32);

let idx = InstrIdx::new(1).unwrap();
sa.allocate(idx, 1, &mut stack);
debug_assert_eq!(stack.size(), 33);
debug_assert_eq!(sa.allocation(idx), &LocalAlloc::Stack { frame_off: 32 });
}
}
Loading

0 comments on commit 7c6b99e

Please sign in to comment.