Skip to content

Commit

Permalink
polkavm-linker: Fix R_RISCV_ADD* && R_RISCV_SUB* relocation
Browse files Browse the repository at this point in the history
We do not calculate the final offset correctly, because range value is
reversed.

Also add a test to verify relocation is working properly.

Signed-off-by: Aman <[email protected]>
  • Loading branch information
aman4150 committed Feb 5, 2025
1 parent 3e99c89 commit 54830f4
Show file tree
Hide file tree
Showing 6 changed files with 120 additions and 25 deletions.
2 changes: 1 addition & 1 deletion crates/polkavm-linker/src/program_from_elf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9334,7 +9334,7 @@ where
)));
};

let range = target_section_address.wrapping_add(target.offset)..origin_section_address.wrapping_add(origin.offset);
let range = origin_section_address.wrapping_add(origin.offset)..target_section_address.wrapping_add(target.offset);
let data = elf.section_data_mut(relocation_target.section_index);
let mut value = range.end.wrapping_sub(range.start);
match size {
Expand Down
85 changes: 61 additions & 24 deletions crates/polkavm/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2044,10 +2044,9 @@ const TEST_BLOB_32_ELF_ZST: &[u8] = include_bytes!("../../../test-data/test-blob
const TEST_BLOB_64_ELF_ZST: &[u8] = include_bytes!("../../../test-data/test-blob_64.elf.zst");

impl TestInstance {
fn new(config: &Config, optimize: bool, is_64_bit: bool) -> Self {
fn new(config: &Config, elf: &'static [u8], optimize: bool, is_64_bit: bool) -> Self {
let _ = env_logger::try_init();
let blob = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let blob = get_blob_impl(optimize, is_64_bit, blob);
let blob = get_blob_impl(optimize, is_64_bit, elf);

let engine = Engine::new(config).unwrap();
let module = Module::from_blob(&engine, &Default::default(), blob).unwrap();
Expand Down Expand Up @@ -2126,14 +2125,16 @@ impl TestInstance {
}

fn test_blob_basic_test(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert_eq!(i.call::<(), u32>("push_one_to_global_vec", ()).unwrap(), 1);
assert_eq!(i.call::<(), u32>("push_one_to_global_vec", ()).unwrap(), 2);
assert_eq!(i.call::<(), u32>("push_one_to_global_vec", ()).unwrap(), 3);
}

fn test_blob_atomic_fetch_add(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert_eq!(i.call::<(u32,), u32>("atomic_fetch_add", (1,)).unwrap(), 0);
assert_eq!(i.call::<(u32,), u32>("atomic_fetch_add", (1,)).unwrap(), 1);
assert_eq!(i.call::<(u32,), u32>("atomic_fetch_add", (1,)).unwrap(), 2);
Expand All @@ -2144,7 +2145,8 @@ fn test_blob_atomic_fetch_add(config: Config, optimize: bool, is_64_bit: bool) {
}

fn test_blob_atomic_fetch_swap(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert_eq!(i.call::<(u32,), u32>("atomic_fetch_swap", (10,)).unwrap(), 0);
assert_eq!(i.call::<(u32,), u32>("atomic_fetch_swap", (100,)).unwrap(), 10);
assert_eq!(i.call::<(u32,), u32>("atomic_fetch_swap", (1000,)).unwrap(), 100);
Expand All @@ -2169,7 +2171,8 @@ fn test_blob_atomic_fetch_minmax(config: Config, optimize: bool, is_64_bit: bool
("atomic_fetch_min_unsigned", minu),
];

let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
for (name, cb) in list {
for a in [-10, 0, 10] {
for b in [-10, 0, 10] {
Expand All @@ -2183,17 +2186,20 @@ fn test_blob_atomic_fetch_minmax(config: Config, optimize: bool, is_64_bit: bool
}

fn test_blob_hostcall(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert_eq!(i.call::<(u32,), u32>("test_multiply_by_6", (10,)).unwrap(), 60);
}

fn test_blob_define_abi(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert!(i.call::<(), ()>("test_define_abi", ()).is_ok());
}

fn test_blob_input_registers(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert!(i.call::<(), ()>("test_input_registers", ()).is_ok());
}

Expand All @@ -2214,7 +2220,8 @@ fn test_blob_call_sbrk_from_host_function(config: Config, optimize: bool, is_64_
}

fn test_blob_program_memory_can_be_reused_and_cleared(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
let address = i.call::<(), u32>("get_global_address", ()).unwrap();

assert_eq!(i.instance.read_memory(address, 4).unwrap(), [0x00, 0x00, 0x00, 0x00]);
Expand All @@ -2233,7 +2240,8 @@ fn test_blob_program_memory_can_be_reused_and_cleared(config: Config, optimize:
}

fn test_blob_out_of_bounds_memory_access_generates_a_trap(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
let address = i.call::<(), u32>("get_global_address", ()).unwrap();
assert_eq!(i.call::<(u32,), u32>("read_u32", (address,)).unwrap(), 0);
i.call::<(), ()>("increment_global", ()).unwrap();
Expand All @@ -2246,7 +2254,8 @@ fn test_blob_out_of_bounds_memory_access_generates_a_trap(config: Config, optimi
}

fn test_blob_call_sbrk_impl(config: Config, optimize: bool, is_64_bit: bool, mut call_sbrk: impl FnMut(&mut TestInstance, u32) -> u32) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
let memory_map = i.module.memory_map().clone();
let heap_base = memory_map.heap_base();
let page_size = memory_map.page_size();
Expand Down Expand Up @@ -2301,7 +2310,8 @@ fn test_blob_call_sbrk_impl(config: Config, optimize: bool, is_64_bit: bool, mut
}

fn test_blob_add_u32(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert_eq!(i.call::<(u32, u32), u32>("add_u32", (1, 2,)).unwrap(), 3);
assert_eq!(i.instance.reg(Reg::A0), 3);

Expand All @@ -2318,7 +2328,8 @@ fn test_blob_add_u32(config: Config, optimize: bool, is_64_bit: bool) {
}

fn test_blob_add_u64(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert_eq!(i.call::<(u64, u64), u64>("add_u64", (1, 2,)).unwrap(), 3);
assert_eq!(i.instance.reg(Reg::A0), 3);
assert_eq!(
Expand All @@ -2328,19 +2339,22 @@ fn test_blob_add_u64(config: Config, optimize: bool, is_64_bit: bool) {
}

fn test_blob_xor_imm_u32(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
for value in [0, 0xaaaaaaaa, 0x55555555, 0x12345678, 0xffffffff] {
assert_eq!(i.call::<(u32,), u32>("xor_imm_u32", (value,)).unwrap(), value ^ 0xfb8f5c1e);
}
}

fn test_blob_branch_less_than_zero(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
i.call::<(), ()>("test_branch_less_than_zero", ()).unwrap();
}

fn test_blob_fetch_add_atomic_u64(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert_eq!(i.call::<(u64,), u64>("fetch_add_atomic_u64", (1,)).unwrap(), 0);
assert_eq!(i.call::<(u64,), u64>("fetch_add_atomic_u64", (0,)).unwrap(), 1);
assert_eq!(i.call::<(u64,), u64>("fetch_add_atomic_u64", (0,)).unwrap(), 1);
Expand All @@ -2349,22 +2363,26 @@ fn test_blob_fetch_add_atomic_u64(config: Config, optimize: bool, is_64_bit: boo
}

fn test_blob_cmov_if_zero_with_zero_reg(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
i.call::<(), ()>("cmov_if_zero_with_zero_reg", ()).unwrap();
}

fn test_blob_cmov_if_not_zero_with_zero_reg(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
i.call::<(), ()>("cmov_if_not_zero_with_zero_reg", ()).unwrap();
}

fn test_blob_min_stack_size(config: Config, optimize: bool, is_64_bit: bool) {
let i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let i = TestInstance::new(&config, elf, optimize, is_64_bit);
assert_eq!(i.instance.module().memory_map().stack_size(), 65536);
}

fn test_blob_negate_and_add(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
if !is_64_bit {
assert_eq!(i.call::<(u32, u32), u32>("negate_and_add", (123, 1,)).unwrap(), 15);
} else {
Expand All @@ -2373,12 +2391,14 @@ fn test_blob_negate_and_add(config: Config, optimize: bool, is_64_bit: bool) {
}

fn test_blob_return_tuple_from_import(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
i.call::<(), ()>("test_return_tuple", ()).unwrap();
}

fn test_blob_return_tuple_from_export(config: Config, optimize: bool, is_64_bit: bool) {
let mut i = TestInstance::new(&config, optimize, is_64_bit);
let elf = if is_64_bit { TEST_BLOB_64_ELF_ZST } else { TEST_BLOB_32_ELF_ZST };
let mut i = TestInstance::new(&config, elf, optimize, is_64_bit);
if is_64_bit {
let a0 = 0x123456789abcdefe_u64;
let a1 = 0x1122334455667788_u64;
Expand Down Expand Up @@ -2406,6 +2426,22 @@ fn test_blob_return_tuple_from_export(config: Config, optimize: bool, is_64_bit:
}
}

fn test_asm_reloc_add_sub(config: Config, optimize: bool, _is_64_bit: bool) {
const BLOB_64: &[u8] = include_bytes!("../../../guest-programs/asm-tests/output/reloc_add_sub_64.elf.zst");

let elf = BLOB_64;
let mut i = TestInstance::new(&config, elf, optimize, true);

let address = i.call::<(u32, ), u32>("get_string", (0, )).unwrap();
assert_eq!(i.instance.read_u32(address).unwrap(), 0x01010101);

let address = i.call::<(u32, ), u32>("get_string", (1, )).unwrap();
assert_eq!(i.instance.read_u32(address).unwrap(), 0x02020202);

let address = i.call::<(u32, ), u32>("get_string", (2, )).unwrap();
assert_eq!(i.instance.read_u32(address).unwrap(), 0x03030303);
}

fn basic_gas_metering(config: Config, gas_metering_kind: GasMeteringKind) {
let _ = env_logger::try_init();

Expand Down Expand Up @@ -3196,6 +3232,7 @@ run_test_blob_tests! {
test_blob_negate_and_add
test_blob_return_tuple_from_import
test_blob_return_tuple_from_export
test_asm_reloc_add_sub
}

macro_rules! assert_impl {
Expand Down
15 changes: 15 additions & 0 deletions guest-programs/asm-tests/build-asm-tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/bash

set -euo pipefail
cd "${0%/*}/"

function build_asm_tests_64bit() {
output_path="output/$1.elf"
echo "> Building: '$1' (-> $output_path)"
riscv64-linux-gnu-as -fPIC -march "rv64ima_zifencei_zbb" -mabi="lp64" $1.S -o $output_path
zstd -f -q -19 -o $output_path.zst $output_path
chmod -x $output_path*
}

build_asm_tests_64bit "reloc_add_sub_64"

Binary file not shown.
Binary file not shown.
43 changes: 43 additions & 0 deletions guest-programs/asm-tests/reloc_add_sub_64.S
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
.global get_string

get_string:
slli a0, a0, 2
la a1, magic_table
mv a2, a1
add a1, a1, a0
lw a0, 0(a1)
add a0, a0, a2
ret

.pushsection .metadata,"",@progbits
_entry_point_name:
.asciz "get_string"

_metadata:
.byte 1
.word 0
.word 10
.quad _entry_point_name
.byte 1
.byte 1
.popsection

.pushsection .polkavm_exports,"R",@note
.byte 1
.quad _metadata
.quad get_string
.popsection

.pushsection .rodata.secrets
magic0: .word 0x01010101
magic1: .word 0x02020202
magic2: .word 0x03030303
.popsection

.pushsection .rodata.table
magic_table:
.word magic0 - magic_table
.word magic1 - magic_table
.word magic2 - magic_table
.popsection

0 comments on commit 54830f4

Please sign in to comment.