Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
GCdePaula committed Jan 29, 2025
1 parent a098784 commit 4aaa8c9
Show file tree
Hide file tree
Showing 31 changed files with 2,484 additions and 1,457 deletions.
551 changes: 305 additions & 246 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,13 @@ cartesi-prt-core = { path = "prt/client-rs/core" }
cartesi-rollups-contracts = "=2.0.0-rc.13"

# eth
alloy = { version = "0.8", features = ["sol-types", "contract", "network", "reqwest", "signers", "signer-local"] }
alloy = { version = "0.8", features = ["contract", "network", "reqwest", "rpc-types", "signers", "signer-local", "sol-types"] }
ruint = "1.12"
tiny-keccak = { version = "2.0", features = ["keccak"] }

# error handling
anyhow = "1.0"
thiserror = "1.0"
thiserror = "2.0"

# async
async-recursion = "1"
Expand Down
3 changes: 1 addition & 2 deletions cartesi-rollups/node/blockchain-reader/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ cartesi-dave-contracts = { workspace = true }
cartesi-rollups-contracts = { workspace = true }

alloy = { workspace = true }
alloy-rpc-types-eth = "0.8.0"
async-recursion = { workspace = true }
clap = { workspace = true }
log = { workspace = true }
Expand All @@ -25,7 +24,7 @@ tokio = { workspace = true }
num-traits = { workspace = true }

[dev-dependencies]
alloy = { workspace = true, features = ["node-bindings"] }
alloy = { workspace = true, features = ["node-bindings", "rpc-types"] }
cartesi-dave-merkle = { workspace = true }
cartesi-prt-core = { workspace = true }
cartesi-prt-contracts = { workspace = true }
Expand Down
22 changes: 8 additions & 14 deletions cartesi-rollups/node/blockchain-reader/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,18 @@ use alloy::{
contract::{Error, Event},
eips::BlockNumberOrTag::Finalized,
hex::ToHexExt,
primitives::Address,
primitives::{Address, U256},
providers::{
network::primitives::BlockTransactionsKind, Provider, ProviderBuilder, RootProvider,
},
rpc::types::Topic,
sol_types::SolEvent,
transports::http::{reqwest::Url, Client, Http},
};
use alloy_rpc_types_eth::Topic;
use async_recursion::async_recursion;
use clap::Parser;
use error::BlockchainReaderError;
use log::{info, trace};
use num_traits::cast::ToPrimitive;
use std::{
iter::Peekable,
marker::{Send, Sync},
Expand Down Expand Up @@ -167,11 +166,11 @@ where
let epoch = Epoch {
epoch_number: e
.epochNumber
.to_u64()
.try_into()
.expect("fail to convert epoch number"),
input_index_boundary: e
.inputIndexUpperBound
.to_u64()
.try_into()
.expect("fail to convert epoch boundary"),
root_tournament: e.tournament.to_string(),
};
Expand Down Expand Up @@ -257,12 +256,7 @@ where
let mut inputs = vec![];

while let Some(input_added) = input_events_peekable.peek() {
if input_added
.index
.to_u64()
.expect("fail to convert input index")
>= input_index_boundary
{
if input_added.index >= U256::from(input_index_boundary) {
break;
}
let input = Input {
Expand Down Expand Up @@ -319,9 +313,9 @@ impl<E: SolEvent + Send + Sync> EventReader<E> {
current_finalized,
)
.await
.map_err(|err_arr| ProviderErrors(err_arr))?;
.map_err(ProviderErrors)?;

return Ok(logs);
Ok(logs)
}
}

Expand Down Expand Up @@ -362,7 +356,7 @@ impl PartitionProvider {
let mut e = Event::new_sol(&self.inner, read_from)
.from_block(start_block)
.to_block(end_block)
.event(&E::SIGNATURE);
.event(E::SIGNATURE);

if let Some(t) = topic1 {
e = e.topic1(t.clone());
Expand Down
4 changes: 2 additions & 2 deletions common-rs/merkle/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,5 @@ alloy = { workspace = true, features = ["sol-types"] }
ruint = { workspace = true }

hex = "0.4"
sha3 = "0.10"
thiserror = "1.0"
tiny-keccak = { workspace = true }
thiserror = { workspace = true }
50 changes: 43 additions & 7 deletions common-rs/merkle/src/digest/keccak.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,60 @@
//! Keccak256 hash for the Digest Type. It's used to hash the data in the Digest.
use sha3::{Digest as Keccak256Digest, Keccak256};
use tiny_keccak::{Hasher, Keccak};

use super::Digest;

impl Digest {
/// Computes the Keccak256 hash of the given data and returns a new Digest.
pub fn from_data(data: &[u8]) -> Digest {
let mut keccak = Keccak256::new();
let mut keccak = Keccak::v256();
keccak.update(data);
let digest: [u8; 32] = keccak.finalize().into();
let mut digest: [u8; 32] = [0; 32];
keccak.finalize(&mut digest);
Digest::from(digest)
}

/// Joins the current Digest with another Digest to create a new Digest.
pub fn join(&self, digest: &Digest) -> Digest {
let mut keccak = Keccak256::new();
keccak.update(self.data);
keccak.update(digest.data);
let digest: [u8; 32] = keccak.finalize().into();
let mut keccak = Keccak::v256();
keccak.update(&self.data);
keccak.update(&digest.data);
let mut digest: [u8; 32] = [0; 32];
keccak.finalize(&mut digest);
Digest::from(digest)
}
}

#[cfg(test)]
mod tests {
use super::Digest;

fn assert_data_eq(expected_digest_hex: &str, digest: Digest) {
assert_eq!(
Digest::from_digest_hex(expected_digest_hex).expect("invalid hex"),
digest
);
}

#[test]
fn test_from_data() {
assert_data_eq(
"0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
Digest::from_data(&[]), // cast keccak ""
);

assert_data_eq(
"0x6228290203658fd4987e40cbb257cabf258f9c288cdee767eaba6b234a73a2f9",
Digest::from_data("bananas".as_bytes()), // cast keccak "bananas"
);
}

#[test]
fn test_join() {
assert_data_eq(
"0x4441036546894c6fcf905b48b722f6b149ec0902955a6445c63cfec478568268",
// cast keccak (cast concat-hex (cast keccak "minhas") (cast keccak "bananas"))
Digest::from_data("minhas".as_bytes()).join(&Digest::from_data("bananas".as_bytes())),
);
}
}
6 changes: 2 additions & 4 deletions common-rs/merkle/src/digest/mod.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
//! Definition of the [Digest] type and its associated methods. A digest is the output of a hash
//! function. It's used to identify the data in the MerkleTree.
use alloy::sol_types::private::B256;
use alloy::primitives::B256;
use hex::FromHex;
use std::fmt;
use thiserror::Error;

pub mod keccak;

use hex;
use thiserror::Error;

const HASH_SIZE: usize = 32;

#[derive(Error, Debug)]
Expand Down
2 changes: 1 addition & 1 deletion machine/emulator
Submodule emulator updated 208 files
5 changes: 3 additions & 2 deletions machine/rust-bindings/cartesi-machine-sys/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,16 @@ copy_uarch = []
# requires setting env vars LIBCARTESI_PATH and INCLUDECARTESI_PATH
external_cartesi = []

default = ["download_uarch"]
# default = ["download_uarch"]
default = ["build_uarch"] # TODO remove


[dependencies]
link-cplusplus = "1.0"


[build-dependencies]
bindgen = "0.69"
bindgen = "0.71"
cfg-if = "1.0"

hex-literal = "0.4.1"
Expand Down
68 changes: 30 additions & 38 deletions machine/rust-bindings/cartesi-machine-sys/build.rs
Original file line number Diff line number Diff line change
@@ -1,27 +1,7 @@
// (c) Cartesi and individual authors (see AUTHORS)
// SPDX-License-Identifier: Apache-2.0 (see LICENSE)

use std::{env, path::PathBuf, process::Command};

mod feature_checks {
#[cfg(all(feature = "build_uarch", feature = "copy_uarch",))]
compile_error!("Features `build_uarch` and `copy_uarch` are mutually exclusive");

#[cfg(all(feature = "build_uarch", feature = "download_uarch"))]
compile_error!("Features `build_uarch` and `download_uarch` are mutually exclusive");

#[cfg(all(feature = "copy_uarch", feature = "download_uarch"))]
compile_error!("Features `copy_uarch`, and `download_uarch` are mutually exclusive");

#[cfg(not(any(
feature = "copy_uarch",
feature = "download_uarch",
feature = "build_uarch",
feature = "external_cartesi",
)))]
compile_error!("At least one of `build_uarch`, `copy_uarch`, `download_uarch`, and `external_cartesi` must be set");
}

fn main() {
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());

Expand Down Expand Up @@ -62,6 +42,7 @@ fn main() {
// Generate bindings
//

// find headers
#[allow(clippy::needless_late_init)]
let include_path;
cfg_if::cfg_if! {
Expand All @@ -75,32 +56,21 @@ fn main() {
}
};

// machine api
// generate machine api
let machine_bindings = bindgen::Builder::default()
.header(include_path.join("machine-c-api.h").to_str().unwrap())
.allowlist_item("^cm_.*")
.allowlist_item("^CM_.*")
.merge_extern_blocks(true)
.prepend_enum_name(false)
.translate_enum_integer_types(true)
.generate()
.expect("Unable to generate machine bindings");

// htif constants
let htif = bindgen::Builder::default()
.header(include_path.join("htif-defines.h").to_str().unwrap())
.generate()
.expect("Unable to generate htif bindings");

// pma constants
let pma = bindgen::Builder::default()
.header(include_path.join("pma-defines.h").to_str().unwrap())
.generate()
.expect("Unable to generate pma bindings");

// Write the bindings to the `$OUT_DIR/bindings.rs` and `$OUT_DIR/htif.rs` files.
machine_bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write machine bindings");
htif.write_to_file(out_path.join("htif.rs"))
.expect("Couldn't write htif defines");
pma.write_to_file(out_path.join("pma.rs"))
.expect("Couldn't write pma defines");

// Setup reruns
println!("cargo:rerun-if-changed=build.rs");
Expand All @@ -120,6 +90,7 @@ mod build_cm {
// Get uarch
cfg_if::cfg_if! {
if #[cfg(feature = "build_uarch")] {
// requires docker
()
} else if #[cfg(feature = "copy_uarch")] {
let uarch_path = machine_dir_path.join("uarch");
Expand All @@ -131,7 +102,9 @@ mod build_cm {
}
}

//
// Build and link emulator
//

// build dependencies
Command::new("make")
Expand Down Expand Up @@ -282,7 +255,26 @@ mod build_cm {
}
}

#[allow(dead_code)]
mod feature_checks {
#[cfg(all(feature = "build_uarch", feature = "copy_uarch",))]
compile_error!("Features `build_uarch` and `copy_uarch` are mutually exclusive");

#[cfg(all(feature = "build_uarch", feature = "download_uarch"))]
compile_error!("Features `build_uarch` and `download_uarch` are mutually exclusive");

#[cfg(all(feature = "copy_uarch", feature = "download_uarch"))]
compile_error!("Features `copy_uarch`, and `download_uarch` are mutually exclusive");

#[cfg(not(any(
feature = "copy_uarch",
feature = "download_uarch",
feature = "build_uarch",
feature = "external_cartesi",
)))]
compile_error!("At least one of `build_uarch`, `copy_uarch`, `download_uarch`, and `external_cartesi` must be set");
}

#[allow(unused)]
fn clean(path: &PathBuf) {
// clean build artifacts
Command::new("make")
Expand Down
4 changes: 2 additions & 2 deletions machine/rust-bindings/cartesi-machine-sys/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@

extern crate link_cplusplus;
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
include!(concat!(env!("OUT_DIR"), "/htif.rs"));
include!(concat!(env!("OUT_DIR"), "/pma.rs"));
// include!(concat!(env!("OUT_DIR"), "/htif.rs"));
// include!(concat!(env!("OUT_DIR"), "/pma.rs"));
11 changes: 9 additions & 2 deletions machine/rust-bindings/cartesi-machine/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,12 @@ remote_machine = ["cartesi-machine-sys/remote_machine"]
[dependencies]
cartesi-machine-sys = { path = "../cartesi-machine-sys" }

hex = "0.4.3"
thiserror = "1.0"
base64 = "0.22"
derive_builder = "0.20"
hex = "0.4"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "2.0"

[dev-dependencies]
tempfile = "3.16"
Loading

0 comments on commit 4aaa8c9

Please sign in to comment.