Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update trussed #34

Merged
merged 2 commits into from
Mar 3, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ trussed-fs-info = { version = "0.2.0", optional = true }
[dev-dependencies]
hex-literal = "0.4.0"
hmac = "0.12.0"
trussed = { version = "0.1.0", default-features = false, features = ["aes256-cbc", "clients-2", "crypto-client", "filesystem-client", "hmac-sha256", "virt", "x255"] }
trussed = { version = "0.1.0", default-features = false, features = ["aes256-cbc", "crypto-client", "filesystem-client", "hmac-sha256", "virt", "x255"] }

[features]
default = []
Expand All @@ -76,13 +76,14 @@ std = []
# used for delog
log-all = []
log-none = []
log-trace = []
log-info = []
log-debug = []
log-warn = []
log-error = []

[patch.crates-io]
trussed = { git = "https://github.com/trussed-dev/trussed.git", rev = "6bba8fde36d05c0227769eb63345744e87d84b2b" }
trussed = { git = "https://github.com/trussed-dev/trussed.git", rev = "5003249c3187dca841f83551ba625921611a5ace" }

trussed-chunked = { path = "extensions/chunked" }
trussed-hkdf = { path = "extensions/hkdf" }
Expand Down
40 changes: 23 additions & 17 deletions src/chunked/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,12 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
let read_state = match &mut backend_ctx.chunked_io_state {
Some(ChunkedIoState::Read(read_state)) => read_state,
Some(ChunkedIoState::EncryptedRead(_)) => {
return read_encrypted_chunk(store, client_id, backend_ctx)
return read_encrypted_chunk(&store, client_id, backend_ctx)
}
_ => return Err(Error::MechanismNotAvailable),
};
let (data, len) = store::filestore_read_chunk(
store,
&store,
client_id,
&read_state.path,
read_state.location,
Expand All @@ -96,9 +96,9 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
Ok(reply::ReadChunk { data, len }.into())
}
ChunkedRequest::StartChunkedRead(request) => {
clear_chunked_state(store, client_id, backend_ctx)?;
clear_chunked_state(&store, client_id, backend_ctx)?;
let (data, len) = store::filestore_read_chunk(
store,
&store,
client_id,
&request.path,
request.location,
Expand All @@ -114,9 +114,9 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
ChunkedRequest::WriteChunk(request) => {
let is_last = !request.data.is_full();
if is_last {
write_last_chunk(store, client_id, backend_ctx, &request.data)?;
write_last_chunk(&store, client_id, backend_ctx, &request.data)?;
} else {
write_chunk(store, client_id, backend_ctx, &request.data)?;
write_chunk(&store, client_id, backend_ctx, &request.data)?;
}
Ok(reply::WriteChunk {}.into())
}
Expand All @@ -126,7 +126,7 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
return Ok(reply::AbortChunkedWrite { aborted: false }.into());
};
let aborted = store::abort_chunked_write(
store,
&store,
client_id,
&write_state.path,
write_state.location,
Expand All @@ -138,12 +138,18 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
path: request.path.clone(),
location: request.location,
}));
store::start_chunked_write(store, client_id, &request.path, request.location, &[])?;
store::start_chunked_write(
&store,
client_id,
&request.path,
request.location,
&[],
)?;
Ok(reply::StartChunkedWrite {}.into())
}
ChunkedRequest::PartialReadFile(request) => {
let (data, file_length) = store::partial_read_file(
store,
&store,
client_id,
&request.path,
request.location,
Expand All @@ -154,7 +160,7 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
}
ChunkedRequest::AppendFile(request) => {
let file_length = store::append_file(
store,
&store,
client_id,
&request.path,
request.location,
Expand All @@ -163,7 +169,7 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
Ok(reply::AppendFile { file_length }.into())
}
ChunkedRequest::StartEncryptedChunkedWrite(request) => {
clear_chunked_state(store, client_id, backend_ctx)?;
clear_chunked_state(&store, client_id, backend_ctx)?;
let key = keystore.load_key(
Secrecy::Secret,
Some(Kind::Symmetric(CHACHA8_KEY_LEN)),
Expand All @@ -179,7 +185,7 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
let aead = ChaCha8Poly1305::new((&*key.material).into());
let encryptor = EncryptorLE31::<ChaCha8Poly1305>::from_aead(aead, nonce);
store::start_chunked_write(
store,
&store,
client_id,
&request.path,
request.location,
Expand All @@ -194,7 +200,7 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
Ok(reply::StartEncryptedChunkedWrite {}.into())
}
ChunkedRequest::StartEncryptedChunkedRead(request) => {
clear_chunked_state(store, client_id, backend_ctx)?;
clear_chunked_state(&store, client_id, backend_ctx)?;
let key = keystore.load_key(
Secrecy::Secret,
Some(Kind::Symmetric(CHACHA8_KEY_LEN)),
Expand All @@ -220,7 +226,7 @@ impl ExtensionImpl<ChunkedExtension> for super::StagingBackend {
}

fn clear_chunked_state(
store: impl Store,
store: &impl Store,
client_id: &Path,
ctx: &mut StagingContext,
) -> Result<(), Error> {
Expand All @@ -240,7 +246,7 @@ fn clear_chunked_state(
}

fn write_chunk(
store: impl Store,
store: &impl Store,
client_id: &Path,
ctx: &mut StagingContext,
data: &Message,
Expand Down Expand Up @@ -279,7 +285,7 @@ fn write_chunk(
}

fn write_last_chunk(
store: impl Store,
store: &impl Store,
client_id: &Path,
ctx: &mut StagingContext,
data: &Message,
Expand Down Expand Up @@ -321,7 +327,7 @@ fn write_last_chunk(
}

fn read_encrypted_chunk(
store: impl Store,
store: &impl Store,
client_id: &Path,
ctx: &mut StagingContext,
) -> Result<ChunkedReply, Error> {
Expand Down
20 changes: 10 additions & 10 deletions src/chunked/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ pub fn fs_read_chunk<const N: usize>(
/// Reads contents from path in location of store.
#[inline(never)]
pub fn read_chunk<const N: usize>(
store: impl Store,
store: &impl Store,
location: Location,
path: &Path,
pos: OpenSeekFrom,
Expand Down Expand Up @@ -83,7 +83,7 @@ pub fn fs_write_chunk(
/// Writes contents to path in location of store.
#[inline(never)]
pub fn write_chunk(
store: impl Store,
store: &impl Store,
location: Location,
path: &Path,
contents: &[u8],
Expand All @@ -95,7 +95,7 @@ pub fn write_chunk(
}

pub fn move_file(
store: impl Store,
store: &impl Store,
from_location: Location,
from_path: &Path,
to_location: Location,
Expand Down Expand Up @@ -182,7 +182,7 @@ fn actual_path(client_id: &Path, client_path: &Path) -> Result<PathBuf, Error> {
}

pub fn start_chunked_write(
store: impl Store,
store: &impl Store,
client_id: &Path,
path: &PathBuf,
location: Location,
Expand All @@ -193,7 +193,7 @@ pub fn start_chunked_write(
}

pub fn filestore_write_chunk(
store: impl Store,
store: &impl Store,
client_id: &Path,
path: &Path,
location: Location,
Expand All @@ -204,7 +204,7 @@ pub fn filestore_write_chunk(
}

pub fn filestore_read_chunk<const N: usize>(
store: impl Store,
store: &impl Store,
client_id: &Path,
path: &PathBuf,
location: Location,
Expand All @@ -216,7 +216,7 @@ pub fn filestore_read_chunk<const N: usize>(
}

pub fn abort_chunked_write(
store: impl Store,
store: &impl Store,
client_id: &Path,
path: &PathBuf,
location: Location,
Expand All @@ -228,7 +228,7 @@ pub fn abort_chunked_write(
}

pub fn flush_chunks(
store: impl Store,
store: &impl Store,
client_id: &Path,
path: &PathBuf,
location: Location,
Expand All @@ -245,7 +245,7 @@ pub fn flush_chunks(
}

pub fn partial_read_file(
store: impl Store,
store: &impl Store,
client_id: &Path,
path: &PathBuf,
location: Location,
Expand All @@ -259,7 +259,7 @@ pub fn partial_read_file(
}

pub fn append_file(
store: impl Store,
store: &impl Store,
client_id: &Path,
path: &PathBuf,
location: Location,
Expand Down
17 changes: 8 additions & 9 deletions src/virt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -220,12 +220,11 @@ use trussed::{
Error, Platform,
};

pub type Client<S, D = Dispatcher> = virt::Client<S, D>;
pub type MultiClient<S, D = Dispatcher> = virt::MultiClient<S, D>;
pub type Client<'a, D = Dispatcher> = virt::Client<'a, D>;

pub fn with_client<S, R, F>(store: S, client_id: &str, f: F) -> R
where
F: FnOnce(Client<S>) -> R,
F: FnOnce(Client) -> R,
S: StoreProvider,
{
virt::with_platform(store, |platform| {
Expand All @@ -249,7 +248,7 @@ pub fn with_client_and_preserve<S, R, F>(
should_preserve_file: fn(&Path, location: Location) -> bool,
) -> R
where
F: FnOnce(Client<S>) -> R,
F: FnOnce(Client) -> R,
S: StoreProvider,
{
let mut dispatcher = Dispatcher::default();
Expand All @@ -276,7 +275,7 @@ pub fn with_clients_and_preserve<S, R, F, const N: usize>(
should_preserve_file: fn(&Path, location: Location) -> bool,
) -> R
where
F: FnOnce([MultiClient<S>; N]) -> R,
F: FnOnce([Client; N]) -> R,
S: StoreProvider,
{
let mut dispatcher = Dispatcher::default();
Expand All @@ -299,15 +298,15 @@ where

pub fn with_fs_client<P, R, F>(internal: P, client_id: &str, f: F) -> R
where
F: FnOnce(Client<Filesystem>) -> R,
F: FnOnce(Client) -> R,
P: Into<PathBuf>,
{
with_client(Filesystem::new(internal), client_id, f)
}

pub fn with_ram_client<R, F>(client_id: &str, f: F) -> R
where
F: FnOnce(Client<Ram>) -> R,
F: FnOnce(Client) -> R,
{
with_client(Ram::default(), client_id, f)
}
Expand All @@ -319,7 +318,7 @@ pub fn with_ram_client_and_preserve<R, F>(
f: F,
) -> R
where
F: FnOnce(Client<Ram>) -> R,
F: FnOnce(Client) -> R,
{
with_client_and_preserve(Ram::default(), client_id, f, should_preserve_file)
}
Expand All @@ -331,7 +330,7 @@ pub fn with_ram_clients_and_preserve<R, F, const N: usize>(
f: F,
) -> R
where
F: FnOnce([MultiClient<Ram>; N]) -> R,
F: FnOnce([Client; N]) -> R,
{
with_clients_and_preserve(Ram::default(), client_ids, f, should_preserve_file)
}