Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor!: Use different Id types #256

Merged
merged 18 commits into from
Sep 23, 2024
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions crates/backend/src/local.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,7 @@ impl ReadBackend for LocalBackend {
.into_iter()
.filter_map(walkdir::Result::ok)
.filter(|e| e.file_type().is_file())
.map(|e| Id::from_hex(&e.file_name().to_string_lossy()))
.filter_map(std::result::Result::ok);
.filter_map(|e| e.file_name().to_string_lossy().parse::<Id>().ok());
Ok(walker.collect())
}

Expand Down Expand Up @@ -216,7 +215,7 @@ impl ReadBackend for LocalBackend {
.filter(|e| e.file_type().is_file())
.map(|e| -> Result<_> {
Ok((
Id::from_hex(&e.file_name().to_string_lossy())?,
e.file_name().to_string_lossy().parse()?,
e.metadata()
.map_err(LocalBackendErrorKind::QueryingWalkDirMetadataFailed)?
.len()
Expand Down
8 changes: 2 additions & 6 deletions crates/backend/src/opendal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,7 @@ impl ReadBackend for OpenDALBackend {
.call()?
.into_iter()
.filter(|e| e.metadata().is_file())
.map(|e| Id::from_hex(e.name()))
.filter_map(Result::ok)
.filter_map(|e| e.name().parse().ok())
.collect())
}

Expand Down Expand Up @@ -201,10 +200,7 @@ impl ReadBackend for OpenDALBackend {
.into_iter()
.filter(|e| e.metadata().is_file())
.map(|e| -> Result<(Id, u32)> {
Ok((
Id::from_hex(e.name())?,
e.metadata().content_length().try_into()?,
))
Ok((e.name().parse()?, e.metadata().content_length().try_into()?))
})
.filter_map(Result::ok)
.collect())
Expand Down
2 changes: 1 addition & 1 deletion crates/backend/src/rest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ impl ReadBackend for RestBackend {
.unwrap_or_default();
Ok(list
.into_iter()
.filter_map(|i| match Id::from_hex(&i.name) {
.filter_map(|i| match i.name.parse::<Id>() {
Ok(id) => Some((id, i.size)),
Err(_) => None,
})
Expand Down
2 changes: 1 addition & 1 deletion crates/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ thiserror = "1.0.63"

# macros
derivative = "2.2.0"
derive_more = { version = "1.0.0", features = ["add", "constructor", "display"] }
derive_more = { version = "1.0.0", features = ["add", "constructor", "display", "from", "deref", "from_str"] }
derive_setters = "0.1.6"

# logging
Expand Down
2 changes: 1 addition & 1 deletion crates/core/src/archiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> {

if !skip_identical_parent || Some(self.snap.tree) != self.parent.tree_id() {
let id = self.be.save_file(&self.snap)?;
self.snap.id = id;
self.snap.id = id.into();
}

p.finish();
Expand Down
8 changes: 4 additions & 4 deletions crates/core/src/archiver/file_archiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::{
},
blob::{
packer::{Packer, PackerStats},
BlobType,
BlobId, BlobType, DataId,
},
cdc::rolling_hash::Rabin64,
chunker::ChunkIter,
Expand Down Expand Up @@ -147,11 +147,11 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> FileArchiver<'a, BE, I> {
let id = hash(&chunk);
let size = chunk.len() as u64;

if !self.index.has_data(&id) {
self.data_packer.add(chunk.into(), id)?;
if !self.index.has_data(&DataId::from(id)) {
self.data_packer.add(chunk.into(), BlobId::from(id))?;
}
p.inc(size);
Ok((id, size))
Ok((DataId::from(id), size))
})
.collect::<RusticResult<_>>()?;

Expand Down
11 changes: 5 additions & 6 deletions crates/core/src/archiver/parent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,8 @@ use log::warn;
use crate::{
archiver::tree::TreeType,
backend::{decrypt::DecryptReadBackend, node::Node},
blob::tree::Tree,
blob::tree::{Tree, TreeId},
error::{ArchiverErrorKind, RusticResult},
id::Id,
index::ReadGlobalIndex,
};

Expand All @@ -19,13 +18,13 @@ use crate::{
/// # Type Parameters
///
/// * `O` - The type of the `TreeType`.
pub(crate) type ItemWithParent<O> = TreeType<(O, ParentResult<()>), ParentResult<Id>>;
pub(crate) type ItemWithParent<O> = TreeType<(O, ParentResult<()>), ParentResult<TreeId>>;

/// The `Parent` is responsible for finding the parent tree of a given tree.
#[derive(Debug)]
pub struct Parent {
/// The tree id of the parent tree.
tree_id: Option<Id>,
tree_id: Option<TreeId>,
/// The parent tree.
tree: Option<Tree>,
/// The current node index.
Expand Down Expand Up @@ -92,7 +91,7 @@ impl Parent {
pub(crate) fn new(
be: &impl DecryptReadBackend,
index: &impl ReadGlobalIndex,
tree_id: Option<Id>,
tree_id: Option<TreeId>,
ignore_ctime: bool,
ignore_inode: bool,
) -> Self {
Expand Down Expand Up @@ -235,7 +234,7 @@ impl Parent {
}

// TODO: add documentation!
pub(crate) fn tree_id(&self) -> Option<Id> {
pub(crate) fn tree_id(&self) -> Option<TreeId> {
self.tree_id
}

Expand Down
19 changes: 11 additions & 8 deletions crates/core/src/archiver/tree_archiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,17 @@ use log::{debug, trace};
use crate::{
archiver::{parent::ParentResult, tree::TreeType},
backend::{decrypt::DecryptWriteBackend, node::Node},
blob::{packer::Packer, tree::Tree, BlobType},
blob::{
packer::Packer,
tree::{Tree, TreeId},
BlobType,
},
error::{ArchiverErrorKind, RusticResult},
id::Id,
index::{indexer::SharedIndexer, ReadGlobalIndex},
repofile::{configfile::ConfigFile, snapshotfile::SnapshotSummary},
};

pub(crate) type TreeItem = TreeType<(ParentResult<()>, u64), ParentResult<Id>>;
pub(crate) type TreeItem = TreeType<(ParentResult<()>, u64), ParentResult<TreeId>>;

/// The `TreeArchiver` is responsible for archiving trees.
///
Expand All @@ -27,7 +30,7 @@ pub(crate) struct TreeArchiver<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex>
/// The current tree.
tree: Tree,
/// The stack of trees.
stack: Vec<(PathBuf, Node, ParentResult<Id>, Tree)>,
stack: Vec<(PathBuf, Node, ParentResult<TreeId>, Tree)>,
/// The index to read from.
index: &'a I,
/// The packer to write to.
Expand Down Expand Up @@ -168,7 +171,7 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> {
/// The id of the tree.
///
/// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed
fn backup_tree(&mut self, path: &Path, parent: &ParentResult<Id>) -> RusticResult<Id> {
fn backup_tree(&mut self, path: &Path, parent: &ParentResult<TreeId>) -> RusticResult<TreeId> {
let (chunk, id) = self.tree.serialize()?;
let dirsize = chunk.len() as u64;
let dirsize_bytes = ByteSize(dirsize).to_string_as(true);
Expand All @@ -193,7 +196,7 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> {
}

if !self.index.has_tree(&id) {
self.tree_packer.add(chunk.into(), id)?;
self.tree_packer.add(chunk.into(), id.into())?;
}
Ok(id)
}
Expand All @@ -219,8 +222,8 @@ impl<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> TreeArchiver<'a, BE, I> {
/// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed
pub(crate) fn finalize(
mut self,
parent_tree: Option<Id>,
) -> RusticResult<(Id, SnapshotSummary)> {
parent_tree: Option<TreeId>,
) -> RusticResult<(TreeId, SnapshotSummary)> {
let parent = parent_tree.map_or(ParentResult::NotFound, ParentResult::Matched);
let id = self.backup_tree(&PathBuf::new(), &parent)?;
let stats = self.tree_packer.finalize()?;
Expand Down
2 changes: 1 addition & 1 deletion crates/core/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ pub trait FindInBackend: ReadBackend {
/// [`BackendAccessErrorKind::IdNotUnique`]: crate::error::BackendAccessErrorKind::IdNotUnique
fn find_ids<T: AsRef<str>>(&self, tpe: FileType, ids: &[T]) -> RusticResult<Vec<Id>> {
ids.iter()
.map(|id| Id::from_hex(id.as_ref()))
.map(|id| id.as_ref().parse())
.collect::<RusticResult<Vec<_>>>()
.or_else(|err|{
trace!("no valid IDs given: {err}, searching for ID starting with given strings instead");
Expand Down
5 changes: 3 additions & 2 deletions crates/core/src/backend/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use crate::{
backend::{FileType, ReadBackend, WriteBackend},
error::{CacheBackendErrorKind, RusticResult},
id::Id,
repofile::configfile::RepositoryId,
};

/// Backend that caches data.
Expand Down Expand Up @@ -235,7 +236,7 @@ impl Cache {
///
/// [`CacheBackendErrorKind::NoCacheDirectory`]: crate::error::CacheBackendErrorKind::NoCacheDirectory
/// [`CacheBackendErrorKind::FromIoError`]: crate::error::CacheBackendErrorKind::FromIoError
pub fn new(id: Id, path: Option<PathBuf>) -> RusticResult<Self> {
pub fn new(id: RepositoryId, path: Option<PathBuf>) -> RusticResult<Self> {
let mut path = path.unwrap_or({
let mut dir = cache_dir().ok_or_else(|| CacheBackendErrorKind::NoCacheDirectory)?;
dir.push("rustic");
Expand Down Expand Up @@ -318,7 +319,7 @@ impl Cache {
})
.map(|e| {
(
Id::from_hex(e.file_name().to_str().unwrap()).unwrap(),
e.file_name().to_str().unwrap().parse().unwrap(),
// handle errors in metadata by returning a size of 0
e.metadata().map_or(0, |m| m.len().try_into().unwrap_or(0)),
)
Expand Down
22 changes: 8 additions & 14 deletions crates/core/src/backend/decrypt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use crate::{
crypto::{hasher::hash, CryptoKey},
error::{CryptBackendErrorKind, RusticErrorKind},
id::Id,
repofile::RepoFile,
repofile::{RepoFile, RepoId},
Progress, RusticResult,
};

Expand All @@ -31,6 +31,8 @@ pub trait DecryptFullBackend: DecryptWriteBackend + DecryptReadBackend {}

impl<T: DecryptWriteBackend + DecryptReadBackend> DecryptFullBackend for T {}

pub type StreamResult<Id, F> = RusticResult<Receiver<RusticResult<(Id, F)>>>;

pub trait DecryptReadBackend: ReadBackend + Clone + 'static {
/// Decrypts the given data.
///
Expand Down Expand Up @@ -142,10 +144,7 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static {
/// # Errors
///
/// If the files could not be read.
fn stream_all<F: RepoFile>(
&self,
p: &impl Progress,
) -> RusticResult<Receiver<RusticResult<(Id, F)>>> {
fn stream_all<F: RepoFile>(&self, p: &impl Progress) -> StreamResult<F::Id, F> {
let list = self.list(F::TYPE).map_err(RusticErrorKind::Backend)?;
self.stream_list(&list, p)
}
Expand All @@ -162,17 +161,13 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static {
/// # Errors
///
/// If the files could not be read.
fn stream_list<F: RepoFile>(
&self,
list: &[Id],
p: &impl Progress,
) -> RusticResult<Receiver<RusticResult<(Id, F)>>> {
fn stream_list<F: RepoFile>(&self, list: &[Id], p: &impl Progress) -> StreamResult<F::Id, F> {
p.set_length(list.len() as u64);
let (tx, rx) = unbounded();

list.into_par_iter()
.for_each_with((self, p, tx), |(be, p, tx), id| {
let file = be.get_file::<F>(id).map(|file| (*id, file));
let file = be.get_file::<F>(id).map(|file| (F::Id::from(*id), file));
p.inc(1);
tx.send(file).unwrap();
});
Expand Down Expand Up @@ -311,17 +306,16 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static {
/// # Panics
///
/// If the files could not be deleted.
fn delete_list<'a, I: ExactSizeIterator<Item = &'a Id> + Send>(
fn delete_list<'a, ID: RepoId, I: ExactSizeIterator<Item = &'a ID> + Send>(
&self,
tpe: FileType,
cacheable: bool,
list: I,
p: impl Progress,
) -> RusticResult<()> {
p.set_length(list.len() as u64);
list.par_bridge().try_for_each(|id| -> RusticResult<_> {
// TODO: Don't panic on file not being able to be deleted.
self.remove(tpe, id, cacheable).unwrap();
self.remove(ID::TYPE, id, cacheable).unwrap();
p.inc(1);
Ok(())
})?;
Expand Down
6 changes: 3 additions & 3 deletions crates/core/src/backend/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use serde_with::{
#[cfg(not(windows))]
use crate::error::NodeErrorKind;

use crate::id::Id;
use crate::blob::{tree::TreeId, DataId};

#[derive(
Default, Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Constructor, PartialOrd, Ord,
Expand All @@ -53,14 +53,14 @@ pub struct Node {
/// # Note
///
/// This should be only set for regular files.
pub content: Option<Vec<Id>>,
pub content: Option<Vec<DataId>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
/// Subtree of the Node.
///
/// # Note
///
/// This should be only set for directories. (TODO: Check if this is correct)
pub subtree: Option<Id>,
pub subtree: Option<TreeId>,
}

#[serde_as]
Expand Down
37 changes: 33 additions & 4 deletions crates/core/src/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
use enum_map::{Enum, EnumMap};
use serde_derive::{Deserialize, Serialize};

use crate::id::Id;
use crate::new_id;

/// All [`BlobType`]s which are supported by the repository
pub const ALL_BLOB_TYPES: [BlobType; 2] = [BlobType::Tree, BlobType::Data];
Expand Down Expand Up @@ -66,17 +66,46 @@
}
}

new_id!(BlobId, "blob");

pub trait PackedId: Copy + Into<BlobId> + From<BlobId> {
const TYPE: BlobType;
}

#[macro_export]
/// Generate newtypes for `Id`s identifying packed blobs
macro_rules! new_blobid {
($a:ident, $b: expr) => {
$crate::new_id!($a, concat!("blob of type", stringify!($b)));
impl From<$crate::blob::BlobId> for $a {
fn from(id: $crate::blob::BlobId) -> Self {
(*id).into()

Check warning on line 82 in crates/core/src/blob.rs

View check run for this annotation

Codecov / codecov/patch

crates/core/src/blob.rs#L81-L82

Added lines #L81 - L82 were not covered by tests
}
}
impl From<$a> for $crate::blob::BlobId {
fn from(id: $a) -> Self {
(*id).into()
}
}
impl $crate::blob::PackedId for $a {
const TYPE: $crate::blob::BlobType = $b;
}
};
}

new_blobid!(DataId, BlobType::Data);

/// A `Blob` is a file that is stored in the backend.
///
/// It can be a `tree` or a `data` blob.
///
/// A `tree` blob is a file that contains a list of other blobs.
/// A `data` blob is a file that contains the actual data.
#[derive(Debug, PartialEq, Eq, Clone, Constructor)]
pub(crate) struct Blob {
#[derive(Debug, PartialEq, Eq, Copy, Clone, Constructor)]
pub struct Blob {
/// The type of the blob
tpe: BlobType,

/// The id of the blob
id: Id,
id: BlobId,
}
Loading
Loading