Skip to content

Commit

Permalink
refactor: migrate some ideas and move the compression logic to roast …
Browse files Browse the repository at this point in the history
…internally

Signed-off-by: Soc Virnyl Estela <[email protected]>
  • Loading branch information
uncomfyhalomacro committed Nov 1, 2024
1 parent a1669f3 commit 901e2f7
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 178 deletions.
4 changes: 2 additions & 2 deletions cargo/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl Vendor for Src {
if let Ok(actual_src) = utils::process_globs(&self.src) {
debug!(?actual_src, "Source got from glob pattern");
if actual_src.is_file() {
libroast::is_supported_format(&actual_src)
libroast::utils::is_supported_format(&actual_src)
} else {
Ok(SupportedFormat::Dir(actual_src))
}
Expand Down Expand Up @@ -206,7 +206,7 @@ impl Vendor for Src {
}
}
}
SupportedFormat::Dir(srcpath) => match utils::copy_dir_all(
SupportedFormat::Dir(srcpath) => match libroast::utils::copy_dir_all(
&srcpath,
&workdir.join(srcpath.file_name().unwrap_or(srcpath.as_os_str())),
) {
Expand Down
116 changes: 74 additions & 42 deletions cargo/src/utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

use std::ffi::OsStr;
use std::fmt::{self, Debug, Display};
use std::fs;
use std::io;
use std::path::Path;
use std::path::PathBuf;
Expand All @@ -23,44 +22,23 @@ use crate::audit::{perform_cargo_audit, process_reports};

use glob::glob;
use libroast::common::Compression;
use libroast::operations::cli::RoastArgs;
use libroast::operations::roast::roast_opts;
use libroast::utils::copy_dir_all;

#[allow(unused_imports)]
use tracing::{debug, error, info, trace, warn, Level};

pub fn copy_dir_all(src: impl AsRef<Path>, dst: &Path) -> Result<(), io::Error> {
debug!("Copying sources");
debug!(?dst);
fs::create_dir_all(dst)?;
Ok(for entry in fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
trace!(?entry);
trace!(?ty);
if ty.is_dir() {
trace!(?ty, "Is directory?");
copy_dir_all(entry.path(), &dst.join(entry.file_name()))?;

// Should we respect symlinks?
// } else if ty.is_symlink() {
// debug!("Is symlink");
// let path = fs::read_link(&entry.path())?;
// let path = fs::canonicalize(&path).unwrap();
// debug!(?path);
// let pathfilename = path.file_name().unwrap_or(OsStr::new("."));
// if path.is_dir() {
// copy_dir_all(&path, &dst.join(pathfilename))?;
// } else {
// fs::copy(&path, &mut dst.join(pathfilename))?;
// }

// Be pedantic or you get symlink error
} else if ty.is_file() {
trace!(?ty, "Is file?");
fs::copy(entry.path(), dst.join(entry.file_name()))?;
};
})
}

pub fn process_src(args: &Opts, prjdir: &Path) -> Result<(), OBSCargoError> {
let v_workdir = tempfile::Builder::new()
.prefix(".vendor")
.rand_bytes(12)
.tempdir()
.map_err(|err| {
error!(?err);
OBSCargoError::new(OBSCargoErrorKind::VendorError, err.to_string())
})?;
let vendor_workdir = v_workdir.path();
let mut manifest_files: Vec<PathBuf> = if !args.cargotoml.is_empty() {
debug!("Using manually specified Cargo.toml files.");
debug!(?args.cargotoml);
Expand Down Expand Up @@ -274,13 +252,67 @@ pub fn process_src(args: &Opts, prjdir: &Path) -> Result<(), OBSCargoError> {
debug!("All paths to archive {:#?}", paths_to_archive);

if vendor_dir.exists() {
vendor::compress(
outdir,
prjdir,
&paths_to_archive,
compression,
args.tag.as_deref(),
)?;
let vendor_filename = match &args.tag {
Some(suffix) => format!("vendor-{}", suffix),
None => "vendor".to_string(),
};
let vendor_filename_with_extension = match &args.compression {
Compression::Gz => format!("{}{}", &vendor_filename, ".tar.gz"),
Compression::Xz => format!("{}{}", &vendor_filename, ".tar.xz"),
Compression::Zst => format!("{}{}", &vendor_filename, ".tar.zst"),
Compression::Bz2 => format!("{}{}", &vendor_filename, ".tar.bz"),
Compression::Not => format!("{}{}", &vendor_filename, ".tar"),
};
let vendor_doppel = vendor_workdir.join(&vendor_filename);
copy_dir_all(vendor_dir, &vendor_doppel).map_err(|err| {
error!(?err);
OBSCargoError::new(OBSCargoErrorKind::VendorError, err.to_string())
})?;

for p in paths_to_archive {
let canon_p = p.canonicalize().unwrap_or(p.to_path_buf());
let stripped_canon_p = canon_p
.strip_prefix(prjdir)
.unwrap_or(Path::new(canon_p.file_stem().unwrap_or_default()));
let p_to_vendor_workdir = vendor_workdir.join(stripped_canon_p);
let p_to_vendor_workdir_parent =
p_to_vendor_workdir.parent().unwrap_or(Path::new(""));
std::fs::create_dir_all(p_to_vendor_workdir_parent).map_err(|err| {
error!(?err);
OBSCargoError::new(
OBSCargoErrorKind::VendorError,
"Failed to create a directory".to_string(),
)
})?;
if canon_p.is_file() {
std::fs::copy(canon_p, p_to_vendor_workdir).map_err(|err| {
error!(?err);
OBSCargoError::new(OBSCargoErrorKind::VendorError, err.to_string())
})?;
} else if canon_p.is_dir() {
copy_dir_all(canon_p, &p_to_vendor_workdir).map_err(|err| {
error!(?err);
OBSCargoError::new(OBSCargoErrorKind::VendorError, err.to_string())
})?;
};
}

let roast_args = RoastArgs {
target: vendor_workdir.to_path_buf(),
include: None,
exclude: None,
additional_paths: None,
outfile: PathBuf::from(vendor_filename_with_extension),
outdir: Some(outdir),
preserve_root: false,
reproducible: true,
ignore_git: true,
ignore_hidden: false,
};
roast_opts(roast_args, false).map_err(|err| {
error!(?err);
OBSCargoError::new(OBSCargoErrorKind::VendorCompressionFailed, err.to_string())
})?;
} else {
error!("Vendor dir does not exist! This is a bug!");
return Err(OBSCargoError::new(
Expand Down
134 changes: 0 additions & 134 deletions cargo/src/vendor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ use std::path::Path;
use crate::errors::OBSCargoError;
use crate::errors::OBSCargoErrorKind;
use crate::utils::cargo_command;
use libroast::common::Compression;
use libroast::compress;

use serde::Deserialize;
use serde::Serialize;
Expand Down Expand Up @@ -168,138 +166,6 @@ pub fn vendor(
})
}

pub fn compress(
outpath: impl AsRef<Path>,
prjdir: impl AsRef<Path>,
paths_to_archive: &[impl AsRef<Path>],
compression: &Compression,
tag: Option<&str>,
) -> Result<(), OBSCargoError> {
info!("📦 Archiving vendored dependencies...");

// RATIONALE: We copy Cargo.lock by default, updated or not updated
// `../` relative to `vendor/` directory.
// CONSIDERATIONS:
// Maybe in the future we can check if Cargo.toml points to a workspace
// using the `toml` crate. For now, we aggressively just copy `../Cargo.lock`
// relative to vendor directory if it exists. Even if it is a workspace,
// it will still copy the project's `Cargo.lock` because we still run
// `vendor` anyway starting at the root of the project where the lockfile resides.
// NOTE: 1. The members in that workspace still requires that root lockfile.
// NOTE: 2. Members of that workspace cannot generate their own lockfiles.
// NOTE: 3. If they are not members, we slap that file into their own compressed vendored
// tarball

let tar_name = match tag {
Some(t) => format!("vendor-{}", t),
None => "vendor".to_string(),
};

let mut vendor_out = outpath.as_ref().join(tar_name);
Ok({
match compression {
Compression::Gz => {
vendor_out.set_extension("tar.gz");
if vendor_out.exists() {
warn!(
replacing = ?vendor_out,
"🔦 Compressed tarball for vendor exists AND will be replaced."
);
}
compress::targz(&vendor_out, &prjdir, paths_to_archive, true).map_err(|err| {
error!(?err, "gz compression failed");
OBSCargoError::new(
OBSCargoErrorKind::VendorCompressionFailed,
"gz compression failed".to_string(),
)
})?;
debug!(
"Compressed and archived to {}",
vendor_out.to_string_lossy()
);
}
Compression::Xz => {
vendor_out.set_extension("tar.xz");
if vendor_out.exists() {
warn!(
replacing = ?vendor_out,
"🔦 Compressed tarball for vendor exists AND will be replaced."
);
}
compress::tarxz(&vendor_out, &prjdir, paths_to_archive, true).map_err(|err| {
error!(?err, "xz compression failed");
OBSCargoError::new(
OBSCargoErrorKind::VendorCompressionFailed,
"xz compression failed".to_string(),
)
})?;
debug!(
"Compressed and archived to {}",
vendor_out.to_string_lossy()
);
}
Compression::Zst => {
vendor_out.set_extension("tar.zst");
if vendor_out.exists() {
warn!(
replacing = ?vendor_out,
"🔦 Compressed tarball for vendor exists AND will be replaced."
);
}
compress::tarzst(&vendor_out, &prjdir, paths_to_archive, true).map_err(|err| {
error!(?err, "zst compression failed");
OBSCargoError::new(
OBSCargoErrorKind::VendorCompressionFailed,
"zst compression failed".to_string(),
)
})?;
debug!(
"Compressed and archived to {}",
vendor_out.to_string_lossy()
);
}
Compression::Bz2 => {
vendor_out.set_extension("tar.bz2");
if vendor_out.exists() {
warn!(
replacing = ?vendor_out,
"🔦 Compressed tarball for vendor exists AND will be replaced."
);
}
compress::tarbz2(&vendor_out, &prjdir, paths_to_archive, true).map_err(|err| {
error!(?err, "bz2 compression failed");
OBSCargoError::new(
OBSCargoErrorKind::VendorCompressionFailed,
"bz2 compression failed".to_string(),
)
})?;
debug!(
"Compressed and archived to {}",
vendor_out.to_string_lossy()
);
}
Compression::Not => {
vendor_out.set_extension("tar");
if vendor_out.exists() {
warn!(
replacing = ?vendor_out,
"🔦 Uncompressed vanilla tarball for vendor exists AND will be replaced."
);
}
compress::tarbz2(&vendor_out, &prjdir, paths_to_archive, true).map_err(|err| {
error!(?err, "bz2 compression failed");
OBSCargoError::new(
OBSCargoErrorKind::VendorCompressionFailed,
"archiving vendor source failed".to_string(),
)
})?;
debug!("Archived to {}", vendor_out.to_string_lossy());
}
}
debug!("Finished creating {} compressed tarball", compression);
})
}

pub fn is_workspace(src: &Path) -> Result<bool, OBSCargoError> {
if let Ok(manifest) = fs::read_to_string(src) {
if let Ok(manifest_data) = toml::from_str::<toml::Value>(&manifest) {
Expand Down

0 comments on commit 901e2f7

Please sign in to comment.