diff --git a/Cargo.lock b/Cargo.lock index 95092ef..78b5f84 100755 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,21 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anstream" version = "0.6.5" @@ -69,20 +84,25 @@ dependencies = [ name = "arx" version = "0.1.0" dependencies = [ + "base32", + "chrono", "clap", "crossterm 0.27.0", "flate2", "git2", "glob-match", + "home", "indicatif", "inquire", "kdl", "miette", "reqwest", "run_script", + "serde", "tar", "thiserror", "tokio", + "toml", "unindent", "walkdir", ] @@ -117,6 +137,12 @@ dependencies = [ "backtrace", ] +[[package]] +name = "base32" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" + [[package]] name = "base64" version = "0.21.5" @@ -155,11 +181,12 @@ checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" [[package]] name = "cc" -version = "1.0.73" +version = "1.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" dependencies = [ "jobserver", + "libc", ] [[package]] @@ -168,6 +195,20 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "chrono" +version = "0.4.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-targets 0.52.0", +] + [[package]] name = "clap" version = "4.4.11" @@ -549,6 +590,15 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + [[package]] name = "http" version = "0.2.8" @@ -620,6 +670,29 @@ dependencies = [ "tokio-native-tls", ] +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "idna" version = "0.2.3" @@ -930,6 +1003,15 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "num-traits" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +dependencies = [ + "autocfg", +] + [[package]] name = "num_cpus" version = "1.13.1" @@ -1268,9 +1350,23 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.143" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53e8e5d5b70924f74ff5c6d64d9a5acd91422117c60f48c4e07855238a254553" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] [[package]] name = "serde_json" @@ -1283,6 +1379,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" +dependencies = [ + "serde", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -1586,6 +1691,40 @@ dependencies = [ "tracing", ] +[[package]] +name = "toml" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af06656561d28735e9c1cd63dfd57132c8155426aa6af24f36a00a351f88c48e" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18769cd1cec395d70860ceb4d932812a0b4d06b1a4bb336745a4d21b9496e992" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + [[package]] name = "tower-service" version = "0.3.2" @@ -1820,6 +1959,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-sys" version = "0.36.1" @@ -1995,6 +2143,15 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +[[package]] +name = "winnow" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dffa400e67ed5a4dd237983829e66475f0a4a26938c4b04c21baede6262215b8" +dependencies = [ + "memchr", +] + [[package]] name = "winreg" version = "0.50.0" diff --git a/Cargo.toml b/Cargo.toml index a7ffcd0..2b32b0e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,20 +8,25 @@ repository = "https://github.com/norskeld/arx" publish = false [dependencies] +base32 = "0.4.0" +chrono = "0.4.35" clap = { version = "4.4.11", features = ["cargo", "derive"] } crossterm = "0.27.0" flate2 = { version = "1.0.28" } git2 = { version = "0.18.1", features = ["vendored-libgit2"] } glob-match = { version = "0.2.1" } +home = "0.5.9" indicatif = "0.17.8" inquire = { version = "0.7.0", features = ["editor"] } kdl = "=4.6.0" miette = { version = "=5.10.0", features = ["fancy"] } reqwest = { version = "0.11.22", features = ["json"] } run_script = { version = "0.10.1" } +serde = { version = "1.0.197", features = ["derive"] } tar = { version = "0.4.40" } thiserror = { version = "1.0.51" } tokio = { version = "1.35.0", features = ["macros", "fs", "rt-multi-thread"] } +toml = "0.8.11" unindent = "0.2.3" walkdir = { version = "2.4.0" } diff --git a/src/actions/actions.rs b/src/actions/actions.rs index 9900278..820f968 100644 --- a/src/actions/actions.rs +++ b/src/actions/actions.rs @@ -246,7 +246,7 @@ impl Run { .name .clone() .or_else(|| { - let lines = command.trim().lines().count(); + let lines = command.lines().count(); if lines > 1 { Some(command.trim().lines().next().unwrap().to_string() + "...") @@ -277,10 +277,25 @@ impl Run { if has_failed { if !err.is_empty() { - eprintln!("{err}"); + // Multiline scripts are run using a temporary shell script, so the errror messages + // sometimes don't look nice, containing the absolute path to that temporary script, e.g.: + // + // /var/folders/81/48f1l9956vjfqzmf9yy24g1c0000gn/T/fsio_1iyUIEI1GJ.sh: line 2: + // + // So here I'm doing dirty string manipulation to clean up the message a bit. + let message = if let Some((_, trailing)) = err.split_once(".sh:") { + trailing.trim().to_string() + } + // TODO: Check error messages on windows (e.g. when trying to run a non-existing command), + // and clean up the message if necessary as well. + else { + err + }; + + eprintln!("{message}"); } - process::exit(1); + process::exit(code); } Ok(println!("{}", output.trim())) @@ -380,7 +395,7 @@ impl Replace { "✗".red() }; - println!("└─ {state} ╌ {replacement}"); + println!("└─ {state} {replacement}"); } } diff --git a/src/actions/executor.rs b/src/actions/executor.rs index dce5bbe..ad9081f 100644 --- a/src/actions/executor.rs +++ b/src/actions/executor.rs @@ -66,7 +66,7 @@ impl Executor { match &self.config.actions { | Actions::Suite(suites) => self.suite(suites).await?, | Actions::Flat(actions) => self.flat(actions).await?, - | Actions::Empty => println!("No actions found."), + | Actions::Empty => return Ok(()), }; // Delete the config file if needed. @@ -105,6 +105,7 @@ impl Executor { if !matches!( (action, it.peek()), (ActionSingle::Prompt(_), Some(ActionSingle::Prompt(_))) + | (ActionSingle::Unknown(_), Some(ActionSingle::Unknown(_))) ) { println!(); } diff --git a/src/actions/prompts.rs b/src/actions/prompts.rs index 486b5af..d49e6d7 100644 --- a/src/actions/prompts.rs +++ b/src/actions/prompts.rs @@ -4,8 +4,7 @@ use std::process; use crossterm::style::Stylize; use inquire::formatter::StringFormatter; use inquire::ui::{Color, RenderConfig, StyleSheet, Styled}; -use inquire::{required, CustomType}; -use inquire::{Confirm, Editor, InquireError, Select, Text}; +use inquire::{Confirm, CustomType, Editor, InquireError, Select, Text}; use crate::actions::State; use crate::config::prompts::*; @@ -102,7 +101,7 @@ impl InputPrompt { if let Some(default) = &self.default { prompt = prompt.with_default(default); } else { - prompt = prompt.with_validator(required!("This field is required.")); + prompt = prompt.with_validator(inquire::required!("This field is required.")); } match prompt.prompt() { diff --git a/src/app.rs b/src/app.rs index 15508e0..dd633ec 100644 --- a/src/app.rs +++ b/src/app.rs @@ -1,13 +1,14 @@ use std::fs; use std::io; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; -use clap::{Parser, Subcommand}; +use clap::{Args, Parser, Subcommand}; use crossterm::style::Stylize; use miette::Diagnostic; use thiserror::Error; use crate::actions::Executor; +use crate::cache::Cache; use crate::config::{Config, ConfigOptionsOverrides}; use crate::report; use crate::repository::{LocalRepository, RemoteRepository}; @@ -26,62 +27,66 @@ pub enum AppError { #[derive(Debug, Default)] pub struct AppState { - /// Whether to cleanup on failure or not. + /// Whether to clean up on failure or not. pub cleanup: bool, - /// Cleanup path, will be set to the destination acquired after creating [RemoteRepository] or + /// Clean up path, will be set to the destination acquired after creating [RemoteRepository] or /// [LocalRepository]. pub cleanup_path: Option, } -#[derive(Parser, Debug)] +#[derive(Clone, Debug, Parser)] #[command(version, about, long_about = None)] -pub struct Cli { - #[command(subcommand)] - pub command: BaseCommand, - - /// Cleanup on failure, i.e. delete target directory. No-op if failed because target directory - /// does not exist. - #[arg(global = true, short, long)] - cleanup: bool, - - /// Delete arx config after scaffolding is complete. - #[arg(global = true, short, long)] - delete: Option, -} - -#[derive(Clone, Debug, Subcommand)] -pub enum BaseCommand { +pub enum Cli { /// Scaffold from a remote repository. #[command(visible_alias = "r")] - Remote { - /// Repository to use for scaffolding. - src: String, - - /// Directory to scaffold to. - path: Option, - - /// Scaffold from a specified ref (branch, tag, or commit). - #[arg(name = "REF", short = 'r', long = "ref")] - meta: Option, - }, + Remote(RepositoryArgs), /// Scaffold from a local repository. #[command(visible_alias = "l")] - Local { - /// Repository to use for scaffolding. - src: String, + Local(RepositoryArgs), + /// Commands for interacting with the cache. + #[command(visible_alias = "c")] + Cache { + #[command(subcommand)] + command: CacheCommand, + }, +} - /// Directory to scaffold to. - path: Option, +#[derive(Clone, Debug, Args)] +pub struct RepositoryArgs { + /// Repository to use for scaffolding. + src: String, + /// Directory to scaffold to. + path: Option, + /// Scaffold from a specified ref (branch, tag, or commit). + #[arg(name = "REF", short = 'r', long = "ref")] + meta: Option, + /// Clean up on failure. No-op if failed because target directory already exists. + #[arg(short = 'C', long)] + cleanup: bool, + /// Delete config after scaffolding is complete. + #[arg(short, long)] + delete: Option, + /// Skip reading config and running actions. + #[arg(short, long)] + skip: bool, + /// Use cached template if available. + #[arg(short = 'c', long, default_value = "true")] + cache: bool, +} - /// Scaffold from a specified ref (branch, tag, or commit). - #[arg(name = "REF", short = 'r', long = "ref")] - meta: Option, - }, +#[derive(Clone, Debug, Subcommand)] +pub enum CacheCommand { + /// List cache entries. + List, + /// Remove all cache entries. + Clear, } #[derive(Debug)] pub struct App { + /// Parsed CLI options and commands. cli: Cli, + /// Current state of the application. state: AppState, } @@ -95,6 +100,17 @@ impl App { /// Runs the app and prints any errors. pub async fn run(&mut self) { + miette::set_hook(Box::new(|_| { + Box::new( + miette::MietteHandlerOpts::new() + .terminal_links(false) + .context_lines(3) + .tab_width(4) + .build(), + ) + })) + .expect("Failed to set up the miette hook"); + let scaffold_res = self.scaffold().await; if scaffold_res.is_err() { @@ -105,129 +121,187 @@ impl App { /// Kicks of the scaffolding process. pub async fn scaffold(&mut self) -> miette::Result<()> { - // Slightly tweak miette. - miette::set_hook(Box::new(|_| { - Box::new( - miette::MietteHandlerOpts::new() - .terminal_links(false) - .context_lines(3) - .tab_width(4) - .build(), - ) - }))?; + match self.cli.clone() { + | Cli::Remote(args) => self.scaffold_remote(args).await, + | Cli::Local(args) => self.scaffold_local(args).await, + | Cli::Cache { command } => self.handle_cache(command), + } + } + + async fn scaffold_remote(&mut self, args: RepositoryArgs) -> miette::Result<()> { + let mut remote = RemoteRepository::new(args.src, args.meta)?; + + // Try to fetch refs early. If we can't get them, there's no point in continuing. + remote.fetch_refs()?; + + // Try to resolve a ref to specific hash. + let hash = remote.resolve_hash()?; - // Build override options. - let overrides = ConfigOptionsOverrides { delete: self.cli.delete }; + let name = args.path.as_ref().unwrap_or(&remote.repo); + let destination = PathBuf::from(name); // Cleanup on failure. - self.state.cleanup = self.cli.cleanup; + self.state.cleanup = args.cleanup; + self.state.cleanup_path = Some(destination.clone()); + + // Check if destination already exists before downloading. + if let Ok(true) = &destination.try_exists() { + // We do not want to remove already existing directory. + self.state.cleanup = false; + + miette::bail!( + "Failed to scaffold: '{}' already exists.", + destination.display() + ); + } - // Load the config. - let destination = match self.cli.command.clone() { - // Preparation flow for remote repositories. - | BaseCommand::Remote { src, path, meta } => { - let remote = RemoteRepository::new(src, meta)?; + let mut cache = Cache::init()?; + let mut bytes = None; - let name = path.as_ref().unwrap_or(&remote.repo); - let destination = PathBuf::from(name); + let source = remote.get_source(); + let mut should_fetch = !args.cache; - // Set cleanup path to the destination. - self.state.cleanup_path = Some(destination.clone()); + if args.cache { + println!("{}", "~ Attempting to read from cache".dim()); - // Check if destination already exists before downloading. - if let Ok(true) = &destination.try_exists() { - // We do not want to remove already existing directory. - self.state.cleanup = false; + if let Some(cached) = cache.read(&source, &hash)? { + println!("{}", "~ Found in cache, reading".dim()); + bytes = Some(cached); + } else { + println!("{}", "~ Nothing found in cache, fetching".dim()); + should_fetch = true; + } + } - miette::bail!( - "Failed to scaffold: '{}' already exists.", - destination.display() - ); - } + if should_fetch { + bytes = Some(remote.fetch().await?); + } - // Fetch the tarball as bytes (compressed). - let tarball = remote.fetch().await?; - - // Decompress and unpack the tarball. - let unpacker = Unpacker::new(tarball); - unpacker.unpack_to(&destination)?; - - destination - }, - // Preparation flow for local repositories. - | BaseCommand::Local { src, path, meta } => { - let local = LocalRepository::new(src, meta); - - let destination = if let Some(destination) = path { - PathBuf::from(destination) - } else { - local - .source - .file_name() - .map(PathBuf::from) - .unwrap_or_default() - }; - - // Set cleanup path to the destination. - self.state.cleanup_path = Some(destination.clone()); - - // Check if destination already exists before performing local clone. - if let Ok(true) = &destination.try_exists() { - // We do not want to remove already existing directory. - self.state.cleanup = false; - - miette::bail!( - "Failed to scaffold: '{}' already exists.", - destination.display() - ); - } + // Decompress and unpack the tarball. If somehow the tarball is empty, bail. + if let Some(bytes) = bytes { + if should_fetch { + cache.write(&source, &remote.meta.to_string(), &hash, &bytes)?; + } + + let unpacker = Unpacker::new(bytes); + unpacker.unpack_to(&destination)?; + } else { + miette::bail!("Failed to scaffold: zero bytes."); + } + + self + .scaffold_execute( + &destination, + args.skip, + ConfigOptionsOverrides { delete: args.delete }, + ) + .await + } - // Copy the directory. - local.copy(&destination)?; + async fn scaffold_local(&mut self, args: RepositoryArgs) -> miette::Result<()> { + let local = LocalRepository::new(args.src, args.meta); + + let destination = if let Some(destination) = args.path { + PathBuf::from(destination) + } else { + local + .source + .file_name() + .map(PathBuf::from) + .unwrap_or_default() + }; - // .git directory path. - let inner_git = destination.join(".git"); + // Cleanup on failure. + self.state.cleanup = args.cleanup; + self.state.cleanup_path = Some(destination.clone()); + + // Check if destination already exists before performing local clone. + if let Ok(true) = &destination.try_exists() { + // We do not want to remove already existing directory. + self.state.cleanup = false; + + miette::bail!( + "Failed to scaffold: '{}' already exists.", + destination.display() + ); + } - // If we copied a repository, we also need to checkout the ref. - if let Ok(true) = inner_git.try_exists() { - println!("{}", "~ Cloned repository".dim()); + // Copy the directory. + local.copy(&destination)?; - // Checkout the ref. - local.checkout(&destination)?; + // .git directory path. + let inner_git = destination.join(".git"); - println!("{} {}", "~ Checked out ref:".dim(), local.meta.0.dim()); + // If we copied a repository, we also need to checkout the ref. + if let Ok(true) = inner_git.try_exists() { + println!("{}", "~ Cloned repository".dim()); - // At last, remove the inner .git directory. - fs::remove_dir_all(inner_git).map_err(|source| { - AppError::Io { - message: "Failed to remove inner .git directory.".to_string(), - source, - } - })?; + // Checkout the ref. + local.checkout(&destination)?; - println!("{}", "~ Removed inner .git directory\n".dim()); - } else { - println!("{}", "~ Copied directory\n".dim()); + println!("{} {}", "~ Checked out ref:".dim(), local.meta.0.dim()); + + // At last, remove the inner .git directory. + fs::remove_dir_all(inner_git).map_err(|source| { + AppError::Io { + message: "Failed to remove inner .git directory.".to_string(), + source, } + })?; - destination - }, - }; + println!("{}", "~ Removed inner .git directory".dim()); + } else { + println!("{}", "~ Copied directory".dim()); + } + + self + .scaffold_execute( + &destination, + args.skip, + ConfigOptionsOverrides { delete: args.delete }, + ) + .await + } + + async fn scaffold_execute( + &mut self, + destination: &Path, + should_skip: bool, + overrides: ConfigOptionsOverrides, + ) -> miette::Result<()> { + if should_skip { + println!("{}", "~ Skipping running actions".dim()); + return Ok(()); + } // Read the config (if it is present). - let mut config = Config::new(&destination); + let mut config = Config::new(destination); + + if config.load()? { + println!(); + + config.override_with(overrides); - config.load()?; - config.override_with(overrides); + // Create executor and kick off execution. + let executor = Executor::new(config); - // Create executor and kick off execution. - let executor = Executor::new(config); + executor.execute().await + } else { + Ok(()) + } + } - executor.execute().await + fn handle_cache(&mut self, command: CacheCommand) -> miette::Result<()> { + let mut cache = Cache::init()?; + + match command { + | CacheCommand::List => Ok(cache.list()?), + | CacheCommand::Clear => Ok(cache.clear()?), + } } - /// Cleanup on failure. - pub fn cleanup(&self) -> miette::Result<()> { + /// Clean up on failure. + fn cleanup(&self) -> miette::Result<()> { if self.state.cleanup { if let Some(destination) = &self.state.cleanup_path { fs::remove_dir_all(destination).map_err(|source| { diff --git a/src/cache.rs b/src/cache.rs new file mode 100644 index 0000000..68c4865 --- /dev/null +++ b/src/cache.rs @@ -0,0 +1,315 @@ +use std::cmp::Ordering; +use std::collections::HashMap; +use std::fs; +use std::io; +use std::path::{Path, PathBuf}; +use std::str::FromStr; + +use base32::Alphabet; +use chrono::{DateTime, Utc}; +use crossterm::style::Stylize; +use miette::{Diagnostic, Report}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +use crate::repository::RemoteRepository; + +/// Unpadded Base 32 alphabet. +const BASE32_ALPHABET: Alphabet = Alphabet::RFC4648 { padding: false }; + +/// `%userprofile%/AppData/Local/arx/.cache` +#[cfg(target_os = "windows")] +const CACHE_ROOT: &str = "AppData/Local/arx/.cache"; + +/// `$HOME/.cache/arx` +#[cfg(not(target_os = "windows"))] +const CACHE_ROOT: &str = ".cache/arx"; + +/// `/tarballs/.tar.gz` +const CACHE_TARBALLS_DIR: &str = "tarballs"; + +/// `/manifest.toml` +const CACHE_MANIFEST: &str = "manifest.toml"; + +#[derive(Debug, Diagnostic, Error)] +pub enum CacheError { + #[error("{message}")] + #[diagnostic(code(arx::cache::io))] + Io { + message: String, + #[source] + source: io::Error, + }, + #[error(transparent)] + #[diagnostic(code(arx::cache::manifest::serialize))] + TomlSerialize(toml::ser::Error), + #[error(transparent)] + #[diagnostic(code(arx::cache::manifest::deserialize))] + TomlDeserialize(toml::de::Error), + #[error("{0}")] + #[diagnostic(transparent)] + Diagnostic(Report), +} + +/// Entry name in the form of Base 32 encoded source string. +type Entry = String; + +/// Cache manifest. +/// +/// # Structure +/// +/// ```toml +/// [[templates..items]] +/// name = "" +/// hash = "" +/// timestamp = +/// ``` +/// +/// Where: +/// +/// - `` - Base 32 encoded source string in the form of: `:/`. +/// - `` - Ref name or commit hash. +/// - `` - Ref/commit hash, either short of full. Used in filenames. +/// - `` - Unix timestamp in milliseconds. +#[derive(Debug, Default, Serialize, Deserialize)] +pub struct Manifest { + templates: HashMap, +} + +/// Represents a template table. +#[derive(Debug, Serialize, Deserialize)] +pub struct Template { + /// List of linked items in the template table. + items: Vec, +} + +/// Represents a linked item in the template table. +#[derive(Debug, Serialize, Deserialize)] +pub struct Item { + /// Ref name or commit hash. + name: String, + /// Ref/commit hash, either short of full. + hash: String, + /// Unix timestamp in milliseconds. + timestamp: i64, +} + +#[derive(Debug)] +pub struct Cache { + /// Root cache directory. + root: PathBuf, + /// Manifest. + manifest: Manifest, +} + +impl Cache { + /// Initializes cache and creates manifest if it doesn't exist. + pub fn init() -> miette::Result { + let root = Self::get_root()?; + let manifest = Self::read_manifest(&root)?; + + Ok(Self { root, manifest }) + } + + /// Returns the root cache directory. + fn get_root() -> miette::Result { + home::home_dir() + .map(|home| home.join(CACHE_ROOT)) + .ok_or(miette::miette!("Failed to resolve home directory.")) + } + + /// Checks if two hashes match. + fn compare_hashes(left: &str, right: &str) -> bool { + match left.len().cmp(&right.len()) { + | Ordering::Less => right.starts_with(left), + | Ordering::Greater => left.starts_with(right), + | Ordering::Equal => left == right, + } + } + + /// Reads manifest from disk. + fn read_manifest>(root: P) -> miette::Result { + let location = root.as_ref().join(CACHE_MANIFEST); + + if !location.is_file() { + // If the manifest file does not exist, we do not return an error. + return Ok(Manifest::default()); + } + + let contents = fs::read_to_string(&location).map_err(|source| { + CacheError::Io { + message: "Failed to read the manifest.".to_string(), + source, + } + })?; + + let manifest = toml::from_str(&contents).map_err(CacheError::TomlDeserialize)?; + + Ok(manifest) + } + + /// Writes manifest to disk. + fn write_manifest(&mut self) -> miette::Result<()> { + let manifest = toml::to_string(&self.manifest).map_err(CacheError::TomlSerialize)?; + + fs::write(self.root.join(CACHE_MANIFEST), manifest).map_err(|source| { + CacheError::Io { + message: "Failed to write the manifest to disk.".to_string(), + source, + } + })?; + + Ok(()) + } + + /// Writes contents to cache. + pub fn write( + &mut self, + source: &str, + name: &str, + hash: &str, + contents: &[u8], + ) -> miette::Result<()> { + let entry = base32::encode(BASE32_ALPHABET, source.as_bytes()); + let timestamp = Utc::now().timestamp_millis(); + + self + .manifest + .templates + .entry(entry) + .and_modify(|template| { + let hash = hash.to_string(); + let name = name.to_string(); + + if !template + .items + .iter() + .any(|item| Self::compare_hashes(&hash, &item.hash)) + { + template.items.push(Item { name, hash, timestamp }); + } + }) + .or_insert_with(|| { + Template { + items: vec![Item { + name: name.to_string(), + hash: hash.to_string(), + timestamp, + }], + } + }); + + self.write_manifest()?; + + let tarballs_dir = self.root.join(CACHE_TARBALLS_DIR); + let tarball = tarballs_dir.join(format!("{hash}.tar.gz")); + + fs::create_dir_all(&tarballs_dir).map_err(|source| { + CacheError::Io { + message: format!("Failed to create the '{CACHE_TARBALLS_DIR}' directory."), + source, + } + })?; + + fs::write(tarball, contents).map_err(|source| { + CacheError::Io { + message: "Failed to write the tarball contents to disk.".to_string(), + source, + } + })?; + + Ok(()) + } + + /// Reads from cache. + pub fn read(&self, source: &str, hash: &str) -> miette::Result>> { + let entry = base32::encode(BASE32_ALPHABET, source.as_bytes()); + + if let Some(template) = self.manifest.templates.get(&entry) { + let item = template + .items + .iter() + .find(|item| Self::compare_hashes(hash, &item.hash)); + + if let Some(item) = item { + let tarball = self + .root + .join(CACHE_TARBALLS_DIR) + .join(format!("{}.tar.gz", item.hash)); + + let contents = fs::read(tarball).map_err(|source| { + CacheError::Io { + message: "Failed to read the cached tarball.".to_string(), + source, + } + })?; + + return Ok(Some(contents)); + } + } + + Ok(None) + } + + /// Lists cache entries. + pub fn list(&self) -> Result<(), CacheError> { + for (key, template) in &self.manifest.templates { + if let Some(bytes) = base32::decode(BASE32_ALPHABET, key) { + let entry = String::from_utf8(bytes).map_err(|_| { + CacheError::Diagnostic(miette::miette!( + code = "arx::cache::invalid_utf8", + help = "Manifest may be malformed, clear the cache and try again.", + "Couldn't decode entry due to invalid UTF-8 in the string: `{key}`." + )) + })?; + + let repo = RemoteRepository::from_str(&entry).map_err(|_| { + CacheError::Diagnostic(miette::miette!( + code = "arx::cache::malformed_entry", + help = "Manifest may be malformed, clear the cache and try again.", + "Couldn't parse entry: `{key}`." + )) + })?; + + let host = repo.host.to_string().cyan(); + let name = format!("{}/{}", repo.user, repo.repo).green(); + + println!("⋅ {host}:{name}"); + + for item in &template.items { + if let Some(date) = DateTime::from_timestamp_millis(item.timestamp) { + let date = date.format("%d/%m/%Y %H:%M").to_string().dim(); + let name = item.name.clone().cyan(); + let hash = item.hash.clone().yellow(); + + println!("└─ {date} @ {name} ╌╌ {hash}"); + } + } + } else { + return Err(CacheError::Diagnostic(miette::miette!( + code = "arx::cache::malformed_entry", + help = "Manifest may be malformed, clear the cache and try again.", + "Couldn't decode entry: `{key}`." + ))); + } + } + + Ok(()) + } + + /// Clears cache. + pub fn clear(&mut self) -> miette::Result<()> { + self.manifest.templates.clear(); + + fs::remove_dir_all(self.root.join(CACHE_TARBALLS_DIR)).map_err(|source| { + CacheError::Io { + message: format!("Failed to clear the '{CACHE_TARBALLS_DIR}' directory."), + source, + } + })?; + + self.write_manifest()?; + + Ok(()) + } +} diff --git a/src/config/config.rs b/src/config/config.rs index f230152..9d93871 100644 --- a/src/config/config.rs +++ b/src/config/config.rs @@ -35,11 +35,9 @@ pub enum ConfigError { #[source] source: io::Error, }, - #[error(transparent)] #[diagnostic(transparent)] Kdl(kdl::KdlError), - #[error("{0}")] #[diagnostic(transparent)] Diagnostic(Report), @@ -88,8 +86,11 @@ pub struct ConfigOptionsOverrides { /// ``` #[derive(Debug)] pub enum Actions { + /// Suites of actions to run. Suite(Vec), + /// Flat list of actions to run. Flat(Vec), + /// No actions to run. Empty, } @@ -170,14 +171,16 @@ impl Config { } /// Tries to load and parse the config. - pub fn load(&mut self) -> Result<(), ConfigError> { + pub fn load(&mut self) -> Result { if self.exists() { let doc = self.parse()?; self.options = self.get_config_options(&doc)?; self.actions = self.get_config_actions(&doc)?; - } - Ok(()) + Ok(true) + } else { + Ok(false) + } } /// Checks if the config exists under `self.root`. diff --git a/src/lib.rs b/src/lib.rs index fff5b60..bc38c80 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,6 +2,7 @@ pub(crate) mod actions; pub mod app; +pub(crate) mod cache; pub(crate) mod config; pub(crate) mod path; pub(crate) mod report; diff --git a/src/repository.rs b/src/repository.rs index c24c92c..1976fe2 100644 --- a/src/repository.rs +++ b/src/repository.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; +use std::fmt::{self, Display}; use std::fs; use std::io; use std::path::{Path, PathBuf}; @@ -42,12 +44,28 @@ pub struct ParseError(Report); pub enum FetchError { #[error("Request failed.")] RequestFailed, - #[error("Repository download failed with code {code}.\n\n{url}")] - RequestFailedWithCode { code: u16, url: Report }, + #[error("Repository download failed with code {code}. {report}")] + RequestFailedWithCode { code: u16, report: Report }, #[error("Couldn't get the response body as bytes.")] RequestBodyFailed, } +#[derive(Debug, Diagnostic, Error)] +#[diagnostic(code(arx::repository::remote))] +pub enum RemoteError { + #[error("Failed to create a detached in-memory remote.\n\n{url}")] + CreateDetachedRemoteFailed { url: Report }, + #[error("Failed to connect the given remote.\n\n{url}")] + ConnectionFailed { url: Report }, +} + +#[derive(Debug, Diagnostic, Error)] +#[diagnostic(code(arx::repository::reference))] +pub enum ReferenceError { + #[error("Invalid reference: `{0}`.")] + InvalidSelector(String), +} + #[derive(Debug, Diagnostic, Error)] #[diagnostic(code(arx::repository::checkout))] pub enum CheckoutError { @@ -74,7 +92,19 @@ pub enum RepositoryHost { BitBucket, } -/// Repository meta or *ref*, i.e. branch, tag or commit. +impl Display for RepositoryHost { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let host = match self { + | RepositoryHost::GitHub => "github", + | RepositoryHost::GitLab => "gitlab", + | RepositoryHost::BitBucket => "bitbucket", + }; + + write!(f, "{host}") + } +} + +/// Repository meta or *ref*, i.e. branch, tag or commit hash. /// /// This newtype exists solely for providing the default value. #[derive(Clone, Debug, PartialEq)] @@ -84,7 +114,13 @@ impl Default for RepositoryMeta { fn default() -> Self { // Using "HEAD" instead of hardcoding the default branch name like "master" or "main". // Suprisingly, works just fine. - RepositoryMeta("HEAD".to_string()) + Self("HEAD".to_string()) + } +} + +impl Display for RepositoryMeta { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) } } @@ -95,6 +131,7 @@ pub struct RemoteRepository { pub user: String, pub repo: String, pub meta: RepositoryMeta, + pub refs: HashMap, } impl RemoteRepository { @@ -108,9 +145,7 @@ impl RemoteRepository { /// Resolves a URL depending on the host and other repository fields. pub fn get_tar_url(&self) -> String { - let RemoteRepository { host, user, repo, meta } = self; - - let RepositoryMeta(meta) = meta; + let RemoteRepository { host, user, repo, meta, .. } = self; match host { | RepositoryHost::GitHub => { @@ -125,7 +160,92 @@ impl RemoteRepository { } } - /// Fetches the tarball using the resolved URL, and reads it into bytes (`Vec`). + /// Resolves a git repository URL depending on the host and other repository fields. + pub fn get_git_url(&self) -> String { + let RemoteRepository { host, user, repo, .. } = self; + + match host { + | RepositoryHost::GitHub => format!("https://github.com/{user}/{repo}.git"), + | RepositoryHost::GitLab => format!("https://gitlab.com/{user}/{repo}.git"), + | RepositoryHost::BitBucket => format!("https://bitbucket.org/{user}/{repo}.git"), + } + } + + /// Returns the source string of the repository. + pub fn get_source(&self) -> String { + let host = match self.host { + | RepositoryHost::GitHub => "github", + | RepositoryHost::GitLab => "gitlab", + | RepositoryHost::BitBucket => "bitbucket", + }; + + let user = &self.user; + let repo = &self.repo; + + format!("{host}:{user}/{repo}") + } + + /// Fetches the refs of the remote repository. + pub fn fetch_refs(&mut self) -> Result<(), RemoteError> { + let git_url = self.get_git_url(); + + let mut remote = git2::Remote::create_detached(git_url.as_bytes()).map_err(|_| { + RemoteError::CreateDetachedRemoteFailed { url: miette::miette!("URL: {git_url}") } + })?; + + let connection = remote + .connect_auth(git2::Direction::Fetch, None, None) + .map_err(|_| RemoteError::ConnectionFailed { url: miette::miette!("URL: {git_url}") })?; + + for head in connection.list().unwrap() { + let original = head.name(); + + let name = (original == "HEAD") + .then_some("HEAD") + .or_else(|| original.strip_prefix("refs/heads/")) + .or_else(|| original.strip_prefix("refs/tags/")) + .map(str::to_string); + + if let Some(name) = name { + self.refs.insert(name, head.oid().to_string()); + } + } + + Ok(()) + } + + /// Resolves a given reference to a commit hash. + pub fn resolve_hash(&self) -> Result { + let selector = self.meta.to_string(); + + // If selector is a branch or tag. + if let Some(hash) = self.refs.get(&selector) { + Ok(hash.to_owned()) + } + // Or it might be a (short) commit hash. + else if selector.len() >= 7 { + git2::Oid::from_str(&selector) + .map(|oid| { + let oid = oid.to_string(); + + // Try to find a full commit hash. + if let Some(full_hash) = self.refs.values().find(|hash| hash.starts_with(&oid)) { + full_hash.to_owned() + } + // At this point this is most likely a commit that's not a tip of any branch. + else { + selector.clone() + } + }) + .map_err(|_| ReferenceError::InvalidSelector(selector)) + } + // Otherwise this is not a valid ref. + else { + Err(ReferenceError::InvalidSelector(selector)) + } + } + + /// Fetches the tarball using the resolved URL, and reads it into a vector of bytes. pub async fn fetch(&self) -> Result, FetchError> { let url = self.get_tar_url(); @@ -133,7 +253,7 @@ impl RemoteRepository { err.status().map_or(FetchError::RequestFailed, |status| { FetchError::RequestFailedWithCode { code: status.as_u16(), - url: miette::miette!("URL: {}", url.clone()), + report: miette::miette!("\n\nURL: {}", url.clone()), } }) })?; @@ -141,10 +261,15 @@ impl RemoteRepository { let status = response.status(); if !status.is_success() { - return Err(FetchError::RequestFailedWithCode { - code: status.as_u16(), - url: miette::miette!("URL: {}", url), - }); + let code = status.as_u16(); + + let report = if code == 404 { + miette::miette!("The requested branch, tag or commit was not found.\n\nURL: {url}") + } else { + miette::miette!("\n\nURL: {url}") + }; + + return Err(FetchError::RequestFailedWithCode { code, report }); } response @@ -258,7 +383,9 @@ impl FromStr for RemoteRepository { RepositoryMeta(input.to_string()) }); - Ok(RemoteRepository { host, user, repo, meta }) + let refs = HashMap::default(); + + Ok(RemoteRepository { host, user, repo, meta, refs }) } } @@ -321,7 +448,8 @@ impl LocalRepository { /// Checks out the repository located at the `destination`. pub fn checkout(&self, destination: &Path) -> Result<(), CheckoutError> { - let RepositoryMeta(meta) = &self.meta; + let meta = self.meta.to_string(); + let head = "HEAD".to_string(); // First, try to create Repository. let repository = GitRepository::open(destination).map_err(CheckoutError::OpenFailed)?; @@ -336,9 +464,9 @@ impl LocalRepository { .ok() .and_then(|(_, reference)| reference) .and_then(|reference| reference.name().map(str::to_string)) - .unwrap_or("HEAD".to_string()) + .unwrap_or(head) } else { - "HEAD".to_string() + head }; // Try to find (parse revision) the desired reference: branch, tag or commit. They are encoded @@ -365,7 +493,7 @@ impl LocalRepository { .map_err(|_| CheckoutError::TreeCheckoutFailed)?; match reference { - // Here `gref`` is an actual reference like branch or tag. + // Here `gref` is an actual reference like branch or tag. | Some(gref) => { let ref_name = gref.name().ok_or(CheckoutError::InvalidRefName)?; @@ -399,7 +527,8 @@ mod tests { host: RepositoryHost::GitHub, user: "foo".to_string(), repo: "bar".to_string(), - meta: RepositoryMeta::default() + meta: RepositoryMeta::default(), + refs: HashMap::default() }) ); } @@ -466,7 +595,8 @@ mod tests { host: RepositoryHost::GitHub, user: "foo".to_string(), repo: "bar".to_string(), - meta + refs: HashMap::default(), + meta, }) ); } @@ -490,7 +620,8 @@ mod tests { host, user: "foo".to_string(), repo: "bar".to_string(), - meta: RepositoryMeta::default() + meta: RepositoryMeta::default(), + refs: HashMap::default() }) ); } @@ -504,7 +635,8 @@ mod tests { host: RepositoryHost::GitHub, user: "foo".to_string(), repo: "bar".to_string(), - meta: RepositoryMeta::default() + meta: RepositoryMeta::default(), + refs: HashMap::default() }) ); } @@ -527,7 +659,8 @@ mod tests { host: RepositoryHost::default(), user: user.to_string(), repo: repo.to_string(), - meta: RepositoryMeta::default() + meta: RepositoryMeta::default(), + refs: HashMap::default() }) ); }