diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 0000000..8af59dd --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[env] +RUST_TEST_THREADS = "1" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b35b27b..74b215d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -66,6 +66,6 @@ jobs: toolchain: "1.85" components: clippy, rustfmt - name: Run tests - run: make tests + run: cargo test env: RUST_LOG: debug diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8427897..841f23f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,7 +14,7 @@ _Используется стандартный процесс открытия 1. **Форкните репозиторий** и создайте новую ветку (`git checkout -b feature-branch`). 2. Внесите изменения и убедитесь, что код проходит проверки (`make style-check`), компилируется и запускается. -3. Запустите тесты (`make tests`). +3. Запустите тесты (`cargo test`). 4. Добавьте информацию о своём изменении в `CHANGELOG.md`. 5. При необходимости внесите изменения в `README.md`. 6. Сделайте коммит, следуя соглашению [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) (см. ниже). diff --git a/Cargo.toml b/Cargo.toml index c6d63ff..cbe0db0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,6 +15,7 @@ keywords = ["picodata", "cargo", "plugin"] categories = ["development-tools::cargo-plugins"] readme = "README.md" rust-version = "1.85" +resolver = "3" [dependencies] clap = { version = "4", features = ["derive"] } diff --git a/Makefile b/Makefile index 472151b..d34acd2 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,3 @@ -.PHONY: tests -tests: - cargo test -- --test-threads=1 - .PHONY: style-check style-check: cargo check --all --bins --tests --benches diff --git a/plugin_template/_Cargo.toml b/plugin_template/_Cargo.toml index 78c20f9..c60307d 100644 --- a/plugin_template/_Cargo.toml +++ b/plugin_template/_Cargo.toml @@ -3,6 +3,7 @@ name = "{{ project_name }}" version = "0.1.0" edition = "2024" publish = false +resolver = "3" [dependencies] picodata-plugin = "25.1.1" @@ -10,11 +11,11 @@ serde = { version = "1", features = ["derive"] } log = "0.4" [dev-dependencies] -picodata-pike = { git = "https://github.com/picodata/pike.git", branch = "sng_features" } # TODO: change after publish on crates.io +picodata-pike = { git = "https://github.com/picodata/pike.git", branch = "master" } # TODO: change after publish on crates.io reqwest = { version = "0.12", features = ["blocking"] } [build-dependencies] -picodata-pike = { git = "https://github.com/picodata/pike.git", branch = "sng_features" } # TODO: change after publish on crates.io +picodata-pike = { git = "https://github.com/picodata/pike.git", branch = "master" } # TODO: change after publish on crates.io liquid = "0.26" fs_extra = "1" diff --git a/src/commands/plugin/new.rs b/src/commands/plugin/new.rs index a6e37b8..9ffd91a 100644 --- a/src/commands/plugin/new.rs +++ b/src/commands/plugin/new.rs @@ -11,6 +11,12 @@ use std::{ use include_dir::{include_dir, Dir, DirEntry}; static PLUGIN_TEMPLATE: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/plugin_template"); +static WS_CARGO_MANIFEST_TEMPLATE: &str = r#"[workspace] +resolver = "3" +members = [ + "{{ project_name }}", +] +"#; fn place_file(target_path: &Path, t_ctx: &liquid::Object, entries: &[DirEntry<'_>]) -> Result<()> { for entry in entries { @@ -71,14 +77,19 @@ where Ok(()) } -fn workspace_init(root_path: &Path, project_name: &str) -> Result<()> { +fn workspace_init(root_path: &Path, project_name: &str, t_ctx: &liquid::Object) -> Result<()> { let cargo_toml_path = root_path.join("Cargo.toml"); let mut cargo_toml = File::create(cargo_toml_path).context("failed to create Cargo.toml for workspace")?; - cargo_toml - .write_all(format!("[workspace]\nmembers = [\n \"{project_name}\",\n]").as_bytes())?; + let ws_template = liquid::ParserBuilder::with_stdlib() + .build() + .context("couldn't build from template")? + .parse(WS_CARGO_MANIFEST_TEMPLATE) + .unwrap(); + + cargo_toml.write_all(ws_template.render(&t_ctx).unwrap().as_bytes())?; fs::copy( root_path.join(project_name).join("topology.toml"), @@ -138,7 +149,8 @@ pub fn cmd(path: Option<&Path>, without_git: bool, init_workspace: bool) -> Resu } if init_workspace { - workspace_init(&path, project_name).context("failed to initiate workspace")?; + workspace_init(&path, project_name, &templates_ctx) + .context("failed to initiate workspace")?; } Ok(()) diff --git a/src/commands/run.rs b/src/commands/run.rs index 5eddbf0..1e73599 100644 --- a/src/commands/run.rs +++ b/src/commands/run.rs @@ -173,6 +173,26 @@ fn enable_plugins(topology: &Topology, data_dir: &Path, picodata_path: &PathBuf) Ok(()) } +fn is_plugin_dir(path: &Path) -> bool { + if !path.is_dir() { + return false; + } + if !path.join("Cargo.toml").exists() { + return false; + } + + if path.join("manifest.yaml.template").exists() { + return true; + } + + fs::read_dir(path) + .unwrap() + .filter(Result::is_ok) + .map(|e| e.unwrap().path()) + .filter(|e| e.is_dir()) + .any(|dir| dir.join("manifest.yaml.template").exists()) +} + pub struct PicodataInstance { instance_name: String, tier: String, @@ -192,7 +212,7 @@ impl PicodataInstance { http_port: u16, pg_port: u16, first_instance_bin_port: u16, - plugins_dir: &Path, + plugins_dir: Option<&Path>, replication_factor: u8, tier: &str, run_params: &Params, @@ -232,8 +252,6 @@ impl PicodataInstance { "run", data_dir_flag, instance_data_dir.to_str().expect("unreachable"), - "--plugin-dir", - plugins_dir.to_str().unwrap_or("target/debug"), listen_flag, &format!("127.0.0.1:{bin_port}"), "--peer", @@ -248,6 +266,13 @@ impl PicodataInstance { tier, ]); + if let Some(plugins_dir) = plugins_dir { + child.args([ + "--plugin-dir", + plugins_dir.to_str().unwrap_or("target/debug"), + ]); + } + if run_params.daemon { child.stdout(Stdio::null()).stderr(Stdio::null()); child.args(["--log", log_file_path.to_str().expect("unreachable")]); @@ -417,22 +442,28 @@ pub fn cluster(params: &Params) -> Result> { let mut params = params.clone(); params.data_dir = params.plugin_path.join(¶ms.data_dir); - let plugins_dir = if params.use_release { - cargo_build( - lib::BuildType::Release, - ¶ms.target_dir, - ¶ms.plugin_path, - )?; - params.plugin_path.join(params.target_dir.join("release")) - } else { - cargo_build( - lib::BuildType::Debug, - ¶ms.target_dir, - ¶ms.plugin_path, - )?; - params.plugin_path.join(params.target_dir.join("debug")) - }; - params.topology.find_plugin_versions(&plugins_dir)?; + let mut plugins_dir = None; + if is_plugin_dir(¶ms.plugin_path) { + plugins_dir = if params.use_release { + cargo_build( + lib::BuildType::Release, + ¶ms.target_dir, + ¶ms.plugin_path, + )?; + Some(params.plugin_path.join(params.target_dir.join("release"))) + } else { + cargo_build( + lib::BuildType::Debug, + ¶ms.target_dir, + ¶ms.plugin_path, + )?; + Some(params.plugin_path.join(params.target_dir.join("debug"))) + }; + + params + .topology + .find_plugin_versions(plugins_dir.as_ref().unwrap())?; + } info!("Running the cluster..."); @@ -451,7 +482,7 @@ pub fn cluster(params: &Params) -> Result> { params.base_http_port + instance_id, params.base_pg_port + instance_id, first_instance_bin_port, - &plugins_dir, + plugins_dir.as_deref(), tier.replication_factor, tier_name, ¶ms, @@ -477,14 +508,16 @@ pub fn cluster(params: &Params) -> Result> { thread::sleep(Duration::from_secs(5)); } - let result = enable_plugins(¶ms.topology, ¶ms.data_dir, ¶ms.picodata_path); - if let Err(e) = result { - for process in &mut picodata_processes { - process.kill().unwrap_or_else(|e| { - error!("failed to kill picodata instances: {:#}", e); - }); + if plugins_dir.is_some() { + let result = enable_plugins(¶ms.topology, ¶ms.data_dir, ¶ms.picodata_path); + if let Err(e) = result { + for process in &mut picodata_processes { + process.kill().unwrap_or_else(|e| { + error!("failed to kill picodata instances: {:#}", e); + }); + } + return Err(e.context("failed to enable plugins")); } - return Err(e.context("failed to enable plugins")); } }; diff --git a/src/main.rs b/src/main.rs index f7bc4bc..ffa94a5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -224,12 +224,15 @@ fn run_child_killer() { process::exit(0) } -fn check_plugin_directory(plugin_dir: &Path) { - if !plugin_dir.join("./topology.toml").exists() { - println!("{CARING_PIKE}"); - - process::exit(1); +fn is_required_path_exists(plugin_dir: &Path, required_path: &Path) { + if required_path.exists() { + return; + } + if plugin_dir.join(required_path).exists() { + return; } + println!("{CARING_PIKE}"); + process::exit(1); } // Add new member to Cargo.toml, additionally checks proper @@ -289,7 +292,7 @@ fn main() -> Result<()> { disable_colors, plugin_path, } => { - check_plugin_directory(&plugin_path); + is_required_path_exists(&plugin_path, &topology); if !daemon { run_child_killer(); @@ -323,7 +326,7 @@ fn main() -> Result<()> { data_dir, plugin_path, } => { - check_plugin_directory(&plugin_path); + is_required_path_exists(&plugin_path, &data_dir); run_child_killer(); let params = commands::stop::ParamsBuilder::default() @@ -345,7 +348,7 @@ fn main() -> Result<()> { target_dir, plugin_path, } => { - check_plugin_directory(&plugin_path); + is_required_path_exists(&plugin_path, Path::new("Cargo.toml")); commands::plugin::pack::cmd(debug, &target_dir, &plugin_path) .context("failed to execute \"pack\" command")?; @@ -355,7 +358,7 @@ fn main() -> Result<()> { target_dir, plugin_path, } => { - check_plugin_directory(&plugin_path); + is_required_path_exists(&plugin_path, Path::new("Cargo.toml")); commands::plugin::build::cmd(release, &target_dir, &plugin_path) .context("failed to execute \"build\" command")?; @@ -372,7 +375,7 @@ fn main() -> Result<()> { } => commands::plugin::new::cmd(None, without_git, workspace) .context("failed to execute \"init\" command")?, Plugin::Add { path, plugin_path } => { - check_plugin_directory(&plugin_path); + is_required_path_exists(&plugin_path, Path::new("Cargo.toml")); modify_workspace(path.file_name().unwrap().to_str().unwrap(), &plugin_path) .context("failed to add new plugin to workspace")?; diff --git a/tests/build.rs b/tests/build.rs index dd25535..ce68499 100644 --- a/tests/build.rs +++ b/tests/build.rs @@ -1,6 +1,6 @@ mod helpers; -use helpers::{build_plugin, check_plugin_version_artefacts, exec_pike, PLUGIN_DIR, TESTS_DIR}; +use helpers::{assert_plugin_build_artefacts, build_plugin, exec_pike, PLUGIN_DIR, TESTS_DIR}; use std::{ fs::{self}, path::Path, @@ -25,39 +25,39 @@ fn test_cargo_build() { build_plugin(&helpers::BuildType::Release, "0.1.0"); build_plugin(&helpers::BuildType::Release, "0.1.1"); - assert!(check_plugin_version_artefacts( + assert_plugin_build_artefacts( &Path::new(PLUGIN_DIR) .join("target") .join("debug") .join("test-plugin") .join("0.1.0"), - false - )); + false, + ); - assert!(check_plugin_version_artefacts( + assert_plugin_build_artefacts( &Path::new(PLUGIN_DIR) .join("target") .join("debug") .join("test-plugin") .join("0.1.1"), - true - )); + true, + ); - assert!(check_plugin_version_artefacts( + assert_plugin_build_artefacts( &Path::new(PLUGIN_DIR) .join("target") .join("release") .join("test-plugin") .join("0.1.0"), - false - )); + false, + ); - assert!(check_plugin_version_artefacts( + assert_plugin_build_artefacts( &Path::new(PLUGIN_DIR) .join("target") .join("release") .join("test-plugin") .join("0.1.1"), - true - )); + true, + ); } diff --git a/tests/helpers/mod.rs b/tests/helpers/mod.rs index 671a922..ba6f981 100644 --- a/tests/helpers/mod.rs +++ b/tests/helpers/mod.rs @@ -3,7 +3,7 @@ use constcat::concat; use log::info; use std::ffi::OsStr; -use std::io::{BufRead, BufReader, Write}; +use std::io::{self, BufRead, BufReader, Write}; use std::path::PathBuf; use std::process::ExitStatus; use std::thread; @@ -83,16 +83,16 @@ impl Cluster { } } -pub fn check_plugin_version_artefacts(plugin_path: &Path, check_symlinks: bool) -> bool { - let symlink_path = plugin_path.join("libtest_plugin.so"); +pub fn assert_plugin_build_artefacts(plugin_path: &Path, must_be_symlinks: bool) { + let lib_path = plugin_path.join("libtest_plugin.so"); - if check_symlinks && !validate_symlink(&symlink_path) { - return false; + if must_be_symlinks { + assert!(validate_symlink(&lib_path)); } - check_existance(&plugin_path.join("manifest.yaml"), false) - && check_existance(&plugin_path.join("libtest_plugin.so"), check_symlinks) - && check_existance(&plugin_path.join("migrations"), false) + assert_path_existance(&plugin_path.join("manifest.yaml"), false); + assert_path_existance(&lib_path, must_be_symlinks); + assert_path_existance(&plugin_path.join("migrations"), false); } fn validate_symlink(symlink_path: &PathBuf) -> bool { @@ -107,19 +107,18 @@ fn validate_symlink(symlink_path: &PathBuf) -> bool { false } -fn check_existance(path: &Path, check_symlinks: bool) -> bool { - if !path.exists() { - return false; - }; +fn assert_path_existance(path: &Path, must_be_symlink: bool) { + assert!(path.exists()); let is_symlink = path .symlink_metadata() .map(|m| m.file_type().is_symlink()) .unwrap_or(false); - if check_symlinks { - is_symlink + + if must_be_symlink { + assert!(is_symlink); } else { - !is_symlink + assert!(!is_symlink); } } @@ -140,7 +139,7 @@ pub fn build_plugin(build_type: &BuildType, new_version: &str) { fs::write(cargo_toml_path, doc.to_string()).unwrap(); // Build according version - match build_type { + let output = match build_type { BuildType::Debug => Command::new("cargo") .args(vec!["build"]) .current_dir(PLUGIN_DIR) @@ -153,6 +152,13 @@ pub fn build_plugin(build_type: &BuildType, new_version: &str) { .output() .unwrap(), }; + + if !output.status.success() { + io::stdout().write_all(&output.stdout).unwrap(); + io::stderr().write_all(&output.stderr).unwrap(); + + assert!(output.status.code().unwrap() != 0); + } } pub fn run_cluster( @@ -305,6 +311,8 @@ where .args(args) .args(cmd_args) .current_dir(current_dir) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) .status() } diff --git a/tests/run.rs b/tests/run.rs index 9db414a..b94c0de 100644 --- a/tests/run.rs +++ b/tests/run.rs @@ -11,6 +11,7 @@ use pike::cluster::{run, MigrationContextVar}; use std::collections::BTreeMap; use std::process::Command; use std::time::Instant; +use std::{env, thread}; use std::{ fs::{self}, path::Path, @@ -605,3 +606,63 @@ fn unpack_archive(path: &Path, unpack_to: &Path) { archive.unpack(unpack_to).unwrap(); } + +#[test] +fn test_run_without_plugin_directory() { + let run_dir = Path::new(TESTS_DIR); + let plugin_dir = Path::new("test_run_without_plugin_directory"); + let data_dir = plugin_dir.join("tmp"); + + // Cleaning up metadata from past run + let _ = fs::remove_dir_all(run_dir.join(plugin_dir)); + + let tiers = BTreeMap::from([( + "default".to_string(), + Tier { + replicasets: 2, + replication_factor: 2, + }, + )]); + + let topology = Topology { + tiers, + ..Default::default() + }; + + let params = RunParamsBuilder::default() + .topology(topology) + .data_dir(run_dir.join(&data_dir)) + .daemon(true) + .build() + .unwrap(); + + run(¶ms).unwrap(); + + let start = Instant::now(); + let mut cluster_started = false; + while Instant::now().duration_since(start) < Duration::from_secs(60) { + let pico_instance = get_picodata_table(run_dir, &data_dir, "_pico_instance"); + + // Compare with 8, because table gives current state and target state + // both of them should be online + if pico_instance.matches("Online").count() == 8 { + cluster_started = true; + break; + } + + thread::sleep(Duration::from_secs(1)); + } + + assert!(exec_pike( + vec!["stop"], + env::current_dir().unwrap(), + &vec![ + "--data-dir".to_string(), + run_dir.join(&data_dir).to_str().unwrap().to_string() + ], + ) + .unwrap() + .success()); + + assert!(cluster_started); +}