Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove the --build-plan option #7902

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion src/bin/cargo/commands/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ pub fn cli() -> App {
)
.arg_manifest_path()
.arg_message_format()
.arg_build_plan()
.after_help(
"\
All packages in the workspace are built if the `--workspace` flag is supplied. The
Expand Down
3 changes: 0 additions & 3 deletions src/cargo/core/compiler/build_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ pub struct BuildConfig {
pub message_format: MessageFormat,
/// Force Cargo to do a full rebuild and treat each target as changed.
pub force_rebuild: bool,
/// Output a build plan to stdout instead of actually compiling.
pub build_plan: bool,
/// An optional override of the rustc path for primary units only
pub primary_unit_rustc: Option<ProcessBuilder>,
pub rustfix_diagnostic_server: RefCell<Option<RustfixDiagnosticServer>>,
Expand Down Expand Up @@ -78,7 +76,6 @@ impl BuildConfig {
mode,
message_format: MessageFormat::Human,
force_rebuild: false,
build_plan: false,
primary_unit_rustc: None,
rustfix_diagnostic_server: RefCell::new(None),
})
Expand Down
162 changes: 0 additions & 162 deletions src/cargo/core/compiler/build_plan.rs

This file was deleted.

26 changes: 3 additions & 23 deletions src/cargo/core/compiler/context/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#![allow(deprecated)]
use std::collections::{BTreeSet, HashMap, HashSet};
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
use std::sync::{Arc, Mutex};

Expand All @@ -11,7 +11,6 @@ use crate::core::PackageId;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::{profile, Config};

use super::build_plan::BuildPlan;
use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
use super::fingerprint::Fingerprint;
use super::job_queue::JobQueue;
Expand Down Expand Up @@ -131,8 +130,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
exec: &Arc<dyn Executor>,
) -> CargoResult<Compilation<'cfg>> {
let mut queue = JobQueue::new(self.bcx, units);
let mut plan = BuildPlan::new();
let build_plan = self.bcx.build_config.build_plan;
self.prepare_units(export_dir, units)?;
self.prepare()?;
custom_build::build_map(&mut self, units)?;
Expand All @@ -145,7 +142,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// function which will run everything in order with proper
// parallelism.
let force_rebuild = self.bcx.build_config.force_rebuild;
super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
super::compile(&mut self, &mut queue, unit, exec, force_rebuild)?;
}

// Now that we've got the full job queue and we've done all our
Expand All @@ -159,12 +156,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}

// Now that we've figured out everything that we're going to do, do it!
queue.execute(&mut self, &mut plan)?;

if build_plan {
plan.set_inputs(self.build_plan_inputs()?);
plan.output_plan();
}
queue.execute(&mut self)?;

// Collect the result of the build into `self.compilation`.
for unit in units.iter() {
Expand Down Expand Up @@ -384,18 +376,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
self.primary_packages.contains(&unit.pkg.package_id())
}

/// Returns the list of filenames read by cargo to generate the `BuildContext`
/// (all `Cargo.toml`, etc.).
pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
// Keep sorted for consistency.
let mut inputs = BTreeSet::new();
// Note: dev-deps are skipped if they are not present in the unit graph.
for unit in self.unit_dependencies.keys() {
inputs.insert(unit.pkg.manifest_path().to_path_buf());
}
Ok(inputs.into_iter().collect())
}

fn check_collistions(&self) -> CargoResult<()> {
let mut output_collisions = HashMap::new();
let describe_collision =
Expand Down
64 changes: 26 additions & 38 deletions src/cargo/core/compiler/custom_build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,6 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
let script_dir = cx.files().build_script_dir(build_script_unit);
let script_out_dir = cx.files().build_script_out_dir(unit);
let script_run_dir = cx.files().build_script_run_dir(unit);
let build_plan = bcx.build_config.build_plan;
let invocation_name = unit.buildkey();

if let Some(deps) = unit.pkg.manifest().metabuild() {
prepare_metabuild(cx, build_script_unit, deps)?;
Expand Down Expand Up @@ -299,40 +297,34 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
if !build_plan {
let build_script_outputs = build_script_outputs.lock().unwrap();
for (name, dep_id, dep_metadata) in lib_deps {
let script_output =
build_script_outputs
.get(dep_id, dep_metadata)
.ok_or_else(|| {
internal(format!(
"failed to locate build state for env vars: {}/{}",
dep_id, dep_metadata
))
})?;
let data = &script_output.metadata;
for &(ref key, ref value) in data.iter() {
cmd.env(
&format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
value,
);
}
}
if let Some(build_scripts) = build_scripts {
super::add_plugin_deps(
&mut cmd,
&build_script_outputs,
&build_scripts,
&host_target_root,
)?;
let outputs = build_script_outputs.lock().unwrap();
for (name, dep_id, dep_metadata) in lib_deps {
let script_output =
outputs
.get(dep_id, dep_metadata)
.ok_or_else(|| {
internal(format!(
"failed to locate build state for env vars: {}/{}",
dep_id, dep_metadata
))
})?;
let data = &script_output.metadata;
for &(ref key, ref value) in data.iter() {
cmd.env(
&format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
value,
);
}
}

if build_plan {
state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new()));
return Ok(());
if let Some(build_scripts) = build_scripts {
super::add_plugin_deps(
&mut cmd,
&outputs,
&build_scripts,
&host_target_root,
)?;
}
drop(outputs);

// And now finally, run the build command itself!
state.running(&cmd);
Expand Down Expand Up @@ -406,11 +398,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes
Ok(())
});

let mut job = if cx.bcx.build_config.build_plan {
Job::new(Work::noop(), Freshness::Dirty)
} else {
fingerprint::prepare_target(cx, unit, false)?
};
let mut job = fingerprint::prepare_target(cx, unit, false)?;
if job.freshness() == Freshness::Dirty {
job.before(dirty);
} else {
Expand Down
Loading