Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 13 pull requests #44164

Closed
wants to merge 30 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
51b29d6
libcore: Implement cloned() for Option<&mut T>
panicbit Aug 6, 2017
5383205
Fix Option<&mut T>::cloned doc test
panicbit Aug 7, 2017
9618299
Assign tracking issue to option_ref_mut_cloned
panicbit Aug 8, 2017
e83c808
Don't highlight # which does not start an attribute in rustdoc
mystor Aug 17, 2017
601e3da
Add reset_err_count() to errors::Handler
topecongiro Aug 10, 2017
8cd4cac
include Cargo.{toml,lock} in rust-src tarball
RalfJung Aug 24, 2017
bd24325
Do not include the src/Cargo.toml
RalfJung Aug 26, 2017
6fc35de
Fail ./x.py on invalid command
vorner Aug 27, 2017
45d31ac
bootstrap: remove unneeded extern crate
ishitatsuyuki Aug 28, 2017
cc5ea04
un-regress behavior of `unused_results` lint for booleans
zackmdavis Aug 28, 2017
2bffa31
compiletest: Change Config comments to doc comments
Aug 28, 2017
10bd39e
Rewrite `std::net::ToSocketAddrs` doc examples.
frewsxcv Aug 27, 2017
2f19383
Update test issue-41783.rs for new attribute highlighting behaviour
mystor Aug 28, 2017
f50bf86
Fix invalid linker position
GuillaumeGomez Aug 28, 2017
ecd127d
rustbuild: Fix dependencies of build-manifest
alexcrichton Aug 29, 2017
27c4ff6
rustc: Remove `specailization_cache` in favor of a query
alexcrichton Aug 29, 2017
4312ed7
Use a byte literal ASCII 0 instead of its decimal value
dtolnay Aug 29, 2017
f2d7045
Rollup merge of #43705 - panicbit:option_ref_mut_cloned, r=aturon
Aug 29, 2017
0f2bbed
Rollup merge of #43778 - topecongiro:handler-reset-err-count, r=arielb1
Aug 29, 2017
e614fd4
Rollup merge of #43918 - mystor:rustdoc-pound, r=QuietMisdreavus
Aug 29, 2017
7c15afb
Rollup merge of #44076 - RalfJung:src, r=alexcrichton
Aug 29, 2017
f550c25
Rollup merge of #44117 - frewsxcv:frewsxcv-to-socket-addrs-examples, …
Aug 29, 2017
f33803d
Rollup merge of #44121 - ishitatsuyuki:bootstrap-deps-purge, r=Mark-S…
Aug 29, 2017
ff75c01
Rollup merge of #44122 - zackmdavis:booleans_were_not_unused_results,…
Aug 29, 2017
e5be927
Rollup merge of #44126 - laumann:config-doc-comments, r=nikomatsakis
Aug 29, 2017
35fbe62
Rollup merge of #44134 - vorner:x-py-unknown-cmd, r=nikomatsakis
Aug 29, 2017
b7b5dbc
Rollup merge of #44135 - GuillaumeGomez:fix-css-links, r=QuietMisdreavus
Aug 29, 2017
3159cee
Rollup merge of #44144 - alexcrichton:faster-hash-and-sign, r=Mark-Si…
Aug 29, 2017
df7d744
Rollup merge of #44157 - alexcrichton:no-specializes-cache, r=eddyb
Aug 29, 2017
a5e3484
Rollup merge of #44158 - dtolnay:zero48, r=sfackler
Aug 29, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions src/bootstrap/dist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -724,6 +724,9 @@ impl Step for Src {
let dst_src = dst.join("rust");
t!(fs::create_dir_all(&dst_src));

let src_files = [
"src/Cargo.lock",
];
// This is the reduced set of paths which will become the rust-src component
// (essentially libstd and all of its path dependencies)
let std_src_dirs = [
Expand Down Expand Up @@ -759,6 +762,9 @@ impl Step for Src {
];

copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
for file in src_files.iter() {
copy(&build.src.join(file), &dst_src.join(file));
}

// Create source tarball in rust-installer format
let mut cmd = rust_installer(builder);
Expand Down
2 changes: 1 addition & 1 deletion src/bootstrap/flags.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`");
None => {
// No subcommand -- show the general usage and subcommand help
println!("{}\n", subcommand_help);
process::exit(0);
process::exit(1);
}
};

Expand Down
1 change: 0 additions & 1 deletion src/bootstrap/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,6 @@ extern crate build_helper;
extern crate serde_derive;
#[macro_use]
extern crate lazy_static;
extern crate serde;
extern crate serde_json;
extern crate cmake;
extern crate filetime;
Expand Down
2 changes: 1 addition & 1 deletion src/bootstrap/tool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ tool!(
Linkchecker, "src/tools/linkchecker", "linkchecker", Mode::Libstd;
CargoTest, "src/tools/cargotest", "cargotest", Mode::Libstd;
Compiletest, "src/tools/compiletest", "compiletest", Mode::Libtest;
BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::Librustc;
BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::Libstd;
RemoteTestClient, "src/tools/remote-test-client", "remote-test-client", Mode::Libstd;
RustInstaller, "src/tools/rust-installer", "rust-installer", Mode::Libstd;
);
Expand Down
2 changes: 1 addition & 1 deletion src/libcore/fmt/num.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ macro_rules! impl_Display {
// decode last 1 or 2 chars
if n < 10 {
curr -= 1;
*buf_ptr.offset(curr) = (n as u8) + 48;
*buf_ptr.offset(curr) = (n as u8) + b'0';
} else {
let d1 = n << 1;
curr -= 2;
Expand Down
20 changes: 20 additions & 0 deletions src/libcore/option.rs
Original file line number Diff line number Diff line change
Expand Up @@ -774,6 +774,26 @@ impl<'a, T: Clone> Option<&'a T> {
}
}

impl<'a, T: Clone> Option<&'a mut T> {
/// Maps an `Option<&mut T>` to an `Option<T>` by cloning the contents of the
/// option.
///
/// # Examples
///
/// ```
/// #![feature(option_ref_mut_cloned)]
/// let mut x = 12;
/// let opt_x = Some(&mut x);
/// assert_eq!(opt_x, Some(&mut 12));
/// let cloned = opt_x.cloned();
/// assert_eq!(cloned, Some(12));
/// ```
#[unstable(feature = "option_ref_mut_cloned", issue = "43738")]
pub fn cloned(self) -> Option<T> {
self.map(|t| t.clone())
}
}

impl<T: Default> Option<T> {
/// Returns the contained value or a default
///
Expand Down
1 change: 1 addition & 0 deletions src/librustc/dep_graph/dep_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -527,6 +527,7 @@ define_dep_nodes!( <'tcx>
[] HasGlobalAllocator(DefId),
[] ExternCrate(DefId),
[] LintLevels,
[] Specializes { impl1: DefId, impl2: DefId },
);

trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
Expand Down
4 changes: 3 additions & 1 deletion src/librustc/traits/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal};
pub use self::object_safety::ObjectSafetyViolation;
pub use self::object_safety::MethodViolationCode;
pub use self::select::{EvaluationCache, SelectionContext, SelectionCache};
pub use self::specialize::{OverlapError, specialization_graph, specializes, translate_substs};
pub use self::specialize::{OverlapError, specialization_graph, translate_substs};
pub use self::specialize::{SpecializesCache, find_associated_item};
pub use self::util::elaborate_predicates;
pub use self::util::supertraits;
Expand Down Expand Up @@ -831,6 +831,7 @@ pub fn provide(providers: &mut ty::maps::Providers) {
*providers = ty::maps::Providers {
is_object_safe: object_safety::is_object_safe_provider,
specialization_graph_of: specialize::specialization_graph_provider,
specializes: specialize::specializes,
..*providers
};
}
Expand All @@ -839,6 +840,7 @@ pub fn provide_extern(providers: &mut ty::maps::Providers) {
*providers = ty::maps::Providers {
is_object_safe: object_safety::is_object_safe_provider,
specialization_graph_of: specialize::specialization_graph_provider,
specializes: specialize::specializes,
..*providers
};
}
3 changes: 1 addition & 2 deletions src/librustc/traits/select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ use infer;
use infer::{InferCtxt, InferOk, TypeFreshener};
use ty::subst::{Kind, Subst, Substs};
use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable};
use traits;
use ty::fast_reject;
use ty::relate::TypeRelation;
use middle::lang_items;
Expand Down Expand Up @@ -1927,7 +1926,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
if other.evaluation == EvaluatedToOk {
if let ImplCandidate(victim_def) = victim.candidate {
let tcx = self.tcx().global_tcx();
return traits::specializes(tcx, other_def, victim_def) ||
return tcx.specializes((other_def, victim_def)) ||
tcx.impls_are_allowed_to_overlap(other_def, victim_def);
}
}
Expand Down
18 changes: 6 additions & 12 deletions src/librustc/traits/specialize/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,15 +150,12 @@ pub fn find_associated_item<'a, 'tcx>(
/// Specialization is determined by the sets of types to which the impls apply;
/// impl1 specializes impl2 if it applies to a subset of the types impl2 applies
/// to.
pub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
impl1_def_id: DefId,
impl2_def_id: DefId) -> bool {
pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
(impl1_def_id, impl2_def_id): (DefId, DefId))
-> bool
{
debug!("specializes({:?}, {:?})", impl1_def_id, impl2_def_id);

if let Some(r) = tcx.specializes_cache.borrow().check(impl1_def_id, impl2_def_id) {
return r;
}

// The feature gate should prevent introducing new specializations, but not
// taking advantage of upstream ones.
if !tcx.sess.features.borrow().specialization &&
Expand Down Expand Up @@ -188,7 +185,7 @@ pub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id).unwrap();

// Create a infcx, taking the predicates of impl1 as assumptions:
let result = tcx.infer_ctxt().enter(|infcx| {
tcx.infer_ctxt().enter(|infcx| {
// Normalize the trait reference. The WF rules ought to ensure
// that this always succeeds.
let impl1_trait_ref =
Expand All @@ -204,10 +201,7 @@ pub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,

// Attempt to prove that impl2 applies, given all of the above.
fulfill_implication(&infcx, penv, impl1_trait_ref, impl2_def_id).is_ok()
});

tcx.specializes_cache.borrow_mut().insert(impl1_def_id, impl2_def_id, result);
result
})
}

/// Attempt to fulfill all obligations of `target_impl` after unification with
Expand Down
6 changes: 3 additions & 3 deletions src/librustc/traits/specialize/specialization_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use super::{OverlapError, specializes};
use super::OverlapError;

use hir::def_id::DefId;
use traits;
Expand Down Expand Up @@ -118,8 +118,8 @@ impl<'a, 'gcx, 'tcx> Children {
return Ok((false, false));
}

let le = specializes(tcx, impl_def_id, possible_sibling);
let ge = specializes(tcx, possible_sibling, impl_def_id);
let le = tcx.specializes((impl_def_id, possible_sibling));
let ge = tcx.specializes((possible_sibling, impl_def_id));

if le == ge {
// overlap, but no specialization; error out
Expand Down
3 changes: 0 additions & 3 deletions src/librustc/ty/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -808,8 +808,6 @@ pub struct GlobalCtxt<'tcx> {

pub sess: &'tcx Session,

pub specializes_cache: RefCell<traits::SpecializesCache>,

pub trans_trait_caches: traits::trans::TransTraitCaches<'tcx>,

pub dep_graph: DepGraph,
Expand Down Expand Up @@ -1072,7 +1070,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
tls::enter_global(GlobalCtxt {
sess: s,
trans_trait_caches: traits::trans::TransTraitCaches::new(dep_graph.clone()),
specializes_cache: RefCell::new(traits::SpecializesCache::new()),
global_arenas: arenas,
global_interners: interners,
dep_graph: dep_graph.clone(),
Expand Down
12 changes: 12 additions & 0 deletions src/librustc/ty/maps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -540,6 +540,12 @@ impl<'tcx> QueryDescription for queries::lint_levels<'tcx> {
}
}

impl<'tcx> QueryDescription for queries::specializes<'tcx> {
fn describe(_tcx: TyCtxt, _: (DefId, DefId)) -> String {
format!("computing whether impls specialize one another")
}
}

// If enabled, send a message to the profile-queries thread
macro_rules! profq_msg {
($tcx:expr, $msg:expr) => {
Expand Down Expand Up @@ -1108,6 +1114,8 @@ define_maps! { <'tcx>
[] extern_crate: ExternCrate(DefId) -> Rc<Option<ExternCrate>>,

[] lint_levels: lint_levels(CrateNum) -> Rc<lint::LintLevelMap>,

[] specializes: specializes_node((DefId, DefId)) -> bool,
}

fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> {
Expand Down Expand Up @@ -1183,3 +1191,7 @@ fn layout_dep_node<'tcx>(_: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepConstructor<'
fn lint_levels<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
DepConstructor::LintLevels
}

fn specializes_node<'tcx>((a, b): (DefId, DefId)) -> DepConstructor<'tcx> {
DepConstructor::Specializes { impl1: a, impl2: b }
}
6 changes: 6 additions & 0 deletions src/librustc_errors/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,12 @@ impl Handler {
self.continue_after_error.set(continue_after_error);
}

// NOTE: DO NOT call this function from rustc, as it relies on `err_count` being non-zero
// if an error happened to avoid ICEs. This function should only be called from tools.
pub fn reset_err_count(&self) {
self.err_count.set(0);
}

pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> {
DiagnosticBuilder::new(self, Level::Cancelled, "")
}
Expand Down
9 changes: 8 additions & 1 deletion src/librustc_lint/unused.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,14 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults {
}

if !(ty_warned || fn_warned) {
cx.span_lint(UNUSED_RESULTS, s.span, "unused result");
match t.sty {
// Historically, booleans have not been considered unused
// results. (See Issue #44119.)
ty::TyBool => return,
_ => {
cx.span_lint(UNUSED_RESULTS, s.span, "unused result");
}
}
}

fn check_must_use(cx: &LateContext, def_id: DefId, sp: Span, describe_path: &str) -> bool {
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/html/format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ impl<'a> fmt::Display for WhereClause<'a> {
}

if end_newline {
//add a space so stripping <br> tags and breaking spaces still renders properly
// add a space so stripping <br> tags and breaking spaces still renders properly
if f.alternate() {
clause.push(' ');
} else {
Expand Down
58 changes: 43 additions & 15 deletions src/librustdoc/html/highlight.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,21 @@ impl<'a> Classifier<'a> {
}
}

/// Gets the next token out of the lexer, emitting fatal errors if lexing fails.
fn try_next_token(&mut self) -> io::Result<TokenAndSpan> {
match self.lexer.try_next_token() {
Ok(tas) => Ok(tas),
Err(_) => {
self.lexer.emit_fatal_errors();
self.lexer.sess.span_diagnostic
.struct_warn("Backing out of syntax highlighting")
.note("You probably did not intend to render this as a rust code-block")
.emit();
Err(io::Error::new(io::ErrorKind::Other, ""))
}
}
}

/// Exhausts the `lexer` writing the output into `out`.
///
/// The general structure for this method is to iterate over each token,
Expand All @@ -183,18 +198,7 @@ impl<'a> Classifier<'a> {
out: &mut W)
-> io::Result<()> {
loop {
let next = match self.lexer.try_next_token() {
Ok(tas) => tas,
Err(_) => {
self.lexer.emit_fatal_errors();
self.lexer.sess.span_diagnostic
.struct_warn("Backing out of syntax highlighting")
.note("You probably did not intend to render this as a rust code-block")
.emit();
return Err(io::Error::new(io::ErrorKind::Other, ""));
}
};

let next = self.try_next_token()?;
if next.tok == token::Eof {
break;
}
Expand Down Expand Up @@ -255,13 +259,37 @@ impl<'a> Classifier<'a> {
}
}

// This is the start of an attribute. We're going to want to
// This might be the start of an attribute. We're going to want to
// continue highlighting it as an attribute until the ending ']' is
// seen, so skip out early. Down below we terminate the attribute
// span when we see the ']'.
token::Pound => {
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
// We can't be sure that our # begins an attribute (it could
// just be appearing in a macro) until we read either `#![` or
// `#[` from the input stream.
//
// We don't want to start highlighting as an attribute until
// we're confident there is going to be a ] coming up, as
// otherwise # tokens in macros highlight the rest of the input
// as an attribute.

// Case 1: #![inner_attribute]
if self.lexer.peek().tok == token::Not {
self.try_next_token()?; // NOTE: consumes `!` token!
if self.lexer.peek().tok == token::OpenDelim(token::Bracket) {
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
}
out.string("#", Class::None, None)?;
out.string("!", Class::None, None)?;
return Ok(());
}

// Case 2: #[outer_attribute]
if self.lexer.peek().tok == token::OpenDelim(token::Bracket) {
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
}
out.string("#", Class::None, None)?;
return Ok(());
}
Expand Down
6 changes: 2 additions & 4 deletions src/librustdoc/html/render.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1523,17 +1523,15 @@ impl<'a> fmt::Display for Item<'a> {
} else {
write!(fmt, "Module ")?;
},
clean::FunctionItem(..) | clean::ForeignFunctionItem(..) =>
write!(fmt, "Function ")?,
clean::FunctionItem(..) | clean::ForeignFunctionItem(..) => write!(fmt, "Function ")?,
clean::TraitItem(..) => write!(fmt, "Trait ")?,
clean::StructItem(..) => write!(fmt, "Struct ")?,
clean::UnionItem(..) => write!(fmt, "Union ")?,
clean::EnumItem(..) => write!(fmt, "Enum ")?,
clean::TypedefItem(..) => write!(fmt, "Type Definition ")?,
clean::MacroItem(..) => write!(fmt, "Macro ")?,
clean::PrimitiveItem(..) => write!(fmt, "Primitive Type ")?,
clean::StaticItem(..) | clean::ForeignStaticItem(..) =>
write!(fmt, "Static ")?,
clean::StaticItem(..) | clean::ForeignStaticItem(..) => write!(fmt, "Static ")?,
clean::ConstantItem(..) => write!(fmt, "Constant ")?,
_ => {
// We don't generate pages for any other type.
Expand Down
Loading