diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index fa0ef165cb4f3..cf31e566c384e 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -630,16 +630,16 @@ impl Pat { } } -/// A single field in a struct pattern +/// A single field in a struct pattern. /// -/// Patterns like the fields of Foo `{ x, ref y, ref mut z }` -/// are treated the same as` x: x, y: ref y, z: ref mut z`, -/// except is_shorthand is true +/// Patterns like the fields of `Foo { x, ref y, ref mut z }` +/// are treated the same as `x: x, y: ref y, z: ref mut z`, +/// except when `is_shorthand` is true. #[derive(Clone, Encodable, Decodable, Debug)] pub struct FieldPat { - /// The identifier for the field + /// The identifier for the field. pub ident: Ident, - /// The pattern the field is destructured to + /// The pattern the field is destructured to. pub pat: P, pub is_shorthand: bool, pub attrs: AttrVec, diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index f08f514f6f7f6..d17b29089d694 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -1767,7 +1767,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } self.arena.alloc_from_iter(inputs.iter().map(|param| match param.pat.kind { PatKind::Ident(_, ident, _) => ident, - _ => Ident::new(kw::Invalid, param.pat.span), + _ => Ident::new(kw::Empty, param.pat.span), })) } diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index 686300c7c5fec..f9eb69bb43815 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -184,7 +184,7 @@ impl<'a> AstValidator<'a> { } fn check_lifetime(&self, ident: Ident) { - let valid_names = [kw::UnderscoreLifetime, kw::StaticLifetime, kw::Invalid]; + let valid_names = [kw::UnderscoreLifetime, kw::StaticLifetime, kw::Empty]; if !valid_names.contains(&ident.name) && ident.without_first_quote().is_reserved() { self.err_handler().span_err(ident.span, "lifetimes cannot use keyword names"); } diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index dcb6e115eda05..333a396a0b4fc 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -2787,7 +2787,7 @@ impl<'a> State<'a> { self.print_explicit_self(&eself); } else { let invalid = if let PatKind::Ident(_, ident, _) = input.pat.kind { - ident.name == kw::Invalid + ident.name == kw::Empty } else { false }; diff --git a/compiler/rustc_builtin_macros/src/llvm_asm.rs b/compiler/rustc_builtin_macros/src/llvm_asm.rs index d203b5bc5429e..d72bfa660e58f 100644 --- a/compiler/rustc_builtin_macros/src/llvm_asm.rs +++ b/compiler/rustc_builtin_macros/src/llvm_asm.rs @@ -95,7 +95,7 @@ fn parse_inline_asm<'a>( }) .unwrap_or(tts.len()); let mut p = cx.new_parser_from_tts(tts.trees().skip(first_colon).collect()); - let mut asm = kw::Invalid; + let mut asm = kw::Empty; let mut asm_str_style = None; let mut outputs = Vec::new(); let mut inputs = Vec::new(); diff --git a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs index d5b2cbaa55843..f1eae605da018 100644 --- a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs +++ b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs @@ -170,7 +170,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // (after #67586 gets fixed). None } else { - let name = kw::Invalid; + let name = kw::Empty; let decl = &self.mir.local_decls[local]; let dbg_var = if full_debug_info { self.adjusted_span_and_dbg_scope(decl.source_info).map( @@ -204,7 +204,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { None } else { Some(match whole_local_var.or(fallback_var) { - Some(var) if var.name != kw::Invalid => var.name.to_string(), + Some(var) if var.name != kw::Empty => var.name.to_string(), _ => format!("{:?}", local), }) }; diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 23999a8dca00f..4ba1416be540e 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -28,7 +28,7 @@ pub struct Lifetime { pub span: Span, /// Either "`'a`", referring to a named lifetime definition, - /// or "``" (i.e., `kw::Invalid`), for elision placeholders. + /// or "``" (i.e., `kw::Empty`), for elision placeholders. /// /// HIR lowering inserts these placeholders in type paths that /// refer to type definitions needing lifetime parameters, diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index fa943d0d748a3..a8371274f61f8 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -868,7 +868,7 @@ impl EarlyLintPass for AnonymousParameters { if let ast::AssocItemKind::Fn(_, ref sig, _, _) = it.kind { for arg in sig.decl.inputs.iter() { if let ast::PatKind::Ident(_, ident, None) = arg.pat.kind { - if ident.name == kw::Invalid { + if ident.name == kw::Empty { cx.struct_span_lint(ANONYMOUS_PARAMETERS, arg.pat.span, |lint| { let ty_snip = cx.sess.source_map().span_to_snippet(arg.ty.span); diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index bfeef4904893a..ea4f7daec43d1 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -728,7 +728,7 @@ impl<'tcx> LateContext<'tcx> { /// Check if a `DefId`'s path matches the given absolute type path usage. /// - /// Anonymous scopes such as `extern` imports are matched with `kw::Invalid`; + /// Anonymous scopes such as `extern` imports are matched with `kw::Empty`; /// inherent `impl` blocks are matched with the name of the type. /// /// Instead of using this method, it is often preferable to instead use diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index fe29f9d177f92..744fdc83a91ec 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -132,7 +132,7 @@ impl ItemLikeVisitor<'tcx> for Collector<'tcx> { impl Collector<'tcx> { fn register_native_lib(&mut self, span: Option, lib: NativeLib) { - if lib.name.as_ref().map(|&s| s == kw::Invalid).unwrap_or(false) { + if lib.name.as_ref().map(|&s| s == kw::Empty).unwrap_or(false) { match span { Some(span) => { struct_span_err!( diff --git a/compiler/rustc_middle/src/hir/map/mod.rs b/compiler/rustc_middle/src/hir/map/mod.rs index 598e28c1a3ab0..09d5b10210312 100644 --- a/compiler/rustc_middle/src/hir/map/mod.rs +++ b/compiler/rustc_middle/src/hir/map/mod.rs @@ -379,7 +379,7 @@ impl<'hir> Map<'hir> { pub fn body_param_names(&self, id: BodyId) -> impl Iterator + 'hir { self.body(id).params.iter().map(|arg| match arg.pat.kind { PatKind::Binding(_, _, ident, _) => ident, - _ => Ident::new(kw::Invalid, rustc_span::DUMMY_SP), + _ => Ident::new(kw::Empty, rustc_span::DUMMY_SP), }) } diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 9b178d9d2bd00..893572785f76b 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -1481,7 +1481,7 @@ impl Printer<'tcx> for FmtPrinter<'_, 'tcx, F> { // FIXME(eddyb) `name` should never be empty, but it // currently is for `extern { ... }` "foreign modules". let name = disambiguated_data.data.name(); - if name != DefPathDataName::Named(kw::Invalid) { + if name != DefPathDataName::Named(kw::Empty) { if !self.empty_path { write!(self, "::")?; } @@ -1608,14 +1608,14 @@ impl PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx, F> { match *region { ty::ReEarlyBound(ref data) => { - data.name != kw::Invalid && data.name != kw::UnderscoreLifetime + data.name != kw::Empty && data.name != kw::UnderscoreLifetime } ty::ReLateBound(_, ty::BoundRegion { kind: br }) | ty::ReFree(ty::FreeRegion { bound_region: br, .. }) | ty::RePlaceholder(ty::Placeholder { name: br, .. }) => { if let ty::BrNamed(_, name) = br { - if name != kw::Invalid && name != kw::UnderscoreLifetime { + if name != kw::Empty && name != kw::UnderscoreLifetime { return true; } } @@ -1685,7 +1685,7 @@ impl FmtPrinter<'_, '_, F> { // `explain_region()` or `note_and_explain_region()`. match *region { ty::ReEarlyBound(ref data) => { - if data.name != kw::Invalid { + if data.name != kw::Empty { p!(write("{}", data.name)); return Ok(self); } @@ -1694,7 +1694,7 @@ impl FmtPrinter<'_, '_, F> { | ty::ReFree(ty::FreeRegion { bound_region: br, .. }) | ty::RePlaceholder(ty::Placeholder { name: br, .. }) => { if let ty::BrNamed(_, name) = br { - if name != kw::Invalid && name != kw::UnderscoreLifetime { + if name != kw::Empty && name != kw::UnderscoreLifetime { p!(write("{}", name)); return Ok(self); } diff --git a/compiler/rustc_mir/src/interpret/intrinsics.rs b/compiler/rustc_mir/src/interpret/intrinsics.rs index c182fa35ee24a..58858c09f44ef 100644 --- a/compiler/rustc_mir/src/interpret/intrinsics.rs +++ b/compiler/rustc_mir/src/interpret/intrinsics.rs @@ -141,9 +141,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } sym::min_align_of_val | sym::size_of_val => { - let place = self.deref_operand(args[0])?; + // Avoid `deref_operand` -- this is not a deref, the ptr does not have to be + // dereferencable! + let place = self.ref_to_mplace(self.read_immediate(args[0])?)?; let (size, align) = self - .size_and_align_of(place.meta, place.layout)? + .size_and_align_of_mplace(place)? .ok_or_else(|| err_unsup_format!("`extern type` does not have known layout"))?; let result = match intrinsic_name { diff --git a/compiler/rustc_mir/src/interpret/validity.rs b/compiler/rustc_mir/src/interpret/validity.rs index a6ea039f278a1..423d1270ac865 100644 --- a/compiler/rustc_mir/src/interpret/validity.rs +++ b/compiler/rustc_mir/src/interpret/validity.rs @@ -391,7 +391,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, ' } // Make sure this is dereferenceable and all. let size_and_align = try_validation!( - self.ecx.size_and_align_of(place.meta, place.layout), + self.ecx.size_and_align_of_mplace(place), self.path, err_ub!(InvalidMeta(msg)) => { "invalid {} metadata: {}", kind, msg }, ); diff --git a/compiler/rustc_mir/src/transform/promote_consts.rs b/compiler/rustc_mir/src/transform/promote_consts.rs index 8d5ed747c3f8f..ea92e23e9bffb 100644 --- a/compiler/rustc_mir/src/transform/promote_consts.rs +++ b/compiler/rustc_mir/src/transform/promote_consts.rs @@ -90,7 +90,7 @@ pub enum TempState { impl TempState { pub fn is_promotable(&self) -> bool { debug!("is_promotable: self={:?}", self); - matches!(self, TempState::Defined { .. } ) + matches!(self, TempState::Defined { .. }) } } @@ -309,50 +309,26 @@ impl<'tcx> Validator<'_, 'tcx> { let statement = &self.body[loc.block].statements[loc.statement_index]; match &statement.kind { StatementKind::Assign(box (_, Rvalue::Ref(_, kind, place))) => { - match kind { - BorrowKind::Shared | BorrowKind::Mut { .. } => {} - - // FIXME(eddyb) these aren't promoted here but *could* - // be promoted as part of a larger value because - // `validate_rvalue` doesn't check them, need to - // figure out what is the intended behavior. - BorrowKind::Shallow | BorrowKind::Unique => return Err(Unpromotable), - } - // We can only promote interior borrows of promotable temps (non-temps // don't get promoted anyway). self.validate_local(place.local)?; + // The reference operation itself must be promotable. + // (Needs to come after `validate_local` to avoid ICEs.) + self.validate_ref(*kind, place)?; + + // We do not check all the projections (they do not get promoted anyway), + // but we do stay away from promoting anything involving a dereference. if place.projection.contains(&ProjectionElem::Deref) { return Err(Unpromotable); } - if self.qualif_local::(place.local) { - return Err(Unpromotable); - } - // FIXME(eddyb) this duplicates part of `validate_rvalue`. - let has_mut_interior = - self.qualif_local::(place.local); - if has_mut_interior { + // We cannot promote things that need dropping, since the promoted value + // would not get dropped. + if self.qualif_local::(place.local) { return Err(Unpromotable); } - if let BorrowKind::Mut { .. } = kind { - let ty = place.ty(self.body, self.tcx).ty; - - // In theory, any zero-sized value could be borrowed - // mutably without consequences. However, only &mut [] - // is allowed right now. - if let ty::Array(_, len) = ty.kind() { - match len.try_eval_usize(self.tcx, self.param_env) { - Some(0) => {} - _ => return Err(Unpromotable), - } - } else { - return Err(Unpromotable); - } - } - Ok(()) } _ => bug!(), @@ -572,58 +548,115 @@ impl<'tcx> Validator<'_, 'tcx> { } } - fn validate_rvalue(&self, rvalue: &Rvalue<'tcx>) -> Result<(), Unpromotable> { - match *rvalue { - Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => { - let operand_ty = operand.ty(self.body, self.tcx); - let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast"); - let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); - if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) { - // ptr-to-int casts are not possible in consts and thus not promotable + fn validate_ref(&self, kind: BorrowKind, place: &Place<'tcx>) -> Result<(), Unpromotable> { + match kind { + // Reject these borrow types just to be safe. + // FIXME(RalfJung): could we allow them? Should we? No point in it until we have a usecase. + BorrowKind::Shallow | BorrowKind::Unique => return Err(Unpromotable), + + BorrowKind::Shared => { + let has_mut_interior = self.qualif_local::(place.local); + if has_mut_interior { return Err(Unpromotable); } } - Rvalue::BinaryOp(op, ref lhs, _) => { - if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).kind() { - assert!( - op == BinOp::Eq - || op == BinOp::Ne - || op == BinOp::Le - || op == BinOp::Lt - || op == BinOp::Ge - || op == BinOp::Gt - || op == BinOp::Offset - ); + BorrowKind::Mut { .. } => { + let ty = place.ty(self.body, self.tcx).ty; - // raw pointer operations are not allowed inside consts and thus not promotable + // In theory, any zero-sized value could be borrowed + // mutably without consequences. However, only &mut [] + // is allowed right now. + if let ty::Array(_, len) = ty.kind() { + match len.try_eval_usize(self.tcx, self.param_env) { + Some(0) => {} + _ => return Err(Unpromotable), + } + } else { return Err(Unpromotable); } } - - Rvalue::NullaryOp(NullOp::Box, _) => return Err(Unpromotable), - - // FIXME(RalfJung): the rest is *implicitly considered promotable*... that seems dangerous. - _ => {} } + Ok(()) + } + + fn validate_rvalue(&self, rvalue: &Rvalue<'tcx>) -> Result<(), Unpromotable> { match rvalue { - Rvalue::ThreadLocalRef(_) => Err(Unpromotable), + Rvalue::Use(operand) + | Rvalue::Repeat(operand, _) + | Rvalue::UnaryOp(UnOp::Not | UnOp::Neg, operand) => { + self.validate_operand(operand)?; + } - Rvalue::NullaryOp(..) => Ok(()), + Rvalue::Discriminant(place) | Rvalue::Len(place) => { + self.validate_place(place.as_ref())? + } - Rvalue::Discriminant(place) | Rvalue::Len(place) => self.validate_place(place.as_ref()), + Rvalue::ThreadLocalRef(_) => return Err(Unpromotable), - Rvalue::Use(operand) - | Rvalue::Repeat(operand, _) - | Rvalue::UnaryOp(_, operand) - | Rvalue::Cast(_, operand, _) => self.validate_operand(operand), + Rvalue::Cast(kind, operand, cast_ty) => { + if matches!(kind, CastKind::Misc) { + let operand_ty = operand.ty(self.body, self.tcx); + let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast"); + let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); + if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) { + // ptr-to-int casts are not possible in consts and thus not promotable + return Err(Unpromotable); + } + // int-to-ptr casts are fine, they just use the integer value at pointer type. + } + + self.validate_operand(operand)?; + } + + Rvalue::BinaryOp(op, lhs, rhs) | Rvalue::CheckedBinaryOp(op, lhs, rhs) => { + let op = *op; + if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).kind() { + // raw pointer operations are not allowed inside consts and thus not promotable + assert!(matches!( + op, + BinOp::Eq + | BinOp::Ne + | BinOp::Le + | BinOp::Lt + | BinOp::Ge + | BinOp::Gt + | BinOp::Offset + )); + return Err(Unpromotable); + } + + match op { + // FIXME: reject operations that can fail -- namely, division and modulo. + BinOp::Eq + | BinOp::Ne + | BinOp::Le + | BinOp::Lt + | BinOp::Ge + | BinOp::Gt + | BinOp::Offset + | BinOp::Add + | BinOp::Sub + | BinOp::Mul + | BinOp::Div + | BinOp::Rem + | BinOp::BitXor + | BinOp::BitAnd + | BinOp::BitOr + | BinOp::Shl + | BinOp::Shr => {} + } - Rvalue::BinaryOp(_, lhs, rhs) | Rvalue::CheckedBinaryOp(_, lhs, rhs) => { self.validate_operand(lhs)?; - self.validate_operand(rhs) + self.validate_operand(rhs)?; } + Rvalue::NullaryOp(op, _) => match op { + NullOp::Box => return Err(Unpromotable), + NullOp::SizeOf => {} + }, + Rvalue::AddressOf(_, place) => { // We accept `&raw *`, i.e., raw reborrows -- creating a raw pointer is // no problem, only using it is. @@ -636,53 +669,36 @@ impl<'tcx> Validator<'_, 'tcx> { }); } } - Err(Unpromotable) + return Err(Unpromotable); } Rvalue::Ref(_, kind, place) => { - if let BorrowKind::Mut { .. } = kind { - let ty = place.ty(self.body, self.tcx).ty; - - // In theory, any zero-sized value could be borrowed - // mutably without consequences. However, only &mut [] - // is allowed right now. - if let ty::Array(_, len) = ty.kind() { - match len.try_eval_usize(self.tcx, self.param_env) { - Some(0) => {} - _ => return Err(Unpromotable), - } - } else { - return Err(Unpromotable); - } - } - // Special-case reborrows to be more like a copy of the reference. - let mut place = place.as_ref(); - if let [proj_base @ .., ProjectionElem::Deref] = &place.projection { - let base_ty = Place::ty_from(place.local, proj_base, self.body, self.tcx).ty; + let mut place_simplified = place.as_ref(); + if let [proj_base @ .., ProjectionElem::Deref] = &place_simplified.projection { + let base_ty = + Place::ty_from(place_simplified.local, proj_base, self.body, self.tcx).ty; if let ty::Ref(..) = base_ty.kind() { - place = PlaceRef { local: place.local, projection: proj_base }; + place_simplified = + PlaceRef { local: place_simplified.local, projection: proj_base }; } } - self.validate_place(place)?; - - let has_mut_interior = self.qualif_local::(place.local); - if has_mut_interior { - return Err(Unpromotable); - } + self.validate_place(place_simplified)?; - Ok(()) + // Check that the reference is fine (using the original place!). + // (Needs to come after `validate_place` to avoid ICEs.) + self.validate_ref(*kind, place)?; } - Rvalue::Aggregate(_, ref operands) => { + Rvalue::Aggregate(_, operands) => { for o in operands { self.validate_operand(o)?; } - - Ok(()) } } + + Ok(()) } fn validate_call( diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs index e041464381220..996615995259d 100644 --- a/compiler/rustc_mir_build/src/build/mod.rs +++ b/compiler/rustc_mir_build/src/build/mod.rs @@ -854,7 +854,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let mut mutability = Mutability::Not; // FIXME(project-rfc-2229#8): Store more precise information - let mut name = kw::Invalid; + let mut name = kw::Empty; if let Some(Node::Binding(pat)) = tcx_hir.find(var_id) { if let hir::PatKind::Binding(_, _, ident, _) = pat.kind { name = ident.name; diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 634cce403df96..c6669f0468296 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -494,7 +494,7 @@ impl<'a> Parser<'a> { let polarity = self.parse_polarity(); // Parse both types and traits as a type, then reinterpret if necessary. - let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span)); + let err_path = |span| ast::Path::from_ident(Ident::new(kw::Empty, span)); let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt) { let span = self.prev_token.span.between(self.token.span); @@ -1699,7 +1699,7 @@ impl<'a> Parser<'a> { // Skip every token until next possible arg or end. p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); // Create a placeholder argument for proper arg count (issue #34264). - Ok(dummy_arg(Ident::new(kw::Invalid, lo.to(p.prev_token.span)))) + Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)))) }); // ...now that we've parsed the first argument, `self` is no longer allowed. first_param = false; @@ -1759,7 +1759,7 @@ impl<'a> Parser<'a> { } match ty { Ok(ty) => { - let ident = Ident::new(kw::Invalid, self.prev_token.span); + let ident = Ident::new(kw::Empty, self.prev_token.span); let bm = BindingMode::ByValue(Mutability::Not); let pat = self.mk_pat_ident(ty.span, bm, ident); (pat, ty) diff --git a/compiler/rustc_passes/src/liveness.rs b/compiler/rustc_passes/src/liveness.rs index 54e3cc69aea9a..fcea1b29ec367 100644 --- a/compiler/rustc_passes/src/liveness.rs +++ b/compiler/rustc_passes/src/liveness.rs @@ -1382,7 +1382,7 @@ impl<'tcx> Liveness<'_, 'tcx> { fn should_warn(&self, var: Variable) -> Option { let name = self.ir.variable_name(var); - if name == kw::Invalid { + if name == kw::Empty { return None; } let name: &str = &name.as_str(); diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs index 708563f2ee981..1bcfdf0faf66a 100644 --- a/compiler/rustc_privacy/src/lib.rs +++ b/compiler/rustc_privacy/src/lib.rs @@ -957,7 +957,7 @@ impl<'tcx> NamePrivacyVisitor<'tcx> { in_update_syntax: bool, ) { // definition of the field - let ident = Ident::new(kw::Invalid, use_ctxt); + let ident = Ident::new(kw::Empty, use_ctxt); let current_hir = self.current_item.unwrap(); let def_id = self.tcx.adjust_ident_and_get_scope(ident, def.did, current_hir).1; if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) { diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 907a08cee2a4c..c5f783e84a91e 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -342,7 +342,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { let field_names = vdata .fields() .iter() - .map(|field| respan(field.span, field.ident.map_or(kw::Invalid, |ident| ident.name))) + .map(|field| respan(field.span, field.ident.map_or(kw::Empty, |ident| ident.name))) .collect(); self.insert_field_names(def_id, field_names); } @@ -527,7 +527,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { // HACK(eddyb) unclear how good this is, but keeping `$crate` // in `source` breaks `src/test/ui/imports/import-crate-var.rs`, // while the current crate doesn't have a valid `crate_name`. - if crate_name != kw::Invalid { + if crate_name != kw::Empty { // `crate_name` should not be interpreted as relative. module_path.push(Segment { ident: Ident { name: kw::PathRoot, span: source.ident.span }, @@ -656,7 +656,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { /// Constructs the reduced graph for one item. fn build_reduced_graph_for_item(&mut self, item: &'b Item) { - if matches!(item.kind, ItemKind::Mod(..)) && item.ident.name == kw::Invalid { + if matches!(item.kind, ItemKind::Mod(..)) && item.ident.name == kw::Empty { // Fake crate root item from expand. return; } diff --git a/compiler/rustc_resolve/src/def_collector.rs b/compiler/rustc_resolve/src/def_collector.rs index 69773ba1d7221..48bce88439424 100644 --- a/compiler/rustc_resolve/src/def_collector.rs +++ b/compiler/rustc_resolve/src/def_collector.rs @@ -74,7 +74,7 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> { // information we encapsulate into, the better let def_data = match &i.kind { ItemKind::Impl { .. } => DefPathData::Impl, - ItemKind::Mod(..) if i.ident.name == kw::Invalid => { + ItemKind::Mod(..) if i.ident.name == kw::Empty => { // Fake crate root item from expand. return visit::walk_item(self, i); } diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 21def3e142910..dd1874debbd96 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -1639,7 +1639,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { } // Record as bound if it's valid: - let ident_valid = ident.name != kw::Invalid; + let ident_valid = ident.name != kw::Empty; if ident_valid { bindings.last_mut().unwrap().1.insert(ident); } diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 5eb30eacf0784..ca30d90e6ad1d 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -1182,12 +1182,12 @@ impl<'a> Resolver<'a> { ) -> Resolver<'a> { let root_local_def_id = LocalDefId { local_def_index: CRATE_DEF_INDEX }; let root_def_id = root_local_def_id.to_def_id(); - let root_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Invalid); + let root_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Empty); let graph_root = arenas.alloc_module(ModuleData { no_implicit_prelude: session.contains_name(&krate.attrs, sym::no_implicit_prelude), ..ModuleData::new(None, root_module_kind, root_def_id, ExpnId::root(), krate.span) }); - let empty_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Invalid); + let empty_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Empty); let empty_module = arenas.alloc_module(ModuleData { no_implicit_prelude: true, ..ModuleData::new( @@ -1797,7 +1797,7 @@ impl<'a> Resolver<'a> { ribs: &[Rib<'a>], ) -> Option> { assert!(ns == TypeNS || ns == ValueNS); - if ident.name == kw::Invalid { + if ident.name == kw::Empty { return Some(LexicalScopeBinding::Res(Res::Err)); } let (general_span, normalized_span) = if ident.name == kw::SelfUpper { diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index c8dbe68512857..5ad7c83ca36af 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -160,7 +160,7 @@ impl<'a> ResolverExpand for Resolver<'a> { hygiene::update_dollar_crate_names(|ctxt| { let ident = Ident::new(kw::DollarCrate, DUMMY_SP.with_ctxt(ctxt)); match self.resolve_crate_root(ident).kind { - ModuleKind::Def(.., name) if name != kw::Invalid => name, + ModuleKind::Def(.., name) if name != kw::Empty => name, _ => kw::Crate, } }); diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs index 0f82db1d05aa1..fdc0d225bb82d 100644 --- a/compiler/rustc_span/src/hygiene.rs +++ b/compiler/rustc_span/src/hygiene.rs @@ -1065,7 +1065,7 @@ pub fn decode_syntax_context< parent: SyntaxContext::root(), opaque: SyntaxContext::root(), opaque_and_semitransparent: SyntaxContext::root(), - dollar_crate_name: kw::Invalid, + dollar_crate_name: kw::Empty, }); let mut ctxts = outer_ctxts.lock(); let new_len = raw_id as usize + 1; @@ -1092,7 +1092,7 @@ pub fn decode_syntax_context< ctxt_data, ); // Make sure nothing weird happening while `decode_data` was running - assert_eq!(dummy.dollar_crate_name, kw::Invalid); + assert_eq!(dummy.dollar_crate_name, kw::Empty); }); Ok(new_ctxt) diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index bc57a00e31b17..7ca2ff4f1fb71 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -25,7 +25,7 @@ symbols! { Keywords { // Special reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. - Invalid: "", + Empty: "", PathRoot: "{{root}}", DollarCrate: "$crate", Underscore: "_", @@ -1273,7 +1273,7 @@ impl Ident { #[inline] pub fn invalid() -> Ident { - Ident::with_dummy_span(kw::Invalid) + Ident::with_dummy_span(kw::Empty) } /// Maps a string to an identifier with a dummy span. @@ -1464,7 +1464,7 @@ impl Symbol { } pub fn is_empty(self) -> bool { - self == kw::Invalid + self == kw::Empty } /// This method is supposed to be used in error messages, so it's expected to be @@ -1648,7 +1648,7 @@ impl Symbol { /// Returns `true` if this symbol can be a raw identifier. pub fn can_be_raw(self) -> bool { - self != kw::Invalid && self != kw::Underscore && !self.is_path_segment_keyword() + self != kw::Empty && self != kw::Underscore && !self.is_path_segment_keyword() } } diff --git a/compiler/rustc_typeck/src/check/expr.rs b/compiler/rustc_typeck/src/check/expr.rs index ec0e039b5d29d..93eb2cfc72a80 100644 --- a/compiler/rustc_typeck/src/check/expr.rs +++ b/compiler/rustc_typeck/src/check/expr.rs @@ -883,7 +883,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Ok(method) } Err(error) => { - if segment.ident.name != kw::Invalid { + if segment.ident.name != kw::Empty { self.report_extended_method_error(segment, span, args, rcvr_t, error); } Err(()) @@ -1547,7 +1547,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return field_ty; } - if field.name == kw::Invalid { + if field.name == kw::Empty { } else if self.method_exists(field, expr_t, expr.hir_id, true) { self.ban_take_value_of_method(expr, expr_t, field); } else if !expr_t.is_primitive_ty() { diff --git a/compiler/rustc_typeck/src/check/fn_ctxt/_impl.rs b/compiler/rustc_typeck/src/check/fn_ctxt/_impl.rs index 41a403a010ee6..2a8b77da44fc4 100644 --- a/compiler/rustc_typeck/src/check/fn_ctxt/_impl.rs +++ b/compiler/rustc_typeck/src/check/fn_ctxt/_impl.rs @@ -914,7 +914,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { method::MethodError::PrivateMatch(kind, def_id, _) => Ok((kind, def_id)), _ => Err(ErrorReported), }; - if item_name.name != kw::Invalid { + if item_name.name != kw::Empty { if let Some(mut e) = self.report_method_error( span, ty, diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 73d12f0a5f45d..57df28a15c85a 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -2042,7 +2042,7 @@ impl Drop for Weak { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); } } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 7c03345aba507..85c0a9f085729 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1927,7 +1927,7 @@ impl Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); - unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } + unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) } } } } diff --git a/library/alloc/src/vec/cow.rs b/library/alloc/src/vec/cow.rs new file mode 100644 index 0000000000000..73d15d3064789 --- /dev/null +++ b/library/alloc/src/vec/cow.rs @@ -0,0 +1,35 @@ +use crate::borrow::Cow; +use core::iter::FromIterator; + +use super::Vec; + +#[stable(feature = "cow_from_vec", since = "1.8.0")] +impl<'a, T: Clone> From<&'a [T]> for Cow<'a, [T]> { + fn from(s: &'a [T]) -> Cow<'a, [T]> { + Cow::Borrowed(s) + } +} + +#[stable(feature = "cow_from_vec", since = "1.8.0")] +impl<'a, T: Clone> From> for Cow<'a, [T]> { + fn from(v: Vec) -> Cow<'a, [T]> { + Cow::Owned(v) + } +} + +#[stable(feature = "cow_from_vec_ref", since = "1.28.0")] +impl<'a, T: Clone> From<&'a Vec> for Cow<'a, [T]> { + fn from(v: &'a Vec) -> Cow<'a, [T]> { + Cow::Borrowed(v.as_slice()) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, T> FromIterator for Cow<'a, [T]> +where + T: Clone, +{ + fn from_iter>(it: I) -> Cow<'a, [T]> { + Cow::Owned(FromIterator::from_iter(it)) + } +} diff --git a/library/alloc/src/vec/drain.rs b/library/alloc/src/vec/drain.rs new file mode 100644 index 0000000000000..fb32d144f872c --- /dev/null +++ b/library/alloc/src/vec/drain.rs @@ -0,0 +1,155 @@ +use crate::alloc::{Allocator, Global}; +use core::fmt; +use core::iter::{FusedIterator, TrustedLen}; +use core::mem::{self}; +use core::ptr::{self, NonNull}; +use core::slice::{self}; + +use super::Vec; + +/// A draining iterator for `Vec`. +/// +/// This `struct` is created by [`Vec::drain`]. +/// See its documentation for more. +/// +/// # Example +/// +/// ``` +/// let mut v = vec![0, 1, 2]; +/// let iter: std::vec::Drain<_> = v.drain(..); +/// ``` +#[stable(feature = "drain", since = "1.6.0")] +pub struct Drain< + 'a, + T: 'a, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, +> { + /// Index of tail to preserve + pub(super) tail_start: usize, + /// Length of tail + pub(super) tail_len: usize, + /// Current remaining range to remove + pub(super) iter: slice::Iter<'a, T>, + pub(super) vec: NonNull>, +} + +#[stable(feature = "collection_debug", since = "1.17.0")] +impl fmt::Debug for Drain<'_, T, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() + } +} + +impl<'a, T, A: Allocator> Drain<'a, T, A> { + /// Returns the remaining items of this iterator as a slice. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec!['a', 'b', 'c']; + /// let mut drain = vec.drain(..); + /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']); + /// let _ = drain.next().unwrap(); + /// assert_eq!(drain.as_slice(), &['b', 'c']); + /// ``` + #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] + pub fn as_slice(&self) -> &[T] { + self.iter.as_slice() + } + + /// Returns a reference to the underlying allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn allocator(&self) -> &A { + unsafe { self.vec.as_ref().allocator() } + } +} + +#[stable(feature = "vec_drain_as_slice", since = "1.46.0")] +impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> { + fn as_ref(&self) -> &[T] { + self.as_slice() + } +} + +#[stable(feature = "drain", since = "1.6.0")] +unsafe impl Sync for Drain<'_, T, A> {} +#[stable(feature = "drain", since = "1.6.0")] +unsafe impl Send for Drain<'_, T, A> {} + +#[stable(feature = "drain", since = "1.6.0")] +impl Iterator for Drain<'_, T, A> { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) }) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +#[stable(feature = "drain", since = "1.6.0")] +impl DoubleEndedIterator for Drain<'_, T, A> { + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) + } +} + +#[stable(feature = "drain", since = "1.6.0")] +impl Drop for Drain<'_, T, A> { + fn drop(&mut self) { + /// Continues dropping the remaining elements in the `Drain`, then moves back the + /// un-`Drain`ed elements to restore the original `Vec`. + struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); + + impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { + fn drop(&mut self) { + // Continue the same loop we have below. If the loop already finished, this does + // nothing. + self.0.for_each(drop); + + if self.0.tail_len > 0 { + unsafe { + let source_vec = self.0.vec.as_mut(); + // memmove back untouched tail, update to new length + let start = source_vec.len(); + let tail = self.0.tail_start; + if tail != start { + let src = source_vec.as_ptr().add(tail); + let dst = source_vec.as_mut_ptr().add(start); + ptr::copy(src, dst, self.0.tail_len); + } + source_vec.set_len(start + self.0.tail_len); + } + } + } + } + + // exhaust self first + while let Some(item) = self.next() { + let guard = DropGuard(self); + drop(item); + mem::forget(guard); + } + + // Drop a `DropGuard` to move back the non-drained tail of `self`. + DropGuard(self); + } +} + +#[stable(feature = "drain", since = "1.6.0")] +impl ExactSizeIterator for Drain<'_, T, A> { + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} + +#[unstable(feature = "trusted_len", issue = "37572")] +unsafe impl TrustedLen for Drain<'_, T, A> {} + +#[stable(feature = "fused", since = "1.26.0")] +impl FusedIterator for Drain<'_, T, A> {} diff --git a/library/alloc/src/vec/drain_filter.rs b/library/alloc/src/vec/drain_filter.rs new file mode 100644 index 0000000000000..3c37c92ae44b0 --- /dev/null +++ b/library/alloc/src/vec/drain_filter.rs @@ -0,0 +1,143 @@ +use crate::alloc::{Allocator, Global}; +use core::ptr::{self}; +use core::slice::{self}; + +use super::Vec; + +/// An iterator which uses a closure to determine if an element should be removed. +/// +/// This struct is created by [`Vec::drain_filter`]. +/// See its documentation for more. +/// +/// # Example +/// +/// ``` +/// #![feature(drain_filter)] +/// +/// let mut v = vec![0, 1, 2]; +/// let iter: std::vec::DrainFilter<_, _> = v.drain_filter(|x| *x % 2 == 0); +/// ``` +#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] +#[derive(Debug)] +pub struct DrainFilter< + 'a, + T, + F, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> where + F: FnMut(&mut T) -> bool, +{ + pub(super) vec: &'a mut Vec, + /// The index of the item that will be inspected by the next call to `next`. + pub(super) idx: usize, + /// The number of items that have been drained (removed) thus far. + pub(super) del: usize, + /// The original length of `vec` prior to draining. + pub(super) old_len: usize, + /// The filter test predicate. + pub(super) pred: F, + /// A flag that indicates a panic has occurred in the filter test predicate. + /// This is used as a hint in the drop implementation to prevent consumption + /// of the remainder of the `DrainFilter`. Any unprocessed items will be + /// backshifted in the `vec`, but no further items will be dropped or + /// tested by the filter predicate. + pub(super) panic_flag: bool, +} + +impl DrainFilter<'_, T, F, A> +where + F: FnMut(&mut T) -> bool, +{ + /// Returns a reference to the underlying allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn allocator(&self) -> &A { + self.vec.allocator() + } +} + +#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] +impl Iterator for DrainFilter<'_, T, F, A> +where + F: FnMut(&mut T) -> bool, +{ + type Item = T; + + fn next(&mut self) -> Option { + unsafe { + while self.idx < self.old_len { + let i = self.idx; + let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len); + self.panic_flag = true; + let drained = (self.pred)(&mut v[i]); + self.panic_flag = false; + // Update the index *after* the predicate is called. If the index + // is updated prior and the predicate panics, the element at this + // index would be leaked. + self.idx += 1; + if drained { + self.del += 1; + return Some(ptr::read(&v[i])); + } else if self.del > 0 { + let del = self.del; + let src: *const T = &v[i]; + let dst: *mut T = &mut v[i - del]; + ptr::copy_nonoverlapping(src, dst, 1); + } + } + None + } + } + + fn size_hint(&self) -> (usize, Option) { + (0, Some(self.old_len - self.idx)) + } +} + +#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] +impl Drop for DrainFilter<'_, T, F, A> +where + F: FnMut(&mut T) -> bool, +{ + fn drop(&mut self) { + struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator> + where + F: FnMut(&mut T) -> bool, + { + drain: &'b mut DrainFilter<'a, T, F, A>, + } + + impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A> + where + F: FnMut(&mut T) -> bool, + { + fn drop(&mut self) { + unsafe { + if self.drain.idx < self.drain.old_len && self.drain.del > 0 { + // This is a pretty messed up state, and there isn't really an + // obviously right thing to do. We don't want to keep trying + // to execute `pred`, so we just backshift all the unprocessed + // elements and tell the vec that they still exist. The backshift + // is required to prevent a double-drop of the last successfully + // drained item prior to a panic in the predicate. + let ptr = self.drain.vec.as_mut_ptr(); + let src = ptr.add(self.drain.idx); + let dst = src.sub(self.drain.del); + let tail_len = self.drain.old_len - self.drain.idx; + src.copy_to(dst, tail_len); + } + self.drain.vec.set_len(self.drain.old_len - self.drain.del); + } + } + } + + let backshift = BackshiftOnDrop { drain: self }; + + // Attempt to consume any remaining elements if the filter predicate + // has not yet panicked. We'll backshift any remaining elements + // whether we've already panicked or if the consumption here panics. + if !backshift.drain.panic_flag { + backshift.drain.for_each(drop); + } + } +} diff --git a/library/alloc/src/vec/in_place_drop.rs b/library/alloc/src/vec/in_place_drop.rs new file mode 100644 index 0000000000000..354d25c2389fb --- /dev/null +++ b/library/alloc/src/vec/in_place_drop.rs @@ -0,0 +1,24 @@ +use core::ptr::{self}; +use core::slice::{self}; + +// A helper struct for in-place iteration that drops the destination slice of iteration, +// i.e. the head. The source slice (the tail) is dropped by IntoIter. +pub(super) struct InPlaceDrop { + pub(super) inner: *mut T, + pub(super) dst: *mut T, +} + +impl InPlaceDrop { + fn len(&self) -> usize { + unsafe { self.dst.offset_from(self.inner) as usize } + } +} + +impl Drop for InPlaceDrop { + #[inline] + fn drop(&mut self) { + unsafe { + ptr::drop_in_place(slice::from_raw_parts_mut(self.inner, self.len())); + } + } +} diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs new file mode 100644 index 0000000000000..f131d06bb18f9 --- /dev/null +++ b/library/alloc/src/vec/into_iter.rs @@ -0,0 +1,283 @@ +use crate::alloc::{Allocator, Global}; +use crate::raw_vec::RawVec; +use core::fmt; +use core::intrinsics::arith_offset; +use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess}; +use core::marker::PhantomData; +use core::mem::{self}; +use core::ptr::{self, NonNull}; +use core::slice::{self}; + +/// An iterator that moves out of a vector. +/// +/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) +/// (provided by the [`IntoIterator`] trait). +/// +/// # Example +/// +/// ``` +/// let v = vec![0, 1, 2]; +/// let iter: std::vec::IntoIter<_> = v.into_iter(); +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +pub struct IntoIter< + T, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { + pub(super) buf: NonNull, + pub(super) phantom: PhantomData, + pub(super) cap: usize, + pub(super) alloc: A, + pub(super) ptr: *const T, + pub(super) end: *const T, +} + +#[stable(feature = "vec_intoiter_debug", since = "1.13.0")] +impl fmt::Debug for IntoIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("IntoIter").field(&self.as_slice()).finish() + } +} + +impl IntoIter { + /// Returns the remaining items of this iterator as a slice. + /// + /// # Examples + /// + /// ``` + /// let vec = vec!['a', 'b', 'c']; + /// let mut into_iter = vec.into_iter(); + /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); + /// let _ = into_iter.next().unwrap(); + /// assert_eq!(into_iter.as_slice(), &['b', 'c']); + /// ``` + #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] + pub fn as_slice(&self) -> &[T] { + unsafe { slice::from_raw_parts(self.ptr, self.len()) } + } + + /// Returns the remaining items of this iterator as a mutable slice. + /// + /// # Examples + /// + /// ``` + /// let vec = vec!['a', 'b', 'c']; + /// let mut into_iter = vec.into_iter(); + /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); + /// into_iter.as_mut_slice()[2] = 'z'; + /// assert_eq!(into_iter.next().unwrap(), 'a'); + /// assert_eq!(into_iter.next().unwrap(), 'b'); + /// assert_eq!(into_iter.next().unwrap(), 'z'); + /// ``` + #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] + pub fn as_mut_slice(&mut self) -> &mut [T] { + unsafe { &mut *self.as_raw_mut_slice() } + } + + /// Returns a reference to the underlying allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn allocator(&self) -> &A { + &self.alloc + } + + fn as_raw_mut_slice(&mut self) -> *mut [T] { + ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) + } + + pub(super) fn drop_remaining(&mut self) { + unsafe { + ptr::drop_in_place(self.as_mut_slice()); + } + self.ptr = self.end; + } + + /// Relinquishes the backing allocation, equivalent to + /// `ptr::write(&mut self, Vec::new().into_iter())` + pub(super) fn forget_allocation(&mut self) { + self.cap = 0; + self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; + self.ptr = self.buf.as_ptr(); + self.end = self.buf.as_ptr(); + } +} + +#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] +impl AsRef<[T]> for IntoIter { + fn as_ref(&self) -> &[T] { + self.as_slice() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +unsafe impl Send for IntoIter {} +#[stable(feature = "rust1", since = "1.0.0")] +unsafe impl Sync for IntoIter {} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for IntoIter { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + if self.ptr as *const _ == self.end { + None + } else if mem::size_of::() == 0 { + // purposefully don't use 'ptr.offset' because for + // vectors with 0-size elements this would return the + // same pointer. + self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T }; + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } else { + let old = self.ptr; + self.ptr = unsafe { self.ptr.offset(1) }; + + Some(unsafe { ptr::read(old) }) + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let exact = if mem::size_of::() == 0 { + (self.end as usize).wrapping_sub(self.ptr as usize) + } else { + unsafe { self.end.offset_from(self.ptr) as usize } + }; + (exact, Some(exact)) + } + + #[inline] + fn count(self) -> usize { + self.len() + } + + unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item + where + Self: TrustedRandomAccess, + { + // SAFETY: the caller must guarantee that `i` is in bounds of the + // `Vec`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` + // is guaranteed to pointer to an element of the `Vec` and + // thus guaranteed to be valid to dereference. + // + // Also note the implementation of `Self: TrustedRandomAccess` requires + // that `T: Copy` so reading elements from the buffer doesn't invalidate + // them for `Drop`. + unsafe { + if mem::size_of::() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for IntoIter { + #[inline] + fn next_back(&mut self) -> Option { + if self.end == self.ptr { + None + } else if mem::size_of::() == 0 { + // See above for why 'ptr.offset' isn't used + self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T }; + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } else { + self.end = unsafe { self.end.offset(-1) }; + + Some(unsafe { ptr::read(self.end) }) + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for IntoIter { + fn is_empty(&self) -> bool { + self.ptr == self.end + } +} + +#[stable(feature = "fused", since = "1.26.0")] +impl FusedIterator for IntoIter {} + +#[unstable(feature = "trusted_len", issue = "37572")] +unsafe impl TrustedLen for IntoIter {} + +#[doc(hidden)] +#[unstable(issue = "none", feature = "std_internals")] +// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr +// and thus we can't implement drop-handling +unsafe impl TrustedRandomAccess for IntoIter +where + T: Copy, +{ + fn may_have_side_effect() -> bool { + false + } +} + +#[stable(feature = "vec_into_iter_clone", since = "1.8.0")] +impl Clone for IntoIter { + #[cfg(not(test))] + fn clone(&self) -> Self { + self.as_slice().to_vec_in(self.alloc.clone()).into_iter() + } + #[cfg(test)] + fn clone(&self) -> Self { + crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { + fn drop(&mut self) { + struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter); + + impl Drop for DropGuard<'_, T, A> { + fn drop(&mut self) { + unsafe { + // `IntoIter::alloc` is not used anymore after this + let alloc = ptr::read(&self.0.alloc); + // RawVec handles deallocation + let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); + } + } + } + + let guard = DropGuard(self); + // destroy the remaining elements + unsafe { + ptr::drop_in_place(guard.0.as_raw_mut_slice()); + } + // now `guard` will be dropped and do the rest + } +} + +#[unstable(issue = "none", feature = "inplace_iteration")] +unsafe impl InPlaceIterable for IntoIter {} + +#[unstable(issue = "none", feature = "inplace_iteration")] +unsafe impl SourceIter for IntoIter { + type Source = Self; + + #[inline] + unsafe fn as_inner(&mut self) -> &mut Self::Source { + self + } +} + +// internal helper trait for in-place iteration specialization. +#[rustc_specialization_trait] +pub(crate) trait AsIntoIter { + type Item; + fn as_into_iter(&mut self) -> &mut IntoIter; +} + +impl AsIntoIter for IntoIter { + type Item = T; + + fn as_into_iter(&mut self) -> &mut IntoIter { + self + } +} diff --git a/library/alloc/src/vec/is_zero.rs b/library/alloc/src/vec/is_zero.rs new file mode 100644 index 0000000000000..b5739970b6ea4 --- /dev/null +++ b/library/alloc/src/vec/is_zero.rs @@ -0,0 +1,71 @@ +use crate::boxed::Box; + +#[rustc_specialization_trait] +pub(super) unsafe trait IsZero { + /// Whether this value is zero + fn is_zero(&self) -> bool; +} + +macro_rules! impl_is_zero { + ($t:ty, $is_zero:expr) => { + unsafe impl IsZero for $t { + #[inline] + fn is_zero(&self) -> bool { + $is_zero(*self) + } + } + }; +} + +impl_is_zero!(i16, |x| x == 0); +impl_is_zero!(i32, |x| x == 0); +impl_is_zero!(i64, |x| x == 0); +impl_is_zero!(i128, |x| x == 0); +impl_is_zero!(isize, |x| x == 0); + +impl_is_zero!(u16, |x| x == 0); +impl_is_zero!(u32, |x| x == 0); +impl_is_zero!(u64, |x| x == 0); +impl_is_zero!(u128, |x| x == 0); +impl_is_zero!(usize, |x| x == 0); + +impl_is_zero!(bool, |x| x == false); +impl_is_zero!(char, |x| x == '\0'); + +impl_is_zero!(f32, |x: f32| x.to_bits() == 0); +impl_is_zero!(f64, |x: f64| x.to_bits() == 0); + +unsafe impl IsZero for *const T { + #[inline] + fn is_zero(&self) -> bool { + (*self).is_null() + } +} + +unsafe impl IsZero for *mut T { + #[inline] + fn is_zero(&self) -> bool { + (*self).is_null() + } +} + +// `Option<&T>` and `Option>` are guaranteed to represent `None` as null. +// For fat pointers, the bytes that would be the pointer metadata in the `Some` +// variant are padding in the `None` variant, so ignoring them and +// zero-initializing instead is ok. +// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of +// `SpecFromElem`. + +unsafe impl IsZero for Option<&T> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} + +unsafe impl IsZero for Option> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} diff --git a/library/alloc/src/vec.rs b/library/alloc/src/vec/mod.rs similarity index 65% rename from library/alloc/src/vec.rs rename to library/alloc/src/vec/mod.rs index c9fa41138cd41..2a83eb33fe3ec 100644 --- a/library/alloc/src/vec.rs +++ b/library/alloc/src/vec/mod.rs @@ -1,4 +1,3 @@ -// ignore-tidy-filelength //! A contiguous growable array type with heap-allocated contents, written //! `Vec`. //! @@ -59,9 +58,7 @@ use core::convert::TryFrom; use core::fmt; use core::hash::{Hash, Hasher}; use core::intrinsics::{arith_offset, assume}; -use core::iter::{ - FromIterator, FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess, -}; +use core::iter::FromIterator; use core::marker::PhantomData; use core::mem::{self, ManuallyDrop, MaybeUninit}; use core::ops::{self, Index, IndexMut, Range, RangeBounds}; @@ -74,6 +71,61 @@ use crate::boxed::Box; use crate::collections::TryReserveError; use crate::raw_vec::RawVec; +#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] +pub use self::drain_filter::DrainFilter; + +mod drain_filter; + +#[stable(feature = "vec_splice", since = "1.21.0")] +pub use self::splice::Splice; + +mod splice; + +#[stable(feature = "drain", since = "1.6.0")] +pub use self::drain::Drain; + +mod drain; + +mod cow; + +pub(crate) use self::into_iter::AsIntoIter; +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::into_iter::IntoIter; + +mod into_iter; + +use self::is_zero::IsZero; + +mod is_zero; + +mod source_iter_marker; + +mod partial_eq; + +use self::spec_from_elem::SpecFromElem; + +mod spec_from_elem; + +use self::set_len_on_drop::SetLenOnDrop; + +mod set_len_on_drop; + +use self::in_place_drop::InPlaceDrop; + +mod in_place_drop; + +use self::spec_from_iter_nested::SpecFromIterNested; + +mod spec_from_iter_nested; + +use self::spec_from_iter::SpecFromIter; + +mod spec_from_iter; + +use self::spec_extend::SpecExtend; + +mod spec_extend; + /// A contiguous growable array type, written `Vec` but pronounced 'vector'. /// /// # Examples @@ -1876,35 +1928,6 @@ impl Vec { } } -// Set the length of the vec when the `SetLenOnDrop` value goes out of scope. -// -// The idea is: The length field in SetLenOnDrop is a local variable -// that the optimizer will see does not alias with any stores through the Vec's data -// pointer. This is a workaround for alias analysis issue #32155 -struct SetLenOnDrop<'a> { - len: &'a mut usize, - local_len: usize, -} - -impl<'a> SetLenOnDrop<'a> { - #[inline] - fn new(len: &'a mut usize) -> Self { - SetLenOnDrop { local_len: *len, len } - } - - #[inline] - fn increment_len(&mut self, increment: usize) { - self.local_len += increment; - } -} - -impl Drop for SetLenOnDrop<'_> { - #[inline] - fn drop(&mut self) { - *self.len = self.local_len; - } -} - impl Vec { /// Removes consecutive repeated elements in the vector according to the /// [`PartialEq`] trait implementation. @@ -1964,131 +1987,6 @@ pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec< ::from_elem(elem, n, alloc) } -// Specialization trait used for Vec::from_elem -trait SpecFromElem: Sized { - fn from_elem(elem: Self, n: usize, alloc: A) -> Vec; -} - -impl SpecFromElem for T { - default fn from_elem(elem: Self, n: usize, alloc: A) -> Vec { - let mut v = Vec::with_capacity_in(n, alloc); - v.extend_with(n, ExtendElement(elem)); - v - } -} - -impl SpecFromElem for i8 { - #[inline] - fn from_elem(elem: i8, n: usize, alloc: A) -> Vec { - if elem == 0 { - return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; - } - unsafe { - let mut v = Vec::with_capacity_in(n, alloc); - ptr::write_bytes(v.as_mut_ptr(), elem as u8, n); - v.set_len(n); - v - } - } -} - -impl SpecFromElem for u8 { - #[inline] - fn from_elem(elem: u8, n: usize, alloc: A) -> Vec { - if elem == 0 { - return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; - } - unsafe { - let mut v = Vec::with_capacity_in(n, alloc); - ptr::write_bytes(v.as_mut_ptr(), elem, n); - v.set_len(n); - v - } - } -} - -impl SpecFromElem for T { - #[inline] - fn from_elem(elem: T, n: usize, alloc: A) -> Vec { - if elem.is_zero() { - return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; - } - let mut v = Vec::with_capacity_in(n, alloc); - v.extend_with(n, ExtendElement(elem)); - v - } -} - -#[rustc_specialization_trait] -unsafe trait IsZero { - /// Whether this value is zero - fn is_zero(&self) -> bool; -} - -macro_rules! impl_is_zero { - ($t:ty, $is_zero:expr) => { - unsafe impl IsZero for $t { - #[inline] - fn is_zero(&self) -> bool { - $is_zero(*self) - } - } - }; -} - -impl_is_zero!(i16, |x| x == 0); -impl_is_zero!(i32, |x| x == 0); -impl_is_zero!(i64, |x| x == 0); -impl_is_zero!(i128, |x| x == 0); -impl_is_zero!(isize, |x| x == 0); - -impl_is_zero!(u16, |x| x == 0); -impl_is_zero!(u32, |x| x == 0); -impl_is_zero!(u64, |x| x == 0); -impl_is_zero!(u128, |x| x == 0); -impl_is_zero!(usize, |x| x == 0); - -impl_is_zero!(bool, |x| x == false); -impl_is_zero!(char, |x| x == '\0'); - -impl_is_zero!(f32, |x: f32| x.to_bits() == 0); -impl_is_zero!(f64, |x: f64| x.to_bits() == 0); - -unsafe impl IsZero for *const T { - #[inline] - fn is_zero(&self) -> bool { - (*self).is_null() - } -} - -unsafe impl IsZero for *mut T { - #[inline] - fn is_zero(&self) -> bool { - (*self).is_null() - } -} - -// `Option<&T>` and `Option>` are guaranteed to represent `None` as null. -// For fat pointers, the bytes that would be the pointer metadata in the `Some` -// variant are padding in the `None` variant, so ignoring them and -// zero-initializing instead is ok. -// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of -// `SpecFromElem`. - -unsafe impl IsZero for Option<&T> { - #[inline] - fn is_zero(&self) -> bool { - self.is_none() - } -} - -unsafe impl IsZero for Option> { - #[inline] - fn is_zero(&self) -> bool { - self.is_none() - } -} - //////////////////////////////////////////////////////////////////////////////// // Common trait implementations for Vec //////////////////////////////////////////////////////////////////////////////// @@ -2263,351 +2161,6 @@ impl Extend for Vec { } } -/// Specialization trait used for Vec::from_iter -/// -/// ## The delegation graph: -/// -/// ```text -/// +-------------+ -/// |FromIterator | -/// +-+-----------+ -/// | -/// v -/// +-+-------------------------------+ +---------------------+ -/// |SpecFromIter +---->+SpecFromIterNested | -/// |where I: | | |where I: | -/// | Iterator (default)----------+ | | Iterator (default) | -/// | vec::IntoIter | | | TrustedLen | -/// | SourceIterMarker---fallback-+ | | | -/// | slice::Iter | | | -/// | Iterator | +---------------------+ -/// +---------------------------------+ -/// ``` -trait SpecFromIter { - fn from_iter(iter: I) -> Self; -} - -/// Another specialization trait for Vec::from_iter -/// necessary to manually prioritize overlapping specializations -/// see [`SpecFromIter`] for details. -trait SpecFromIterNested { - fn from_iter(iter: I) -> Self; -} - -impl SpecFromIterNested for Vec -where - I: Iterator, -{ - default fn from_iter(mut iterator: I) -> Self { - // Unroll the first iteration, as the vector is going to be - // expanded on this iteration in every case when the iterable is not - // empty, but the loop in extend_desugared() is not going to see the - // vector being full in the few subsequent loop iterations. - // So we get better branch prediction. - let mut vector = match iterator.next() { - None => return Vec::new(), - Some(element) => { - let (lower, _) = iterator.size_hint(); - let mut vector = Vec::with_capacity(lower.saturating_add(1)); - unsafe { - ptr::write(vector.as_mut_ptr(), element); - vector.set_len(1); - } - vector - } - }; - // must delegate to spec_extend() since extend() itself delegates - // to spec_from for empty Vecs - as SpecExtend>::spec_extend(&mut vector, iterator); - vector - } -} - -impl SpecFromIterNested for Vec -where - I: TrustedLen, -{ - fn from_iter(iterator: I) -> Self { - let mut vector = match iterator.size_hint() { - (_, Some(upper)) => Vec::with_capacity(upper), - _ => Vec::new(), - }; - // must delegate to spec_extend() since extend() itself delegates - // to spec_from for empty Vecs - vector.spec_extend(iterator); - vector - } -} - -impl SpecFromIter for Vec -where - I: Iterator, -{ - default fn from_iter(iterator: I) -> Self { - SpecFromIterNested::from_iter(iterator) - } -} - -// A helper struct for in-place iteration that drops the destination slice of iteration, -// i.e. the head. The source slice (the tail) is dropped by IntoIter. -struct InPlaceDrop { - inner: *mut T, - dst: *mut T, -} - -impl InPlaceDrop { - fn len(&self) -> usize { - unsafe { self.dst.offset_from(self.inner) as usize } - } -} - -impl Drop for InPlaceDrop { - #[inline] - fn drop(&mut self) { - unsafe { - ptr::drop_in_place(slice::from_raw_parts_mut(self.inner, self.len())); - } - } -} - -impl SpecFromIter> for Vec { - fn from_iter(iterator: IntoIter) -> Self { - // A common case is passing a vector into a function which immediately - // re-collects into a vector. We can short circuit this if the IntoIter - // has not been advanced at all. - // When it has been advanced We can also reuse the memory and move the data to the front. - // But we only do so when the resulting Vec wouldn't have more unused capacity - // than creating it through the generic FromIterator implementation would. That limitation - // is not strictly necessary as Vec's allocation behavior is intentionally unspecified. - // But it is a conservative choice. - let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr; - if !has_advanced || iterator.len() >= iterator.cap / 2 { - unsafe { - let it = ManuallyDrop::new(iterator); - if has_advanced { - ptr::copy(it.ptr, it.buf.as_ptr(), it.len()); - } - return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap); - } - } - - let mut vec = Vec::new(); - // must delegate to spec_extend() since extend() itself delegates - // to spec_from for empty Vecs - vec.spec_extend(iterator); - vec - } -} - -fn write_in_place_with_drop( - src_end: *const T, -) -> impl FnMut(InPlaceDrop, T) -> Result, !> { - move |mut sink, item| { - unsafe { - // the InPlaceIterable contract cannot be verified precisely here since - // try_fold has an exclusive reference to the source pointer - // all we can do is check if it's still in range - debug_assert!(sink.dst as *const _ <= src_end, "InPlaceIterable contract violation"); - ptr::write(sink.dst, item); - sink.dst = sink.dst.add(1); - } - Ok(sink) - } -} - -/// Specialization marker for collecting an iterator pipeline into a Vec while reusing the -/// source allocation, i.e. executing the pipeline in place. -/// -/// The SourceIter parent trait is necessary for the specializing function to access the allocation -/// which is to be reused. But it is not sufficient for the specialization to be valid. See -/// additional bounds on the impl. -#[rustc_unsafe_specialization_marker] -trait SourceIterMarker: SourceIter {} - -// The std-internal SourceIter/InPlaceIterable traits are only implemented by chains of -// Adapter>> (all owned by core/std). Additional bounds -// on the adapter implementations (beyond `impl Trait for Adapter`) only depend on other -// traits already marked as specialization traits (Copy, TrustedRandomAccess, FusedIterator). -// I.e. the marker does not depend on lifetimes of user-supplied types. Modulo the Copy hole, which -// several other specializations already depend on. -impl SourceIterMarker for T where T: SourceIter + InPlaceIterable {} - -impl SpecFromIter for Vec -where - I: Iterator + SourceIterMarker, -{ - default fn from_iter(mut iterator: I) -> Self { - // Additional requirements which cannot expressed via trait bounds. We rely on const eval - // instead: - // a) no ZSTs as there would be no allocation to reuse and pointer arithmetic would panic - // b) size match as required by Alloc contract - // c) alignments match as required by Alloc contract - if mem::size_of::() == 0 - || mem::size_of::() - != mem::size_of::<<::Source as AsIntoIter>::Item>() - || mem::align_of::() - != mem::align_of::<<::Source as AsIntoIter>::Item>() - { - // fallback to more generic implementations - return SpecFromIterNested::from_iter(iterator); - } - - let (src_buf, src_ptr, dst_buf, dst_end, cap) = unsafe { - let inner = iterator.as_inner().as_into_iter(); - ( - inner.buf.as_ptr(), - inner.ptr, - inner.buf.as_ptr() as *mut T, - inner.end as *const T, - inner.cap, - ) - }; - - // use try-fold since - // - it vectorizes better for some iterator adapters - // - unlike most internal iteration methods, it only takes a &mut self - // - it lets us thread the write pointer through its innards and get it back in the end - let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf }; - let sink = iterator - .try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(dst_end)) - .unwrap(); - // iteration succeeded, don't drop head - let dst = ManuallyDrop::new(sink).dst; - - let src = unsafe { iterator.as_inner().as_into_iter() }; - // check if SourceIter contract was upheld - // caveat: if they weren't we may not even make it to this point - debug_assert_eq!(src_buf, src.buf.as_ptr()); - // check InPlaceIterable contract. This is only possible if the iterator advanced the - // source pointer at all. If it uses unchecked access via TrustedRandomAccess - // then the source pointer will stay in its initial position and we can't use it as reference - if src.ptr != src_ptr { - debug_assert!( - dst as *const _ <= src.ptr, - "InPlaceIterable contract violation, write pointer advanced beyond read pointer" - ); - } - - // drop any remaining values at the tail of the source - src.drop_remaining(); - // but prevent drop of the allocation itself once IntoIter goes out of scope - src.forget_allocation(); - - let vec = unsafe { - let len = dst.offset_from(dst_buf) as usize; - Vec::from_raw_parts(dst_buf, len, cap) - }; - - vec - } -} - -impl<'a, T: 'a, I> SpecFromIter<&'a T, I> for Vec -where - I: Iterator, - T: Clone, -{ - default fn from_iter(iterator: I) -> Self { - SpecFromIter::from_iter(iterator.cloned()) - } -} - -// This utilizes `iterator.as_slice().to_vec()` since spec_extend -// must take more steps to reason about the final capacity + length -// and thus do more work. `to_vec()` directly allocates the correct amount -// and fills it exactly. -impl<'a, T: 'a + Clone> SpecFromIter<&'a T, slice::Iter<'a, T>> for Vec { - #[cfg(not(test))] - fn from_iter(iterator: slice::Iter<'a, T>) -> Self { - iterator.as_slice().to_vec() - } - - // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is - // required for this method definition, is not available. Instead use the - // `slice::to_vec` function which is only available with cfg(test) - // NB see the slice::hack module in slice.rs for more information - #[cfg(test)] - fn from_iter(iterator: slice::Iter<'a, T>) -> Self { - crate::slice::to_vec(iterator.as_slice(), Global) - } -} - -// Specialization trait used for Vec::extend -trait SpecExtend { - fn spec_extend(&mut self, iter: I); -} - -impl SpecExtend for Vec -where - I: Iterator, -{ - default fn spec_extend(&mut self, iter: I) { - self.extend_desugared(iter) - } -} - -impl SpecExtend for Vec -where - I: TrustedLen, -{ - default fn spec_extend(&mut self, iterator: I) { - // This is the case for a TrustedLen iterator. - let (low, high) = iterator.size_hint(); - if let Some(high_value) = high { - debug_assert_eq!( - low, - high_value, - "TrustedLen iterator's size hint is not exact: {:?}", - (low, high) - ); - } - if let Some(additional) = high { - self.reserve(additional); - unsafe { - let mut ptr = self.as_mut_ptr().add(self.len()); - let mut local_len = SetLenOnDrop::new(&mut self.len); - iterator.for_each(move |element| { - ptr::write(ptr, element); - ptr = ptr.offset(1); - // NB can't overflow since we would have had to alloc the address space - local_len.increment_len(1); - }); - } - } else { - self.extend_desugared(iterator) - } - } -} - -impl SpecExtend> for Vec { - fn spec_extend(&mut self, mut iterator: IntoIter) { - unsafe { - self.append_elements(iterator.as_slice() as _); - } - iterator.ptr = iterator.end; - } -} - -impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec -where - I: Iterator, - T: Clone, -{ - default fn spec_extend(&mut self, iterator: I) { - self.spec_extend(iterator.cloned()) - } -} - -impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec -where - T: Copy, -{ - fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { - let slice = iterator.as_slice(); - unsafe { self.append_elements(slice) }; - } -} - impl Vec { // leaf method to which various SpecFrom/SpecExtend implementations delegate when // they have no further optimizations to apply @@ -2759,45 +2312,6 @@ impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec { } } -macro_rules! __impl_slice_eq1 { - ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => { - #[$stability] - impl PartialEq<$rhs> for $lhs - where - T: PartialEq, - $($ty: $bound)? - { - #[inline] - fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] } - #[inline] - fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] } - } - } -} - -__impl_slice_eq1! { [A: Allocator] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } -__impl_slice_eq1! { [A: Allocator] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } -__impl_slice_eq1! { [A: Allocator] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } -__impl_slice_eq1! { [A: Allocator] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } -__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")] } -__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] } - -// NOTE: some less important impls are omitted to reduce code bloat -// FIXME(Centril): Reconsider this? -//__impl_slice_eq1! { [const N: usize] Vec, &mut [B; N], } -//__impl_slice_eq1! { [const N: usize] [A; N], Vec, } -//__impl_slice_eq1! { [const N: usize] &[A; N], Vec, } -//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec, } -//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], } -//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], } -//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], } - /// Implements comparison of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Vec { @@ -2997,729 +2511,3 @@ impl TryFrom> for [T; N] { Ok(array) } } - -//////////////////////////////////////////////////////////////////////////////// -// Clone-on-write -//////////////////////////////////////////////////////////////////////////////// - -#[stable(feature = "cow_from_vec", since = "1.8.0")] -impl<'a, T: Clone> From<&'a [T]> for Cow<'a, [T]> { - fn from(s: &'a [T]) -> Cow<'a, [T]> { - Cow::Borrowed(s) - } -} - -#[stable(feature = "cow_from_vec", since = "1.8.0")] -impl<'a, T: Clone> From> for Cow<'a, [T]> { - fn from(v: Vec) -> Cow<'a, [T]> { - Cow::Owned(v) - } -} - -#[stable(feature = "cow_from_vec_ref", since = "1.28.0")] -impl<'a, T: Clone> From<&'a Vec> for Cow<'a, [T]> { - fn from(v: &'a Vec) -> Cow<'a, [T]> { - Cow::Borrowed(v.as_slice()) - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> FromIterator for Cow<'a, [T]> -where - T: Clone, -{ - fn from_iter>(it: I) -> Cow<'a, [T]> { - Cow::Owned(FromIterator::from_iter(it)) - } -} - -//////////////////////////////////////////////////////////////////////////////// -// Iterators -//////////////////////////////////////////////////////////////////////////////// - -/// An iterator that moves out of a vector. -/// -/// This `struct` is created by the `into_iter` method on [`Vec`] (provided -/// by the [`IntoIterator`] trait). -/// -/// # Example -/// -/// ``` -/// let v = vec![0, 1, 2]; -/// let iter: std::vec::IntoIter<_> = v.into_iter(); -/// ``` -#[stable(feature = "rust1", since = "1.0.0")] -pub struct IntoIter< - T, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> { - buf: NonNull, - phantom: PhantomData, - cap: usize, - alloc: A, - ptr: *const T, - end: *const T, -} - -#[stable(feature = "vec_intoiter_debug", since = "1.13.0")] -impl fmt::Debug for IntoIter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("IntoIter").field(&self.as_slice()).finish() - } -} - -impl IntoIter { - /// Returns the remaining items of this iterator as a slice. - /// - /// # Examples - /// - /// ``` - /// let vec = vec!['a', 'b', 'c']; - /// let mut into_iter = vec.into_iter(); - /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); - /// let _ = into_iter.next().unwrap(); - /// assert_eq!(into_iter.as_slice(), &['b', 'c']); - /// ``` - #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] - pub fn as_slice(&self) -> &[T] { - unsafe { slice::from_raw_parts(self.ptr, self.len()) } - } - - /// Returns the remaining items of this iterator as a mutable slice. - /// - /// # Examples - /// - /// ``` - /// let vec = vec!['a', 'b', 'c']; - /// let mut into_iter = vec.into_iter(); - /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); - /// into_iter.as_mut_slice()[2] = 'z'; - /// assert_eq!(into_iter.next().unwrap(), 'a'); - /// assert_eq!(into_iter.next().unwrap(), 'b'); - /// assert_eq!(into_iter.next().unwrap(), 'z'); - /// ``` - #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] - pub fn as_mut_slice(&mut self) -> &mut [T] { - unsafe { &mut *self.as_raw_mut_slice() } - } - - /// Returns a reference to the underlying allocator. - #[unstable(feature = "allocator_api", issue = "32838")] - #[inline] - pub fn allocator(&self) -> &A { - &self.alloc - } - - fn as_raw_mut_slice(&mut self) -> *mut [T] { - ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) - } - - fn drop_remaining(&mut self) { - unsafe { - ptr::drop_in_place(self.as_mut_slice()); - } - self.ptr = self.end; - } - - /// Relinquishes the backing allocation, equivalent to - /// `ptr::write(&mut self, Vec::new().into_iter())` - fn forget_allocation(&mut self) { - self.cap = 0; - self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; - self.ptr = self.buf.as_ptr(); - self.end = self.buf.as_ptr(); - } -} - -#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] -impl AsRef<[T]> for IntoIter { - fn as_ref(&self) -> &[T] { - self.as_slice() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for IntoIter {} -#[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for IntoIter {} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { - type Item = T; - - #[inline] - fn next(&mut self) -> Option { - if self.ptr as *const _ == self.end { - None - } else if mem::size_of::() == 0 { - // purposefully don't use 'ptr.offset' because for - // vectors with 0-size elements this would return the - // same pointer. - self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T }; - - // Make up a value of this ZST. - Some(unsafe { mem::zeroed() }) - } else { - let old = self.ptr; - self.ptr = unsafe { self.ptr.offset(1) }; - - Some(unsafe { ptr::read(old) }) - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let exact = if mem::size_of::() == 0 { - (self.end as usize).wrapping_sub(self.ptr as usize) - } else { - unsafe { self.end.offset_from(self.ptr) as usize } - }; - (exact, Some(exact)) - } - - #[inline] - fn count(self) -> usize { - self.len() - } - - unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item - where - Self: TrustedRandomAccess, - { - // SAFETY: the caller must guarantee that `i` is in bounds of the - // `Vec`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` - // is guaranteed to pointer to an element of the `Vec` and - // thus guaranteed to be valid to dereference. - // - // Also note the implementation of `Self: TrustedRandomAccess` requires - // that `T: Copy` so reading elements from the buffer doesn't invalidate - // them for `Drop`. - unsafe { - if mem::size_of::() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { - #[inline] - fn next_back(&mut self) -> Option { - if self.end == self.ptr { - None - } else if mem::size_of::() == 0 { - // See above for why 'ptr.offset' isn't used - self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T }; - - // Make up a value of this ZST. - Some(unsafe { mem::zeroed() }) - } else { - self.end = unsafe { self.end.offset(-1) }; - - Some(unsafe { ptr::read(self.end) }) - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter { - fn is_empty(&self) -> bool { - self.ptr == self.end - } -} - -#[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} - -#[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for IntoIter {} - -#[doc(hidden)] -#[unstable(issue = "none", feature = "std_internals")] -// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr -// and thus we can't implement drop-handling -unsafe impl TrustedRandomAccess for IntoIter -where - T: Copy, -{ - fn may_have_side_effect() -> bool { - false - } -} - -#[stable(feature = "vec_into_iter_clone", since = "1.8.0")] -impl Clone for IntoIter { - #[cfg(not(test))] - fn clone(&self) -> Self { - self.as_slice().to_vec_in(self.alloc.clone()).into_iter() - } - #[cfg(test)] - fn clone(&self) -> Self { - crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter { - fn drop(&mut self) { - struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter); - - impl Drop for DropGuard<'_, T, A> { - fn drop(&mut self) { - unsafe { - // `IntoIter::alloc` is not used anymore after this - let alloc = ptr::read(&self.0.alloc); - // RawVec handles deallocation - let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); - } - } - } - - let guard = DropGuard(self); - // destroy the remaining elements - unsafe { - ptr::drop_in_place(guard.0.as_raw_mut_slice()); - } - // now `guard` will be dropped and do the rest - } -} - -#[unstable(issue = "none", feature = "inplace_iteration")] -unsafe impl InPlaceIterable for IntoIter {} - -#[unstable(issue = "none", feature = "inplace_iteration")] -unsafe impl SourceIter for IntoIter { - type Source = Self; - - #[inline] - unsafe fn as_inner(&mut self) -> &mut Self::Source { - self - } -} - -// internal helper trait for in-place iteration specialization. -#[rustc_specialization_trait] -pub(crate) trait AsIntoIter { - type Item; - fn as_into_iter(&mut self) -> &mut IntoIter; -} - -impl AsIntoIter for IntoIter { - type Item = T; - - fn as_into_iter(&mut self) -> &mut IntoIter { - self - } -} - -/// A draining iterator for `Vec`. -/// -/// This `struct` is created by [`Vec::drain`]. -/// See its documentation for more. -/// -/// # Example -/// -/// ``` -/// let mut v = vec![0, 1, 2]; -/// let iter: std::vec::Drain<_> = v.drain(..); -/// ``` -#[stable(feature = "drain", since = "1.6.0")] -pub struct Drain< - 'a, - T: 'a, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, -> { - /// Index of tail to preserve - tail_start: usize, - /// Length of tail - tail_len: usize, - /// Current remaining range to remove - iter: slice::Iter<'a, T>, - vec: NonNull>, -} - -#[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for Drain<'_, T, A> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() - } -} - -impl<'a, T, A: Allocator> Drain<'a, T, A> { - /// Returns the remaining items of this iterator as a slice. - /// - /// # Examples - /// - /// ``` - /// let mut vec = vec!['a', 'b', 'c']; - /// let mut drain = vec.drain(..); - /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']); - /// let _ = drain.next().unwrap(); - /// assert_eq!(drain.as_slice(), &['b', 'c']); - /// ``` - #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] - pub fn as_slice(&self) -> &[T] { - self.iter.as_slice() - } - - /// Returns a reference to the underlying allocator. - #[unstable(feature = "allocator_api", issue = "32838")] - #[inline] - pub fn allocator(&self) -> &A { - unsafe { self.vec.as_ref().allocator() } - } -} - -#[stable(feature = "vec_drain_as_slice", since = "1.46.0")] -impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> { - fn as_ref(&self) -> &[T] { - self.as_slice() - } -} - -#[stable(feature = "drain", since = "1.6.0")] -unsafe impl Sync for Drain<'_, T, A> {} -#[stable(feature = "drain", since = "1.6.0")] -unsafe impl Send for Drain<'_, T, A> {} - -#[stable(feature = "drain", since = "1.6.0")] -impl Iterator for Drain<'_, T, A> { - type Item = T; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) }) - } - - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -#[stable(feature = "drain", since = "1.6.0")] -impl DoubleEndedIterator for Drain<'_, T, A> { - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) - } -} - -#[stable(feature = "drain", since = "1.6.0")] -impl Drop for Drain<'_, T, A> { - fn drop(&mut self) { - /// Continues dropping the remaining elements in the `Drain`, then moves back the - /// un-`Drain`ed elements to restore the original `Vec`. - struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); - - impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { - fn drop(&mut self) { - // Continue the same loop we have below. If the loop already finished, this does - // nothing. - self.0.for_each(drop); - - if self.0.tail_len > 0 { - unsafe { - let source_vec = self.0.vec.as_mut(); - // memmove back untouched tail, update to new length - let start = source_vec.len(); - let tail = self.0.tail_start; - if tail != start { - let src = source_vec.as_ptr().add(tail); - let dst = source_vec.as_mut_ptr().add(start); - ptr::copy(src, dst, self.0.tail_len); - } - source_vec.set_len(start + self.0.tail_len); - } - } - } - } - - // exhaust self first - while let Some(item) = self.next() { - let guard = DropGuard(self); - drop(item); - mem::forget(guard); - } - - // Drop a `DropGuard` to move back the non-drained tail of `self`. - DropGuard(self); - } -} - -#[stable(feature = "drain", since = "1.6.0")] -impl ExactSizeIterator for Drain<'_, T, A> { - fn is_empty(&self) -> bool { - self.iter.is_empty() - } -} - -#[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for Drain<'_, T, A> {} - -#[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Drain<'_, T, A> {} - -/// A splicing iterator for `Vec`. -/// -/// This struct is created by [`Vec::splice()`]. -/// See its documentation for more. -/// -/// # Example -/// -/// ``` -/// let mut v = vec![0, 1, 2]; -/// let new = [7, 8]; -/// let iter: std::vec::Splice<_> = v.splice(1.., new.iter().cloned()); -/// ``` -#[derive(Debug)] -#[stable(feature = "vec_splice", since = "1.21.0")] -pub struct Splice< - 'a, - I: Iterator + 'a, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, -> { - drain: Drain<'a, I::Item, A>, - replace_with: I, -} - -#[stable(feature = "vec_splice", since = "1.21.0")] -impl Iterator for Splice<'_, I, A> { - type Item = I::Item; - - fn next(&mut self) -> Option { - self.drain.next() - } - - fn size_hint(&self) -> (usize, Option) { - self.drain.size_hint() - } -} - -#[stable(feature = "vec_splice", since = "1.21.0")] -impl DoubleEndedIterator for Splice<'_, I, A> { - fn next_back(&mut self) -> Option { - self.drain.next_back() - } -} - -#[stable(feature = "vec_splice", since = "1.21.0")] -impl ExactSizeIterator for Splice<'_, I, A> {} - -#[stable(feature = "vec_splice", since = "1.21.0")] -impl Drop for Splice<'_, I, A> { - fn drop(&mut self) { - self.drain.by_ref().for_each(drop); - - unsafe { - if self.drain.tail_len == 0 { - self.drain.vec.as_mut().extend(self.replace_with.by_ref()); - return; - } - - // First fill the range left by drain(). - if !self.drain.fill(&mut self.replace_with) { - return; - } - - // There may be more elements. Use the lower bound as an estimate. - // FIXME: Is the upper bound a better guess? Or something else? - let (lower_bound, _upper_bound) = self.replace_with.size_hint(); - if lower_bound > 0 { - self.drain.move_tail(lower_bound); - if !self.drain.fill(&mut self.replace_with) { - return; - } - } - - // Collect any remaining elements. - // This is a zero-length vector which does not allocate if `lower_bound` was exact. - let mut collected = self.replace_with.by_ref().collect::>().into_iter(); - // Now we have an exact count. - if collected.len() > 0 { - self.drain.move_tail(collected.len()); - let filled = self.drain.fill(&mut collected); - debug_assert!(filled); - debug_assert_eq!(collected.len(), 0); - } - } - // Let `Drain::drop` move the tail back if necessary and restore `vec.len`. - } -} - -/// Private helper methods for `Splice::drop` -impl Drain<'_, T, A> { - /// The range from `self.vec.len` to `self.tail_start` contains elements - /// that have been moved out. - /// Fill that range as much as possible with new elements from the `replace_with` iterator. - /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.) - unsafe fn fill>(&mut self, replace_with: &mut I) -> bool { - let vec = unsafe { self.vec.as_mut() }; - let range_start = vec.len; - let range_end = self.tail_start; - let range_slice = unsafe { - slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start) - }; - - for place in range_slice { - if let Some(new_item) = replace_with.next() { - unsafe { ptr::write(place, new_item) }; - vec.len += 1; - } else { - return false; - } - } - true - } - - /// Makes room for inserting more elements before the tail. - unsafe fn move_tail(&mut self, additional: usize) { - let vec = unsafe { self.vec.as_mut() }; - let len = self.tail_start + self.tail_len; - vec.buf.reserve(len, additional); - - let new_tail_start = self.tail_start + additional; - unsafe { - let src = vec.as_ptr().add(self.tail_start); - let dst = vec.as_mut_ptr().add(new_tail_start); - ptr::copy(src, dst, self.tail_len); - } - self.tail_start = new_tail_start; - } -} - -/// An iterator which uses a closure to determine if an element should be removed. -/// -/// This struct is created by [`Vec::drain_filter`]. -/// See its documentation for more. -/// -/// # Example -/// -/// ``` -/// #![feature(drain_filter)] -/// -/// let mut v = vec![0, 1, 2]; -/// let iter: std::vec::DrainFilter<_, _> = v.drain_filter(|x| *x % 2 == 0); -/// ``` -#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -#[derive(Debug)] -pub struct DrainFilter< - 'a, - T, - F, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, -> where - F: FnMut(&mut T) -> bool, -{ - vec: &'a mut Vec, - /// The index of the item that will be inspected by the next call to `next`. - idx: usize, - /// The number of items that have been drained (removed) thus far. - del: usize, - /// The original length of `vec` prior to draining. - old_len: usize, - /// The filter test predicate. - pred: F, - /// A flag that indicates a panic has occurred in the filter test predicate. - /// This is used as a hint in the drop implementation to prevent consumption - /// of the remainder of the `DrainFilter`. Any unprocessed items will be - /// backshifted in the `vec`, but no further items will be dropped or - /// tested by the filter predicate. - panic_flag: bool, -} - -impl DrainFilter<'_, T, F, A> -where - F: FnMut(&mut T) -> bool, -{ - /// Returns a reference to the underlying allocator. - #[unstable(feature = "allocator_api", issue = "32838")] - #[inline] - pub fn allocator(&self) -> &A { - self.vec.allocator() - } -} - -#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Iterator for DrainFilter<'_, T, F, A> -where - F: FnMut(&mut T) -> bool, -{ - type Item = T; - - fn next(&mut self) -> Option { - unsafe { - while self.idx < self.old_len { - let i = self.idx; - let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len); - self.panic_flag = true; - let drained = (self.pred)(&mut v[i]); - self.panic_flag = false; - // Update the index *after* the predicate is called. If the index - // is updated prior and the predicate panics, the element at this - // index would be leaked. - self.idx += 1; - if drained { - self.del += 1; - return Some(ptr::read(&v[i])); - } else if self.del > 0 { - let del = self.del; - let src: *const T = &v[i]; - let dst: *mut T = &mut v[i - del]; - ptr::copy_nonoverlapping(src, dst, 1); - } - } - None - } - } - - fn size_hint(&self) -> (usize, Option) { - (0, Some(self.old_len - self.idx)) - } -} - -#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Drop for DrainFilter<'_, T, F, A> -where - F: FnMut(&mut T) -> bool, -{ - fn drop(&mut self) { - struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator> - where - F: FnMut(&mut T) -> bool, - { - drain: &'b mut DrainFilter<'a, T, F, A>, - } - - impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A> - where - F: FnMut(&mut T) -> bool, - { - fn drop(&mut self) { - unsafe { - if self.drain.idx < self.drain.old_len && self.drain.del > 0 { - // This is a pretty messed up state, and there isn't really an - // obviously right thing to do. We don't want to keep trying - // to execute `pred`, so we just backshift all the unprocessed - // elements and tell the vec that they still exist. The backshift - // is required to prevent a double-drop of the last successfully - // drained item prior to a panic in the predicate. - let ptr = self.drain.vec.as_mut_ptr(); - let src = ptr.add(self.drain.idx); - let dst = src.sub(self.drain.del); - let tail_len = self.drain.old_len - self.drain.idx; - src.copy_to(dst, tail_len); - } - self.drain.vec.set_len(self.drain.old_len - self.drain.del); - } - } - } - - let backshift = BackshiftOnDrop { drain: self }; - - // Attempt to consume any remaining elements if the filter predicate - // has not yet panicked. We'll backshift any remaining elements - // whether we've already panicked or if the consumption here panics. - if !backshift.drain.panic_flag { - backshift.drain.for_each(drop); - } - } -} diff --git a/library/alloc/src/vec/partial_eq.rs b/library/alloc/src/vec/partial_eq.rs new file mode 100644 index 0000000000000..ff90b6caf4601 --- /dev/null +++ b/library/alloc/src/vec/partial_eq.rs @@ -0,0 +1,43 @@ +use crate::alloc::Allocator; +use crate::borrow::Cow; + +use super::Vec; + +macro_rules! __impl_slice_eq1 { + ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => { + #[$stability] + impl PartialEq<$rhs> for $lhs + where + T: PartialEq, + $($ty: $bound)? + { + #[inline] + fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] } + #[inline] + fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] } + } + } +} + +__impl_slice_eq1! { [A: Allocator] Vec, Vec, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] Vec, &[U], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] Vec, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] &[T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } +__impl_slice_eq1! { [A: Allocator] &mut [T], Vec, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } +__impl_slice_eq1! { [A: Allocator] Vec, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } +__impl_slice_eq1! { [A: Allocator] [T], Vec, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } +__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, [U; N], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator, const N: usize] Vec, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] } + +// NOTE: some less important impls are omitted to reduce code bloat +// FIXME(Centril): Reconsider this? +//__impl_slice_eq1! { [const N: usize] Vec, &mut [B; N], } +//__impl_slice_eq1! { [const N: usize] [A; N], Vec, } +//__impl_slice_eq1! { [const N: usize] &[A; N], Vec, } +//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec, } +//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], } +//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], } +//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], } diff --git a/library/alloc/src/vec/set_len_on_drop.rs b/library/alloc/src/vec/set_len_on_drop.rs new file mode 100644 index 0000000000000..8b66bc8121296 --- /dev/null +++ b/library/alloc/src/vec/set_len_on_drop.rs @@ -0,0 +1,28 @@ +// Set the length of the vec when the `SetLenOnDrop` value goes out of scope. +// +// The idea is: The length field in SetLenOnDrop is a local variable +// that the optimizer will see does not alias with any stores through the Vec's data +// pointer. This is a workaround for alias analysis issue #32155 +pub(super) struct SetLenOnDrop<'a> { + len: &'a mut usize, + local_len: usize, +} + +impl<'a> SetLenOnDrop<'a> { + #[inline] + pub(super) fn new(len: &'a mut usize) -> Self { + SetLenOnDrop { local_len: *len, len } + } + + #[inline] + pub(super) fn increment_len(&mut self, increment: usize) { + self.local_len += increment; + } +} + +impl Drop for SetLenOnDrop<'_> { + #[inline] + fn drop(&mut self) { + *self.len = self.local_len; + } +} diff --git a/library/alloc/src/vec/source_iter_marker.rs b/library/alloc/src/vec/source_iter_marker.rs new file mode 100644 index 0000000000000..8c0e95559fa15 --- /dev/null +++ b/library/alloc/src/vec/source_iter_marker.rs @@ -0,0 +1,108 @@ +use core::iter::{InPlaceIterable, SourceIter}; +use core::mem::{self, ManuallyDrop}; +use core::ptr::{self}; + +use super::{AsIntoIter, InPlaceDrop, SpecFromIter, SpecFromIterNested, Vec}; + +/// Specialization marker for collecting an iterator pipeline into a Vec while reusing the +/// source allocation, i.e. executing the pipeline in place. +/// +/// The SourceIter parent trait is necessary for the specializing function to access the allocation +/// which is to be reused. But it is not sufficient for the specialization to be valid. See +/// additional bounds on the impl. +#[rustc_unsafe_specialization_marker] +pub(super) trait SourceIterMarker: SourceIter {} + +// The std-internal SourceIter/InPlaceIterable traits are only implemented by chains of +// Adapter>> (all owned by core/std). Additional bounds +// on the adapter implementations (beyond `impl Trait for Adapter`) only depend on other +// traits already marked as specialization traits (Copy, TrustedRandomAccess, FusedIterator). +// I.e. the marker does not depend on lifetimes of user-supplied types. Modulo the Copy hole, which +// several other specializations already depend on. +impl SourceIterMarker for T where T: SourceIter + InPlaceIterable {} + +impl SpecFromIter for Vec +where + I: Iterator + SourceIterMarker, +{ + default fn from_iter(mut iterator: I) -> Self { + // Additional requirements which cannot expressed via trait bounds. We rely on const eval + // instead: + // a) no ZSTs as there would be no allocation to reuse and pointer arithmetic would panic + // b) size match as required by Alloc contract + // c) alignments match as required by Alloc contract + if mem::size_of::() == 0 + || mem::size_of::() + != mem::size_of::<<::Source as AsIntoIter>::Item>() + || mem::align_of::() + != mem::align_of::<<::Source as AsIntoIter>::Item>() + { + // fallback to more generic implementations + return SpecFromIterNested::from_iter(iterator); + } + + let (src_buf, src_ptr, dst_buf, dst_end, cap) = unsafe { + let inner = iterator.as_inner().as_into_iter(); + ( + inner.buf.as_ptr(), + inner.ptr, + inner.buf.as_ptr() as *mut T, + inner.end as *const T, + inner.cap, + ) + }; + + // use try-fold since + // - it vectorizes better for some iterator adapters + // - unlike most internal iteration methods, it only takes a &mut self + // - it lets us thread the write pointer through its innards and get it back in the end + let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf }; + let sink = iterator + .try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(dst_end)) + .unwrap(); + // iteration succeeded, don't drop head + let dst = ManuallyDrop::new(sink).dst; + + let src = unsafe { iterator.as_inner().as_into_iter() }; + // check if SourceIter contract was upheld + // caveat: if they weren't we may not even make it to this point + debug_assert_eq!(src_buf, src.buf.as_ptr()); + // check InPlaceIterable contract. This is only possible if the iterator advanced the + // source pointer at all. If it uses unchecked access via TrustedRandomAccess + // then the source pointer will stay in its initial position and we can't use it as reference + if src.ptr != src_ptr { + debug_assert!( + dst as *const _ <= src.ptr, + "InPlaceIterable contract violation, write pointer advanced beyond read pointer" + ); + } + + // drop any remaining values at the tail of the source + src.drop_remaining(); + // but prevent drop of the allocation itself once IntoIter goes out of scope + src.forget_allocation(); + + let vec = unsafe { + let len = dst.offset_from(dst_buf) as usize; + Vec::from_raw_parts(dst_buf, len, cap) + }; + + vec + } +} + +fn write_in_place_with_drop( + src_end: *const T, +) -> impl FnMut(InPlaceDrop, T) -> Result, !> { + move |mut sink, item| { + unsafe { + // the InPlaceIterable contract cannot be verified precisely here since + // try_fold has an exclusive reference to the source pointer + // all we can do is check if it's still in range + debug_assert!(sink.dst as *const _ <= src_end, "InPlaceIterable contract violation"); + ptr::write(sink.dst, item); + sink.dst = sink.dst.add(1); + } + Ok(sink) + } +} diff --git a/library/alloc/src/vec/spec_extend.rs b/library/alloc/src/vec/spec_extend.rs new file mode 100644 index 0000000000000..b6186a7ebaf73 --- /dev/null +++ b/library/alloc/src/vec/spec_extend.rs @@ -0,0 +1,82 @@ +use crate::alloc::Allocator; +use core::iter::TrustedLen; +use core::ptr::{self}; +use core::slice::{self}; + +use super::{IntoIter, SetLenOnDrop, Vec}; + +// Specialization trait used for Vec::extend +pub(super) trait SpecExtend { + fn spec_extend(&mut self, iter: I); +} + +impl SpecExtend for Vec +where + I: Iterator, +{ + default fn spec_extend(&mut self, iter: I) { + self.extend_desugared(iter) + } +} + +impl SpecExtend for Vec +where + I: TrustedLen, +{ + default fn spec_extend(&mut self, iterator: I) { + // This is the case for a TrustedLen iterator. + let (low, high) = iterator.size_hint(); + if let Some(high_value) = high { + debug_assert_eq!( + low, + high_value, + "TrustedLen iterator's size hint is not exact: {:?}", + (low, high) + ); + } + if let Some(additional) = high { + self.reserve(additional); + unsafe { + let mut ptr = self.as_mut_ptr().add(self.len()); + let mut local_len = SetLenOnDrop::new(&mut self.len); + iterator.for_each(move |element| { + ptr::write(ptr, element); + ptr = ptr.offset(1); + // NB can't overflow since we would have had to alloc the address space + local_len.increment_len(1); + }); + } + } else { + self.extend_desugared(iterator) + } + } +} + +impl SpecExtend> for Vec { + fn spec_extend(&mut self, mut iterator: IntoIter) { + unsafe { + self.append_elements(iterator.as_slice() as _); + } + iterator.ptr = iterator.end; + } +} + +impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec +where + I: Iterator, + T: Clone, +{ + default fn spec_extend(&mut self, iterator: I) { + self.spec_extend(iterator.cloned()) + } +} + +impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec +where + T: Copy, +{ + fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { + let slice = iterator.as_slice(); + unsafe { self.append_elements(slice) }; + } +} diff --git a/library/alloc/src/vec/spec_from_elem.rs b/library/alloc/src/vec/spec_from_elem.rs new file mode 100644 index 0000000000000..de610174783c4 --- /dev/null +++ b/library/alloc/src/vec/spec_from_elem.rs @@ -0,0 +1,60 @@ +use crate::alloc::Allocator; +use crate::raw_vec::RawVec; +use core::ptr::{self}; + +use super::{ExtendElement, IsZero, Vec}; + +// Specialization trait used for Vec::from_elem +pub(super) trait SpecFromElem: Sized { + fn from_elem(elem: Self, n: usize, alloc: A) -> Vec; +} + +impl SpecFromElem for T { + default fn from_elem(elem: Self, n: usize, alloc: A) -> Vec { + let mut v = Vec::with_capacity_in(n, alloc); + v.extend_with(n, ExtendElement(elem)); + v + } +} + +impl SpecFromElem for i8 { + #[inline] + fn from_elem(elem: i8, n: usize, alloc: A) -> Vec { + if elem == 0 { + return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; + } + unsafe { + let mut v = Vec::with_capacity_in(n, alloc); + ptr::write_bytes(v.as_mut_ptr(), elem as u8, n); + v.set_len(n); + v + } + } +} + +impl SpecFromElem for u8 { + #[inline] + fn from_elem(elem: u8, n: usize, alloc: A) -> Vec { + if elem == 0 { + return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; + } + unsafe { + let mut v = Vec::with_capacity_in(n, alloc); + ptr::write_bytes(v.as_mut_ptr(), elem, n); + v.set_len(n); + v + } + } +} + +impl SpecFromElem for T { + #[inline] + fn from_elem(elem: T, n: usize, alloc: A) -> Vec { + if elem.is_zero() { + return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; + } + let mut v = Vec::with_capacity_in(n, alloc); + v.extend_with(n, ExtendElement(elem)); + v + } +} diff --git a/library/alloc/src/vec/spec_from_iter.rs b/library/alloc/src/vec/spec_from_iter.rs new file mode 100644 index 0000000000000..bbfcc68daeff4 --- /dev/null +++ b/library/alloc/src/vec/spec_from_iter.rs @@ -0,0 +1,97 @@ +use core::mem::ManuallyDrop; +use core::ptr::{self}; +use core::slice::{self}; + +use super::{IntoIter, SpecExtend, SpecFromIterNested, Vec}; + +/// Specialization trait used for Vec::from_iter +/// +/// ## The delegation graph: +/// +/// ```text +/// +-------------+ +/// |FromIterator | +/// +-+-----------+ +/// | +/// v +/// +-+-------------------------------+ +---------------------+ +/// |SpecFromIter +---->+SpecFromIterNested | +/// |where I: | | |where I: | +/// | Iterator (default)----------+ | | Iterator (default) | +/// | vec::IntoIter | | | TrustedLen | +/// | SourceIterMarker---fallback-+ | | | +/// | slice::Iter | | | +/// | Iterator | +---------------------+ +/// +---------------------------------+ +/// ``` +pub(super) trait SpecFromIter { + fn from_iter(iter: I) -> Self; +} + +impl SpecFromIter for Vec +where + I: Iterator, +{ + default fn from_iter(iterator: I) -> Self { + SpecFromIterNested::from_iter(iterator) + } +} + +impl SpecFromIter> for Vec { + fn from_iter(iterator: IntoIter) -> Self { + // A common case is passing a vector into a function which immediately + // re-collects into a vector. We can short circuit this if the IntoIter + // has not been advanced at all. + // When it has been advanced We can also reuse the memory and move the data to the front. + // But we only do so when the resulting Vec wouldn't have more unused capacity + // than creating it through the generic FromIterator implementation would. That limitation + // is not strictly necessary as Vec's allocation behavior is intentionally unspecified. + // But it is a conservative choice. + let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr; + if !has_advanced || iterator.len() >= iterator.cap / 2 { + unsafe { + let it = ManuallyDrop::new(iterator); + if has_advanced { + ptr::copy(it.ptr, it.buf.as_ptr(), it.len()); + } + return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap); + } + } + + let mut vec = Vec::new(); + // must delegate to spec_extend() since extend() itself delegates + // to spec_from for empty Vecs + vec.spec_extend(iterator); + vec + } +} + +impl<'a, T: 'a, I> SpecFromIter<&'a T, I> for Vec +where + I: Iterator, + T: Clone, +{ + default fn from_iter(iterator: I) -> Self { + SpecFromIter::from_iter(iterator.cloned()) + } +} + +// This utilizes `iterator.as_slice().to_vec()` since spec_extend +// must take more steps to reason about the final capacity + length +// and thus do more work. `to_vec()` directly allocates the correct amount +// and fills it exactly. +impl<'a, T: 'a + Clone> SpecFromIter<&'a T, slice::Iter<'a, T>> for Vec { + #[cfg(not(test))] + fn from_iter(iterator: slice::Iter<'a, T>) -> Self { + iterator.as_slice().to_vec() + } + + // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is + // required for this method definition, is not available. Instead use the + // `slice::to_vec` function which is only available with cfg(test) + // NB see the slice::hack module in slice.rs for more information + #[cfg(test)] + fn from_iter(iterator: slice::Iter<'a, T>) -> Self { + crate::slice::to_vec(iterator.as_slice(), crate::alloc::Global) + } +} diff --git a/library/alloc/src/vec/spec_from_iter_nested.rs b/library/alloc/src/vec/spec_from_iter_nested.rs new file mode 100644 index 0000000000000..6abd4ff2a3f0a --- /dev/null +++ b/library/alloc/src/vec/spec_from_iter_nested.rs @@ -0,0 +1,56 @@ +use core::iter::TrustedLen; +use core::ptr::{self}; + +use super::{SpecExtend, Vec}; + +/// Another specialization trait for Vec::from_iter +/// necessary to manually prioritize overlapping specializations +/// see [`SpecFromIter`] for details. +pub(super) trait SpecFromIterNested { + fn from_iter(iter: I) -> Self; +} + +impl SpecFromIterNested for Vec +where + I: Iterator, +{ + default fn from_iter(mut iterator: I) -> Self { + // Unroll the first iteration, as the vector is going to be + // expanded on this iteration in every case when the iterable is not + // empty, but the loop in extend_desugared() is not going to see the + // vector being full in the few subsequent loop iterations. + // So we get better branch prediction. + let mut vector = match iterator.next() { + None => return Vec::new(), + Some(element) => { + let (lower, _) = iterator.size_hint(); + let mut vector = Vec::with_capacity(lower.saturating_add(1)); + unsafe { + ptr::write(vector.as_mut_ptr(), element); + vector.set_len(1); + } + vector + } + }; + // must delegate to spec_extend() since extend() itself delegates + // to spec_from for empty Vecs + as SpecExtend>::spec_extend(&mut vector, iterator); + vector + } +} + +impl SpecFromIterNested for Vec +where + I: TrustedLen, +{ + fn from_iter(iterator: I) -> Self { + let mut vector = match iterator.size_hint() { + (_, Some(upper)) => Vec::with_capacity(upper), + _ => Vec::new(), + }; + // must delegate to spec_extend() since extend() itself delegates + // to spec_from for empty Vecs + vector.spec_extend(iterator); + vector + } +} diff --git a/library/alloc/src/vec/splice.rs b/library/alloc/src/vec/splice.rs new file mode 100644 index 0000000000000..0a27b5b62ecf5 --- /dev/null +++ b/library/alloc/src/vec/splice.rs @@ -0,0 +1,133 @@ +use crate::alloc::{Allocator, Global}; +use core::ptr::{self}; +use core::slice::{self}; + +use super::{Drain, Vec}; + +/// A splicing iterator for `Vec`. +/// +/// This struct is created by [`Vec::splice()`]. +/// See its documentation for more. +/// +/// # Example +/// +/// ``` +/// let mut v = vec![0, 1, 2]; +/// let new = [7, 8]; +/// let iter: std::vec::Splice<_> = v.splice(1.., new.iter().cloned()); +/// ``` +#[derive(Debug)] +#[stable(feature = "vec_splice", since = "1.21.0")] +pub struct Splice< + 'a, + I: Iterator + 'a, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, +> { + pub(super) drain: Drain<'a, I::Item, A>, + pub(super) replace_with: I, +} + +#[stable(feature = "vec_splice", since = "1.21.0")] +impl Iterator for Splice<'_, I, A> { + type Item = I::Item; + + fn next(&mut self) -> Option { + self.drain.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.drain.size_hint() + } +} + +#[stable(feature = "vec_splice", since = "1.21.0")] +impl DoubleEndedIterator for Splice<'_, I, A> { + fn next_back(&mut self) -> Option { + self.drain.next_back() + } +} + +#[stable(feature = "vec_splice", since = "1.21.0")] +impl ExactSizeIterator for Splice<'_, I, A> {} + +#[stable(feature = "vec_splice", since = "1.21.0")] +impl Drop for Splice<'_, I, A> { + fn drop(&mut self) { + self.drain.by_ref().for_each(drop); + + unsafe { + if self.drain.tail_len == 0 { + self.drain.vec.as_mut().extend(self.replace_with.by_ref()); + return; + } + + // First fill the range left by drain(). + if !self.drain.fill(&mut self.replace_with) { + return; + } + + // There may be more elements. Use the lower bound as an estimate. + // FIXME: Is the upper bound a better guess? Or something else? + let (lower_bound, _upper_bound) = self.replace_with.size_hint(); + if lower_bound > 0 { + self.drain.move_tail(lower_bound); + if !self.drain.fill(&mut self.replace_with) { + return; + } + } + + // Collect any remaining elements. + // This is a zero-length vector which does not allocate if `lower_bound` was exact. + let mut collected = self.replace_with.by_ref().collect::>().into_iter(); + // Now we have an exact count. + if collected.len() > 0 { + self.drain.move_tail(collected.len()); + let filled = self.drain.fill(&mut collected); + debug_assert!(filled); + debug_assert_eq!(collected.len(), 0); + } + } + // Let `Drain::drop` move the tail back if necessary and restore `vec.len`. + } +} + +/// Private helper methods for `Splice::drop` +impl Drain<'_, T, A> { + /// The range from `self.vec.len` to `self.tail_start` contains elements + /// that have been moved out. + /// Fill that range as much as possible with new elements from the `replace_with` iterator. + /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.) + unsafe fn fill>(&mut self, replace_with: &mut I) -> bool { + let vec = unsafe { self.vec.as_mut() }; + let range_start = vec.len; + let range_end = self.tail_start; + let range_slice = unsafe { + slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start) + }; + + for place in range_slice { + if let Some(new_item) = replace_with.next() { + unsafe { ptr::write(place, new_item) }; + vec.len += 1; + } else { + return false; + } + } + true + } + + /// Makes room for inserting more elements before the tail. + unsafe fn move_tail(&mut self, additional: usize) { + let vec = unsafe { self.vec.as_mut() }; + let len = self.tail_start + self.tail_len; + vec.buf.reserve(len, additional); + + let new_tail_start = self.tail_start + additional; + unsafe { + let src = vec.as_ptr().add(self.tail_start); + let dst = vec.as_mut_ptr().add(new_tail_start); + ptr::copy(src, dst, self.tail_len); + } + self.tail_start = new_tail_start; + } +} diff --git a/library/core/src/iter/adapters/intersperse.rs b/library/core/src/iter/adapters/intersperse.rs new file mode 100644 index 0000000000000..362326725490f --- /dev/null +++ b/library/core/src/iter/adapters/intersperse.rs @@ -0,0 +1,76 @@ +use super::Peekable; + +/// An iterator adapter that places a separator between all elements. +#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")] +#[derive(Debug, Clone)] +pub struct Intersperse +where + I::Item: Clone, +{ + separator: I::Item, + iter: Peekable, + needs_sep: bool, +} + +impl Intersperse +where + I::Item: Clone, +{ + pub(in crate::iter) fn new(iter: I, separator: I::Item) -> Self { + Self { iter: iter.peekable(), separator, needs_sep: false } + } +} + +#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")] +impl Iterator for Intersperse +where + I: Iterator, + I::Item: Clone, +{ + type Item = I::Item; + + #[inline] + fn next(&mut self) -> Option { + if self.needs_sep && self.iter.peek().is_some() { + self.needs_sep = false; + Some(self.separator.clone()) + } else { + self.needs_sep = true; + self.iter.next() + } + } + + fn fold(mut self, init: B, mut f: F) -> B + where + Self: Sized, + F: FnMut(B, Self::Item) -> B, + { + let mut accum = init; + + // Use `peek()` first to avoid calling `next()` on an empty iterator. + if !self.needs_sep || self.iter.peek().is_some() { + if let Some(x) = self.iter.next() { + accum = f(accum, x); + } + } + + let element = &self.separator; + + self.iter.fold(accum, |mut accum, x| { + accum = f(accum, element.clone()); + accum = f(accum, x); + accum + }) + } + + fn size_hint(&self) -> (usize, Option) { + let (lo, hi) = self.iter.size_hint(); + let next_is_elem = !self.needs_sep; + let lo = lo.saturating_sub(next_is_elem as usize).saturating_add(lo); + let hi = match hi { + Some(hi) => hi.saturating_sub(next_is_elem as usize).checked_add(hi), + None => None, + }; + (lo, hi) + } +} diff --git a/library/core/src/iter/adapters/mod.rs b/library/core/src/iter/adapters/mod.rs index b8d3430f91099..7dfbf32cea7b8 100644 --- a/library/core/src/iter/adapters/mod.rs +++ b/library/core/src/iter/adapters/mod.rs @@ -11,6 +11,7 @@ mod filter_map; mod flatten; mod fuse; mod inspect; +mod intersperse; mod map; mod map_while; mod peekable; @@ -41,6 +42,9 @@ pub use self::flatten::Flatten; #[stable(feature = "iter_copied", since = "1.36.0")] pub use self::copied::Copied; +#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")] +pub use self::intersperse::Intersperse; + #[unstable(feature = "iter_map_while", reason = "recently added", issue = "68537")] pub use self::map_while::MapWhile; diff --git a/library/core/src/iter/mod.rs b/library/core/src/iter/mod.rs index 3e74637b49f1c..569de719d03d6 100644 --- a/library/core/src/iter/mod.rs +++ b/library/core/src/iter/mod.rs @@ -395,6 +395,8 @@ pub use self::adapters::Cloned; pub use self::adapters::Copied; #[stable(feature = "iterator_flatten", since = "1.29.0")] pub use self::adapters::Flatten; +#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")] +pub use self::adapters::Intersperse; #[unstable(feature = "iter_map_while", reason = "recently added", issue = "68537")] pub use self::adapters::MapWhile; #[unstable(feature = "inplace_iteration", issue = "none")] diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index 7ba16f89284e1..633175702d870 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -8,7 +8,7 @@ use crate::ops::{Add, ControlFlow, Try}; use super::super::TrustedRandomAccess; use super::super::{Chain, Cloned, Copied, Cycle, Enumerate, Filter, FilterMap, Fuse}; use super::super::{FlatMap, Flatten}; -use super::super::{FromIterator, Product, Sum, Zip}; +use super::super::{FromIterator, Intersperse, Product, Sum, Zip}; use super::super::{ Inspect, Map, MapWhile, Peekable, Rev, Scan, Skip, SkipWhile, StepBy, Take, TakeWhile, }; @@ -569,6 +569,28 @@ pub trait Iterator { Zip::new(self, other.into_iter()) } + /// Places a copy of `separator` between all elements. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(iter_intersperse)] + /// + /// let hello = ["Hello", "World"].iter().copied().intersperse(" ").collect::(); + /// assert_eq!(hello, "Hello World"); + /// ``` + #[inline] + #[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")] + fn intersperse(self, separator: Self::Item) -> Intersperse + where + Self: Sized, + Self::Item: Clone, + { + Intersperse::new(self, separator) + } + /// Takes a closure and creates an iterator which calls that closure on each /// element. /// diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index 87956787242ac..22e44a3c40904 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -379,7 +379,8 @@ pub const fn size_of_val(val: &T) -> usize { /// ``` #[inline] #[unstable(feature = "layout_for_ptr", issue = "69835")] -pub unsafe fn size_of_val_raw(val: *const T) -> usize { +#[rustc_const_unstable(feature = "const_size_of_val_raw", issue = "46571")] +pub const unsafe fn size_of_val_raw(val: *const T) -> usize { intrinsics::size_of_val(val) } @@ -510,7 +511,8 @@ pub const fn align_of_val(val: &T) -> usize { /// ``` #[inline] #[unstable(feature = "layout_for_ptr", issue = "69835")] -pub unsafe fn align_of_val_raw(val: *const T) -> usize { +#[rustc_const_unstable(feature = "const_align_of_val_raw", issue = "46571")] +pub const unsafe fn align_of_val_raw(val: *const T) -> usize { intrinsics::min_align_of_val(val) } diff --git a/library/core/tests/iter.rs b/library/core/tests/iter.rs index ec4b49da384c3..7376e7848eff5 100644 --- a/library/core/tests/iter.rs +++ b/library/core/tests/iter.rs @@ -3505,3 +3505,85 @@ pub fn extend_for_unit() { } assert_eq!(x, 5); } + +#[test] +fn test_intersperse() { + let xs = ["a", "", "b", "c"]; + let v: Vec<&str> = xs.iter().map(|x| x.clone()).intersperse(", ").collect(); + let text: String = v.concat(); + assert_eq!(text, "a, , b, c".to_string()); + + let ys = [0, 1, 2, 3]; + let mut it = ys[..0].iter().map(|x| *x).intersperse(1); + assert!(it.next() == None); +} + +#[test] +fn test_intersperse_size_hint() { + let xs = ["a", "", "b", "c"]; + let mut iter = xs.iter().map(|x| x.clone()).intersperse(", "); + assert_eq!(iter.size_hint(), (7, Some(7))); + + assert_eq!(iter.next(), Some("a")); + assert_eq!(iter.size_hint(), (6, Some(6))); + assert_eq!(iter.next(), Some(", ")); + assert_eq!(iter.size_hint(), (5, Some(5))); + + assert_eq!([].iter().intersperse(&()).size_hint(), (0, Some(0))); +} + +#[test] +fn test_fold_specialization_intersperse() { + let mut iter = (1..2).intersperse(0); + iter.clone().for_each(|x| assert_eq!(Some(x), iter.next())); + + let mut iter = (1..3).intersperse(0); + iter.clone().for_each(|x| assert_eq!(Some(x), iter.next())); + + let mut iter = (1..4).intersperse(0); + iter.clone().for_each(|x| assert_eq!(Some(x), iter.next())); +} + +#[test] +fn test_try_fold_specialization_intersperse_ok() { + let mut iter = (1..2).intersperse(0); + iter.clone().try_for_each(|x| { + assert_eq!(Some(x), iter.next()); + Some(()) + }); + + let mut iter = (1..3).intersperse(0); + iter.clone().try_for_each(|x| { + assert_eq!(Some(x), iter.next()); + Some(()) + }); + + let mut iter = (1..4).intersperse(0); + iter.clone().try_for_each(|x| { + assert_eq!(Some(x), iter.next()); + Some(()) + }); +} + +#[test] +fn test_try_fold_specialization_intersperse_err() { + let orig_iter = ["a", "b"].iter().copied().intersperse("-"); + + // Abort after the first item. + let mut iter = orig_iter.clone(); + iter.try_for_each(|_| None::<()>); + assert_eq!(iter.next(), Some("-")); + assert_eq!(iter.next(), Some("b")); + assert_eq!(iter.next(), None); + + // Abort after the second item. + let mut iter = orig_iter.clone(); + iter.try_for_each(|item| if item == "-" { None } else { Some(()) }); + assert_eq!(iter.next(), Some("b")); + assert_eq!(iter.next(), None); + + // Abort after the third item. + let mut iter = orig_iter.clone(); + iter.try_for_each(|item| if item == "b" { None } else { Some(()) }); + assert_eq!(iter.next(), None); +} diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index 9e8ec7060216b..fba3294e0bbdb 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -51,6 +51,7 @@ #![feature(array_value_iter)] #![feature(iter_advance_by)] #![feature(iter_partition_in_place)] +#![feature(iter_intersperse)] #![feature(iter_is_partitioned)] #![feature(iter_order_by)] #![feature(cmp_min_max_by)] diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 97f40815b87b4..b8bae69d06330 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -351,11 +351,13 @@ def output(filepath): with open(tmp, 'w') as f: yield f try: - os.remove(filepath) # PermissionError/OSError on Win32 if in use - os.rename(tmp, filepath) + if os.path.exists(filepath): + os.remove(filepath) # PermissionError/OSError on Win32 if in use except OSError: shutil.copy2(tmp, filepath) os.remove(tmp) + return + os.rename(tmp, filepath) class RustBuild(object): diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index e56556b5af70c..766b02a5acd69 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -941,7 +941,7 @@ impl<'a> Clean for (&'a [hir::Ty<'a>], &'a [Ident]) { .iter() .enumerate() .map(|(i, ty)| { - let mut name = self.1.get(i).map(|ident| ident.name).unwrap_or(kw::Invalid); + let mut name = self.1.get(i).map(|ident| ident.name).unwrap_or(kw::Empty); if name.is_empty() { name = kw::Underscore; } @@ -1000,7 +1000,7 @@ impl<'tcx> Clean for (DefId, ty::PolyFnSig<'tcx>) { .iter() .map(|t| Argument { type_: t.clean(cx), - name: names.next().map(|i| i.name).unwrap_or(kw::Invalid), + name: names.next().map(|i| i.name).unwrap_or(kw::Empty), }) .collect(), }, diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index 2cde0c209ee9b..a6c090c6576ef 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -8,7 +8,6 @@ use crate::clean::{ }; use crate::core::DocContext; -use itertools::Itertools; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index d8fadbf57e8db..a83a3600383ee 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -2086,8 +2086,8 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl (true, false) => return Ordering::Greater, } } - let lhs = i1.name.unwrap_or(kw::Invalid).as_str(); - let rhs = i2.name.unwrap_or(kw::Invalid).as_str(); + let lhs = i1.name.unwrap_or(kw::Empty).as_str(); + let rhs = i2.name.unwrap_or(kw::Empty).as_str(); compare_names(&lhs, &rhs) } @@ -4207,7 +4207,7 @@ fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer, cache: ty: \"{ty}\", \ relpath: \"{path}\"\ }};", - name = it.name.unwrap_or(kw::Invalid), + name = it.name.unwrap_or(kw::Empty), ty = it.type_(), path = relpath ); diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index e8bf664d45c4d..7ed64c5813fcd 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -17,6 +17,7 @@ #![feature(type_ascription)] #![feature(split_inclusive)] #![feature(str_split_once)] +#![feature(iter_intersperse)] #![recursion_limit = "256"] #[macro_use] diff --git a/src/test/ui/associated-types/defaults-wf.stderr b/src/test/ui/associated-types/defaults-wf.stderr index 4c43e6a182dc9..d4fa5be742ff3 100644 --- a/src/test/ui/associated-types/defaults-wf.stderr +++ b/src/test/ui/associated-types/defaults-wf.stderr @@ -4,7 +4,7 @@ error[E0277]: the size for values of type `[u8]` cannot be known at compilation LL | type Ty = Vec<[u8]>; | ^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time | - ::: $SRC_DIR/alloc/src/vec.rs:LL:COL + ::: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL | LL | pub struct Vec { | - required by this bound in `Vec` diff --git a/src/test/ui/bad/bad-sized.stderr b/src/test/ui/bad/bad-sized.stderr index 60a5bb9f78666..260d78b543a4c 100644 --- a/src/test/ui/bad/bad-sized.stderr +++ b/src/test/ui/bad/bad-sized.stderr @@ -15,7 +15,7 @@ error[E0277]: the size for values of type `dyn Trait` cannot be known at compila LL | let x: Vec = Vec::new(); | ^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time | - ::: $SRC_DIR/alloc/src/vec.rs:LL:COL + ::: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL | LL | pub struct Vec { | - required by this bound in `Vec` diff --git a/src/test/ui/const-generics/issues/issue-80062.rs b/src/test/ui/const-generics/issues/issue-80062.rs new file mode 100644 index 0000000000000..56dc53298fb35 --- /dev/null +++ b/src/test/ui/const-generics/issues/issue-80062.rs @@ -0,0 +1,10 @@ +// Regression test for issue #80062 (fixed by `min_const_generics`) + +fn sof() -> T { unimplemented!() } + +fn test() { + let _: [u8; sof::()]; + //~^ ERROR generic parameters may not be used in const operations +} + +fn main() {} diff --git a/src/test/ui/const-generics/issues/issue-80062.stderr b/src/test/ui/const-generics/issues/issue-80062.stderr new file mode 100644 index 0000000000000..aad8907bda2d0 --- /dev/null +++ b/src/test/ui/const-generics/issues/issue-80062.stderr @@ -0,0 +1,11 @@ +error: generic parameters may not be used in const operations + --> $DIR/issue-80062.rs:6:23 + | +LL | let _: [u8; sof::()]; + | ^ cannot perform const operation using `T` + | + = note: type parameters may not be used in const expressions + = help: use `#![feature(const_generics)]` and `#![feature(const_evaluatable_checked)]` to allow generic const expressions + +error: aborting due to previous error + diff --git a/src/test/ui/consts/const-size_of_val-align_of_val.rs b/src/test/ui/consts/const-size_of_val-align_of_val.rs index e8e6f1d390099..5c0d7d94d64f1 100644 --- a/src/test/ui/consts/const-size_of_val-align_of_val.rs +++ b/src/test/ui/consts/const-size_of_val-align_of_val.rs @@ -1,6 +1,7 @@ // run-pass #![feature(const_size_of_val, const_align_of_val)] +#![feature(const_size_of_val_raw, const_align_of_val_raw, layout_for_ptr)] use std::mem; @@ -32,6 +33,9 @@ const ALIGN_OF_UGH: usize = mem::align_of_val(&UGH); const SIZE_OF_SLICE: usize = mem::size_of_val("foobar".as_bytes()); +const SIZE_OF_DANGLING: usize = unsafe { mem::size_of_val_raw(0x100 as *const i32) }; +const ALIGN_OF_DANGLING: usize = unsafe { mem::align_of_val_raw(0x100 as *const i16) }; + fn main() { assert_eq!(SIZE_OF_FOO, mem::size_of::()); assert_eq!(SIZE_OF_BAR, mem::size_of::()); @@ -41,5 +45,8 @@ fn main() { assert_eq!(ALIGN_OF_BAR, mem::align_of::()); assert_eq!(ALIGN_OF_UGH, mem::align_of::()); + assert_eq!(SIZE_OF_DANGLING, mem::size_of::()); + assert_eq!(ALIGN_OF_DANGLING, mem::align_of::()); + assert_eq!(SIZE_OF_SLICE, "foobar".len()); } diff --git a/src/test/ui/issues/issue-20433.stderr b/src/test/ui/issues/issue-20433.stderr index 3f7226c79bf2a..5fed02164b5b7 100644 --- a/src/test/ui/issues/issue-20433.stderr +++ b/src/test/ui/issues/issue-20433.stderr @@ -4,7 +4,7 @@ error[E0277]: the size for values of type `[i32]` cannot be known at compilation LL | fn iceman(c: Vec<[i32]>) {} | ^^^^^^^^^^ doesn't have a size known at compile-time | - ::: $SRC_DIR/alloc/src/vec.rs:LL:COL + ::: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL | LL | pub struct Vec { | - required by this bound in `Vec` diff --git a/src/test/ui/lint/lint-const-item-mutation.stderr b/src/test/ui/lint/lint-const-item-mutation.stderr index 74505eeb987c6..3973af540c81f 100644 --- a/src/test/ui/lint/lint-const-item-mutation.stderr +++ b/src/test/ui/lint/lint-const-item-mutation.stderr @@ -107,7 +107,7 @@ LL | VEC.push(0); = note: each usage of a `const` item creates a new temporary = note: the mutable reference will refer to this temporary, not the original `const` item note: mutable reference created due to call to this method - --> $SRC_DIR/alloc/src/vec.rs:LL:COL + --> $SRC_DIR/alloc/src/vec/mod.rs:LL:COL | LL | / pub fn push(&mut self, value: T) { LL | | // This will panic or abort if we would allocate > isize::MAX bytes diff --git a/src/tools/clippy/clippy_lints/src/lifetimes.rs b/src/tools/clippy/clippy_lints/src/lifetimes.rs index 4d737b3f49b03..e84c8b4e5b3e0 100644 --- a/src/tools/clippy/clippy_lints/src/lifetimes.rs +++ b/src/tools/clippy/clippy_lints/src/lifetimes.rs @@ -501,7 +501,7 @@ impl<'tcx> Visitor<'tcx> for BodyLifetimeChecker { // for lifetimes as parameters of generics fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) { - if lifetime.name.ident().name != kw::Invalid && lifetime.name.ident().name != kw::StaticLifetime { + if lifetime.name.ident().name != kw::Empty && lifetime.name.ident().name != kw::StaticLifetime { self.lifetimes_used_in_body = true; } }