diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index aa857e48412a4..fc1af3fc3dd11 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -2837,7 +2837,7 @@ impl UseTree { /// Distinguishes between `Attribute`s that decorate items and Attributes that /// are contained as statements within items. These two cases need to be /// distinguished for pretty-printing. -#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy, HashStable_Generic, Hash)] +#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy, HashStable_Generic)] pub enum AttrStyle { Outer, Inner, diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 66e1af245cbbb..3cfd5dc271d6c 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -67,7 +67,7 @@ pub enum Delimiter { // type. This means that float literals like `1f32` are classified by this type // as `Int`. Only upon conversion to `ast::LitKind` will such a literal be // given the `Float` kind. -#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, Hash)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub enum LitKind { Bool, // AST only, must never appear in a `Token` Byte, @@ -84,7 +84,7 @@ pub enum LitKind { } /// A literal token. -#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, Hash)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub struct Lit { pub kind: LitKind, pub symbol: Symbol, @@ -229,7 +229,7 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool { .contains(&name) } -#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic, Hash)] +#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)] pub enum IdentIsRaw { No, Yes, @@ -911,7 +911,7 @@ pub enum Nonterminal { NtVis(P), } -#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable)] pub enum NonterminalKind { Item, Block, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 4371ff4bd9308..0c6188ae6506b 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -14,7 +14,6 @@ //! ownership of the original. use std::borrow::Cow; -use std::hash::Hash; use std::{cmp, fmt, iter}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; @@ -106,15 +105,6 @@ where } } } - -/*impl Hash for TokenStream { - fn hash(&self, state: &mut H) { - for sub_tt in self.trees() { - sub_tt.hash(state); - } - } -}*/ - pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync { fn to_attr_token_stream(&self) -> AttrTokenStream; } @@ -309,7 +299,7 @@ pub struct TokenStream(pub(crate) Lrc>); /// compound token. Used for conversions to `proc_macro::Spacing`. Also used to /// guide pretty-printing, which is where the `JointHidden` value (which isn't /// part of `proc_macro::Spacing`) comes in useful. -#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic, Hash)] +#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)] pub enum Spacing { /// The token cannot join with the following token to form a compound /// token. @@ -737,7 +727,7 @@ impl TokenTreeCursor { } } -#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] pub struct DelimSpan { pub open: Span, pub close: Span, @@ -761,7 +751,7 @@ impl DelimSpan { } } -#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic, Hash)] +#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)] pub struct DelimSpacing { pub open: Spacing, pub close: Spacing, diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index b57c650eb848a..18404a1671a35 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -1083,7 +1083,6 @@ pub trait ResolverExpand { fn expand_legacy_bang( &self, invoc_id: LocalExpnId, - span: Span, current_expansion: LocalExpnId, ) -> Result<(TokenStream, usize), CanRetry>; } diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 1dedec03b1ed6..c05ac70b5ce13 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -400,13 +400,13 @@ pub struct MacroExpander<'a, 'b> { pub fn expand_legacy_bang<'tcx>( tcx: TyCtxt<'tcx>, - key: (LocalExpnId, Span, LocalExpnId), + key: (LocalExpnId, LocalExpnId), ) -> Result<(&'tcx TokenStream, usize), CanRetry> { - let (invoc_id, span, current_expansion) = key; + let (invoc_id, current_expansion) = key; let map = tcx.macro_map.borrow(); - let (arg, expander) = map.get(&invoc_id).as_ref().unwrap(); + let (arg, span, expander) = map.get(&invoc_id).as_ref().unwrap(); expander - .expand(&tcx.sess, span, arg.clone(), current_expansion) + .expand(&tcx.sess, *span, arg.clone(), current_expansion) .map(|(tts, i)| (tcx.arena.alloc(tts) as &TokenStream, i)) } @@ -709,11 +709,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { // Macros defined in the current crate have a real node id, // whereas macros from an external crate have a dummy id.\ - let tok_result: Box = match self.cx.resolver.expand_legacy_bang( - invoc.expansion_data.id, - span, - self.cx.current_expansion.id, - ) { + let tok_result: Box = match self + .cx + .resolver + .expand_legacy_bang(invoc.expansion_data.id, self.cx.current_expansion.id) + { Ok((tts, i)) => { if self.cx.trace_macros() { let msg = format!("to `{}`", pprust::tts_to_string(&tts)); diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 17954226ee3f3..850e3cd294999 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -2,16 +2,16 @@ use std::borrow::Cow; use std::collections::hash_map::Entry; use std::{mem, slice}; -use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::token::NtPatKind::*; use rustc_ast::token::TokenKind::*; -use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token, TokenKind}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast_pretty::pprust; use rustc_attr::{self as attr, TransparencyError}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_data_structures::sync::Lrc; use rustc_errors::{Applicability, ErrorGuaranteed}; use rustc_feature::Features; use rustc_lint_defs::builtin::{ diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index d9d3316aaf3db..b06910595bb25 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -404,7 +404,8 @@ fn maybe_use_metavar_location( }) ); if undelimited_seq { - // Do not record metavar spans for tokens from undelimited sequences, for perf reasons. return orig_tt.clone(); + // Do not record metavar spans for tokens from undelimited sequences, for perf reasons. + return orig_tt.clone(); } let insert = |mspans: &mut FxHashMap<_, _>, s, ms| match mspans.try_insert(s, ms) { diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index 5fd74c1ab72dd..1ee50e0bcd87d 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -119,7 +119,7 @@ macro_rules! arena_types { [decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph, [] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls, [] hir_owner_nodes: rustc_hir::OwnerNodes<'tcx>, - [] expand_lagacy_bang: rustc_ast::tokenstream::TokenStream, + [] expand_legacy_bang: rustc_ast::tokenstream::TokenStream, ]); ) } diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index 37dc77f8f3484..44c2d7c2d1ad7 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -592,11 +592,11 @@ impl<'tcx> Key for (ValidityRequirement, ty::ParamEnvAnd<'tcx, Ty<'tcx>>) { } } -impl Key for (LocalExpnId, Span, LocalExpnId) { +impl Key for (LocalExpnId, LocalExpnId) { type Cache = DefaultCache; fn default_span(&self, _: TyCtxt<'_>) -> Span { - self.1 + DUMMY_SP } fn ty_def_id(&self) -> Option { diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 8bbe7f7c57c42..7420f65101fe1 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -111,10 +111,10 @@ rustc_queries! { desc { "triggering a delayed bug for testing incremental" } } - query expand_legacy_bang(key: (LocalExpnId, Span, LocalExpnId)) -> Result<(&'tcx TokenStream, usize), CanRetry> { + query expand_legacy_bang(key: (LocalExpnId, LocalExpnId)) -> Result<(&'tcx TokenStream, usize), CanRetry> { eval_always no_hash - desc { "expand lagacy bang" } + desc { "expand legacy bang" } } /// Collects the list of all tools registered using `#![register_tool]`. diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index c7f8e9a94d274..c72607ae2acb4 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -1310,7 +1310,7 @@ pub struct GlobalCtxt<'tcx> { pub macro_map: RwLock< FxHashMap< LocalExpnId, - (TokenStream, Lrc), + (TokenStream, Span, Lrc), >, >, } diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index e71c0b40578dc..aa5796680919b 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -336,11 +336,11 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> { self.create_stable_hashing_context(), ); if let SyntaxExtensionKind::TcxLegacyBang(tcx_expander) = &ext.kind { - if let InvocationKind::Bang { ref mac, .. } = invoc.kind { + if let InvocationKind::Bang { ref mac, span } = invoc.kind { self.tcx .macro_map .borrow_mut() - .insert(invoc_id, (mac.args.tokens.clone(), tcx_expander.clone())); + .insert(invoc_id, (mac.args.tokens.clone(), span, tcx_expander.clone())); } } @@ -538,11 +538,10 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> { fn expand_legacy_bang( &self, invoc_id: LocalExpnId, - span: Span, current_expansion: LocalExpnId, ) -> Result<(TokenStream, usize), CanRetry> { self.tcx() - .expand_legacy_bang((invoc_id, span, current_expansion)) + .expand_legacy_bang((invoc_id, current_expansion)) .map(|(tts, i)| (tts.clone(), i)) } }