Skip to content

Commit

Permalink
cleanup hashes and span in the query key
Browse files Browse the repository at this point in the history
Co-authored-by: Felix Rath <[email protected]>
  • Loading branch information
SparrowLii and futile committed Aug 4, 2024
1 parent 116b618 commit a1cec0f
Show file tree
Hide file tree
Showing 12 changed files with 30 additions and 41 deletions.
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2837,7 +2837,7 @@ impl UseTree {
/// Distinguishes between `Attribute`s that decorate items and Attributes that
/// are contained as statements within items. These two cases need to be
/// distinguished for pretty-printing.
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy, HashStable_Generic, Hash)]
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy, HashStable_Generic)]
pub enum AttrStyle {
Outer,
Inner,
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_ast/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ pub enum Delimiter {
// type. This means that float literals like `1f32` are classified by this type
// as `Int`. Only upon conversion to `ast::LitKind` will such a literal be
// given the `Float` kind.
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, Hash)]
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum LitKind {
Bool, // AST only, must never appear in a `Token`
Byte,
Expand All @@ -84,7 +84,7 @@ pub enum LitKind {
}

/// A literal token.
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, Hash)]
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct Lit {
pub kind: LitKind,
pub symbol: Symbol,
Expand Down Expand Up @@ -229,7 +229,7 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool {
.contains(&name)
}

#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic, Hash)]
#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)]
pub enum IdentIsRaw {
No,
Yes,
Expand Down Expand Up @@ -911,7 +911,7 @@ pub enum Nonterminal {
NtVis(P<ast::Visibility>),
}

#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, Hash)]
#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable)]
pub enum NonterminalKind {
Item,
Block,
Expand Down
16 changes: 3 additions & 13 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
//! ownership of the original.
use std::borrow::Cow;
use std::hash::Hash;
use std::{cmp, fmt, iter};

use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
Expand Down Expand Up @@ -106,15 +105,6 @@ where
}
}
}

/*impl Hash for TokenStream {
fn hash<H: Hasher>(&self, state: &mut H) {
for sub_tt in self.trees() {
sub_tt.hash(state);
}
}
}*/

pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync {
fn to_attr_token_stream(&self) -> AttrTokenStream;
}
Expand Down Expand Up @@ -309,7 +299,7 @@ pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
/// guide pretty-printing, which is where the `JointHidden` value (which isn't
/// part of `proc_macro::Spacing`) comes in useful.
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic, Hash)]
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub enum Spacing {
/// The token cannot join with the following token to form a compound
/// token.
Expand Down Expand Up @@ -737,7 +727,7 @@ impl TokenTreeCursor {
}
}

#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic, Hash)]
#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub struct DelimSpan {
pub open: Span,
pub close: Span,
Expand All @@ -761,7 +751,7 @@ impl DelimSpan {
}
}

#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic, Hash)]
#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub struct DelimSpacing {
pub open: Spacing,
pub close: Spacing,
Expand Down
1 change: 0 additions & 1 deletion compiler/rustc_expand/src/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1083,7 +1083,6 @@ pub trait ResolverExpand {
fn expand_legacy_bang(
&self,
invoc_id: LocalExpnId,
span: Span,
current_expansion: LocalExpnId,
) -> Result<(TokenStream, usize), CanRetry>;
}
Expand Down
18 changes: 9 additions & 9 deletions compiler/rustc_expand/src/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -400,13 +400,13 @@ pub struct MacroExpander<'a, 'b> {

pub fn expand_legacy_bang<'tcx>(
tcx: TyCtxt<'tcx>,
key: (LocalExpnId, Span, LocalExpnId),
key: (LocalExpnId, LocalExpnId),
) -> Result<(&'tcx TokenStream, usize), CanRetry> {
let (invoc_id, span, current_expansion) = key;
let (invoc_id, current_expansion) = key;
let map = tcx.macro_map.borrow();
let (arg, expander) = map.get(&invoc_id).as_ref().unwrap();
let (arg, span, expander) = map.get(&invoc_id).as_ref().unwrap();
expander
.expand(&tcx.sess, span, arg.clone(), current_expansion)
.expand(&tcx.sess, *span, arg.clone(), current_expansion)
.map(|(tts, i)| (tcx.arena.alloc(tts) as &TokenStream, i))
}

Expand Down Expand Up @@ -709,11 +709,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> {

// Macros defined in the current crate have a real node id,
// whereas macros from an external crate have a dummy id.\
let tok_result: Box<dyn MacResult> = match self.cx.resolver.expand_legacy_bang(
invoc.expansion_data.id,
span,
self.cx.current_expansion.id,
) {
let tok_result: Box<dyn MacResult> = match self
.cx
.resolver
.expand_legacy_bang(invoc.expansion_data.id, self.cx.current_expansion.id)
{
Ok((tts, i)) => {
if self.cx.trace_macros() {
let msg = format!("to `{}`", pprust::tts_to_string(&tts));
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_expand/src/mbe/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,16 @@ use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::{mem, slice};

use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::token::NtPatKind::*;
use rustc_ast::token::TokenKind::*;
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind};
use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_ast_pretty::pprust;
use rustc_attr::{self as attr, TransparencyError};
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, ErrorGuaranteed};
use rustc_feature::Features;
use rustc_lint_defs::builtin::{
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_expand/src/mbe/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,8 @@ fn maybe_use_metavar_location(
})
);
if undelimited_seq {
// Do not record metavar spans for tokens from undelimited sequences, for perf reasons. return orig_tt.clone();
// Do not record metavar spans for tokens from undelimited sequences, for perf reasons.
return orig_tt.clone();
}

let insert = |mspans: &mut FxHashMap<_, _>, s, ms| match mspans.try_insert(s, ms) {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_middle/src/arena.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ macro_rules! arena_types {
[decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph,
[] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls,
[] hir_owner_nodes: rustc_hir::OwnerNodes<'tcx>,
[] expand_lagacy_bang: rustc_ast::tokenstream::TokenStream,
[] expand_legacy_bang: rustc_ast::tokenstream::TokenStream,
]);
)
}
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_middle/src/query/keys.rs
Original file line number Diff line number Diff line change
Expand Up @@ -592,11 +592,11 @@ impl<'tcx> Key for (ValidityRequirement, ty::ParamEnvAnd<'tcx, Ty<'tcx>>) {
}
}

impl Key for (LocalExpnId, Span, LocalExpnId) {
impl Key for (LocalExpnId, LocalExpnId) {
type Cache<V> = DefaultCache<Self, V>;

fn default_span(&self, _: TyCtxt<'_>) -> Span {
self.1
DUMMY_SP
}

fn ty_def_id(&self) -> Option<DefId> {
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_middle/src/query/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,10 @@ rustc_queries! {
desc { "triggering a delayed bug for testing incremental" }
}

query expand_legacy_bang(key: (LocalExpnId, Span, LocalExpnId)) -> Result<(&'tcx TokenStream, usize), CanRetry> {
query expand_legacy_bang(key: (LocalExpnId, LocalExpnId)) -> Result<(&'tcx TokenStream, usize), CanRetry> {
eval_always
no_hash
desc { "expand lagacy bang" }
desc { "expand legacy bang" }
}

/// Collects the list of all tools registered using `#![register_tool]`.
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_middle/src/ty/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1310,7 +1310,7 @@ pub struct GlobalCtxt<'tcx> {
pub macro_map: RwLock<
FxHashMap<
LocalExpnId,
(TokenStream, Lrc<dyn TcxMacroExpander + sync::DynSync + sync::DynSend>),
(TokenStream, Span, Lrc<dyn TcxMacroExpander + sync::DynSync + sync::DynSend>),
>,
>,
}
Expand Down
7 changes: 3 additions & 4 deletions compiler/rustc_resolve/src/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -336,11 +336,11 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> {
self.create_stable_hashing_context(),
);
if let SyntaxExtensionKind::TcxLegacyBang(tcx_expander) = &ext.kind {
if let InvocationKind::Bang { ref mac, .. } = invoc.kind {
if let InvocationKind::Bang { ref mac, span } = invoc.kind {
self.tcx
.macro_map
.borrow_mut()
.insert(invoc_id, (mac.args.tokens.clone(), tcx_expander.clone()));
.insert(invoc_id, (mac.args.tokens.clone(), span, tcx_expander.clone()));
}
}

Expand Down Expand Up @@ -538,11 +538,10 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> {
fn expand_legacy_bang(
&self,
invoc_id: LocalExpnId,
span: Span,
current_expansion: LocalExpnId,
) -> Result<(TokenStream, usize), CanRetry> {
self.tcx()
.expand_legacy_bang((invoc_id, span, current_expansion))
.expand_legacy_bang((invoc_id, current_expansion))
.map(|(tts, i)| (tts.clone(), i))
}
}
Expand Down

0 comments on commit a1cec0f

Please sign in to comment.