From f45b080965036d5386087c61b66fd93dff406cdc Mon Sep 17 00:00:00 2001 From: Wyatt Herkamp Date: Fri, 8 Mar 2024 11:10:29 -0500 Subject: [PATCH 1/5] Starting Fix for cfg stripping --- crates/cfg/src/cfg_attr.rs | 70 +++++++++++ crates/cfg/src/cfg_expr.rs | 2 +- crates/cfg/src/lib.rs | 4 +- crates/hir-expand/src/cfg_process.rs | 178 +++++++++++++++++++++++++++ crates/hir-expand/src/db.rs | 28 +++-- crates/hir-expand/src/fixup.rs | 6 +- crates/hir-expand/src/lib.rs | 2 +- crates/mbe/src/syntax_bridge.rs | 37 ++++-- 8 files changed, 302 insertions(+), 25 deletions(-) create mode 100644 crates/cfg/src/cfg_attr.rs create mode 100644 crates/hir-expand/src/cfg_process.rs diff --git a/crates/cfg/src/cfg_attr.rs b/crates/cfg/src/cfg_attr.rs new file mode 100644 index 000000000000..4eb5928b1e14 --- /dev/null +++ b/crates/cfg/src/cfg_attr.rs @@ -0,0 +1,70 @@ +use std::{ + fmt::{self, Debug}, + slice::Iter as SliceIter, +}; + +use crate::{cfg_expr::next_cfg_expr, CfgAtom, CfgExpr}; +use tt::{Delimiter, SmolStr, Span}; +/// Represents a `#[cfg_attr(.., my_attr)]` attribute. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CfgAttr { + /// Expression in `cfg_attr` attribute. + pub cfg_expr: CfgExpr, + /// Inner attribute. + pub attr: tt::Subtree, +} + +impl CfgAttr { + /// Parses a sub tree in the form of (cfg_expr, inner_attribute) + pub fn parse(tt: &tt::Subtree) -> Option> { + let mut iter = tt.token_trees.iter(); + let cfg_expr = next_cfg_expr(&mut iter).unwrap_or(CfgExpr::Invalid); + // FIXME: This is probably not the right way to do this + // Get's the span of the next token tree + let first_span = iter.as_slice().first().map(|tt| tt.first_span())?; + let attr = tt::Subtree { + delimiter: Delimiter::invisible_spanned(first_span), + token_trees: iter.cloned().collect(), + }; + Some(CfgAttr { cfg_expr, attr: attr }) + } +} + +#[cfg(test)] +mod tests { + use expect_test::{expect, Expect}; + use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; + use syntax::{ast, AstNode}; + + use crate::{CfgAttr, DnfExpr}; + + fn check_dnf(input: &str, expected_dnf: Expect, expected_attrs: Expect) { + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); + let Some(CfgAttr { cfg_expr, attr }) = CfgAttr::parse(&tt) else { + assert!(false, "failed to parse cfg_attr"); + return; + }; + + let actual = format!("#![cfg({})]", DnfExpr::new(cfg_expr)); + expected_dnf.assert_eq(&actual); + let actual_attrs = format!("#![{}]", attr); + expected_attrs.assert_eq(&actual_attrs); + } + + #[test] + fn smoke() { + check_dnf( + r#"#![cfg_attr(feature = "nightly", feature(slice_split_at_unchecked))]"#, + expect![[r#"#![cfg(feature = "nightly")]"#]], + expect![r#"#![feature (slice_split_at_unchecked)]"#], + ); + + check_dnf( + r#"#![cfg_attr(not(feature = "std"), no_std)]"#, + expect![[r#"#![cfg(not(feature = "std"))]"#]], + expect![r#"#![no_std]"#], + ); + } +} diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs index 4be6ae7481d8..425fa90efe63 100644 --- a/crates/cfg/src/cfg_expr.rs +++ b/crates/cfg/src/cfg_expr.rs @@ -63,7 +63,7 @@ impl CfgExpr { } } -fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { +pub(crate) fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { let name = match it.next() { None => return None, Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index 454d6fc5384b..4d5483d9561d 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -2,7 +2,8 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -mod cfg_expr; +mod cfg_attr; +pub(crate) mod cfg_expr; mod dnf; #[cfg(test)] mod tests; @@ -12,6 +13,7 @@ use std::fmt; use rustc_hash::FxHashSet; use tt::SmolStr; +pub use cfg_attr::CfgAttr; pub use cfg_expr::{CfgAtom, CfgExpr}; pub use dnf::DnfExpr; diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs new file mode 100644 index 000000000000..7f6158f6bb3c --- /dev/null +++ b/crates/hir-expand/src/cfg_process.rs @@ -0,0 +1,178 @@ +use std::os::windows::process; + +use mbe::syntax_node_to_token_tree; +use rustc_hash::FxHashSet; +use syntax::{ + ast::{self, Attr, FieldList, HasAttrs, RecordFieldList, TupleFieldList, Variant, VariantList}, + AstNode, SyntaxElement, SyntaxNode, T, +}; +use tracing::info; + +use crate::{db::ExpandDatabase, span_map::SpanMap, MacroCallLoc}; + +fn check_cfg_attr( + attr: &Attr, + loc: &MacroCallLoc, + span_map: &SpanMap, + db: &dyn ExpandDatabase, +) -> Option { + attr.simple_name().as_deref().map(|v| v == "cfg")?; + info!("Checking cfg attr {:?}", attr); + let Some(tt) = attr.token_tree() else { + info!("cfg attr has no expr {:?}", attr); + return Some(true); + }; + info!("Checking cfg {:?}", tt); + let tt = tt.syntax().clone(); + // Convert to a tt::Subtree + let tt = syntax_node_to_token_tree(&tt, span_map, loc.call_site); + let cfg = cfg::CfgExpr::parse(&tt); + let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false); + Some(enabled) +} +enum CfgAttrResult { + Enabled(Attr), + Disabled, +} + +fn check_cfg_attr_attr( + attr: &Attr, + loc: &MacroCallLoc, + span_map: &SpanMap, + db: &dyn ExpandDatabase, +) -> Option { + attr.simple_name().as_deref().map(|v| v == "cfg_attr")?; + info!("Checking cfg_attr attr {:?}", attr); + let Some(tt) = attr.token_tree() else { + info!("cfg_attr attr has no expr {:?}", attr); + return None; + }; + info!("Checking cfg_attr {:?}", tt); + let tt = tt.syntax().clone(); + // Convert to a tt::Subtree + let tt = syntax_node_to_token_tree(&tt, span_map, loc.call_site); + let cfg = cfg::CfgExpr::parse(&tt); + let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false); + if enabled { + // FIXME: Add the internal attribute + Some(CfgAttrResult::Enabled(attr.clone())) + } else { + Some(CfgAttrResult::Disabled) + } +} + +fn process_has_attrs_with_possible_comma( + items: impl Iterator, + loc: &MacroCallLoc, + span_map: &SpanMap, + db: &dyn ExpandDatabase, + res: &mut FxHashSet, +) -> Option<()> { + for item in items { + let field_attrs = item.attrs(); + 'attrs: for attr in field_attrs { + let Some(enabled) = check_cfg_attr(&attr, loc, span_map, db) else { + continue; + }; + if enabled { + //FIXME: Should we remove the cfg_attr? + } else { + info!("censoring type {:?}", item.syntax()); + res.insert(item.syntax().clone().into()); + // We need to remove the , as well + if let Some(comma) = item.syntax().next_sibling_or_token() { + if comma.kind() == T![,] { + res.insert(comma.into()); + } + } + break 'attrs; + } + let Some(attr_result) = check_cfg_attr_attr(&attr, loc, span_map, db) else { + continue; + }; + match attr_result { + CfgAttrResult::Enabled(attr) => { + //FIXME: Replace the attribute with the internal attribute + } + CfgAttrResult::Disabled => { + info!("censoring type {:?}", item.syntax()); + res.insert(attr.syntax().clone().into()); + continue; + } + } + } + } + Some(()) +} +fn process_enum( + variants: VariantList, + loc: &MacroCallLoc, + span_map: &SpanMap, + db: &dyn ExpandDatabase, + res: &mut FxHashSet, +) -> Option<()> { + for variant in variants.variants() { + 'attrs: for attr in variant.attrs() { + if !check_cfg_attr(&attr, loc, span_map, db)? { + info!("censoring variant {:?}", variant.syntax()); + res.insert(variant.syntax().clone().into()); + if let Some(comma) = variant.syntax().next_sibling_or_token() { + if comma.kind() == T![,] { + res.insert(comma.into()); + } + } + break 'attrs; + } + } + if let Some(fields) = variant.field_list() { + match fields { + ast::FieldList::RecordFieldList(fields) => { + process_has_attrs_with_possible_comma(fields.fields(), loc, span_map, db, res)?; + } + ast::FieldList::TupleFieldList(fields) => { + process_has_attrs_with_possible_comma(fields.fields(), loc, span_map, db, res)?; + } + } + } + } + Some(()) +} +/// Handle +pub(crate) fn process_cfg_attrs( + node: &SyntaxNode, + loc: &MacroCallLoc, + span_map: &SpanMap, + db: &dyn ExpandDatabase, +) -> Option> { + let mut res = FxHashSet::default(); + let item = ast::Item::cast(node.clone())?; + match item { + ast::Item::Struct(it) => match it.field_list()? { + ast::FieldList::RecordFieldList(fields) => { + process_has_attrs_with_possible_comma( + fields.fields(), + loc, + span_map, + db, + &mut res, + )?; + } + ast::FieldList::TupleFieldList(fields) => { + process_has_attrs_with_possible_comma( + fields.fields(), + loc, + span_map, + db, + &mut res, + )?; + } + }, + ast::Item::Enum(it) => { + process_enum(it.variant_list()?, loc, span_map, db, &mut res)?; + } + // FIXME: Implement for other items + _ => {} + } + + Some(res) +} diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 6f69ee15acac..5188d8073299 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -7,15 +7,17 @@ use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; use span::{AstIdMap, SyntaxContextData, SyntaxContextId}; use syntax::{ - ast::{self, HasAttrs}, - AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, + ast::{self, Attr, HasAttrs}, + AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, }; +use tracing::info; use triomphe::Arc; use crate::{ attrs::collect_attrs, builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, + cfg_process, declarative::DeclarativeMacroExpander, fixup::{self, reverse_fixups, SyntaxFixupUndoInfo}, hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt}, @@ -152,8 +154,8 @@ pub fn expand_speculative( let censor = censor_for_macro_input(&loc, speculative_args); let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site); fixups.append.retain(|it, _| match it { - syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Token(_) => true, + it => !censor.contains(it), }); fixups.remove.extend(censor); ( @@ -408,12 +410,15 @@ fn macro_arg( ), MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { let censor = censor_for_macro_input(&loc, &syntax); + let censor_cfg = censor_cfg_elements(&syntax, &loc, &map, db); let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site); fixups.append.retain(|it, _| match it { - syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Token(_) => true, + it => !censor.contains(it) && !censor_cfg.contains(it), }); fixups.remove.extend(censor); + fixups.remove.extend(censor_cfg); + { let mut tt = mbe::syntax_node_to_token_tree_modified( &syntax, @@ -456,12 +461,19 @@ fn macro_arg( } } } - +fn censor_cfg_elements( + node: &SyntaxNode, + loc: &MacroCallLoc, + span_map: &SpanMap, + db: &dyn ExpandDatabase, +) -> FxHashSet { + cfg_process::process_cfg_attrs(node, loc, span_map, db).unwrap_or_default() +} // FIXME: Censoring info should be calculated by the caller! Namely by name resolution /// Certain macro calls expect some nodes in the input to be preprocessed away, namely: /// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped /// - attributes expect the invoking attribute to be stripped -fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet { +fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet { // FIXME: handle `cfg_attr` (|| { let censor = match loc.kind { @@ -477,7 +489,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet return None, @@ -486,7 +498,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet>, - pub(crate) remove: FxHashSet, + pub(crate) remove: FxHashSet, pub(crate) undo_info: SyntaxFixupUndoInfo, } @@ -51,7 +51,7 @@ pub(crate) fn fixup_syntax( call_site: Span, ) -> SyntaxFixups { let mut append = FxHashMap::::default(); - let mut remove = FxHashSet::::default(); + let mut remove = FxHashSet::::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); let dummy_range = FIXUP_DUMMY_RANGE; @@ -68,7 +68,7 @@ pub(crate) fn fixup_syntax( let node_range = node.text_range(); if can_handle_error(&node) && has_error_to_handle(&node) { - remove.insert(node.clone()); + remove.insert(node.clone().into()); // the node contains an error node, we have to completely replace it by something valid let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site); let idx = original.len() as u32; diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 42dc8c12d60b..8136e7f0470a 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -22,8 +22,8 @@ pub mod proc_macro; pub mod quote; pub mod span_map; +mod cfg_process; mod fixup; - use attrs::collect_attrs; use triomphe::Arc; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 3c270e30a9ba..593a8e5ed32f 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -9,6 +9,7 @@ use syntax::{ SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; +use tracing::info; use tt::{ buffer::{Cursor, TokenBuffer}, Span, @@ -92,7 +93,7 @@ pub fn syntax_node_to_token_tree_modified( node: &SyntaxNode, map: SpanMap, append: FxHashMap>>>, - remove: FxHashSet, + remove: FxHashSet, call_site: SpanData, ) -> tt::Subtree> where @@ -629,7 +630,7 @@ struct Converter { /// Used to make the emitted text ranges in the spans relative to the span anchor. map: SpanMap, append: FxHashMap>>, - remove: FxHashSet, + remove: FxHashSet, call_site: S, } @@ -638,7 +639,7 @@ impl Converter { node: &SyntaxNode, map: SpanMap, append: FxHashMap>>, - remove: FxHashSet, + remove: FxHashSet, call_site: S, ) -> Self { let mut this = Converter { @@ -660,16 +661,30 @@ impl Converter { fn next_token(&mut self) -> Option { while let Some(ev) = self.preorder.next() { match ev { - WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t), - WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => { - self.preorder.skip_subtree(); - if let Some(mut v) = self.append.remove(&n.into()) { - v.reverse(); - self.current_leaves.extend(v); - return None; + WalkEvent::Enter(token) => { + if self.remove.contains(&token) { + match token { + syntax::NodeOrToken::Token(_) => { + continue; + } + node => { + self.preorder.skip_subtree(); + if let Some(mut v) = self.append.remove(&node) { + v.reverse(); + self.current_leaves.extend(v); + return None; + } + } + } + } else { + match token { + syntax::NodeOrToken::Token(token) => { + return Some(token); + } + _ => (), + } } } - WalkEvent::Enter(SyntaxElement::Node(_)) => (), WalkEvent::Leave(ele) => { if let Some(mut v) = self.append.remove(&ele) { v.reverse(); From 79f2651262a72bf72e0f1d3d2f9d815ac75eb147 Mon Sep 17 00:00:00 2001 From: Wyatt Herkamp Date: Sat, 9 Mar 2024 13:25:56 -0500 Subject: [PATCH 2/5] Add cfg_attr and cleanup code --- crates/cfg/Cargo.toml | 4 +- crates/cfg/src/cfg_attr.rs | 70 -------- crates/cfg/src/cfg_expr.rs | 77 ++++++++- crates/cfg/src/lib.rs | 4 +- crates/cfg/src/tests.rs | 26 ++- crates/hir-expand/src/cfg_process.rs | 234 ++++++++++++++------------- crates/hir-expand/src/db.rs | 15 +- crates/mbe/src/syntax_bridge.rs | 1 - 8 files changed, 233 insertions(+), 198 deletions(-) delete mode 100644 crates/cfg/src/cfg_attr.rs diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index fbda065b10f3..059fd85440ba 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -16,6 +16,7 @@ rustc-hash.workspace = true # locals deps tt.workspace = true +syntax.workspace = true [dev-dependencies] expect-test = "1.4.1" @@ -28,7 +29,6 @@ derive_arbitrary = "1.3.2" # local deps mbe.workspace = true -syntax.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/crates/cfg/src/cfg_attr.rs b/crates/cfg/src/cfg_attr.rs deleted file mode 100644 index 4eb5928b1e14..000000000000 --- a/crates/cfg/src/cfg_attr.rs +++ /dev/null @@ -1,70 +0,0 @@ -use std::{ - fmt::{self, Debug}, - slice::Iter as SliceIter, -}; - -use crate::{cfg_expr::next_cfg_expr, CfgAtom, CfgExpr}; -use tt::{Delimiter, SmolStr, Span}; -/// Represents a `#[cfg_attr(.., my_attr)]` attribute. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct CfgAttr { - /// Expression in `cfg_attr` attribute. - pub cfg_expr: CfgExpr, - /// Inner attribute. - pub attr: tt::Subtree, -} - -impl CfgAttr { - /// Parses a sub tree in the form of (cfg_expr, inner_attribute) - pub fn parse(tt: &tt::Subtree) -> Option> { - let mut iter = tt.token_trees.iter(); - let cfg_expr = next_cfg_expr(&mut iter).unwrap_or(CfgExpr::Invalid); - // FIXME: This is probably not the right way to do this - // Get's the span of the next token tree - let first_span = iter.as_slice().first().map(|tt| tt.first_span())?; - let attr = tt::Subtree { - delimiter: Delimiter::invisible_spanned(first_span), - token_trees: iter.cloned().collect(), - }; - Some(CfgAttr { cfg_expr, attr: attr }) - } -} - -#[cfg(test)] -mod tests { - use expect_test::{expect, Expect}; - use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; - use syntax::{ast, AstNode}; - - use crate::{CfgAttr, DnfExpr}; - - fn check_dnf(input: &str, expected_dnf: Expect, expected_attrs: Expect) { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); - let Some(CfgAttr { cfg_expr, attr }) = CfgAttr::parse(&tt) else { - assert!(false, "failed to parse cfg_attr"); - return; - }; - - let actual = format!("#![cfg({})]", DnfExpr::new(cfg_expr)); - expected_dnf.assert_eq(&actual); - let actual_attrs = format!("#![{}]", attr); - expected_attrs.assert_eq(&actual_attrs); - } - - #[test] - fn smoke() { - check_dnf( - r#"#![cfg_attr(feature = "nightly", feature(slice_split_at_unchecked))]"#, - expect![[r#"#![cfg(feature = "nightly")]"#]], - expect![r#"#![feature (slice_split_at_unchecked)]"#], - ); - - check_dnf( - r#"#![cfg_attr(not(feature = "std"), no_std)]"#, - expect![[r#"#![cfg(not(feature = "std"))]"#]], - expect![r#"#![no_std]"#], - ); - } -} diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs index 425fa90efe63..91731c29d2bf 100644 --- a/crates/cfg/src/cfg_expr.rs +++ b/crates/cfg/src/cfg_expr.rs @@ -2,8 +2,12 @@ //! //! See: -use std::{fmt, slice::Iter as SliceIter}; +use std::{fmt, iter::Peekable, slice::Iter as SliceIter}; +use syntax::{ + ast::{self, Meta}, + NodeOrToken, +}; use tt::SmolStr; /// A simple configuration value passed in from the outside. @@ -47,6 +51,12 @@ impl CfgExpr { pub fn parse(tt: &tt::Subtree) -> CfgExpr { next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid) } + /// Parses a `cfg` attribute from the meta + pub fn parse_from_attr_meta(meta: Meta) -> Option { + let tt = meta.token_tree()?; + let mut iter = tt.token_trees_and_tokens().skip(1).peekable(); + next_cfg_expr_from_syntax(&mut iter) + } /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option { match self { @@ -62,8 +72,71 @@ impl CfgExpr { } } } +fn next_cfg_expr_from_syntax(iter: &mut Peekable) -> Option +where + I: Iterator>, +{ + let name = match iter.next() { + None => return None, + Some(NodeOrToken::Token(element)) => match element.kind() { + syntax::T![ident] => SmolStr::new(element.text()), + _ => return Some(CfgExpr::Invalid), + }, + Some(_) => return Some(CfgExpr::Invalid), + }; + let result = match name.as_str() { + "all" | "any" | "not" => { + let mut preds = Vec::new(); + let Some(NodeOrToken::Node(tree)) = iter.next() else { + return Some(CfgExpr::Invalid); + }; + let mut tree_iter = tree.token_trees_and_tokens().skip(1).peekable(); + while tree_iter + .peek() + .filter( + |element| matches!(element, NodeOrToken::Token(token) if (token.kind() != syntax::T![')'])), + ) + .is_some() + { + let pred = next_cfg_expr_from_syntax(&mut tree_iter); + if let Some(pred) = pred { + preds.push(pred); + } + } + let group = match name.as_str() { + "all" => CfgExpr::All(preds), + "any" => CfgExpr::Any(preds), + "not" => CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))), + _ => unreachable!(), + }; + Some(group) + } + _ => match iter.peek() { + Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => { + iter.next(); + match iter.next() { + Some(NodeOrToken::Token(value_token)) + if (value_token.kind() == syntax::SyntaxKind::STRING) => + { + let value = value_token.text(); + let value = SmolStr::new(value.trim_matches('"')); + Some(CfgExpr::Atom(CfgAtom::KeyValue { key: name, value })) + } + _ => None, + } + } + _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), + }, + }; + if let Some(NodeOrToken::Token(element)) = iter.peek() { + if element.kind() == syntax::T![,] { + iter.next(); + } + } + result +} -pub(crate) fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { +fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { let name = match it.next() { None => return None, Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index 4d5483d9561d..454d6fc5384b 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -2,8 +2,7 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -mod cfg_attr; -pub(crate) mod cfg_expr; +mod cfg_expr; mod dnf; #[cfg(test)] mod tests; @@ -13,7 +12,6 @@ use std::fmt; use rustc_hash::FxHashSet; use tt::SmolStr; -pub use cfg_attr::CfgAttr; pub use cfg_expr::{CfgAtom, CfgExpr}; pub use dnf::DnfExpr; diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 62fb429a63fa..8eca907d8b84 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,7 +1,10 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; -use syntax::{ast, AstNode}; +use syntax::{ + ast::{self, Attr}, + AstNode, SourceFile, +}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; @@ -12,6 +15,22 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } +fn check_dnf_from_syntax(input: &str, expect: Expect) { + let parse = SourceFile::parse(input); + let node = match parse.tree().syntax().descendants().find_map(Attr::cast) { + Some(it) => it, + None => { + let node = std::any::type_name::(); + panic!("Failed to make ast node `{node}` from text {input}") + } + }; + let node = node.clone_subtree(); + assert_eq!(node.syntax().text_range().start(), 0.into()); + + let cfg = CfgExpr::parse_from_attr_meta(node.meta().unwrap()).unwrap(); + let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); + expect.assert_eq(&actual); +} fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); @@ -86,6 +105,11 @@ fn smoke() { check_dnf("#![cfg(not(a))]", expect![[r#"#![cfg(not(a))]"#]]); } +#[test] +fn cfg_from_attr() { + check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]); + check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]); +} #[test] fn distribute() { diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs index 7f6158f6bb3c..d67402b0b8ea 100644 --- a/crates/hir-expand/src/cfg_process.rs +++ b/crates/hir-expand/src/cfg_process.rs @@ -1,102 +1,65 @@ -use std::os::windows::process; - -use mbe::syntax_node_to_token_tree; +//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro use rustc_hash::FxHashSet; use syntax::{ - ast::{self, Attr, FieldList, HasAttrs, RecordFieldList, TupleFieldList, Variant, VariantList}, + ast::{self, Attr, HasAttrs, Meta, VariantList}, AstNode, SyntaxElement, SyntaxNode, T, }; -use tracing::info; +use tracing::{info, warn}; -use crate::{db::ExpandDatabase, span_map::SpanMap, MacroCallLoc}; +use crate::{db::ExpandDatabase, MacroCallKind, MacroCallLoc}; -fn check_cfg_attr( - attr: &Attr, - loc: &MacroCallLoc, - span_map: &SpanMap, - db: &dyn ExpandDatabase, -) -> Option { - attr.simple_name().as_deref().map(|v| v == "cfg")?; - info!("Checking cfg attr {:?}", attr); - let Some(tt) = attr.token_tree() else { - info!("cfg attr has no expr {:?}", attr); - return Some(true); - }; - info!("Checking cfg {:?}", tt); - let tt = tt.syntax().clone(); - // Convert to a tt::Subtree - let tt = syntax_node_to_token_tree(&tt, span_map, loc.call_site); - let cfg = cfg::CfgExpr::parse(&tt); +fn check_cfg_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option { + if !attr.simple_name().as_deref().map(|v| v == "cfg")? { + return None; + } + info!("Evaluating cfg {}", attr); + let cfg = cfg::CfgExpr::parse_from_attr_meta(attr.meta()?)?; + info!("Checking cfg {:?}", cfg); let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false); Some(enabled) } -enum CfgAttrResult { - Enabled(Attr), - Disabled, -} -fn check_cfg_attr_attr( - attr: &Attr, - loc: &MacroCallLoc, - span_map: &SpanMap, - db: &dyn ExpandDatabase, -) -> Option { - attr.simple_name().as_deref().map(|v| v == "cfg_attr")?; - info!("Checking cfg_attr attr {:?}", attr); - let Some(tt) = attr.token_tree() else { - info!("cfg_attr attr has no expr {:?}", attr); +fn check_cfg_attr_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option { + if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? { return None; - }; - info!("Checking cfg_attr {:?}", tt); - let tt = tt.syntax().clone(); - // Convert to a tt::Subtree - let tt = syntax_node_to_token_tree(&tt, span_map, loc.call_site); - let cfg = cfg::CfgExpr::parse(&tt); - let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false); - if enabled { - // FIXME: Add the internal attribute - Some(CfgAttrResult::Enabled(attr.clone())) - } else { - Some(CfgAttrResult::Disabled) } + info!("Evaluating cfg_attr {}", attr); + + let cfg_expr = cfg::CfgExpr::parse_from_attr_meta(attr.meta()?)?; + info!("Checking cfg_attr {:?}", cfg_expr); + let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg_expr) != Some(false); + Some(enabled) } fn process_has_attrs_with_possible_comma( items: impl Iterator, loc: &MacroCallLoc, - span_map: &SpanMap, db: &dyn ExpandDatabase, - res: &mut FxHashSet, + remove: &mut FxHashSet, ) -> Option<()> { for item in items { let field_attrs = item.attrs(); 'attrs: for attr in field_attrs { - let Some(enabled) = check_cfg_attr(&attr, loc, span_map, db) else { - continue; - }; - if enabled { - //FIXME: Should we remove the cfg_attr? - } else { - info!("censoring type {:?}", item.syntax()); - res.insert(item.syntax().clone().into()); - // We need to remove the , as well - if let Some(comma) = item.syntax().next_sibling_or_token() { - if comma.kind() == T![,] { - res.insert(comma.into()); - } + if let Some(enabled) = check_cfg_attr(&attr, loc, db) { + // Rustc does not strip the attribute if it is enabled. So we will will leave it + if !enabled { + info!("censoring type {:?}", item.syntax()); + remove.insert(item.syntax().clone().into()); + // We need to remove the , as well + add_comma(&item, remove); + break 'attrs; } - break 'attrs; - } - let Some(attr_result) = check_cfg_attr_attr(&attr, loc, span_map, db) else { - continue; }; - match attr_result { - CfgAttrResult::Enabled(attr) => { - //FIXME: Replace the attribute with the internal attribute - } - CfgAttrResult::Disabled => { - info!("censoring type {:?}", item.syntax()); - res.insert(attr.syntax().clone().into()); + + if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) { + if enabled { + info!("Removing cfg_attr tokens {:?}", attr); + let meta = attr.meta()?; + let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; + remove.extend(removes_from_cfg_attr); + } else { + info!("censoring type cfg_attr {:?}", item.syntax()); + remove.insert(attr.syntax().clone().into()); continue; } } @@ -104,75 +67,130 @@ fn process_has_attrs_with_possible_comma( } Some(()) } + +fn remove_tokens_within_cfg_attr(meta: Meta) -> Option> { + let mut remove: FxHashSet = FxHashSet::default(); + info!("Enabling attribute {}", meta); + let meta_path = meta.path()?; + info!("Removing {:?}", meta_path.syntax()); + remove.insert(meta_path.syntax().clone().into()); + + let meta_tt = meta.token_tree()?; + info!("meta_tt {}", meta_tt); + // Remove the left paren + remove.insert(meta_tt.l_paren_token()?.into()); + let mut found_comma = false; + for tt in meta_tt.token_trees_and_tokens().skip(1) { + info!("Checking {:?}", tt); + // Check if it is a subtree or a token. If it is a token check if it is a comma. If so, remove it and break. + match tt { + syntax::NodeOrToken::Node(node) => { + // Remove the entire subtree + remove.insert(node.syntax().clone().into()); + } + syntax::NodeOrToken::Token(token) => { + if token.kind() == T![,] { + found_comma = true; + remove.insert(token.into()); + break; + } + remove.insert(token.into()); + } + } + } + if !found_comma { + warn!("No comma found in {}", meta_tt); + return None; + } + // Remove the right paren + remove.insert(meta_tt.r_paren_token()?.into()); + Some(remove) +} +fn add_comma(item: &impl AstNode, res: &mut FxHashSet) { + if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) { + res.insert(comma); + } +} fn process_enum( variants: VariantList, loc: &MacroCallLoc, - span_map: &SpanMap, db: &dyn ExpandDatabase, - res: &mut FxHashSet, + remove: &mut FxHashSet, ) -> Option<()> { - for variant in variants.variants() { - 'attrs: for attr in variant.attrs() { - if !check_cfg_attr(&attr, loc, span_map, db)? { - info!("censoring variant {:?}", variant.syntax()); - res.insert(variant.syntax().clone().into()); - if let Some(comma) = variant.syntax().next_sibling_or_token() { - if comma.kind() == T![,] { - res.insert(comma.into()); - } + 'variant: for variant in variants.variants() { + for attr in variant.attrs() { + if let Some(enabled) = check_cfg_attr(&attr, loc, db) { + // Rustc does not strip the attribute if it is enabled. So we will will leave it + if !enabled { + info!("censoring type {:?}", variant.syntax()); + remove.insert(variant.syntax().clone().into()); + // We need to remove the , as well + add_comma(&variant, remove); + continue 'variant; + } + }; + + if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) { + if enabled { + info!("Removing cfg_attr tokens {:?}", attr); + let meta = attr.meta()?; + let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; + remove.extend(removes_from_cfg_attr); + } else { + info!("censoring type cfg_attr {:?}", variant.syntax()); + remove.insert(attr.syntax().clone().into()); + continue; } - break 'attrs; } } if let Some(fields) = variant.field_list() { match fields { ast::FieldList::RecordFieldList(fields) => { - process_has_attrs_with_possible_comma(fields.fields(), loc, span_map, db, res)?; + process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?; } ast::FieldList::TupleFieldList(fields) => { - process_has_attrs_with_possible_comma(fields.fields(), loc, span_map, db, res)?; + process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?; } } } } Some(()) } -/// Handle + pub(crate) fn process_cfg_attrs( node: &SyntaxNode, loc: &MacroCallLoc, - span_map: &SpanMap, db: &dyn ExpandDatabase, ) -> Option> { + // FIXME: #[cfg_eval] is not implemented. But it is not stable yet + if !matches!(loc.kind, MacroCallKind::Derive { .. }) { + return None; + } let mut res = FxHashSet::default(); + let item = ast::Item::cast(node.clone())?; match item { ast::Item::Struct(it) => match it.field_list()? { ast::FieldList::RecordFieldList(fields) => { - process_has_attrs_with_possible_comma( - fields.fields(), - loc, - span_map, - db, - &mut res, - )?; + process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut res)?; } ast::FieldList::TupleFieldList(fields) => { - process_has_attrs_with_possible_comma( - fields.fields(), - loc, - span_map, - db, - &mut res, - )?; + process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut res)?; } }, ast::Item::Enum(it) => { - process_enum(it.variant_list()?, loc, span_map, db, &mut res)?; + process_enum(it.variant_list()?, loc, db, &mut res)?; } - // FIXME: Implement for other items + ast::Item::Union(it) => { + process_has_attrs_with_possible_comma( + it.record_field_list()?.fields(), + loc, + db, + &mut res, + )?; + } + // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now _ => {} } - Some(res) } diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 5188d8073299..bb69c72be4d4 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -7,10 +7,9 @@ use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; use span::{AstIdMap, SyntaxContextData, SyntaxContextId}; use syntax::{ - ast::{self, Attr, HasAttrs}, + ast::{self, HasAttrs}, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, }; -use tracing::info; use triomphe::Arc; use crate::{ @@ -410,7 +409,8 @@ fn macro_arg( ), MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { let censor = censor_for_macro_input(&loc, &syntax); - let censor_cfg = censor_cfg_elements(&syntax, &loc, &map, db); + let censor_cfg = + cfg_process::process_cfg_attrs(&syntax, &loc, db).unwrap_or_default(); let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site); fixups.append.retain(|it, _| match it { syntax::NodeOrToken::Token(_) => true, @@ -461,14 +461,7 @@ fn macro_arg( } } } -fn censor_cfg_elements( - node: &SyntaxNode, - loc: &MacroCallLoc, - span_map: &SpanMap, - db: &dyn ExpandDatabase, -) -> FxHashSet { - cfg_process::process_cfg_attrs(node, loc, span_map, db).unwrap_or_default() -} + // FIXME: Censoring info should be calculated by the caller! Namely by name resolution /// Certain macro calls expect some nodes in the input to be preprocessed away, namely: /// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 593a8e5ed32f..972e548d3432 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -9,7 +9,6 @@ use syntax::{ SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; -use tracing::info; use tt::{ buffer::{Cursor, TokenBuffer}, Span, From 948a2dee0993b77240cb369a26cfd75d350d373c Mon Sep 17 00:00:00 2001 From: Wyatt Herkamp Date: Sat, 9 Mar 2024 14:12:27 -0500 Subject: [PATCH 3/5] Clippy Fix --- crates/mbe/src/syntax_bridge.rs | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 972e548d3432..c7ffb0a41a51 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -675,13 +675,8 @@ impl Converter { } } } - } else { - match token { - syntax::NodeOrToken::Token(token) => { - return Some(token); - } - _ => (), - } + } else if let syntax::NodeOrToken::Token(token) = token { + return Some(token); } } WalkEvent::Leave(ele) => { From 0fb5d0d918e1deae39f1a561707a6b06a2981925 Mon Sep 17 00:00:00 2001 From: Wyatt Herkamp Date: Mon, 11 Mar 2024 06:55:04 -0400 Subject: [PATCH 4/5] Check for cfg_attr on the actual item and Debug instead of info in cfg_process --- crates/hir-expand/src/cfg_process.rs | 84 +++++++++++++++------------- 1 file changed, 46 insertions(+), 38 deletions(-) diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs index d67402b0b8ea..1d088ada646b 100644 --- a/crates/hir-expand/src/cfg_process.rs +++ b/crates/hir-expand/src/cfg_process.rs @@ -4,7 +4,7 @@ use syntax::{ ast::{self, Attr, HasAttrs, Meta, VariantList}, AstNode, SyntaxElement, SyntaxNode, T, }; -use tracing::{info, warn}; +use tracing::{debug, warn}; use crate::{db::ExpandDatabase, MacroCallKind, MacroCallLoc}; @@ -12,9 +12,9 @@ fn check_cfg_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> O if !attr.simple_name().as_deref().map(|v| v == "cfg")? { return None; } - info!("Evaluating cfg {}", attr); + debug!("Evaluating cfg {}", attr); let cfg = cfg::CfgExpr::parse_from_attr_meta(attr.meta()?)?; - info!("Checking cfg {:?}", cfg); + debug!("Checking cfg {:?}", cfg); let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false); Some(enabled) } @@ -23,10 +23,9 @@ fn check_cfg_attr_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? { return None; } - info!("Evaluating cfg_attr {}", attr); - + debug!("Evaluating cfg_attr {}", attr); let cfg_expr = cfg::CfgExpr::parse_from_attr_meta(attr.meta()?)?; - info!("Checking cfg_attr {:?}", cfg_expr); + debug!("Checking cfg_attr {:?}", cfg_expr); let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg_expr) != Some(false); Some(enabled) } @@ -40,25 +39,22 @@ fn process_has_attrs_with_possible_comma( for item in items { let field_attrs = item.attrs(); 'attrs: for attr in field_attrs { - if let Some(enabled) = check_cfg_attr(&attr, loc, db) { - // Rustc does not strip the attribute if it is enabled. So we will will leave it - if !enabled { - info!("censoring type {:?}", item.syntax()); - remove.insert(item.syntax().clone().into()); - // We need to remove the , as well - add_comma(&item, remove); - break 'attrs; - } - }; + if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() { + debug!("censoring type {:?}", item.syntax()); + remove.insert(item.syntax().clone().into()); + // We need to remove the , as well + add_comma(&item, remove); + break 'attrs; + } if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) { if enabled { - info!("Removing cfg_attr tokens {:?}", attr); + debug!("Removing cfg_attr tokens {:?}", attr); let meta = attr.meta()?; let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; remove.extend(removes_from_cfg_attr); } else { - info!("censoring type cfg_attr {:?}", item.syntax()); + debug!("censoring type cfg_attr {:?}", item.syntax()); remove.insert(attr.syntax().clone().into()); continue; } @@ -70,18 +66,18 @@ fn process_has_attrs_with_possible_comma( fn remove_tokens_within_cfg_attr(meta: Meta) -> Option> { let mut remove: FxHashSet = FxHashSet::default(); - info!("Enabling attribute {}", meta); + debug!("Enabling attribute {}", meta); let meta_path = meta.path()?; - info!("Removing {:?}", meta_path.syntax()); + debug!("Removing {:?}", meta_path.syntax()); remove.insert(meta_path.syntax().clone().into()); let meta_tt = meta.token_tree()?; - info!("meta_tt {}", meta_tt); + debug!("meta_tt {}", meta_tt); // Remove the left paren remove.insert(meta_tt.l_paren_token()?.into()); let mut found_comma = false; for tt in meta_tt.token_trees_and_tokens().skip(1) { - info!("Checking {:?}", tt); + debug!("Checking {:?}", tt); // Check if it is a subtree or a token. If it is a token check if it is a comma. If so, remove it and break. match tt { syntax::NodeOrToken::Node(node) => { @@ -119,25 +115,23 @@ fn process_enum( ) -> Option<()> { 'variant: for variant in variants.variants() { for attr in variant.attrs() { - if let Some(enabled) = check_cfg_attr(&attr, loc, db) { + if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() { // Rustc does not strip the attribute if it is enabled. So we will will leave it - if !enabled { - info!("censoring type {:?}", variant.syntax()); - remove.insert(variant.syntax().clone().into()); - // We need to remove the , as well - add_comma(&variant, remove); - continue 'variant; - } + debug!("censoring type {:?}", variant.syntax()); + remove.insert(variant.syntax().clone().into()); + // We need to remove the , as well + add_comma(&variant, remove); + continue 'variant; }; if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) { if enabled { - info!("Removing cfg_attr tokens {:?}", attr); + debug!("Removing cfg_attr tokens {:?}", attr); let meta = attr.meta()?; let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; remove.extend(removes_from_cfg_attr); } else { - info!("censoring type cfg_attr {:?}", variant.syntax()); + debug!("censoring type cfg_attr {:?}", variant.syntax()); remove.insert(attr.syntax().clone().into()); continue; } @@ -166,31 +160,45 @@ pub(crate) fn process_cfg_attrs( if !matches!(loc.kind, MacroCallKind::Derive { .. }) { return None; } - let mut res = FxHashSet::default(); + let mut remove = FxHashSet::default(); let item = ast::Item::cast(node.clone())?; + for attr in item.attrs() { + if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) { + if enabled { + debug!("Removing cfg_attr tokens {:?}", attr); + let meta = attr.meta()?; + let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; + remove.extend(removes_from_cfg_attr); + } else { + debug!("censoring type cfg_attr {:?}", item.syntax()); + remove.insert(attr.syntax().clone().into()); + continue; + } + } + } match item { ast::Item::Struct(it) => match it.field_list()? { ast::FieldList::RecordFieldList(fields) => { - process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut res)?; + process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?; } ast::FieldList::TupleFieldList(fields) => { - process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut res)?; + process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?; } }, ast::Item::Enum(it) => { - process_enum(it.variant_list()?, loc, db, &mut res)?; + process_enum(it.variant_list()?, loc, db, &mut remove)?; } ast::Item::Union(it) => { process_has_attrs_with_possible_comma( it.record_field_list()?.fields(), loc, db, - &mut res, + &mut remove, )?; } // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now _ => {} } - Some(res) + Some(remove) } From 447de3d7887b565d01086a52a93ed418036b81e3 Mon Sep 17 00:00:00 2001 From: Wyatt Herkamp Date: Mon, 11 Mar 2024 11:05:59 -0400 Subject: [PATCH 5/5] Review Updates and added tests. --- crates/cfg/Cargo.toml | 2 +- crates/cfg/src/cfg_expr.rs | 77 +------- crates/cfg/src/tests.rs | 26 +-- .../builtin_derive_macro.rs | 118 ++++++++++++ crates/hir-expand/src/cfg_process.rs | 171 +++++++++++++++--- crates/hir-expand/src/db.rs | 6 +- 6 files changed, 274 insertions(+), 126 deletions(-) diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index 059fd85440ba..9b3a5026ac80 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -16,7 +16,6 @@ rustc-hash.workspace = true # locals deps tt.workspace = true -syntax.workspace = true [dev-dependencies] expect-test = "1.4.1" @@ -29,6 +28,7 @@ derive_arbitrary = "1.3.2" # local deps mbe.workspace = true +syntax.workspace = true [lints] workspace = true diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs index 91731c29d2bf..b7dbb7b5fdd7 100644 --- a/crates/cfg/src/cfg_expr.rs +++ b/crates/cfg/src/cfg_expr.rs @@ -2,12 +2,8 @@ //! //! See: -use std::{fmt, iter::Peekable, slice::Iter as SliceIter}; +use std::{fmt, slice::Iter as SliceIter}; -use syntax::{ - ast::{self, Meta}, - NodeOrToken, -}; use tt::SmolStr; /// A simple configuration value passed in from the outside. @@ -51,12 +47,7 @@ impl CfgExpr { pub fn parse(tt: &tt::Subtree) -> CfgExpr { next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid) } - /// Parses a `cfg` attribute from the meta - pub fn parse_from_attr_meta(meta: Meta) -> Option { - let tt = meta.token_tree()?; - let mut iter = tt.token_trees_and_tokens().skip(1).peekable(); - next_cfg_expr_from_syntax(&mut iter) - } + /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option { match self { @@ -72,70 +63,6 @@ impl CfgExpr { } } } -fn next_cfg_expr_from_syntax(iter: &mut Peekable) -> Option -where - I: Iterator>, -{ - let name = match iter.next() { - None => return None, - Some(NodeOrToken::Token(element)) => match element.kind() { - syntax::T![ident] => SmolStr::new(element.text()), - _ => return Some(CfgExpr::Invalid), - }, - Some(_) => return Some(CfgExpr::Invalid), - }; - let result = match name.as_str() { - "all" | "any" | "not" => { - let mut preds = Vec::new(); - let Some(NodeOrToken::Node(tree)) = iter.next() else { - return Some(CfgExpr::Invalid); - }; - let mut tree_iter = tree.token_trees_and_tokens().skip(1).peekable(); - while tree_iter - .peek() - .filter( - |element| matches!(element, NodeOrToken::Token(token) if (token.kind() != syntax::T![')'])), - ) - .is_some() - { - let pred = next_cfg_expr_from_syntax(&mut tree_iter); - if let Some(pred) = pred { - preds.push(pred); - } - } - let group = match name.as_str() { - "all" => CfgExpr::All(preds), - "any" => CfgExpr::Any(preds), - "not" => CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))), - _ => unreachable!(), - }; - Some(group) - } - _ => match iter.peek() { - Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => { - iter.next(); - match iter.next() { - Some(NodeOrToken::Token(value_token)) - if (value_token.kind() == syntax::SyntaxKind::STRING) => - { - let value = value_token.text(); - let value = SmolStr::new(value.trim_matches('"')); - Some(CfgExpr::Atom(CfgAtom::KeyValue { key: name, value })) - } - _ => None, - } - } - _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), - }, - }; - if let Some(NodeOrToken::Token(element)) = iter.peek() { - if element.kind() == syntax::T![,] { - iter.next(); - } - } - result -} - fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { let name = match it.next() { None => return None, diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 8eca907d8b84..62fb429a63fa 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,10 +1,7 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; -use syntax::{ - ast::{self, Attr}, - AstNode, SourceFile, -}; +use syntax::{ast, AstNode}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; @@ -15,22 +12,6 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } -fn check_dnf_from_syntax(input: &str, expect: Expect) { - let parse = SourceFile::parse(input); - let node = match parse.tree().syntax().descendants().find_map(Attr::cast) { - Some(it) => it, - None => { - let node = std::any::type_name::(); - panic!("Failed to make ast node `{node}` from text {input}") - } - }; - let node = node.clone_subtree(); - assert_eq!(node.syntax().text_range().start(), 0.into()); - - let cfg = CfgExpr::parse_from_attr_meta(node.meta().unwrap()).unwrap(); - let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); - expect.assert_eq(&actual); -} fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); @@ -105,11 +86,6 @@ fn smoke() { check_dnf("#![cfg(not(a))]", expect![[r#"#![cfg(not(a))]"#]]); } -#[test] -fn cfg_from_attr() { - check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]); - check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]); -} #[test] fn distribute() { diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs index 86b4466153a3..89c1b446081c 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs @@ -528,3 +528,121 @@ impl < > $crate::fmt::Debug for Command< > where { }"#]], ); } +#[test] +fn test_debug_expand_with_cfg() { + check( + r#" + //- minicore: derive, fmt + use core::fmt::Debug; + + #[derive(Debug)] + struct HideAndShow { + #[cfg(never)] + always_hide: u32, + #[cfg(not(never))] + always_show: u32, + } + #[derive(Debug)] + enum HideAndShowEnum { + #[cfg(never)] + AlwaysHide, + #[cfg(not(never))] + AlwaysShow{ + #[cfg(never)] + always_hide: u32, + #[cfg(not(never))] + always_show: u32, + } + } + "#, + expect![[r#" +use core::fmt::Debug; + +#[derive(Debug)] +struct HideAndShow { + #[cfg(never)] + always_hide: u32, + #[cfg(not(never))] + always_show: u32, +} +#[derive(Debug)] +enum HideAndShowEnum { + #[cfg(never)] + AlwaysHide, + #[cfg(not(never))] + AlwaysShow{ + #[cfg(never)] + always_hide: u32, + #[cfg(not(never))] + always_show: u32, + } +} + +impl < > $crate::fmt::Debug for HideAndShow< > where { + fn fmt(&self , f: &mut $crate::fmt::Formatter) -> $crate::fmt::Result { + match self { + HideAndShow { + always_show: always_show, + } + =>f.debug_struct("HideAndShow").field("always_show", &always_show).finish() + } + } +} +impl < > $crate::fmt::Debug for HideAndShowEnum< > where { + fn fmt(&self , f: &mut $crate::fmt::Formatter) -> $crate::fmt::Result { + match self { + HideAndShowEnum::AlwaysShow { + always_show: always_show, + } + =>f.debug_struct("AlwaysShow").field("always_show", &always_show).finish(), + } + } +}"#]], + ); +} +#[test] +fn test_default_expand_with_cfg() { + check( + r#" +//- minicore: derive, default +#[derive(Default)] +struct Foo { + field1: i32, + #[cfg(never)] + field2: (), +} +#[derive(Default)] +enum Bar { + Foo, + #[cfg_attr(not(never), default)] + Bar, +} +"#, + expect![[r#" +#[derive(Default)] +struct Foo { + field1: i32, + #[cfg(never)] + field2: (), +} +#[derive(Default)] +enum Bar { + Foo, + #[cfg_attr(not(never), default)] + Bar, +} + +impl < > $crate::default::Default for Foo< > where { + fn default() -> Self { + Foo { + field1: $crate::default::Default::default(), + } + } +} +impl < > $crate::default::Default for Bar< > where { + fn default() -> Self { + Bar::Bar + } +}"#]], + ); +} diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs index 1d088ada646b..c74c13a6fd3c 100644 --- a/crates/hir-expand/src/cfg_process.rs +++ b/crates/hir-expand/src/cfg_process.rs @@ -1,10 +1,14 @@ //! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro +use std::iter::Peekable; + +use cfg::{CfgAtom, CfgExpr}; use rustc_hash::FxHashSet; use syntax::{ ast::{self, Attr, HasAttrs, Meta, VariantList}, - AstNode, SyntaxElement, SyntaxNode, T, + AstNode, NodeOrToken, SyntaxElement, SyntaxNode, T, }; use tracing::{debug, warn}; +use tt::SmolStr; use crate::{db::ExpandDatabase, MacroCallKind, MacroCallLoc}; @@ -13,7 +17,7 @@ fn check_cfg_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> O return None; } debug!("Evaluating cfg {}", attr); - let cfg = cfg::CfgExpr::parse_from_attr_meta(attr.meta()?)?; + let cfg = parse_from_attr_meta(attr.meta()?)?; debug!("Checking cfg {:?}", cfg); let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false); Some(enabled) @@ -24,7 +28,7 @@ fn check_cfg_attr_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) return None; } debug!("Evaluating cfg_attr {}", attr); - let cfg_expr = cfg::CfgExpr::parse_from_attr_meta(attr.meta()?)?; + let cfg_expr = parse_from_attr_meta(attr.meta()?)?; debug!("Checking cfg_attr {:?}", cfg_expr); let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg_expr) != Some(false); Some(enabled) @@ -43,7 +47,7 @@ fn process_has_attrs_with_possible_comma( debug!("censoring type {:?}", item.syntax()); remove.insert(item.syntax().clone().into()); // We need to remove the , as well - add_comma(&item, remove); + remove_possible_comma(&item, remove); break 'attrs; } @@ -63,7 +67,18 @@ fn process_has_attrs_with_possible_comma( } Some(()) } - +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum CfgExprStage { + /// Stripping the CFGExpr part of the attribute + StrippigCfgExpr, + /// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute + FoundComma, + /// Everything following the attribute. This could be another attribute or the end of the attribute. + // FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute + // Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110 + EverythingElse, +} +/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input. fn remove_tokens_within_cfg_attr(meta: Meta) -> Option> { let mut remove: FxHashSet = FxHashSet::default(); debug!("Enabling attribute {}", meta); @@ -73,36 +88,44 @@ fn remove_tokens_within_cfg_attr(meta: Meta) -> Option> let meta_tt = meta.token_tree()?; debug!("meta_tt {}", meta_tt); - // Remove the left paren - remove.insert(meta_tt.l_paren_token()?.into()); - let mut found_comma = false; - for tt in meta_tt.token_trees_and_tokens().skip(1) { - debug!("Checking {:?}", tt); - // Check if it is a subtree or a token. If it is a token check if it is a comma. If so, remove it and break. - match tt { - syntax::NodeOrToken::Node(node) => { - // Remove the entire subtree + let mut stage = CfgExprStage::StrippigCfgExpr; + for tt in meta_tt.token_trees_and_tokens() { + debug!("Checking {:?}. Stage: {:?}", tt, stage); + match (stage, tt) { + (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => { remove.insert(node.syntax().clone().into()); } - syntax::NodeOrToken::Token(token) => { + (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => { if token.kind() == T![,] { - found_comma = true; - remove.insert(token.into()); - break; + stage = CfgExprStage::FoundComma; } remove.insert(token.into()); } + (CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token)) + if (token.kind() == T![,] || token.kind() == T![')']) => + { + // The end of the attribute or separator for the next attribute + stage = CfgExprStage::EverythingElse; + remove.insert(token.into()); + } + (CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => { + remove.insert(node.syntax().clone().into()); + } + (CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => { + remove.insert(token.into()); + } + // This is an actual attribute + _ => {} } } - if !found_comma { - warn!("No comma found in {}", meta_tt); + if stage != CfgExprStage::EverythingElse { + warn!("Invalid cfg_attr attribute. {:?}", meta_tt); return None; } - // Remove the right paren - remove.insert(meta_tt.r_paren_token()?.into()); Some(remove) } -fn add_comma(item: &impl AstNode, res: &mut FxHashSet) { +/// Removes a possible comma after the [AstNode] +fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet) { if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) { res.insert(comma); } @@ -120,7 +143,7 @@ fn process_enum( debug!("censoring type {:?}", variant.syntax()); remove.insert(variant.syntax().clone().into()); // We need to remove the , as well - add_comma(&variant, remove); + remove_possible_comma(&variant, remove); continue 'variant; }; @@ -202,3 +225,103 @@ pub(crate) fn process_cfg_attrs( } Some(remove) } +/// Parses a `cfg` attribute from the meta +fn parse_from_attr_meta(meta: Meta) -> Option { + let tt = meta.token_tree()?; + let mut iter = tt.token_trees_and_tokens().skip(1).peekable(); + next_cfg_expr_from_syntax(&mut iter) +} + +fn next_cfg_expr_from_syntax(iter: &mut Peekable) -> Option +where + I: Iterator>, +{ + let name = match iter.next() { + None => return None, + Some(NodeOrToken::Token(element)) => match element.kind() { + syntax::T![ident] => SmolStr::new(element.text()), + _ => return Some(CfgExpr::Invalid), + }, + Some(_) => return Some(CfgExpr::Invalid), + }; + let result = match name.as_str() { + "all" | "any" | "not" => { + let mut preds = Vec::new(); + let Some(NodeOrToken::Node(tree)) = iter.next() else { + return Some(CfgExpr::Invalid); + }; + let mut tree_iter = tree.token_trees_and_tokens().skip(1).peekable(); + while tree_iter + .peek() + .filter( + |element| matches!(element, NodeOrToken::Token(token) if (token.kind() != syntax::T![')'])), + ) + .is_some() + { + let pred = next_cfg_expr_from_syntax(&mut tree_iter); + if let Some(pred) = pred { + preds.push(pred); + } + } + let group = match name.as_str() { + "all" => CfgExpr::All(preds), + "any" => CfgExpr::Any(preds), + "not" => CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))), + _ => unreachable!(), + }; + Some(group) + } + _ => match iter.peek() { + Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => { + iter.next(); + match iter.next() { + Some(NodeOrToken::Token(value_token)) + if (value_token.kind() == syntax::SyntaxKind::STRING) => + { + let value = value_token.text(); + let value = SmolStr::new(value.trim_matches('"')); + Some(CfgExpr::Atom(CfgAtom::KeyValue { key: name, value })) + } + _ => None, + } + } + _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), + }, + }; + if let Some(NodeOrToken::Token(element)) = iter.peek() { + if element.kind() == syntax::T![,] { + iter.next(); + } + } + result +} +#[cfg(test)] +mod tests { + use cfg::DnfExpr; + use expect_test::{expect, Expect}; + use syntax::{ast::Attr, AstNode, SourceFile}; + + use crate::cfg_process::parse_from_attr_meta; + + fn check_dnf_from_syntax(input: &str, expect: Expect) { + let parse = SourceFile::parse(input); + let node = match parse.tree().syntax().descendants().find_map(Attr::cast) { + Some(it) => it, + None => { + let node = std::any::type_name::(); + panic!("Failed to make ast node `{node}` from text {input}") + } + }; + let node = node.clone_subtree(); + assert_eq!(node.syntax().text_range().start(), 0.into()); + + let cfg = parse_from_attr_meta(node.meta().unwrap()).unwrap(); + let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); + expect.assert_eq(&actual); + } + #[test] + fn cfg_from_attr() { + check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]); + check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]); + } +} diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index bb69c72be4d4..a7469ae5c88f 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -151,12 +151,16 @@ pub fn expand_speculative( ), MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { let censor = censor_for_macro_input(&loc, speculative_args); + let censor_cfg = + cfg_process::process_cfg_attrs(speculative_args, &loc, db).unwrap_or_default(); let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site); fixups.append.retain(|it, _| match it { syntax::NodeOrToken::Token(_) => true, - it => !censor.contains(it), + it => !censor.contains(it) && !censor_cfg.contains(it), }); fixups.remove.extend(censor); + fixups.remove.extend(censor_cfg); + ( mbe::syntax_node_to_token_tree_modified( speculative_args,